diff --git a/internal/threatfeed/handler-logs.go b/internal/threatfeed/handler-logs.go index fc76566..96a44c9 100644 --- a/internal/threatfeed/handler-logs.go +++ b/internal/threatfeed/handler-logs.go @@ -1,6 +1,7 @@ package threatfeed import ( + "cmp" "encoding/json" "html/template" "io" @@ -22,14 +23,52 @@ func handleLogsMain(w http.ResponseWriter, r *http.Request) { func handleLogs(w http.ResponseWriter, r *http.Request) { switch r.PathValue("logtype") { case "http": - handleLogHTTP(w) + switch r.PathValue("subtype") { + case "": + handleLogHTTP(w) + case "ip": + displayStats(w, httpIPStats{}) + case "useragent": + displayStats(w, httpUserAgentStats{}) + case "path": + displayStats(w, httpPathStats{}) + case "query": + displayStats(w, httpQueryStats{}) + case "method": + displayStats(w, httpMethodStats{}) + case "host": + displayStats(w, httpHostStats{}) + default: + handleNotFound(w, r) + } case "ssh": - handleLogSSH(w) + switch r.PathValue("subtype") { + case "": + handleLogSSH(w) + case "ip": + displayStats(w, sshIPStats{}) + case "client": + displayStats(w, sshClientStats{}) + case "username": + displayStats(w, sshUsernameStats{}) + case "password": + displayStats(w, sshPasswordStats{}) + default: + handleNotFound(w, r) + } default: handleNotFound(w, r) } } +// displayLogErrorPage servers an error page when there is a problem parsing +// log files. +func displayLogErrorPage(w http.ResponseWriter, err error) { + w.WriteHeader(http.StatusInternalServerError) + tmpl := template.Must(template.ParseFS(templates, "templates/logs-error.html", "templates/nav.html")) + _ = tmpl.ExecuteTemplate(w, "logs-error.html", map[string]any{"Error": err, "NavData": "logs"}) +} + // handleLogSSH serves the SSH honeypot logs as a web page. It opens the // honeypot log files, parses the data to JSON, and passes the result to an // HTML template for rendering. @@ -37,9 +76,7 @@ func handleLogSSH(w http.ResponseWriter) { l := logFiles{} reader, err := l.open() if err != nil { - w.WriteHeader(http.StatusInternalServerError) - tmpl := template.Must(template.ParseFS(templates, "templates/logs-error.html", "templates/nav.html")) - _ = tmpl.ExecuteTemplate(w, "logs-error.html", map[string]any{"Error": err, "NavData": "logs"}) + displayLogErrorPage(w, err) return } defer l.close() @@ -59,14 +96,13 @@ func handleLogSSH(w http.ResponseWriter) { data := make([]Log, 0, maxResults+1) for d.More() { var entry Log - if err := d.Decode(&entry); err != nil { + err := d.Decode(&entry) + if err != nil || entry.EventType != "ssh" { continue } - if entry.EventType == "ssh" { - data = append(data, entry) - if len(data) > maxResults { - data = data[1:] - } + data = append(data, entry) + if len(data) > maxResults { + data = data[1:] } } slices.Reverse(data) @@ -82,9 +118,7 @@ func handleLogHTTP(w http.ResponseWriter) { l := logFiles{} reader, err := l.open() if err != nil { - w.WriteHeader(http.StatusInternalServerError) - tmpl := template.Must(template.ParseFS(templates, "templates/logs-error.html", "templates/nav.html")) - _ = tmpl.ExecuteTemplate(w, "logs-error.html", map[string]any{"Error": err, "NavData": "logs"}) + displayLogErrorPage(w, err) return } defer l.close() @@ -104,14 +138,13 @@ func handleLogHTTP(w http.ResponseWriter) { data := make([]Log, 0, maxResults+1) for d.More() { var entry Log - if err := d.Decode(&entry); err != nil { + err := d.Decode(&entry) + if err != nil || entry.EventType != "http" { continue } - if entry.EventType == "http" { - data = append(data, entry) - if len(data) > maxResults { - data = data[1:] - } + data = append(data, entry) + if len(data) > maxResults { + data = data[1:] } } slices.Reverse(data) @@ -120,6 +153,292 @@ func handleLogHTTP(w http.ResponseWriter) { _ = tmpl.ExecuteTemplate(w, "logs-http.html", map[string]any{"Data": data, "NavData": "logs"}) } +// displayStats handles the processing and rendering of statistics for a given +// field. It reads honeypot log data, counts the occurrences of `field` and +// displays the results. +func displayStats(w http.ResponseWriter, field fieldCounter) { + l := logFiles{} + reader, err := l.open() + if err != nil { + displayLogErrorPage(w, err) + return + } + defer l.close() + + fieldCounts := field.count(reader) + + results := []statsResult{} + for k, v := range fieldCounts { + results = append(results, statsResult{Field: k, Count: v}) + } + slices.SortFunc(results, func(a, b statsResult) int { + return cmp.Or( + -cmp.Compare(a.Count, b.Count), + cmp.Compare(a.Field, b.Field), + ) + }) + + tmpl := template.Must(template.ParseFS(templates, "templates/logs-stats.html", "templates/nav.html")) + _ = tmpl.ExecuteTemplate( + w, + "logs-stats.html", + map[string]any{ + "Data": results, + "Header": field.fieldName(), + "NavData": "logs", + }, + ) +} + +// statsResult holds a specific value for field and its associated count. +type statsResult struct { + Field string + Count int +} + +// fieldCounter is an interface that defines methods for counting occurrences +// of specific fields. +type fieldCounter interface { + count(io.Reader) map[string]int + fieldName() string +} + +// sshIPStats is the log structure for extracting SSH IP data. +type sshIPStats struct { + EventType string `json:"event_type"` + SourceIP string `json:"source_ip"` +} + +func (sshIPStats) fieldName() string { return "Source IP" } + +func (sshIPStats) count(r io.Reader) map[string]int { + fieldCounts := map[string]int{} + d := json.NewDecoder(r) + for d.More() { + var entry sshIPStats + err := d.Decode(&entry) + if err != nil || entry.EventType != "ssh" { + continue + } + fieldCounts[entry.SourceIP]++ + } + return fieldCounts +} + +// sshClientStats is the log structure for extracting SSH client data. +type sshClientStats struct { + EventType string `json:"event_type"` + Details struct { + Client string `json:"ssh_client"` + } `json:"event_details"` +} + +func (sshClientStats) fieldName() string { return "SSH Client" } + +func (sshClientStats) count(r io.Reader) map[string]int { + fieldCounts := map[string]int{} + d := json.NewDecoder(r) + for d.More() { + var entry sshClientStats + err := d.Decode(&entry) + if err != nil || entry.EventType != "ssh" { + continue + } + fieldCounts[entry.Details.Client]++ + } + return fieldCounts +} + +// sshUsernameStats is the log structure for extracting SSH username data. +type sshUsernameStats struct { + EventType string `json:"event_type"` + Details struct { + Username string `json:"username"` + } `json:"event_details"` +} + +func (sshUsernameStats) fieldName() string { return "Username" } + +func (sshUsernameStats) count(r io.Reader) map[string]int { + fieldCounts := map[string]int{} + d := json.NewDecoder(r) + for d.More() { + var entry sshUsernameStats + err := d.Decode(&entry) + if err != nil || entry.EventType != "ssh" { + continue + } + fieldCounts[entry.Details.Username]++ + } + return fieldCounts +} + +// sshPasswordStats is the log structure for extracting SSH password data. +type sshPasswordStats struct { + EventType string `json:"event_type"` + Details struct { + Password string `json:"password"` + } `json:"event_details"` +} + +func (sshPasswordStats) fieldName() string { return "Password" } + +func (sshPasswordStats) count(r io.Reader) map[string]int { + fieldCounts := map[string]int{} + d := json.NewDecoder(r) + for d.More() { + var entry sshPasswordStats + err := d.Decode(&entry) + if err != nil || entry.EventType != "ssh" { + continue + } + fieldCounts[entry.Details.Password]++ + } + return fieldCounts +} + +// httpIPStats is the log structure for extracting HTTP IP data. +type httpIPStats struct { + EventType string `json:"event_type"` + SourceIP string `json:"source_ip"` +} + +func (httpIPStats) fieldName() string { return "Source IP" } + +func (httpIPStats) count(r io.Reader) map[string]int { + fieldCounts := map[string]int{} + d := json.NewDecoder(r) + for d.More() { + var entry httpIPStats + err := d.Decode(&entry) + if err != nil || entry.EventType != "http" { + continue + } + fieldCounts[entry.SourceIP]++ + } + return fieldCounts +} + +// httpUserAgentStats is the log structure for extracting HTTP user-agent data. +type httpUserAgentStats struct { + EventType string `json:"event_type"` + Details struct { + UserAgent string `json:"user_agent"` + } `json:"event_details"` +} + +func (httpUserAgentStats) fieldName() string { return "User-Agent" } + +func (httpUserAgentStats) count(r io.Reader) map[string]int { + fieldCounts := map[string]int{} + d := json.NewDecoder(r) + for d.More() { + var entry httpUserAgentStats + err := d.Decode(&entry) + if err != nil || entry.EventType != "http" { + continue + } + fieldCounts[entry.Details.UserAgent]++ + } + return fieldCounts +} + +// httpPathStats is the log structure for extracting HTTP path data. +type httpPathStats struct { + EventType string `json:"event_type"` + Details struct { + Path string `json:"path"` + } `json:"event_details"` +} + +func (httpPathStats) fieldName() string { return "Path" } + +func (httpPathStats) count(r io.Reader) map[string]int { + fieldCounts := map[string]int{} + d := json.NewDecoder(r) + for d.More() { + var entry httpPathStats + err := d.Decode(&entry) + if err != nil || entry.EventType != "http" { + continue + } + fieldCounts[entry.Details.Path]++ + } + return fieldCounts +} + +// httpQueryStats is the log structure for extracting HTTP query string data. +type httpQueryStats struct { + EventType string `json:"event_type"` + Details struct { + Query string `json:"query"` + } `json:"event_details"` +} + +func (httpQueryStats) fieldName() string { return "Query String" } + +func (httpQueryStats) count(r io.Reader) map[string]int { + fieldCounts := map[string]int{} + d := json.NewDecoder(r) + for d.More() { + var entry httpQueryStats + err := d.Decode(&entry) + if err != nil || entry.EventType != "http" { + continue + } + fieldCounts[entry.Details.Query]++ + } + return fieldCounts +} + +// httpMethodStats is the log structure for extracting HTTP method data. +type httpMethodStats struct { + EventType string `json:"event_type"` + Details struct { + Method string `json:"method"` + } `json:"event_details"` +} + +func (httpMethodStats) fieldName() string { return "HTTP Method" } + +func (httpMethodStats) count(r io.Reader) map[string]int { + fieldCounts := map[string]int{} + d := json.NewDecoder(r) + for d.More() { + var entry httpMethodStats + err := d.Decode(&entry) + if err != nil || entry.EventType != "http" { + continue + } + fieldCounts[entry.Details.Method]++ + } + return fieldCounts +} + +// httpHostStats is the log structure for extracting HTTP host header data. +type httpHostStats struct { + EventType string `json:"event_type"` + Details struct { + Host string `json:"host"` + } `json:"event_details"` +} + +func (httpHostStats) fieldName() string { return "Host Header" } + +func (httpHostStats) count(r io.Reader) map[string]int { + fieldCounts := map[string]int{} + d := json.NewDecoder(r) + for d.More() { + var entry httpHostStats + err := d.Decode(&entry) + if err != nil || entry.EventType != "http" { + continue + } + fieldCounts[entry.Details.Host]++ + } + return fieldCounts +} + // logFiles represents open honeypot log files and their associate io.Reader. type logFiles struct { files []*os.File diff --git a/internal/threatfeed/server.go b/internal/threatfeed/server.go index 43dece6..7445f7f 100644 --- a/internal/threatfeed/server.go +++ b/internal/threatfeed/server.go @@ -81,6 +81,7 @@ func Start(c *config.Config) { // Honeypot log handlers. mux.HandleFunc("GET /logs", enforcePrivateIP(handleLogsMain)) mux.HandleFunc("GET /logs/{logtype}", enforcePrivateIP(handleLogs)) + mux.HandleFunc("GET /logs/{logtype}/{subtype}", enforcePrivateIP(handleLogs)) srv := &http.Server{ Addr: ":" + c.ThreatFeed.Port, diff --git a/internal/threatfeed/templates/css/style.css b/internal/threatfeed/templates/css/style.css index 8ff39a6..d2dab63 100644 --- a/internal/threatfeed/templates/css/style.css +++ b/internal/threatfeed/templates/css/style.css @@ -239,11 +239,11 @@ ul.no-bullets { } ul.log-list { + font-family: 'Menlo', 'Consolas', 'Monaco', 'Liberation Mono', 'Lucida Console', monospace; font-size: 1.1rem; - line-height: 2.5rem; + line-height: 1.75rem; list-style-type: none; - margin-bottom: 0.3rem; - padding-left: 0; + padding: 0; } /* ======= */ @@ -294,29 +294,15 @@ thead th a:active { color: #9cf; text-decoration: none; text-underline-offset: 0.3rem; - padding: 0.75rem 0.8rem; - min-width: 100rem; - border-radius: 8px; -} - -.log-list .icon { - margin-right: 0.6rem; - vertical-align: middle; - padding-bottom: 0.3rem; - color: #aab; + padding: 0.3rem 0; } .log-list a:hover { - outline: 2px solid #7de; - color: #fb0; -} - -.log-list a:hover .icon { - color: #fff; + text-decoration: underline; } .log-list a:active { - background-color: #112; + text-decoration: none; } /* ===== */ @@ -704,46 +690,69 @@ table.live-logs td.tooltip:hover pre.tooltip-content { color: #ff5; } +/* ===== */ +/* Stats */ +/* ===== */ +/* Count */ +.logs-stats tbody td:nth-child(1) { + color: #ee6; + text-align: right; +} + +.logs-stats tbody tr:nth-child(odd) td:nth-child(1) { + color: #ff5; +} + +/* Value */ +.logs-stats tbody td:nth-child(2) { + color: #1ee; + overflow-wrap: anywhere; +} + +.logs-stats tbody tr:nth-child(odd) td:nth-child(2) { + color: #5ff; +} + /* ======== */ /* SSH Logs */ /* ======== */ /* Time */ .logs-ssh tbody td:nth-child(1) { - color: #8b949e; + color: #778; white-space: nowrap; } .logs-ssh tbody tr:nth-child(odd) td:nth-child(1) { - color: #ccc; + color: #99a; } /* Source IP */ .logs-ssh tbody td:nth-child(2) { - color: #48e3ff; + color: #1ee; } .logs-ssh tbody tr:nth-child(odd) td:nth-child(2) { - color: #aaffff; + color: #5ff; } /* Username */ .logs-ssh tbody td:nth-child(3) { - color: #b8c1ff; + color: #ed7; overflow-wrap: anywhere; } .logs-ssh tbody tr:nth-child(odd) td:nth-child(3) { - color: #c8e1ff; + color: #ff5; } /* Password */ .logs-ssh tbody td:nth-child(4) { - color: #ffff55; + color: #8bf; overflow-wrap: anywhere; } .logs-ssh tbody tr:nth-child(odd) td:nth-child(4) { - color: #eedc82; + color: #69e; } /* ========= */ @@ -751,40 +760,40 @@ table.live-logs td.tooltip:hover pre.tooltip-content { /* ========= */ /* Time */ .logs-http tbody td:nth-child(1) { - color: #8b949e; + color: #778; white-space: nowrap; } .logs-http tbody tr:nth-child(odd) td:nth-child(1) { - color: #ccc; + color: #99a; } /* Source IP */ .logs-http tbody td:nth-child(2) { - color: #48e3ff; + color: #1ee; } .logs-http tbody tr:nth-child(odd) td:nth-child(2) { - color: #aaffff; + color: #5ff; } /* Method */ .logs-http tbody td:nth-child(3) { - color: #b8c1ff; + color: #8bf; } .logs-http tbody tr:nth-child(odd) td:nth-child(3) { - color: #c8e1ff; + color: #69e; } /* Path */ .logs-http tbody td:nth-child(4) { - color: #ffff55; + color: #ed7; overflow-wrap: anywhere; } .logs-http tbody tr:nth-child(odd) td:nth-child(4) { - color: #eedc82; + color: #ff5; } /* ============= */ @@ -965,6 +974,10 @@ table.live-logs td.tooltip:hover pre.tooltip-content { .live-logs td:nth-child(3) { margin-bottom: 2rem; } + + pre.tooltip-content { + top: 0; + } } @media (max-width: 550px) { diff --git a/internal/threatfeed/templates/logs-stats.html b/internal/threatfeed/templates/logs-stats.html new file mode 100644 index 0000000..a17d71a --- /dev/null +++ b/internal/threatfeed/templates/logs-stats.html @@ -0,0 +1,47 @@ + + + + + + Deceptifeed + + + +
+ {{template "nav" .NavData}} +
+
+ {{if .Data}} + + + + + + + {{range .Data}} + {{end}} + +
Count + {{.Header}} +
{{.Count}}{{.Field}}
+ {{else}} +

No log data found

+ {{end}} +
+ + + + \ No newline at end of file diff --git a/internal/threatfeed/templates/logs.html b/internal/threatfeed/templates/logs.html index e7bdc8a..4b1cdee 100644 --- a/internal/threatfeed/templates/logs.html +++ b/internal/threatfeed/templates/logs.html @@ -14,21 +14,21 @@

Honeypot Logs

+
  • SSH Logs
  • +
  • HTTP Logs
  • + +