diff --git a/common/httpx/csp.go b/common/httpx/csp.go index c356cfcb8..8950fd775 100644 --- a/common/httpx/csp.go +++ b/common/httpx/csp.go @@ -5,7 +5,7 @@ import ( "strings" "github.com/PuerkitoBio/goquery" - "github.com/projectdiscovery/httpx/common/slice" + mapsutil "github.com/projectdiscovery/utils/maps" stringsutil "github.com/projectdiscovery/utils/strings" ) @@ -49,7 +49,7 @@ func (h *HTTPX) CSPGrab(r *Response) *CSPData { } if len(domains) > 0 { - return &CSPData{Domains: slice.ToSlice(domains)} + return &CSPData{Domains: mapsutil.GetKeys(domains)} } return nil } diff --git a/common/httpx/filter.go b/common/httpx/filter.go index 846b962d9..e553abcbb 100644 --- a/common/httpx/filter.go +++ b/common/httpx/filter.go @@ -2,7 +2,8 @@ package httpx import ( "regexp" - "strings" + + stringsutil "github.com/projectdiscovery/utils/strings" ) // Filter defines a generic filter interface to apply to responses @@ -17,13 +18,7 @@ type FilterString struct { // Filter a response with strings filtering func (f FilterString) Filter(response *Response) (bool, error) { - for _, keyword := range f.Keywords { - if strings.Contains(response.Raw, keyword) { - return true, nil - } - } - - return false, nil + return stringsutil.ContainsAnyI(response.Raw, f.Keywords...), nil } // FilterRegex defines a filter of type regex diff --git a/common/httpx/pipeline.go b/common/httpx/pipeline.go index f4130ef3f..b6b7b7817 100644 --- a/common/httpx/pipeline.go +++ b/common/httpx/pipeline.go @@ -6,6 +6,8 @@ import ( "net" "strings" "time" + + stringsutil "github.com/projectdiscovery/utils/strings" ) // SupportPipeline checks if the target host supports HTTP1.1 pipelining by sending x probes @@ -48,7 +50,7 @@ func (h *HTTPX) SupportPipeline(protocol, method, host string, port int) bool { // The check is very naive, but it works most of the times for _, s := range strings.Split(string(reply), "\n\n") { - if strings.Contains(s, "HTTP/1.1") || strings.Contains(s, "HTTP/1.0") { + if stringsutil.ContainsAnyI(s, "HTTP/1.1", "HTTP/1.0") { gotReplies++ } } diff --git a/common/slice/doc.go b/common/slice/doc.go deleted file mode 100644 index 021761516..000000000 --- a/common/slice/doc.go +++ /dev/null @@ -1,2 +0,0 @@ -// Package slice contains a set of utilities to deal with slices -package slice diff --git a/common/slice/slice.go b/common/slice/slice.go deleted file mode 100644 index ce837b9b4..000000000 --- a/common/slice/slice.go +++ /dev/null @@ -1,40 +0,0 @@ -package slice - -// IntSliceContains check if a slice contains the specified int value -func IntSliceContains(sl []int, v int) bool { - for _, vv := range sl { - if vv == v { - return true - } - } - return false -} - -// UIntSliceContains check if a slice contains the specified uint value -func UInt32SliceContains(sl []uint32, v uint32) bool { - for _, vv := range sl { - if vv == v { - return true - } - } - return false -} - -// StringSliceContains check if a slice contains the specified int value -func StringSliceContains(sl []string, v string) bool { - for _, vv := range sl { - if vv == v { - return true - } - } - return false -} - -// ToSlice creates a slice with all string keys from a map -func ToSlice(m map[string]struct{}) (s []string) { - for k := range m { - s = append(s, k) - } - - return -} diff --git a/runner/options.go b/runner/options.go index 8a94fceb9..ee7f8e468 100644 --- a/runner/options.go +++ b/runner/options.go @@ -23,11 +23,11 @@ import ( customport "github.com/projectdiscovery/httpx/common/customports" fileutilz "github.com/projectdiscovery/httpx/common/fileutil" "github.com/projectdiscovery/httpx/common/httpx" - "github.com/projectdiscovery/httpx/common/slice" "github.com/projectdiscovery/httpx/common/stringz" "github.com/projectdiscovery/utils/auth/pdcp" "github.com/projectdiscovery/utils/env" fileutil "github.com/projectdiscovery/utils/file" + sliceutil "github.com/projectdiscovery/utils/slice" stringsutil "github.com/projectdiscovery/utils/strings" updateutils "github.com/projectdiscovery/utils/update" ) @@ -663,7 +663,7 @@ func (options *Options) ValidateOptions() error { if options.Hashes != "" { for _, hashType := range strings.Split(options.Hashes, ",") { - if !slice.StringSliceContains([]string{"md5", "sha1", "sha256", "sha512", "mmh3", "simhash"}, strings.ToLower(hashType)) { + if !sliceutil.Contains([]string{"md5", "sha1", "sha256", "sha512", "mmh3", "simhash"}, strings.ToLower(hashType)) { gologger.Error().Msgf("Unsupported hash type: %s\n", hashType) } } diff --git a/runner/runner.go b/runner/runner.go index b07bb76d7..0ced57044 100644 --- a/runner/runner.go +++ b/runner/runner.go @@ -62,7 +62,6 @@ import ( fileutilz "github.com/projectdiscovery/httpx/common/fileutil" "github.com/projectdiscovery/httpx/common/httputilz" "github.com/projectdiscovery/httpx/common/httpx" - "github.com/projectdiscovery/httpx/common/slice" "github.com/projectdiscovery/httpx/common/stringz" "github.com/projectdiscovery/mapcidr" "github.com/projectdiscovery/rawhttp" @@ -786,15 +785,9 @@ func (r *Runner) RunEnumeration() { } for resp := range output { - if r.options.SniName != "" { resp.SNI = r.options.SniName } - // call the callback function if any - // be careful and check for result.Err - if r.options.OnResult != nil { - r.options.OnResult(resp) - } if resp.Err != nil { // Change the error message if any port value passed explicitly @@ -807,15 +800,6 @@ func (r *Runner) RunEnumeration() { continue } - if indexFile != nil { - indexData := fmt.Sprintf("%s %s (%d %s)\n", resp.StoredResponsePath, resp.URL, resp.StatusCode, http.StatusText(resp.StatusCode)) - _, _ = indexFile.WriteString(indexData) - } - if indexScreenshotFile != nil && resp.ScreenshotPathRel != "" { - indexData := fmt.Sprintf("%s %s (%d %s)\n", resp.ScreenshotPathRel, resp.URL, resp.StatusCode, http.StatusText(resp.StatusCode)) - _, _ = indexScreenshotFile.WriteString(indexData) - } - // apply matchers and filters if r.options.OutputFilterCondition != "" || r.options.OutputMatchCondition != "" { if r.options.OutputMatchCondition != "" { @@ -836,46 +820,46 @@ func (r *Runner) RunEnumeration() { logFilteredErrorPage(resp.URL) continue } - if len(r.options.filterStatusCode) > 0 && slice.IntSliceContains(r.options.filterStatusCode, resp.StatusCode) { + if len(r.options.filterStatusCode) > 0 && sliceutil.Contains(r.options.filterStatusCode, resp.StatusCode) { continue } - if len(r.options.filterContentLength) > 0 && slice.IntSliceContains(r.options.filterContentLength, resp.ContentLength) { + if len(r.options.filterContentLength) > 0 && sliceutil.Contains(r.options.filterContentLength, resp.ContentLength) { continue } - if len(r.options.filterLinesCount) > 0 && slice.IntSliceContains(r.options.filterLinesCount, resp.Lines) { + if len(r.options.filterLinesCount) > 0 && sliceutil.Contains(r.options.filterLinesCount, resp.Lines) { continue } - if len(r.options.filterWordsCount) > 0 && slice.IntSliceContains(r.options.filterWordsCount, resp.Words) { + if len(r.options.filterWordsCount) > 0 && sliceutil.Contains(r.options.filterWordsCount, resp.Words) { continue } if r.options.filterRegex != nil && r.options.filterRegex.MatchString(resp.Raw) { continue } - if r.options.OutputFilterString != "" && strings.Contains(strings.ToLower(resp.Raw), strings.ToLower(r.options.OutputFilterString)) { + if r.options.OutputFilterString != "" && stringsutil.ContainsAnyI(resp.Raw, r.options.OutputFilterString) { continue } if len(r.options.OutputFilterFavicon) > 0 && stringsutil.EqualFoldAny(resp.FavIconMMH3, r.options.OutputFilterFavicon...) { continue } - if len(r.options.matchStatusCode) > 0 && !slice.IntSliceContains(r.options.matchStatusCode, resp.StatusCode) { + if len(r.options.matchStatusCode) > 0 && !sliceutil.Contains(r.options.matchStatusCode, resp.StatusCode) { continue } - if len(r.options.matchContentLength) > 0 && !slice.IntSliceContains(r.options.matchContentLength, resp.ContentLength) { + if len(r.options.matchContentLength) > 0 && !sliceutil.Contains(r.options.matchContentLength, resp.ContentLength) { continue } if r.options.matchRegex != nil && !r.options.matchRegex.MatchString(resp.Raw) { continue } - if r.options.OutputMatchString != "" && !strings.Contains(strings.ToLower(resp.Raw), strings.ToLower(r.options.OutputMatchString)) { + if r.options.OutputMatchString != "" && !stringsutil.ContainsAnyI(resp.Raw, r.options.OutputMatchString) { continue } if len(r.options.OutputMatchFavicon) > 0 && !stringsutil.EqualFoldAny(resp.FavIconMMH3, r.options.OutputMatchFavicon...) { continue } - if len(r.options.matchLinesCount) > 0 && !slice.IntSliceContains(r.options.matchLinesCount, resp.Lines) { + if len(r.options.matchLinesCount) > 0 && !sliceutil.Contains(r.options.matchLinesCount, resp.Lines) { continue } - if len(r.options.matchWordsCount) > 0 && !slice.IntSliceContains(r.options.matchWordsCount, resp.Words) { + if len(r.options.matchWordsCount) > 0 && !sliceutil.Contains(r.options.matchWordsCount, resp.Words) { continue } if len(r.options.OutputMatchCdn) > 0 && !stringsutil.EqualFoldAny(resp.CDNName, r.options.OutputMatchCdn...) { @@ -884,6 +868,72 @@ func (r *Runner) RunEnumeration() { if len(r.options.OutputFilterCdn) > 0 && stringsutil.EqualFoldAny(resp.CDNName, r.options.OutputFilterCdn...) { continue } + + // call the callback function if any + // be careful and check for result.Err + if r.options.OnResult != nil { + r.options.OnResult(resp) + } + + // store responses or chain in directory + URL, _ := urlutil.Parse(resp.URL) + domainFile := resp.Method + ":" + URL.EscapedString() + hash := hashes.Sha1([]byte(domainFile)) + domainResponseFile := fmt.Sprintf("%s.txt", hash) + screenshotResponseFile := fmt.Sprintf("%s.png", hash) + hostFilename := strings.ReplaceAll(URL.Host, ":", "_") + domainResponseBaseDir := filepath.Join(r.options.StoreResponseDir, "response") + domainScreenshotBaseDir := filepath.Join(r.options.StoreResponseDir, "screenshot") + responseBaseDir := filepath.Join(domainResponseBaseDir, hostFilename) + screenshotBaseDir := filepath.Join(domainScreenshotBaseDir, hostFilename) + + var responsePath, screenshotPath, screenshotPathRel string + // store response + if r.scanopts.StoreResponse || r.scanopts.StoreChain { + responsePath = fileutilz.AbsPathOrDefault(filepath.Join(responseBaseDir, domainResponseFile)) + // URL.EscapedString returns that can be used as filename + respRaw := resp.Raw + reqRaw := resp.RequestRaw + if len(respRaw) > r.scanopts.MaxResponseBodySizeToSave { + respRaw = respRaw[:r.scanopts.MaxResponseBodySizeToSave] + } + data := reqRaw + if r.options.StoreChain && resp.Response.HasChain() { + data = append(data, append([]byte("\n"), []byte(resp.Response.GetChain())...)...) + } + data = append(data, respRaw...) + data = append(data, []byte("\n\n\n")...) + data = append(data, []byte(resp.URL)...) + _ = fileutil.CreateFolder(responseBaseDir) + writeErr := os.WriteFile(responsePath, data, 0644) + if writeErr != nil { + gologger.Error().Msgf("Could not write response at path '%s', to disk: %s", responsePath, writeErr) + } + resp.StoredResponsePath = responsePath + } + + if r.scanopts.Screenshot { + screenshotPath = fileutilz.AbsPathOrDefault(filepath.Join(screenshotBaseDir, screenshotResponseFile)) + screenshotPathRel = filepath.Join(hostFilename, screenshotResponseFile) + _ = fileutil.CreateFolder(screenshotBaseDir) + err := os.WriteFile(screenshotPath, resp.ScreenshotBytes, 0644) + if err != nil { + gologger.Error().Msgf("Could not write screenshot at path '%s', to disk: %s", screenshotPath, err) + } + + resp.ScreenshotPath = screenshotPath + resp.ScreenshotPathRel = screenshotPathRel + } + + if indexFile != nil { + indexData := fmt.Sprintf("%s %s (%d %s)\n", resp.StoredResponsePath, resp.URL, resp.StatusCode, http.StatusText(resp.StatusCode)) + _, _ = indexFile.WriteString(indexData) + } + if indexScreenshotFile != nil && resp.ScreenshotPathRel != "" { + indexData := fmt.Sprintf("%s %s (%d %s)\n", resp.ScreenshotPathRel, resp.URL, resp.StatusCode, http.StatusText(resp.StatusCode)) + _, _ = indexScreenshotFile.WriteString(indexData) + } + if r.options.OutputMatchResponseTime != "" { filterOps := FilterOperator{flag: "-mrt, -match-response-time"} operator, value, err := filterOps.Parse(r.options.OutputMatchResponseTime) @@ -1915,41 +1965,6 @@ retry: builder.WriteRune(']') } - // store responses or chain in directory - domainFile := method + ":" + URL.EscapedString() - hash := hashes.Sha1([]byte(domainFile)) - domainResponseFile := fmt.Sprintf("%s.txt", hash) - screenshotResponseFile := fmt.Sprintf("%s.png", hash) - hostFilename := strings.ReplaceAll(URL.Host, ":", "_") - domainResponseBaseDir := filepath.Join(scanopts.StoreResponseDirectory, "response") - domainScreenshotBaseDir := filepath.Join(scanopts.StoreResponseDirectory, "screenshot") - responseBaseDir := filepath.Join(domainResponseBaseDir, hostFilename) - screenshotBaseDir := filepath.Join(domainScreenshotBaseDir, hostFilename) - - var responsePath, screenshotPath, screenshotPathRel string - // store response - if scanopts.StoreResponse || scanopts.StoreChain { - responsePath = fileutilz.AbsPathOrDefault(filepath.Join(responseBaseDir, domainResponseFile)) - // URL.EscapedString returns that can be used as filename - respRaw := resp.Raw - reqRaw := requestDump - if len(respRaw) > scanopts.MaxResponseBodySizeToSave { - respRaw = respRaw[:scanopts.MaxResponseBodySizeToSave] - } - data := reqRaw - if scanopts.StoreChain && resp.HasChain() { - data = append(data, append([]byte("\n"), []byte(resp.GetChain())...)...) - } - data = append(data, respRaw...) - data = append(data, []byte("\n\n\n")...) - data = append(data, []byte(fullURL)...) - _ = fileutil.CreateFolder(responseBaseDir) - writeErr := os.WriteFile(responsePath, data, 0644) - if writeErr != nil { - gologger.Error().Msgf("Could not write response at path '%s', to disk: %s", responsePath, writeErr) - } - } - parsed, err := r.parseURL(fullURL) if err != nil { return Result{URL: fullURL, Input: origInput, Err: errors.Wrap(err, "could not parse url")} @@ -1988,14 +2003,6 @@ retry: if err != nil { gologger.Warning().Msgf("Could not take screenshot '%s': %s", fullURL, err) } else { - screenshotPath = fileutilz.AbsPathOrDefault(filepath.Join(screenshotBaseDir, screenshotResponseFile)) - screenshotPathRel = filepath.Join(hostFilename, screenshotResponseFile) - _ = fileutil.CreateFolder(screenshotBaseDir) - err := os.WriteFile(screenshotPath, screenshotBytes, 0644) - if err != nil { - gologger.Error().Msgf("Could not write screenshot at path '%s', to disk: %s", screenshotPath, err) - } - pHash, err = calculatePerceptionHash(screenshotBytes) if err != nil { gologger.Warning().Msgf("%v: %s", err, fullURL) @@ -2034,66 +2041,63 @@ retry: builder.WriteRune(']') } - // We now have headless body. We can use it for tech detection - result := Result{ - Timestamp: time.Now(), - Request: request, - ResponseHeaders: responseHeaders, - RawHeaders: rawResponseHeaders, - Scheme: parsed.Scheme, - Port: finalPort, - Path: finalPath, - Raw: resp.Raw, - URL: fullURL, - Input: origInput, - ContentLength: resp.ContentLength, - ChainStatusCodes: chainStatusCodes, - Chain: chainItems, - StatusCode: resp.StatusCode, - Location: resp.GetHeaderPart("Location", ";"), - ContentType: resp.GetHeaderPart("Content-Type", ";"), - Title: title, - str: builder.String(), - VHost: isvhost, - WebServer: serverHeader, - ResponseBody: serverResponseRaw, - BodyPreview: bodyPreview, - WebSocket: isWebSocket, - TLSData: resp.TLSData, - CSPData: resp.CSPData, - Pipeline: pipeline, - HTTP2: http2, - Method: method, - Host: ip, - A: ips4, - AAAA: ips6, - CNAMEs: cnames, - CDN: isCDN, - CDNName: cdnName, - ResponseTime: resp.Duration.String(), - Technologies: technologies, - FinalURL: finalURL, - FavIconMMH3: faviconMMH3, - FaviconPath: faviconPath, - Hashes: hashesMap, - Extracts: extractResult, - Jarm: jarmhash, - Lines: resp.Lines, - Words: resp.Words, - ASN: asnResponse, - ExtractRegex: extractRegex, - StoredResponsePath: responsePath, - ScreenshotBytes: screenshotBytes, - ScreenshotPath: screenshotPath, - ScreenshotPathRel: screenshotPathRel, - HeadlessBody: headlessBody, + Timestamp: time.Now(), + Request: request, + ResponseHeaders: responseHeaders, + RawHeaders: rawResponseHeaders, + Scheme: parsed.Scheme, + Port: finalPort, + Path: finalPath, + Raw: resp.Raw, + URL: fullURL, + Input: origInput, + ContentLength: resp.ContentLength, + ChainStatusCodes: chainStatusCodes, + Chain: chainItems, + StatusCode: resp.StatusCode, + Location: resp.GetHeaderPart("Location", ";"), + ContentType: resp.GetHeaderPart("Content-Type", ";"), + Title: title, + str: builder.String(), + VHost: isvhost, + WebServer: serverHeader, + ResponseBody: serverResponseRaw, + BodyPreview: bodyPreview, + WebSocket: isWebSocket, + TLSData: resp.TLSData, + CSPData: resp.CSPData, + Pipeline: pipeline, + HTTP2: http2, + Method: method, + Host: ip, + A: ips4, + AAAA: ips6, + CNAMEs: cnames, + CDN: isCDN, + CDNName: cdnName, + ResponseTime: resp.Duration.String(), + Technologies: technologies, + FinalURL: finalURL, + FavIconMMH3: faviconMMH3, + FaviconPath: faviconPath, + Hashes: hashesMap, + Extracts: extractResult, + Jarm: jarmhash, + Lines: resp.Lines, + Words: resp.Words, + ASN: asnResponse, + ExtractRegex: extractRegex, + ScreenshotBytes: screenshotBytes, + HeadlessBody: headlessBody, KnowledgeBase: map[string]interface{}{ "PageType": r.errorPageClassifier.Classify(respData), "pHash": pHash, }, TechnologyDetails: technologyDetails, Resolvers: resolvers, + RequestRaw: requestDump, + Response: resp, } return result } diff --git a/runner/types.go b/runner/types.go index fc6f3287f..1928081eb 100644 --- a/runner/types.go +++ b/runner/types.go @@ -90,7 +90,10 @@ type Result struct { KnowledgeBase map[string]interface{} `json:"knowledgebase,omitempty" csv:"knowledgebase"` Resolvers []string `json:"resolvers,omitempty" csv:"resolvers"` + // Internal Fields TechnologyDetails map[string]wappalyzer.AppInfo `json:"-" csv:"-"` + RequestRaw []byte `json:"-" csv:"-"` + Response *httpx.Response `json:"-" csv:"-"` } // function to get dsl variables from result struct