pax_global_header 0000666 0000000 0000000 00000000064 13706265214 0014520 g ustar 00root root 0000000 0000000 52 comment=d05cf280aefdd4a13e530915708c768ce446187a
ffuf-1.1.0/ 0000775 0000000 0000000 00000000000 13706265214 0012445 5 ustar 00root root 0000000 0000000 ffuf-1.1.0/.gitignore 0000664 0000000 0000000 00000000014 13706265214 0014430 0 ustar 00root root 0000000 0000000 /ffuf
.idea
ffuf-1.1.0/.goreleaser.yml 0000664 0000000 0000000 00000000636 13706265214 0015403 0 ustar 00root root 0000000 0000000 builds:
- binary: ffuf
goos:
- linux
- windows
- freebsd
- openbsd
- darwin
goarch:
- amd64
- 386
- arm
- arm64
ignore:
- goos: freebsd
goarch: arm64
archives:
- id: tgz
format: tar.gz
replacements:
darwin: macOS
format_overrides:
- goos: windows
format: zip
signs:
- artifacts: checksum
ffuf-1.1.0/CHANGELOG.md 0000664 0000000 0000000 00000014614 13706265214 0014264 0 ustar 00root root 0000000 0000000 ## Changelog
- master
- New
- Changed
- v1.1.0
- New
- New CLI flag `-maxtime-job` to set max. execution time per job.
- Changed behaviour of `-maxtime`, can now be used for entire process.
- A new flag `-ignore-body` so ffuf does not fetch the response content. Default value=false.
- Added the wordlists to the header information.
- Added support to output "all" formats (specify the path/filename sans file extension and ffuf will add the appropriate suffix for the filetype)
- Changed
- Fixed a bug related to the autocalibration feature making the random seed initialization also to take place before autocalibration needs it.
- Added tls renegotiation flag to fix #193 in http.Client
- Fixed HTML report to display select/combo-box for rows per page (and increased default from 10 to 250 rows).
- Added Host information to JSON output file
- Fixed request method when supplying request file
- Fixed crash with 3XX responses that weren't redirects (304 Not Modified, 300 Multiple Choices etc)
- v1.0.2
- Changed
- Write POST request data properly to file when ran with `-od`.
- Fixed a bug by using header canonicaliztion related to HTTP headers being case insensitive.
- Properly handle relative redirect urls with `-recursion`
- Calculate req/sec correctly for when using recursion
- When `-request` is used, allow the user to override URL using `-u`
- v1.0.1
- Changed
- Fixed a bug where regex matchers and filters would fail if `-od` was used to store the request & response contents.
- v1.0
- New
- New CLI flag `-ic` to ignore comments from wordlist.
- New CLI flags `-request` to specify the raw request file to build the actual request from and `-request-proto` to define the new request format.
- New CLI flag `-od` (output directory) to enable writing requests and responses for matched results to a file for postprocessing or debugging purposes.
- New CLI flag `-maxtime` to limit the running time of ffuf
- New CLI flags `-recursion` and `-recursion-depth` to control recursive ffuf jobs if directories are found. This requires the `-u` to end with FUZZ keyword.
- New CLI flag `-replay-proxy` to replay matched requests using a custom proxy.
- Changed
- Limit the use of `-e` (extensions) to a single keyword: FUZZ
- Regexp matching and filtering (-mr/-fr) allow using keywords in patterns
- Take 429 responses into account when -sa (stop on all error cases) is used
- Remove -k flag support, convert to dummy flag #134
- Write configuration to output JSON
- Better help text.
- If any matcher is set, ignore -mc default value.
- v0.12
- New
- Added a new flag to select a multi wordlist operation mode: `--mode`, possible values: `clusterbomb` and `pitchfork`.
- Added a new output file format eJSON, for always base64 encoding the input data.
- Redirect location is always shown in the output files (when using `-o`)
- Full URL is always shown in the output files (when using `-o`)
- HTML output format got [DataTables](https://datatables.net/) support allowing realtime searches, sorting by column etc.
- New CLI flag `-v` for verbose output. Including full URL, and redirect location.
- SIGTERM monitoring, in order to catch keyboard interrupts an such, to be able to write `-o` files before exiting.
- Changed
- Fixed a bug in the default multi wordlist mode
- Fixed JSON output regression, where all the input data was always encoded in base64
- `--debug-log` no correctly logs connection errors
- Removed `-l` flag in favor of `-v`
- More verbose information in banner shown in startup.
- v0.11
- New
- New CLI flag: -l, shows target location of redirect responses
- New CLI flac: -acc, custom auto-calibration strings
- New CLI flag: -debug-log, writes the debug logging to the specified file.
- New CLI flags -ml and -fl, filters/matches line count in response
- Ability to use multiple wordlists / keywords by defining multiple -w command line flags. The if no keyword is defined, the default is FUZZ to keep backwards compatibility. Example: `-w "wordlists/custom.txt:CUSTOM" -H "RandomHeader: CUSTOM"`.
- Changed
- New CLI flag: -i, dummy flag that does nothing. for compatibility with copy as curl.
- New CLI flag: -b/--cookie, cookie data for compatibility with copy as curl.
- New Output format are available: HTML and Markdown table.
- New CLI flag: -l, shows target location of redirect responses
- Filtering and matching by status code, response size or word count now allow using ranges in addition to single values
- The internal logging information to be discarded, and can be written to a file with the new `-debug-log` flag.
- v0.10
- New
- New CLI flag: -ac to autocalibrate response size and word filters based on few preset URLs.
- New CLI flag: -timeout to specify custom timeouts for all HTTP requests.
- New CLI flag: --data for compatibility with copy as curl functionality of browsers.
- New CLI flag: --compressed, dummy flag that does nothing. for compatibility with copy as curl.
- New CLI flags: --input-cmd, and --input-num to handle input generation using external commands. Mutators for example. Environment variable FFUF_NUM will be updated on every call of the command.
- When --input-cmd is used, display position instead of the payload in results. The output file (of all formats) will include the payload in addition to the position however.
- Changed
- Wordlist can also be read from standard input
- Defining -d or --data implies POST method if -X doesn't set it to something else than GET
- v0.9
- New
- New output file formats: CSV and eCSV (CSV with base64 encoded input field to avoid CSV breakage with payloads containing a comma)
- New CLI flag to follow redirects
- Erroring connections will be retried once
- Error counter in status bar
- New CLI flags: -se (stop on spurious errors) and -sa (stop on all errors, implies -se and -sf)
- New CLI flags: -e to provide a list of extensions to add to wordlist entries, and -D to provide DirSearch wordlist format compatibility.
- Wildcard option for response status code matcher.
- v0.8
- New
- New CLI flag to write output to a file in JSON format
- New CLI flag to stop on spurious 403 responses
- Changed
- Regex matching / filtering now matches the headers alongside of the response body
ffuf-1.1.0/CONTRIBUTORS.md 0000664 0000000 0000000 00000001665 13706265214 0014734 0 ustar 00root root 0000000 0000000 # Contributors
* [bjhulst](https://github.com/bjhulst)
* [bsysop](https://twitter.com/bsysop)
* [ccsplit](https://github.com/ccsplit)
* [codingo](https://github.com/codingo)
* [c_sto](https://github.com/c-sto)
* [Damian89](https://github.com/Damian89)
* [Daviey](https://github.com/Daviey)
* [delic](https://github.com/delic)
* [eur0pa](https://github.com/eur0pa)
* [fang0654](https://github.com/fang0654)
* [helpermika](https://github.com/helpermika)
* [Ice3man543](https://github.com/Ice3man543)
* [JamTookTheBait](https://github.com/JamTookTheBait)
* [joohoi](https://github.com/joohoi)
* [jvesiluoma](https://github.com/jvesiluoma)
* [lc](https://github.com/lc)
* [nnwakelam](https://twitter.com/nnwakelam)
* [oh6hay](https://github.com/oh6hay)
* [putsi](https://github.com/putsi)
* [SakiiR](https://github.com/SakiiR)
* [seblw](https://github.com/seblw)
* [Shaked](https://github.com/Shaked)
* [SolomonSklash](https://github.com/SolomonSklash)
ffuf-1.1.0/LICENSE 0000664 0000000 0000000 00000002057 13706265214 0013456 0 ustar 00root root 0000000 0000000 MIT License
Copyright (c) 2020 Joona Hoikkala
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
ffuf-1.1.0/README.md 0000664 0000000 0000000 00000021752 13706265214 0013733 0 ustar 00root root 0000000 0000000 ```
/'___\ /'___\ /'___\
/\ \__/ /\ \__/ __ __ /\ \__/
\ \ ,__\\ \ ,__\/\ \/\ \ \ \ ,__\
\ \ \_/ \ \ \_/\ \ \_\ \ \ \ \_/
\ \_\ \ \_\ \ \____/ \ \_\
\/_/ \/_/ \/___/ \/_/
```
# ffuf - Fuzz Faster U Fool
A fast web fuzzer written in Go.
## Installation
- [Download](https://github.com/ffuf/ffuf/releases/latest) a prebuilt binary from [releases page](https://github.com/ffuf/ffuf/releases/latest), unpack and run!
or
- If you have go compiler installed: `go get github.com/ffuf/ffuf`
The only dependency of ffuf is Go 1.11. No dependencies outside of Go standard library are needed.
## Example usage
### Typical directory discovery
[](https://asciinema.org/a/211350)
By using the FUZZ keyword at the end of URL (`-u`):
```
ffuf -w /path/to/wordlist -u https://target/FUZZ
```
### Virtual host discovery (without DNS records)
[](https://asciinema.org/a/211360)
Assuming that the default virtualhost response size is 4242 bytes, we can filter out all the responses of that size (`-fs 4242`)while fuzzing the Host - header:
```
ffuf -w /path/to/vhost/wordlist -u https://target -H "Host: FUZZ" -fs 4242
```
### GET parameter fuzzing
GET parameter name fuzzing is very similar to directory discovery, and works by defining the `FUZZ` keyword as a part of the URL. This also assumes an response size of 4242 bytes for invalid GET parameter name.
```
ffuf -w /path/to/paramnames.txt -u https://target/script.php?FUZZ=test_value -fs 4242
```
If the parameter name is known, the values can be fuzzed the same way. This example assumes a wrong parameter value returning HTTP response code 401.
```
ffuf -w /path/to/values.txt -u https://target/script.php?valid_name=FUZZ -fc 401
```
### POST data fuzzing
This is a very straightforward operation, again by using the `FUZZ` keyword. This example is fuzzing only part of the POST request. We're again filtering out the 401 responses.
```
ffuf -w /path/to/postdata.txt -X POST -d "username=admin\&password=FUZZ" -u https://target/login.php -fc 401
```
### Maximum execution time
If you don't want ffuf to run indefinitely, you can use the `-maxtime`. This stops __the entire__ process after a given time (in seconds).
```
ffuf -w /path/to/wordlist -u https://target/FUZZ -maxtime 60
```
When working with recursion, you can control the maxtime __per job__ using `-maxtime-job`. This will stop the current job after a given time (in seconds) and continue with the next one. New jobs are created when the recursion functionality detects a subdirectory.
```
ffuf -w /path/to/wordlist -u https://target/FUZZ -maxtime-job 60 -recursion -recursion-depth 2
```
It is also possible to combine both flags limiting the per job maximum execution time as well as the overall execution time. If you do not use recursion then both flags behave equally.
### Using external mutator to produce test cases
For this example, we'll fuzz JSON data that's sent over POST. [Radamsa](https://gitlab.com/akihe/radamsa) is used as the mutator.
When `--input-cmd` is used, ffuf will display matches as their position. This same position value will be available for the callee as an environment variable `$FFUF_NUM`. We'll use this position value as the seed for the mutator. Files example1.txt and example2.txt contain valid JSON payloads. We are matching all the responses, but filtering out response code `400 - Bad request`:
```
ffuf --input-cmd 'radamsa --seed $FFUF_NUM example1.txt example2.txt' -H "Content-Type: application/json" -X POST -u https://ffuf.io.fi/ -mc all -fc 400
```
It of course isn't very efficient to call the mutator for each payload, so we can also pre-generate the payloads, still using [Radamsa](https://gitlab.com/akihe/radamsa) as an example:
```
# Generate 1000 example payloads
radamsa -n 1000 -o %n.txt example1.txt example2.txt
# This results into files 1.txt ... 1000.txt
# Now we can just read the payload data in a loop from file for ffuf
ffuf --input-cmd 'cat $FFUF_NUM.txt' -H "Content-Type: application/json" -X POST -u https://ffuf.io.fi/ -mc all -fc 400
```
## Usage
To define the test case for ffuf, use the keyword `FUZZ` anywhere in the URL (`-u`), headers (`-H`), or POST data (`-d`).
```
Fuzz Faster U Fool - v1.0
HTTP OPTIONS:
-H Header `"Name: Value"`, separated by colon. Multiple -H flags are accepted.
-X HTTP method to use (default: GET)
-b Cookie data `"NAME1=VALUE1; NAME2=VALUE2"` for copy as curl functionality.
-d POST data
-r Follow redirects (default: false)
-recursion Scan recursively. Only FUZZ keyword is supported, and URL (-u) has to end in it. (default: false)
-recursion-depth Maximum recursion depth. (default: 0)
-replay-proxy Replay matched requests using this proxy.
-timeout HTTP request timeout in seconds. (default: 10)
-u Target URL
-x HTTP Proxy URL
GENERAL OPTIONS:
-V Show version information. (default: false)
-ac Automatically calibrate filtering options (default: false)
-acc Custom auto-calibration string. Can be used multiple times. Implies -ac
-c Colorize output. (default: false)
-maxtime Maximum running time in seconds for the entire process. (default: 0)
-maxtime-job Maximum running time in seconds per job. (default: 0)
-p Seconds of `delay` between requests, or a range of random delay. For example "0.1" or "0.1-2.0"
-s Do not print additional information (silent mode) (default: false)
-sa Stop on all error cases. Implies -sf and -se. (default: false)
-se Stop on spurious errors (default: false)
-sf Stop when > 95% of responses return 403 Forbidden (default: false)
-t Number of concurrent threads. (default: 40)
-v Verbose output, printing full URL and redirect location (if any) with the results. (default: false)
MATCHER OPTIONS:
-mc Match HTTP status codes, or "all" for everything. (default: 200,204,301,302,307,401,403)
-ml Match amount of lines in response
-mr Match regexp
-ms Match HTTP response size
-mw Match amount of words in response
FILTER OPTIONS:
-fc Filter HTTP status codes from response. Comma separated list of codes and ranges
-fl Filter by amount of lines in response. Comma separated list of line counts and ranges
-fr Filter regexp
-fs Filter HTTP response size. Comma separated list of sizes and ranges
-fw Filter by amount of words in response. Comma separated list of word counts and ranges
INPUT OPTIONS:
-D DirSearch wordlist compatibility mode. Used in conjunction with -e flag. (default: false)
-e Comma separated list of extensions. Extends FUZZ keyword.
-ic Ignore wordlist comments (default: false)
-input-cmd Command producing the input. --input-num is required when using this input method. Overrides -w.
-input-num Number of inputs to test. Used in conjunction with --input-cmd. (default: 100)
-mode Multi-wordlist operation mode. Available modes: clusterbomb, pitchfork (default: clusterbomb)
-request File containing the raw http request
-request-proto Protocol to use along with raw request (default: https)
-w Wordlist file path and (optional) keyword separated by colon. eg. '/path/to/wordlist:KEYWORD'
OUTPUT OPTIONS:
-debug-log Write all of the internal logging to the specified file.
-o Write output to file
-od Directory path to store matched results to.
-of Output file format. Available formats: json, ejson, html, md, csv, ecsv (default: json)
EXAMPLE USAGE:
Fuzz file paths from wordlist.txt, match all responses but filter out those with content-size 42.
Colored, verbose output.
ffuf -w wordlist.txt -u https://example.org/FUZZ -mc all -fs 42 -c -v
Fuzz Host-header, match HTTP 200 responses.
ffuf -w hosts.txt -u https://example.org/ -H "Host: FUZZ" -mc 200
Fuzz POST JSON data. Match all responses not containing text "error".
ffuf -w entries.txt -u https://example.org/ -X POST -H "Content-Type: application/json" \
-d '{"name": "FUZZ", "anotherkey": "anothervalue"}' -fr "error"
Fuzz multiple locations. Match only responses reflecting the value of "VAL" keyword. Colored.
ffuf -w params.txt:PARAM -w values.txt:VAL -u https://example.org/?PARAM=VAL -mr "VAL" -c
More information and examples: https://github.com/ffuf/ffuf
```
## Helper scripts and advanced payloads
See [ffuf-scripts](https://github.com/ffuf/ffuf-scripts) repository for helper scripts and payload generators
for different workflows and usage scenarios.
## License
ffuf is released under MIT license. See [LICENSE](https://github.com/ffuf/ffuf/blob/master/LICENSE).
ffuf-1.1.0/go.mod 0000664 0000000 0000000 00000000045 13706265214 0013552 0 ustar 00root root 0000000 0000000 module github.com/ffuf/ffuf
go 1.11
ffuf-1.1.0/help.go 0000664 0000000 0000000 00000012031 13706265214 0013721 0 ustar 00root root 0000000 0000000 package main
import (
"flag"
"fmt"
"os"
"github.com/ffuf/ffuf/pkg/ffuf"
)
type UsageSection struct {
Name string
Description string
Flags []UsageFlag
Hidden bool
ExpectedFlags []string
}
//PrintSection prints out the section name, description and each of the flags
func (u *UsageSection) PrintSection(max_length int, extended bool) {
// Do not print if extended usage not requested and section marked as hidden
if !extended && u.Hidden {
return
}
fmt.Printf("%s:\n", u.Name)
for _, f := range u.Flags {
f.PrintFlag(max_length)
}
fmt.Printf("\n")
}
type UsageFlag struct {
Name string
Description string
Default string
}
//PrintFlag prints out the flag name, usage string and default value
func (f *UsageFlag) PrintFlag(max_length int) {
// Create format string, used for padding
format := fmt.Sprintf(" -%%-%ds %%s", max_length)
if f.Default != "" {
format = format + " (default: %s)\n"
fmt.Printf(format, f.Name, f.Description, f.Default)
} else {
format = format + "\n"
fmt.Printf(format, f.Name, f.Description)
}
}
func Usage() {
u_http := UsageSection{
Name: "HTTP OPTIONS",
Description: "Options controlling the HTTP request and its parts.",
Flags: make([]UsageFlag, 0),
Hidden: false,
ExpectedFlags: []string{"H", "X", "b", "d", "r", "u", "recursion", "recursion-depth", "replay-proxy", "timeout", "ignore-body", "x"},
}
u_general := UsageSection{
Name: "GENERAL OPTIONS",
Description: "",
Flags: make([]UsageFlag, 0),
Hidden: false,
ExpectedFlags: []string{"ac", "acc", "c", "maxtime", "maxtime-job", "p", "s", "sa", "se", "sf", "t", "v", "V"},
}
u_compat := UsageSection{
Name: "COMPATIBILITY OPTIONS",
Description: "Options to ensure compatibility with other pieces of software.",
Flags: make([]UsageFlag, 0),
Hidden: true,
ExpectedFlags: []string{"compressed", "cookie", "data", "data-ascii", "data-binary", "i", "k"},
}
u_matcher := UsageSection{
Name: "MATCHER OPTIONS",
Description: "Matchers for the response filtering.",
Flags: make([]UsageFlag, 0),
Hidden: false,
ExpectedFlags: []string{"mc", "ml", "mr", "ms", "mw"},
}
u_filter := UsageSection{
Name: "FILTER OPTIONS",
Description: "Filters for the response filtering.",
Flags: make([]UsageFlag, 0),
Hidden: false,
ExpectedFlags: []string{"fc", "fl", "fr", "fs", "fw"},
}
u_input := UsageSection{
Name: "INPUT OPTIONS",
Description: "Options for input data for fuzzing. Wordlists and input generators.",
Flags: make([]UsageFlag, 0),
Hidden: false,
ExpectedFlags: []string{"D", "ic", "input-cmd", "input-num", "mode", "request", "request-proto", "e", "w"},
}
u_output := UsageSection{
Name: "OUTPUT OPTIONS",
Description: "Options for output. Output file formats, file names and debug file locations.",
Flags: make([]UsageFlag, 0),
Hidden: false,
ExpectedFlags: []string{"debug-log", "o", "of", "od"},
}
sections := []UsageSection{u_http, u_general, u_compat, u_matcher, u_filter, u_input, u_output}
// Populate the flag sections
max_length := 0
flag.VisitAll(func(f *flag.Flag) {
found := false
for i, section := range sections {
if strInSlice(f.Name, section.ExpectedFlags) {
sections[i].Flags = append(sections[i].Flags, UsageFlag{
Name: f.Name,
Description: f.Usage,
Default: f.DefValue,
})
found = true
}
}
if !found {
fmt.Printf("DEBUG: Flag %s was found but not defined in help.go.\n", f.Name)
os.Exit(1)
}
if len(f.Name) > max_length {
max_length = len(f.Name)
}
})
fmt.Printf("Fuzz Faster U Fool - v%s\n\n", ffuf.VERSION)
// Print out the sections
for _, section := range sections {
section.PrintSection(max_length, false)
}
// Usage examples.
fmt.Printf("EXAMPLE USAGE:\n")
fmt.Printf(" Fuzz file paths from wordlist.txt, match all responses but filter out those with content-size 42.\n")
fmt.Printf(" Colored, verbose output.\n")
fmt.Printf(" ffuf -w wordlist.txt -u https://example.org/FUZZ -mc all -fs 42 -c -v\n\n")
fmt.Printf(" Fuzz Host-header, match HTTP 200 responses.\n")
fmt.Printf(" ffuf -w hosts.txt -u https://example.org/ -H \"Host: FUZZ\" -mc 200\n\n")
fmt.Printf(" Fuzz POST JSON data. Match all responses not containing text \"error\".\n")
fmt.Printf(" ffuf -w entries.txt -u https://example.org/ -X POST -H \"Content-Type: application/json\" \\\n")
fmt.Printf(" -d '{\"name\": \"FUZZ\", \"anotherkey\": \"anothervalue\"}' -fr \"error\"\n\n")
fmt.Printf(" Fuzz multiple locations. Match only responses reflecting the value of \"VAL\" keyword. Colored.\n")
fmt.Printf(" ffuf -w params.txt:PARAM -w values.txt:VAL -u https://example.org/?PARAM=VAL -mr \"VAL\" -c\n\n")
fmt.Printf(" More information and examples: https://github.com/ffuf/ffuf\n\n")
}
func strInSlice(val string, slice []string) bool {
for _, v := range slice {
if v == val {
return true
}
}
return false
}
ffuf-1.1.0/main.go 0000664 0000000 0000000 00000045775 13706265214 0013742 0 ustar 00root root 0000000 0000000 package main
import (
"bufio"
"context"
"flag"
"fmt"
"io/ioutil"
"log"
"net/textproto"
"net/url"
"os"
"runtime"
"strconv"
"strings"
"github.com/ffuf/ffuf/pkg/ffuf"
"github.com/ffuf/ffuf/pkg/filter"
"github.com/ffuf/ffuf/pkg/input"
"github.com/ffuf/ffuf/pkg/output"
"github.com/ffuf/ffuf/pkg/runner"
)
type cliOptions struct {
extensions string
delay string
filterStatus string
filterSize string
filterRegexp string
filterWords string
filterLines string
matcherStatus string
matcherSize string
matcherRegexp string
matcherWords string
matcherLines string
proxyURL string
replayProxyURL string
request string
requestProto string
URL string
outputFormat string
ignoreBody bool
wordlists multiStringFlag
inputcommands multiStringFlag
headers multiStringFlag
cookies multiStringFlag
AutoCalibrationStrings multiStringFlag
showVersion bool
debugLog string
}
type multiStringFlag []string
func (m *multiStringFlag) String() string {
return ""
}
func (m *multiStringFlag) Set(value string) error {
*m = append(*m, value)
return nil
}
func main() {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
conf := ffuf.NewConfig(ctx)
opts := cliOptions{}
var ignored bool
flag.BoolVar(&conf.IgnoreWordlistComments, "ic", false, "Ignore wordlist comments")
flag.StringVar(&opts.extensions, "e", "", "Comma separated list of extensions. Extends FUZZ keyword.")
flag.BoolVar(&conf.DirSearchCompat, "D", false, "DirSearch wordlist compatibility mode. Used in conjunction with -e flag.")
flag.Var(&opts.headers, "H", "Header `\"Name: Value\"`, separated by colon. Multiple -H flags are accepted.")
flag.StringVar(&opts.URL, "u", "", "Target URL")
flag.Var(&opts.wordlists, "w", "Wordlist file path and (optional) keyword separated by colon. eg. '/path/to/wordlist:KEYWORD'")
flag.BoolVar(&ignored, "k", false, "Dummy flag for backwards compatibility")
flag.StringVar(&opts.delay, "p", "", "Seconds of `delay` between requests, or a range of random delay. For example \"0.1\" or \"0.1-2.0\"")
flag.StringVar(&opts.filterStatus, "fc", "", "Filter HTTP status codes from response. Comma separated list of codes and ranges")
flag.StringVar(&opts.filterSize, "fs", "", "Filter HTTP response size. Comma separated list of sizes and ranges")
flag.StringVar(&opts.filterRegexp, "fr", "", "Filter regexp")
flag.StringVar(&opts.filterWords, "fw", "", "Filter by amount of words in response. Comma separated list of word counts and ranges")
flag.StringVar(&opts.filterLines, "fl", "", "Filter by amount of lines in response. Comma separated list of line counts and ranges")
flag.StringVar(&conf.Data, "d", "", "POST data")
flag.StringVar(&conf.Data, "data", "", "POST data (alias of -d)")
flag.StringVar(&conf.Data, "data-ascii", "", "POST data (alias of -d)")
flag.StringVar(&conf.Data, "data-binary", "", "POST data (alias of -d)")
flag.BoolVar(&conf.Colors, "c", false, "Colorize output.")
flag.BoolVar(&ignored, "compressed", true, "Dummy flag for copy as curl functionality (ignored)")
flag.Var(&opts.inputcommands, "input-cmd", "Command producing the input. --input-num is required when using this input method. Overrides -w.")
flag.IntVar(&conf.InputNum, "input-num", 100, "Number of inputs to test. Used in conjunction with --input-cmd.")
flag.StringVar(&conf.InputMode, "mode", "clusterbomb", "Multi-wordlist operation mode. Available modes: clusterbomb, pitchfork")
flag.BoolVar(&ignored, "i", true, "Dummy flag for copy as curl functionality (ignored)")
flag.Var(&opts.cookies, "b", "Cookie data `\"NAME1=VALUE1; NAME2=VALUE2\"` for copy as curl functionality.")
flag.Var(&opts.cookies, "cookie", "Cookie data (alias of -b)")
flag.StringVar(&opts.matcherStatus, "mc", "200,204,301,302,307,401,403", "Match HTTP status codes, or \"all\" for everything.")
flag.StringVar(&opts.matcherSize, "ms", "", "Match HTTP response size")
flag.StringVar(&opts.matcherRegexp, "mr", "", "Match regexp")
flag.StringVar(&opts.matcherWords, "mw", "", "Match amount of words in response")
flag.StringVar(&opts.matcherLines, "ml", "", "Match amount of lines in response")
flag.StringVar(&opts.proxyURL, "x", "", "HTTP Proxy URL")
flag.StringVar(&opts.request, "request", "", "File containing the raw http request")
flag.StringVar(&opts.requestProto, "request-proto", "https", "Protocol to use along with raw request")
flag.StringVar(&conf.Method, "X", "GET", "HTTP method to use")
flag.StringVar(&conf.OutputFile, "o", "", "Write output to file")
flag.StringVar(&opts.outputFormat, "of", "json", "Output file format. Available formats: json, ejson, html, md, csv, ecsv (or, 'all' for all formats)")
flag.StringVar(&conf.OutputDirectory, "od", "", "Directory path to store matched results to.")
flag.BoolVar(&conf.IgnoreBody, "ignore-body", false, "Do not fetch the response content.")
flag.BoolVar(&conf.Quiet, "s", false, "Do not print additional information (silent mode)")
flag.BoolVar(&conf.StopOn403, "sf", false, "Stop when > 95% of responses return 403 Forbidden")
flag.BoolVar(&conf.StopOnErrors, "se", false, "Stop on spurious errors")
flag.BoolVar(&conf.StopOnAll, "sa", false, "Stop on all error cases. Implies -sf and -se.")
flag.BoolVar(&conf.FollowRedirects, "r", false, "Follow redirects")
flag.BoolVar(&conf.Recursion, "recursion", false, "Scan recursively. Only FUZZ keyword is supported, and URL (-u) has to end in it.")
flag.IntVar(&conf.RecursionDepth, "recursion-depth", 0, "Maximum recursion depth.")
flag.StringVar(&opts.replayProxyURL, "replay-proxy", "", "Replay matched requests using this proxy.")
flag.BoolVar(&conf.AutoCalibration, "ac", false, "Automatically calibrate filtering options")
flag.Var(&opts.AutoCalibrationStrings, "acc", "Custom auto-calibration string. Can be used multiple times. Implies -ac")
flag.IntVar(&conf.Threads, "t", 40, "Number of concurrent threads.")
flag.IntVar(&conf.Timeout, "timeout", 10, "HTTP request timeout in seconds.")
flag.IntVar(&conf.MaxTime, "maxtime", 0, "Maximum running time in seconds for entire process.")
flag.IntVar(&conf.MaxTimeJob, "maxtime-job", 0, "Maximum running time in seconds per job.")
flag.BoolVar(&conf.Verbose, "v", false, "Verbose output, printing full URL and redirect location (if any) with the results.")
flag.BoolVar(&opts.showVersion, "V", false, "Show version information.")
flag.StringVar(&opts.debugLog, "debug-log", "", "Write all of the internal logging to the specified file.")
flag.Usage = Usage
flag.Parse()
if opts.showVersion {
fmt.Printf("ffuf version: %s\n", ffuf.VERSION)
os.Exit(0)
}
if len(opts.debugLog) != 0 {
f, err := os.OpenFile(opts.debugLog, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
fmt.Fprintf(os.Stderr, "Disabling logging, encountered error(s): %s\n", err)
log.SetOutput(ioutil.Discard)
} else {
log.SetOutput(f)
defer f.Close()
}
} else {
log.SetOutput(ioutil.Discard)
}
if err := prepareConfig(&opts, &conf); err != nil {
fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err)
Usage()
os.Exit(1)
}
job, err := prepareJob(&conf)
if err != nil {
fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err)
Usage()
os.Exit(1)
}
if err := prepareFilters(&opts, &conf); err != nil {
fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err)
Usage()
os.Exit(1)
}
if err := filter.CalibrateIfNeeded(job); err != nil {
fmt.Fprintf(os.Stderr, "Error in autocalibration, exiting: %s\n", err)
os.Exit(1)
}
// Job handles waiting for goroutines to complete itself
job.Start()
}
func prepareJob(conf *ffuf.Config) (*ffuf.Job, error) {
job := &ffuf.Job{
Config: conf,
}
errs := ffuf.NewMultierror()
var err error
inputprovider, err := input.NewInputProvider(conf)
if err != nil {
errs.Add(err)
}
// TODO: implement error handling for runnerprovider and outputprovider
// We only have http runner right now
job.Runner = runner.NewRunnerByName("http", conf, false)
if len(conf.ReplayProxyURL) > 0 {
job.ReplayRunner = runner.NewRunnerByName("http", conf, true)
}
// Initialize the correct inputprovider
for _, v := range conf.InputProviders {
err = inputprovider.AddProvider(v)
if err != nil {
errs.Add(err)
}
}
job.Input = inputprovider
// We only have stdout outputprovider right now
job.Output = output.NewOutputProviderByName("stdout", conf)
return job, errs.ErrorOrNil()
}
func prepareFilters(parseOpts *cliOptions, conf *ffuf.Config) error {
errs := ffuf.NewMultierror()
// If any other matcher is set, ignore -mc default value
matcherSet := false
statusSet := false
warningIgnoreBody := false
flag.Visit(func(f *flag.Flag) {
if f.Name == "mc" {
statusSet = true
}
if f.Name == "ms" {
matcherSet = true
warningIgnoreBody = true
}
if f.Name == "ml" {
matcherSet = true
warningIgnoreBody = true
}
if f.Name == "mr" {
matcherSet = true
}
if f.Name == "mw" {
matcherSet = true
warningIgnoreBody = true
}
})
if statusSet || !matcherSet {
if err := filter.AddMatcher(conf, "status", parseOpts.matcherStatus); err != nil {
errs.Add(err)
}
}
if parseOpts.filterStatus != "" {
if err := filter.AddFilter(conf, "status", parseOpts.filterStatus); err != nil {
errs.Add(err)
}
}
if parseOpts.filterSize != "" {
warningIgnoreBody = true
if err := filter.AddFilter(conf, "size", parseOpts.filterSize); err != nil {
errs.Add(err)
}
}
if parseOpts.filterRegexp != "" {
if err := filter.AddFilter(conf, "regexp", parseOpts.filterRegexp); err != nil {
errs.Add(err)
}
}
if parseOpts.filterWords != "" {
warningIgnoreBody = true
if err := filter.AddFilter(conf, "word", parseOpts.filterWords); err != nil {
errs.Add(err)
}
}
if parseOpts.filterLines != "" {
warningIgnoreBody = true
if err := filter.AddFilter(conf, "line", parseOpts.filterLines); err != nil {
errs.Add(err)
}
}
if parseOpts.matcherSize != "" {
if err := filter.AddMatcher(conf, "size", parseOpts.matcherSize); err != nil {
errs.Add(err)
}
}
if parseOpts.matcherRegexp != "" {
if err := filter.AddMatcher(conf, "regexp", parseOpts.matcherRegexp); err != nil {
errs.Add(err)
}
}
if parseOpts.matcherWords != "" {
if err := filter.AddMatcher(conf, "word", parseOpts.matcherWords); err != nil {
errs.Add(err)
}
}
if parseOpts.matcherLines != "" {
if err := filter.AddMatcher(conf, "line", parseOpts.matcherLines); err != nil {
errs.Add(err)
}
}
if conf.IgnoreBody && warningIgnoreBody {
fmt.Printf("*** Warning: possible undesired combination of -ignore-body and the response options: fl,fs,fw,ml,ms and mw.\n")
}
return errs.ErrorOrNil()
}
func prepareConfig(parseOpts *cliOptions, conf *ffuf.Config) error {
//TODO: refactor in a proper flag library that can handle things like required flags
errs := ffuf.NewMultierror()
var err error
var err2 error
if len(parseOpts.URL) == 0 && parseOpts.request == "" {
errs.Add(fmt.Errorf("-u flag or -request flag is required"))
}
// prepare extensions
if parseOpts.extensions != "" {
extensions := strings.Split(parseOpts.extensions, ",")
conf.Extensions = extensions
}
// Convert cookies to a header
if len(parseOpts.cookies) > 0 {
parseOpts.headers.Set("Cookie: " + strings.Join(parseOpts.cookies, "; "))
}
//Prepare inputproviders
for _, v := range parseOpts.wordlists {
var wl []string
if runtime.GOOS == "windows" {
// Try to ensure that Windows file paths like C:\path\to\wordlist.txt:KEYWORD are treated properly
if ffuf.FileExists(v) {
// The wordlist was supplied without a keyword parameter
wl = []string{v}
} else {
filepart := v[:strings.LastIndex(v, ":")]
if ffuf.FileExists(filepart) {
wl = []string{filepart, v[strings.LastIndex(v, ":")+1:]}
} else {
// The file was not found. Use full wordlist parameter value for more concise error message down the line
wl = []string{v}
}
}
} else {
wl = strings.SplitN(v, ":", 2)
}
if len(wl) == 2 {
conf.InputProviders = append(conf.InputProviders, ffuf.InputProviderConfig{
Name: "wordlist",
Value: wl[0],
Keyword: wl[1],
})
} else {
conf.InputProviders = append(conf.InputProviders, ffuf.InputProviderConfig{
Name: "wordlist",
Value: wl[0],
Keyword: "FUZZ",
})
}
}
for _, v := range parseOpts.inputcommands {
ic := strings.SplitN(v, ":", 2)
if len(ic) == 2 {
conf.InputProviders = append(conf.InputProviders, ffuf.InputProviderConfig{
Name: "command",
Value: ic[0],
Keyword: ic[1],
})
conf.CommandKeywords = append(conf.CommandKeywords, ic[0])
} else {
conf.InputProviders = append(conf.InputProviders, ffuf.InputProviderConfig{
Name: "command",
Value: ic[0],
Keyword: "FUZZ",
})
conf.CommandKeywords = append(conf.CommandKeywords, "FUZZ")
}
}
if len(conf.InputProviders) == 0 {
errs.Add(fmt.Errorf("Either -w or --input-cmd flag is required"))
}
// Prepare the request using body
if parseOpts.request != "" {
err := parseRawRequest(parseOpts, conf)
if err != nil {
errmsg := fmt.Sprintf("Could not parse raw request: %s", err)
errs.Add(fmt.Errorf(errmsg))
}
}
//Prepare URL
if parseOpts.URL != "" {
conf.Url = parseOpts.URL
}
//Prepare headers and make canonical
for _, v := range parseOpts.headers {
hs := strings.SplitN(v, ":", 2)
if len(hs) == 2 {
// trim and make canonical
// except if used in custom defined header
var CanonicalNeeded bool = true
for _, a := range conf.CommandKeywords {
if a == hs[0] {
CanonicalNeeded = false
}
}
// check if part of InputProviders
if CanonicalNeeded {
for _, b := range conf.InputProviders {
if b.Keyword == hs[0] {
CanonicalNeeded = false
}
}
}
if CanonicalNeeded {
var CanonicalHeader string = textproto.CanonicalMIMEHeaderKey(strings.TrimSpace(hs[0]))
conf.Headers[CanonicalHeader] = strings.TrimSpace(hs[1])
} else {
conf.Headers[strings.TrimSpace(hs[0])] = strings.TrimSpace(hs[1])
}
} else {
errs.Add(fmt.Errorf("Header defined by -H needs to have a value. \":\" should be used as a separator"))
}
}
//Prepare delay
d := strings.Split(parseOpts.delay, "-")
if len(d) > 2 {
errs.Add(fmt.Errorf("Delay needs to be either a single float: \"0.1\" or a range of floats, delimited by dash: \"0.1-0.8\""))
} else if len(d) == 2 {
conf.Delay.IsRange = true
conf.Delay.HasDelay = true
conf.Delay.Min, err = strconv.ParseFloat(d[0], 64)
conf.Delay.Max, err2 = strconv.ParseFloat(d[1], 64)
if err != nil || err2 != nil {
errs.Add(fmt.Errorf("Delay range min and max values need to be valid floats. For example: 0.1-0.5"))
}
} else if len(parseOpts.delay) > 0 {
conf.Delay.IsRange = false
conf.Delay.HasDelay = true
conf.Delay.Min, err = strconv.ParseFloat(parseOpts.delay, 64)
if err != nil {
errs.Add(fmt.Errorf("Delay needs to be either a single float: \"0.1\" or a range of floats, delimited by dash: \"0.1-0.8\""))
}
}
// Verify proxy url format
if len(parseOpts.proxyURL) > 0 {
_, err := url.Parse(parseOpts.proxyURL)
if err != nil {
errs.Add(fmt.Errorf("Bad proxy url (-x) format: %s", err))
} else {
conf.ProxyURL = parseOpts.proxyURL
}
}
// Verify replayproxy url format
if len(parseOpts.replayProxyURL) > 0 {
_, err := url.Parse(parseOpts.replayProxyURL)
if err != nil {
errs.Add(fmt.Errorf("Bad replay-proxy url (-replay-proxy) format: %s", err))
} else {
conf.ReplayProxyURL = parseOpts.replayProxyURL
}
}
//Check the output file format option
if conf.OutputFile != "" {
//No need to check / error out if output file isn't defined
outputFormats := []string{"all", "json", "ejson", "html", "md", "csv", "ecsv"}
found := false
for _, f := range outputFormats {
if f == parseOpts.outputFormat {
conf.OutputFormat = f
found = true
}
}
if !found {
errs.Add(fmt.Errorf("Unknown output file format (-of): %s", parseOpts.outputFormat))
}
}
// Auto-calibration strings
if len(parseOpts.AutoCalibrationStrings) > 0 {
conf.AutoCalibrationStrings = parseOpts.AutoCalibrationStrings
}
// Using -acc implies -ac
if len(conf.AutoCalibrationStrings) > 0 {
conf.AutoCalibration = true
}
// Handle copy as curl situation where POST method is implied by --data flag. If method is set to anything but GET, NOOP
if len(conf.Data) > 0 &&
conf.Method == "GET" &&
//don't modify the method automatically if a request file is being used as input
len(parseOpts.request) == 0 {
conf.Method = "POST"
}
conf.CommandLine = strings.Join(os.Args, " ")
for _, provider := range conf.InputProviders {
if !keywordPresent(provider.Keyword, conf) {
errmsg := fmt.Sprintf("Keyword %s defined, but not found in headers, method, URL or POST data.", provider.Keyword)
errs.Add(fmt.Errorf(errmsg))
}
}
// Do checks for recursion mode
if conf.Recursion {
if !strings.HasSuffix(conf.Url, "FUZZ") {
errmsg := fmt.Sprintf("When using -recursion the URL (-u) must end with FUZZ keyword.")
errs.Add(fmt.Errorf(errmsg))
}
}
return errs.ErrorOrNil()
}
func parseRawRequest(parseOpts *cliOptions, conf *ffuf.Config) error {
file, err := os.Open(parseOpts.request)
if err != nil {
return fmt.Errorf("could not open request file: %s", err)
}
defer file.Close()
r := bufio.NewReader(file)
s, err := r.ReadString('\n')
if err != nil {
return fmt.Errorf("could not read request: %s", err)
}
parts := strings.Split(s, " ")
if len(parts) < 3 {
return fmt.Errorf("malformed request supplied")
}
// Set the request Method
conf.Method = parts[0]
for {
line, err := r.ReadString('\n')
line = strings.TrimSpace(line)
if err != nil || line == "" {
break
}
p := strings.SplitN(line, ":", 2)
if len(p) != 2 {
continue
}
if strings.EqualFold(p[0], "content-length") {
continue
}
conf.Headers[strings.TrimSpace(p[0])] = strings.TrimSpace(p[1])
}
// Handle case with the full http url in path. In that case,
// ignore any host header that we encounter and use the path as request URL
if strings.HasPrefix(parts[1], "http") {
parsed, err := url.Parse(parts[1])
if err != nil {
return fmt.Errorf("could not parse request URL: %s", err)
}
conf.Url = parts[1]
conf.Headers["Host"] = parsed.Host
} else {
// Build the request URL from the request
conf.Url = parseOpts.requestProto + "://" + conf.Headers["Host"] + parts[1]
}
// Set the request body
b, err := ioutil.ReadAll(r)
if err != nil {
return fmt.Errorf("could not read request body: %s", err)
}
conf.Data = string(b)
return nil
}
func keywordPresent(keyword string, conf *ffuf.Config) bool {
//Search for keyword from HTTP method, URL and POST data too
if strings.Index(conf.Method, keyword) != -1 {
return true
}
if strings.Index(conf.Url, keyword) != -1 {
return true
}
if strings.Index(conf.Data, keyword) != -1 {
return true
}
for k, v := range conf.Headers {
if strings.Index(k, keyword) != -1 {
return true
}
if strings.Index(v, keyword) != -1 {
return true
}
}
return false
}
ffuf-1.1.0/pkg/ 0000775 0000000 0000000 00000000000 13706265214 0013226 5 ustar 00root root 0000000 0000000 ffuf-1.1.0/pkg/ffuf/ 0000775 0000000 0000000 00000000000 13706265214 0014154 5 ustar 00root root 0000000 0000000 ffuf-1.1.0/pkg/ffuf/config.go 0000664 0000000 0000000 00000007356 13706265214 0015763 0 ustar 00root root 0000000 0000000 package ffuf
import (
"context"
)
type Config struct {
Headers map[string]string `json:"headers"`
Extensions []string `json:"extensions"`
DirSearchCompat bool `json:"dirsearch_compatibility"`
Method string `json:"method"`
Url string `json:"url"`
Data string `json:"postdata"`
Quiet bool `json:"quiet"`
Colors bool `json:"colors"`
InputProviders []InputProviderConfig `json:"inputproviders"`
CommandKeywords []string `json:"-"`
InputNum int `json:"cmd_inputnum"`
InputMode string `json:"inputmode"`
OutputDirectory string `json:"outputdirectory"`
OutputFile string `json:"outputfile"`
OutputFormat string `json:"outputformat"`
IgnoreBody bool `json:"ignorebody"`
IgnoreWordlistComments bool `json:"ignore_wordlist_comments"`
StopOn403 bool `json:"stop_403"`
StopOnErrors bool `json:"stop_errors"`
StopOnAll bool `json:"stop_all"`
FollowRedirects bool `json:"follow_redirects"`
AutoCalibration bool `json:"autocalibration"`
AutoCalibrationStrings []string `json:"autocalibration_strings"`
Timeout int `json:"timeout"`
ProgressFrequency int `json:"-"`
Delay optRange `json:"delay"`
Filters map[string]FilterProvider `json:"filters"`
Matchers map[string]FilterProvider `json:"matchers"`
Threads int `json:"threads"`
Context context.Context `json:"-"`
ProxyURL string `json:"proxyurl"`
ReplayProxyURL string `json:"replayproxyurl"`
CommandLine string `json:"cmdline"`
Verbose bool `json:"verbose"`
MaxTime int `json:"maxtime"`
MaxTimeJob int `json:"maxtime_job"`
Recursion bool `json:"recursion"`
RecursionDepth int `json:"recursion_depth"`
}
type InputProviderConfig struct {
Name string `json:"name"`
Keyword string `json:"keyword"`
Value string `json:"value"`
}
func NewConfig(ctx context.Context) Config {
var conf Config
conf.Context = ctx
conf.Headers = make(map[string]string)
conf.Method = "GET"
conf.Url = ""
conf.Data = ""
conf.Quiet = false
conf.IgnoreWordlistComments = false
conf.StopOn403 = false
conf.StopOnErrors = false
conf.StopOnAll = false
conf.FollowRedirects = false
conf.InputProviders = make([]InputProviderConfig, 0)
conf.CommandKeywords = make([]string, 0)
conf.AutoCalibrationStrings = make([]string, 0)
conf.InputNum = 0
conf.InputMode = "clusterbomb"
conf.ProxyURL = ""
conf.Filters = make(map[string]FilterProvider)
conf.Matchers = make(map[string]FilterProvider)
conf.Delay = optRange{0, 0, false, false}
conf.Extensions = make([]string, 0)
conf.Timeout = 10
// Progress update frequency, in milliseconds
conf.ProgressFrequency = 100
conf.DirSearchCompat = false
conf.Verbose = false
conf.MaxTime = 0
conf.MaxTimeJob = 0
conf.Recursion = false
conf.RecursionDepth = 0
return conf
}
ffuf-1.1.0/pkg/ffuf/const.go 0000664 0000000 0000000 00000000127 13706265214 0015631 0 ustar 00root root 0000000 0000000 package ffuf
const (
//VERSION holds the current version number
VERSION = "1.1.0"
)
ffuf-1.1.0/pkg/ffuf/interfaces.go 0000664 0000000 0000000 00000002076 13706265214 0016633 0 ustar 00root root 0000000 0000000 package ffuf
//FilterProvider is a generic interface for both Matchers and Filters
type FilterProvider interface {
Filter(response *Response) (bool, error)
Repr() string
}
//RunnerProvider is an interface for request executors
type RunnerProvider interface {
Prepare(input map[string][]byte) (Request, error)
Execute(req *Request) (Response, error)
}
//InputProvider interface handles the input data for RunnerProvider
type InputProvider interface {
AddProvider(InputProviderConfig) error
Next() bool
Position() int
Reset()
Value() map[string][]byte
Total() int
}
//InternalInputProvider interface handles providing input data to InputProvider
type InternalInputProvider interface {
Keyword() string
Next() bool
Position() int
ResetPosition()
IncrementPosition()
Value() []byte
Total() int
}
//OutputProvider is responsible of providing output from the RunnerProvider
type OutputProvider interface {
Banner() error
Finalize() error
Progress(status Progress)
Info(infostring string)
Error(errstring string)
Warning(warnstring string)
Result(resp Response)
}
ffuf-1.1.0/pkg/ffuf/job.go 0000664 0000000 0000000 00000023654 13706265214 0015267 0 ustar 00root root 0000000 0000000 package ffuf
import (
"fmt"
"log"
"math/rand"
"os"
"os/signal"
"sync"
"syscall"
"time"
)
//Job ties together Config, Runner, Input and Output
type Job struct {
Config *Config
ErrorMutex sync.Mutex
Input InputProvider
Runner RunnerProvider
ReplayRunner RunnerProvider
Output OutputProvider
Counter int
ErrorCounter int
SpuriousErrorCounter int
Total int
Running bool
RunningJob bool
Count403 int
Count429 int
Error string
startTime time.Time
startTimeJob time.Time
queuejobs []QueueJob
queuepos int
currentDepth int
}
type QueueJob struct {
Url string
depth int
}
func NewJob(conf *Config) Job {
var j Job
j.Counter = 0
j.ErrorCounter = 0
j.SpuriousErrorCounter = 0
j.Running = false
j.RunningJob = false
j.queuepos = 0
j.queuejobs = make([]QueueJob, 0)
j.currentDepth = 0
return j
}
//incError increments the error counter
func (j *Job) incError() {
j.ErrorMutex.Lock()
defer j.ErrorMutex.Unlock()
j.ErrorCounter++
j.SpuriousErrorCounter++
}
//inc403 increments the 403 response counter
func (j *Job) inc403() {
j.ErrorMutex.Lock()
defer j.ErrorMutex.Unlock()
j.Count403++
}
// inc429 increments the 429 response counter
func (j *Job) inc429() {
j.ErrorMutex.Lock()
defer j.ErrorMutex.Unlock()
j.Count429++
}
//resetSpuriousErrors resets the spurious error counter
func (j *Job) resetSpuriousErrors() {
j.ErrorMutex.Lock()
defer j.ErrorMutex.Unlock()
j.SpuriousErrorCounter = 0
}
//Start the execution of the Job
func (j *Job) Start() {
if j.startTime.IsZero() {
j.startTime = time.Now()
}
// Add the default job to job queue
j.queuejobs = append(j.queuejobs, QueueJob{Url: j.Config.Url, depth: 0})
rand.Seed(time.Now().UnixNano())
j.Total = j.Input.Total()
defer j.Stop()
j.Running = true
j.RunningJob = true
//Show banner if not running in silent mode
if !j.Config.Quiet {
j.Output.Banner()
}
// Monitor for SIGTERM and do cleanup properly (writing the output files etc)
j.interruptMonitor()
for j.jobsInQueue() {
j.prepareQueueJob()
if j.queuepos > 1 && !j.RunningJob {
// Print info for queued recursive jobs
j.Output.Info(fmt.Sprintf("Scanning: %s", j.Config.Url))
}
j.Input.Reset()
j.startTimeJob = time.Now()
j.RunningJob = true
j.Counter = 0
j.startExecution()
}
j.Output.Finalize()
}
func (j *Job) jobsInQueue() bool {
if j.queuepos < len(j.queuejobs) {
return true
}
return false
}
func (j *Job) prepareQueueJob() {
j.Config.Url = j.queuejobs[j.queuepos].Url
j.currentDepth = j.queuejobs[j.queuepos].depth
j.queuepos += 1
}
func (j *Job) startExecution() {
var wg sync.WaitGroup
wg.Add(1)
go j.runProgress(&wg)
//Limiter blocks after reaching the buffer, ensuring limited concurrency
limiter := make(chan bool, j.Config.Threads)
for j.Input.Next() {
// Check if we should stop the process
j.CheckStop()
if !j.Running {
defer j.Output.Warning(j.Error)
break
}
limiter <- true
nextInput := j.Input.Value()
nextPosition := j.Input.Position()
wg.Add(1)
j.Counter++
go func() {
defer func() { <-limiter }()
defer wg.Done()
j.runTask(nextInput, nextPosition, false)
if j.Config.Delay.HasDelay {
var sleepDurationMS time.Duration
if j.Config.Delay.IsRange {
sTime := j.Config.Delay.Min + rand.Float64()*(j.Config.Delay.Max-j.Config.Delay.Min)
sleepDurationMS = time.Duration(sTime * 1000)
} else {
sleepDurationMS = time.Duration(j.Config.Delay.Min * 1000)
}
time.Sleep(sleepDurationMS * time.Millisecond)
}
}()
if !j.RunningJob {
defer j.Output.Warning(j.Error)
return
}
}
wg.Wait()
j.updateProgress()
return
}
func (j *Job) interruptMonitor() {
sigChan := make(chan os.Signal, 2)
signal.Notify(sigChan, os.Interrupt, syscall.SIGTERM)
go func() {
for _ = range sigChan {
j.Error = "Caught keyboard interrupt (Ctrl-C)\n"
j.Stop()
}
}()
}
func (j *Job) runProgress(wg *sync.WaitGroup) {
defer wg.Done()
totalProgress := j.Input.Total()
for j.Counter <= totalProgress {
if !j.Running {
break
}
j.updateProgress()
if j.Counter == totalProgress {
return
}
if !j.RunningJob {
return
}
time.Sleep(time.Millisecond * time.Duration(j.Config.ProgressFrequency))
}
}
func (j *Job) updateProgress() {
prog := Progress{
StartedAt: j.startTimeJob,
ReqCount: j.Counter,
ReqTotal: j.Input.Total(),
QueuePos: j.queuepos,
QueueTotal: len(j.queuejobs),
ErrorCount: j.ErrorCounter,
}
j.Output.Progress(prog)
}
func (j *Job) isMatch(resp Response) bool {
matched := false
for _, m := range j.Config.Matchers {
match, err := m.Filter(&resp)
if err != nil {
continue
}
if match {
matched = true
}
}
// The response was not matched, return before running filters
if !matched {
return false
}
for _, f := range j.Config.Filters {
fv, err := f.Filter(&resp)
if err != nil {
continue
}
if fv {
return false
}
}
return true
}
func (j *Job) runTask(input map[string][]byte, position int, retried bool) {
req, err := j.Runner.Prepare(input)
req.Position = position
if err != nil {
j.Output.Error(fmt.Sprintf("Encountered an error while preparing request: %s\n", err))
j.incError()
log.Printf("%s", err)
return
}
resp, err := j.Runner.Execute(&req)
if err != nil {
if retried {
j.incError()
log.Printf("%s", err)
} else {
j.runTask(input, position, true)
}
return
}
if j.SpuriousErrorCounter > 0 {
j.resetSpuriousErrors()
}
if j.Config.StopOn403 || j.Config.StopOnAll {
// Increment Forbidden counter if we encountered one
if resp.StatusCode == 403 {
j.inc403()
}
}
if j.Config.StopOnAll {
// increment 429 counter if the response code is 429
if j.Config.StopOnAll {
if resp.StatusCode == 429 {
j.inc429()
}
}
}
if j.isMatch(resp) {
// Re-send request through replay-proxy if needed
if j.ReplayRunner != nil {
replayreq, err := j.ReplayRunner.Prepare(input)
replayreq.Position = position
if err != nil {
j.Output.Error(fmt.Sprintf("Encountered an error while preparing replayproxy request: %s\n", err))
j.incError()
log.Printf("%s", err)
} else {
_, _ = j.ReplayRunner.Execute(&replayreq)
}
}
j.Output.Result(resp)
// Refresh the progress indicator as we printed something out
j.updateProgress()
}
if j.Config.Recursion && len(resp.GetRedirectLocation(false)) > 0 {
j.handleRecursionJob(resp)
}
return
}
//handleRecursionJob adds a new recursion job to the job queue if a new directory is found
func (j *Job) handleRecursionJob(resp Response) {
if (resp.Request.Url + "/") != resp.GetRedirectLocation(true) {
// Not a directory, return early
return
}
if j.Config.RecursionDepth == 0 || j.currentDepth < j.Config.RecursionDepth {
// We have yet to reach the maximum recursion depth
recUrl := resp.Request.Url + "/" + "FUZZ"
newJob := QueueJob{Url: recUrl, depth: j.currentDepth + 1}
j.queuejobs = append(j.queuejobs, newJob)
j.Output.Info(fmt.Sprintf("Adding a new job to the queue: %s", recUrl))
} else {
j.Output.Warning(fmt.Sprintf("Directory found, but recursion depth exceeded. Ignoring: %s", resp.GetRedirectLocation(true)))
}
}
//CalibrateResponses returns slice of Responses for randomly generated filter autocalibration requests
func (j *Job) CalibrateResponses() ([]Response, error) {
cInputs := make([]string, 0)
rand.Seed(time.Now().UnixNano())
if len(j.Config.AutoCalibrationStrings) < 1 {
cInputs = append(cInputs, "admin"+RandomString(16)+"/")
cInputs = append(cInputs, ".htaccess"+RandomString(16))
cInputs = append(cInputs, RandomString(16)+"/")
cInputs = append(cInputs, RandomString(16))
} else {
cInputs = append(cInputs, j.Config.AutoCalibrationStrings...)
}
results := make([]Response, 0)
for _, input := range cInputs {
inputs := make(map[string][]byte, 0)
for _, v := range j.Config.InputProviders {
inputs[v.Keyword] = []byte(input)
}
req, err := j.Runner.Prepare(inputs)
if err != nil {
j.Output.Error(fmt.Sprintf("Encountered an error while preparing request: %s\n", err))
j.incError()
log.Printf("%s", err)
return results, err
}
resp, err := j.Runner.Execute(&req)
if err != nil {
return results, err
}
// Only calibrate on responses that would be matched otherwise
if j.isMatch(resp) {
results = append(results, resp)
}
}
return results, nil
}
// CheckStop stops the job if stopping conditions are met
func (j *Job) CheckStop() {
if j.Counter > 50 {
// We have enough samples
if j.Config.StopOn403 || j.Config.StopOnAll {
if float64(j.Count403)/float64(j.Counter) > 0.95 {
// Over 95% of requests are 403
j.Error = "Getting an unusual amount of 403 responses, exiting."
j.Stop()
}
}
if j.Config.StopOnErrors || j.Config.StopOnAll {
if j.SpuriousErrorCounter > j.Config.Threads*2 {
// Most of the requests are erroring
j.Error = "Receiving spurious errors, exiting."
j.Stop()
}
}
if j.Config.StopOnAll && (float64(j.Count429)/float64(j.Counter) > 0.2) {
// Over 20% of responses are 429
j.Error = "Getting an unusual amount of 429 responses, exiting."
j.Stop()
}
}
// Check for runtime of entire process
if j.Config.MaxTime > 0 {
dur := time.Now().Sub(j.startTime)
runningSecs := int(dur / time.Second)
if runningSecs >= j.Config.MaxTime {
j.Error = "Maximum running time for entire process reached, exiting."
j.Stop()
}
}
// Check for runtime of current job
if j.Config.MaxTimeJob > 0 {
dur := time.Now().Sub(j.startTimeJob)
runningSecs := int(dur / time.Second)
if runningSecs >= j.Config.MaxTimeJob {
j.Error = "Maximum running time for this job reached, continuing with next job if one exists."
j.Next()
}
}
}
//Stop the execution of the Job
func (j *Job) Stop() {
j.Running = false
return
}
//Stop current, resume to next
func (j *Job) Next() {
j.RunningJob = false
return
}
ffuf-1.1.0/pkg/ffuf/multierror.go 0000664 0000000 0000000 00000001031 13706265214 0016702 0 ustar 00root root 0000000 0000000 package ffuf
import (
"fmt"
)
type Multierror struct {
errors []error
}
//NewMultierror returns a new Multierror
func NewMultierror() Multierror {
return Multierror{}
}
func (m *Multierror) Add(err error) {
m.errors = append(m.errors, err)
}
func (m *Multierror) ErrorOrNil() error {
var errString string
if len(m.errors) > 0 {
errString += fmt.Sprintf("%d errors occured.\n", len(m.errors))
for _, e := range m.errors {
errString += fmt.Sprintf("\t* %s\n", e)
}
return fmt.Errorf("%s", errString)
}
return nil
}
ffuf-1.1.0/pkg/ffuf/optrange.go 0000664 0000000 0000000 00000003201 13706265214 0016316 0 ustar 00root root 0000000 0000000 package ffuf
import (
"encoding/json"
"fmt"
"strconv"
"strings"
)
//optRange stores either a single float, in which case the value is stored in min and IsRange is false,
//or a range of floats, in which case IsRange is true
type optRange struct {
Min float64
Max float64
IsRange bool
HasDelay bool
}
type optRangeJSON struct {
Value string `json:"value"`
}
func (o *optRange) MarshalJSON() ([]byte, error) {
value := ""
if o.Min == o.Max {
value = fmt.Sprintf("%.2f", o.Min)
} else {
value = fmt.Sprintf("%.2f-%.2f", o.Min, o.Max)
}
return json.Marshal(&optRangeJSON{
Value: value,
})
}
func (o *optRange) UnmarshalJSON(b []byte) error {
var inc optRangeJSON
err := json.Unmarshal(b, &inc)
if err != nil {
return err
}
return o.Initialize(inc.Value)
}
//Initialize sets up the optRange from string value
func (o *optRange) Initialize(value string) error {
var err, err2 error
d := strings.Split(value, "-")
if len(d) > 2 {
return fmt.Errorf("Delay needs to be either a single float: \"0.1\" or a range of floats, delimited by dash: \"0.1-0.8\"")
} else if len(d) == 2 {
o.IsRange = true
o.HasDelay = true
o.Min, err = strconv.ParseFloat(d[0], 64)
o.Max, err2 = strconv.ParseFloat(d[1], 64)
if err != nil || err2 != nil {
return fmt.Errorf("Delay range min and max values need to be valid floats. For example: 0.1-0.5")
}
} else if len(value) > 0 {
o.IsRange = false
o.HasDelay = true
o.Min, err = strconv.ParseFloat(value, 64)
if err != nil {
return fmt.Errorf("Delay needs to be either a single float: \"0.1\" or a range of floats, delimited by dash: \"0.1-0.8\"")
}
}
return nil
}
ffuf-1.1.0/pkg/ffuf/progress.go 0000664 0000000 0000000 00000000241 13706265214 0016344 0 ustar 00root root 0000000 0000000 package ffuf
import (
"time"
)
type Progress struct {
StartedAt time.Time
ReqCount int
ReqTotal int
QueuePos int
QueueTotal int
ErrorCount int
}
ffuf-1.1.0/pkg/ffuf/request.go 0000664 0000000 0000000 00000000662 13706265214 0016177 0 ustar 00root root 0000000 0000000 package ffuf
// Request holds the meaningful data that is passed for runner for making the query
type Request struct {
Method string
Host string
Url string
Headers map[string]string
Data []byte
Input map[string][]byte
Position int
Raw string
}
func NewRequest(conf *Config) Request {
var req Request
req.Method = conf.Method
req.Url = conf.Url
req.Headers = make(map[string]string)
return req
}
ffuf-1.1.0/pkg/ffuf/response.go 0000664 0000000 0000000 00000002447 13706265214 0016350 0 ustar 00root root 0000000 0000000 package ffuf
import (
"net/http"
"net/url"
)
// Response struct holds the meaningful data returned from request and is meant for passing to filters
type Response struct {
StatusCode int64
Headers map[string][]string
Data []byte
ContentLength int64
ContentWords int64
ContentLines int64
Cancelled bool
Request *Request
Raw string
ResultFile string
}
// GetRedirectLocation returns the redirect location for a 3xx redirect HTTP response
func (resp *Response) GetRedirectLocation(absolute bool) string {
redirectLocation := ""
if resp.StatusCode >= 300 && resp.StatusCode <= 399 {
if loc, ok := resp.Headers["Location"]; ok {
if len(loc) > 0 {
redirectLocation = loc[0]
}
}
}
if absolute {
redirectUrl, err := url.Parse(redirectLocation)
if err != nil {
return redirectLocation
}
baseUrl, err := url.Parse(resp.Request.Url)
if err != nil {
return redirectLocation
}
redirectLocation = baseUrl.ResolveReference(redirectUrl).String()
}
return redirectLocation
}
func NewResponse(httpresp *http.Response, req *Request) Response {
var resp Response
resp.Request = req
resp.StatusCode = int64(httpresp.StatusCode)
resp.Headers = httpresp.Header
resp.Cancelled = false
resp.Raw = ""
resp.ResultFile = ""
return resp
}
ffuf-1.1.0/pkg/ffuf/util.go 0000664 0000000 0000000 00000001563 13706265214 0015465 0 ustar 00root root 0000000 0000000 package ffuf
import (
"math/rand"
"os"
)
//used for random string generation in calibration function
var chars = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ")
//RandomString returns a random string of length of parameter n
func RandomString(n int) string {
s := make([]rune, n)
for i := range s {
s[i] = chars[rand.Intn(len(chars))]
}
return string(s)
}
//UniqStringSlice returns an unordered slice of unique strings. The duplicates are dropped
func UniqStringSlice(inslice []string) []string {
found := map[string]bool{}
for _, v := range inslice {
found[v] = true
}
ret := []string{}
for k, _ := range found {
ret = append(ret, k)
}
return ret
}
//FileExists checks if the filepath exists and is not a directory
func FileExists(path string) bool {
md, err := os.Stat(path)
if os.IsNotExist(err) {
return false
}
return !md.IsDir()
}
ffuf-1.1.0/pkg/ffuf/valuerange.go 0000664 0000000 0000000 00000001711 13706265214 0016634 0 ustar 00root root 0000000 0000000 package ffuf
import (
"fmt"
"regexp"
"strconv"
)
type ValueRange struct {
Min, Max int64
}
func ValueRangeFromString(instr string) (ValueRange, error) {
// is the value a range
minmax := regexp.MustCompile("^(\\d+)\\-(\\d+)$").FindAllStringSubmatch(instr, -1)
if minmax != nil {
// yes
minval, err := strconv.ParseInt(minmax[0][1], 10, 0)
if err != nil {
return ValueRange{}, fmt.Errorf("Invalid value: %s", minmax[0][1])
}
maxval, err := strconv.ParseInt(minmax[0][2], 10, 0)
if err != nil {
return ValueRange{}, fmt.Errorf("Invalid value: %s", minmax[0][2])
}
if minval >= maxval {
return ValueRange{}, fmt.Errorf("Minimum has to be smaller than maximum")
}
return ValueRange{minval, maxval}, nil
} else {
// no, a single value or something else
intval, err := strconv.ParseInt(instr, 10, 0)
if err != nil {
return ValueRange{}, fmt.Errorf("Invalid value: %s", instr)
}
return ValueRange{intval, intval}, nil
}
}
ffuf-1.1.0/pkg/filter/ 0000775 0000000 0000000 00000000000 13706265214 0014513 5 ustar 00root root 0000000 0000000 ffuf-1.1.0/pkg/filter/filter.go 0000664 0000000 0000000 00000004642 13706265214 0016335 0 ustar 00root root 0000000 0000000 package filter
import (
"fmt"
"strconv"
"strings"
"github.com/ffuf/ffuf/pkg/ffuf"
)
func NewFilterByName(name string, value string) (ffuf.FilterProvider, error) {
if name == "status" {
return NewStatusFilter(value)
}
if name == "size" {
return NewSizeFilter(value)
}
if name == "word" {
return NewWordFilter(value)
}
if name == "line" {
return NewLineFilter(value)
}
if name == "regexp" {
return NewRegexpFilter(value)
}
return nil, fmt.Errorf("Could not create filter with name %s", name)
}
//AddFilter adds a new filter to Config
func AddFilter(conf *ffuf.Config, name string, option string) error {
newf, err := NewFilterByName(name, option)
if err == nil {
conf.Filters[name] = newf
}
return err
}
//AddMatcher adds a new matcher to Config
func AddMatcher(conf *ffuf.Config, name string, option string) error {
newf, err := NewFilterByName(name, option)
if err == nil {
conf.Matchers[name] = newf
}
return err
}
//CalibrateIfNeeded runs a self-calibration task for filtering options (if needed) by requesting random resources and acting accordingly
func CalibrateIfNeeded(j *ffuf.Job) error {
if !j.Config.AutoCalibration {
return nil
}
// Handle the calibration
responses, err := j.CalibrateResponses()
if err != nil {
return err
}
if len(responses) > 0 {
calibrateFilters(j, responses)
}
return nil
}
func calibrateFilters(j *ffuf.Job, responses []ffuf.Response) {
sizeCalib := make([]string, 0)
wordCalib := make([]string, 0)
lineCalib := make([]string, 0)
for _, r := range responses {
if r.ContentLength > 0 {
// Only add if we have an actual size of responses
sizeCalib = append(sizeCalib, strconv.FormatInt(r.ContentLength, 10))
}
if r.ContentWords > 0 {
// Only add if we have an actual word length of response
wordCalib = append(wordCalib, strconv.FormatInt(r.ContentWords, 10))
}
if r.ContentLines > 1 {
// Only add if we have an actual word length of response
lineCalib = append(lineCalib, strconv.FormatInt(r.ContentLines, 10))
}
}
//Remove duplicates
sizeCalib = ffuf.UniqStringSlice(sizeCalib)
wordCalib = ffuf.UniqStringSlice(wordCalib)
lineCalib = ffuf.UniqStringSlice(lineCalib)
if len(sizeCalib) > 0 {
AddFilter(j.Config, "size", strings.Join(sizeCalib, ","))
}
if len(wordCalib) > 0 {
AddFilter(j.Config, "word", strings.Join(wordCalib, ","))
}
if len(lineCalib) > 0 {
AddFilter(j.Config, "line", strings.Join(lineCalib, ","))
}
}
ffuf-1.1.0/pkg/filter/filter_test.go 0000664 0000000 0000000 00000002023 13706265214 0017363 0 ustar 00root root 0000000 0000000 package filter
import (
"testing"
)
func TestNewFilterByName(t *testing.T) {
scf, _ := NewFilterByName("status", "200")
if _, ok := scf.(*StatusFilter); !ok {
t.Errorf("Was expecting statusfilter")
}
szf, _ := NewFilterByName("size", "200")
if _, ok := szf.(*SizeFilter); !ok {
t.Errorf("Was expecting sizefilter")
}
wf, _ := NewFilterByName("word", "200")
if _, ok := wf.(*WordFilter); !ok {
t.Errorf("Was expecting wordfilter")
}
lf, _ := NewFilterByName("line", "200")
if _, ok := lf.(*LineFilter); !ok {
t.Errorf("Was expecting linefilter")
}
ref, _ := NewFilterByName("regexp", "200")
if _, ok := ref.(*RegexpFilter); !ok {
t.Errorf("Was expecting regexpfilter")
}
}
func TestNewFilterByNameError(t *testing.T) {
_, err := NewFilterByName("status", "invalid")
if err == nil {
t.Errorf("Was expecing an error")
}
}
func TestNewFilterByNameNotFound(t *testing.T) {
_, err := NewFilterByName("nonexistent", "invalid")
if err == nil {
t.Errorf("Was expecing an error with invalid filter name")
}
}
ffuf-1.1.0/pkg/filter/lines.go 0000664 0000000 0000000 00000003003 13706265214 0016150 0 ustar 00root root 0000000 0000000 package filter
import (
"encoding/json"
"fmt"
"strconv"
"strings"
"github.com/ffuf/ffuf/pkg/ffuf"
)
type LineFilter struct {
Value []ffuf.ValueRange
}
func NewLineFilter(value string) (ffuf.FilterProvider, error) {
var intranges []ffuf.ValueRange
for _, sv := range strings.Split(value, ",") {
vr, err := ffuf.ValueRangeFromString(sv)
if err != nil {
return &LineFilter{}, fmt.Errorf("Line filter or matcher (-fl / -ml): invalid value: %s", sv)
}
intranges = append(intranges, vr)
}
return &LineFilter{Value: intranges}, nil
}
func (f *LineFilter) MarshalJSON() ([]byte, error) {
value := make([]string, 0)
for _, v := range f.Value {
if v.Min == v.Max {
value = append(value, strconv.FormatInt(v.Min, 10))
} else {
value = append(value, fmt.Sprintf("%d-%d", v.Min, v.Max))
}
}
return json.Marshal(&struct {
Value string `json:"value"`
}{
Value: strings.Join(value, ","),
})
}
func (f *LineFilter) Filter(response *ffuf.Response) (bool, error) {
linesSize := len(strings.Split(string(response.Data), "\n"))
for _, iv := range f.Value {
if iv.Min <= int64(linesSize) && int64(linesSize) <= iv.Max {
return true, nil
}
}
return false, nil
}
func (f *LineFilter) Repr() string {
var strval []string
for _, iv := range f.Value {
if iv.Min == iv.Max {
strval = append(strval, strconv.Itoa(int(iv.Min)))
} else {
strval = append(strval, strconv.Itoa(int(iv.Min))+"-"+strconv.Itoa(int(iv.Max)))
}
}
return fmt.Sprintf("Response lines: %s", strings.Join(strval, ","))
}
ffuf-1.1.0/pkg/filter/lines_test.go 0000664 0000000 0000000 00000002161 13706265214 0017213 0 ustar 00root root 0000000 0000000 package filter
import (
"strings"
"testing"
"github.com/ffuf/ffuf/pkg/ffuf"
)
func TestNewLineFilter(t *testing.T) {
f, _ := NewLineFilter("200,301,400-410,500")
linesRepr := f.Repr()
if strings.Index(linesRepr, "200,301,400-410,500") == -1 {
t.Errorf("Word filter was expected to have 4 values")
}
}
func TestNewLineFilterError(t *testing.T) {
_, err := NewLineFilter("invalid")
if err == nil {
t.Errorf("Was expecting an error from errenous input data")
}
}
func TestLineFiltering(t *testing.T) {
f, _ := NewLineFilter("200,301,402-450,500")
for i, test := range []struct {
input int64
output bool
}{
{200, true},
{301, true},
{500, true},
{4, false},
{444, true},
{302, false},
{401, false},
{402, true},
{450, true},
{451, false},
} {
var data []string
for i := int64(0); i < test.input; i++ {
data = append(data, "A")
}
resp := ffuf.Response{Data: []byte(strings.Join(data, " "))}
filterReturn, _ := f.Filter(&resp)
if filterReturn != test.output {
t.Errorf("Filter test %d: Was expecing filter return value of %t but got %t", i, test.output, filterReturn)
}
}
}
ffuf-1.1.0/pkg/filter/regex.go 0000664 0000000 0000000 00000002363 13706265214 0016160 0 ustar 00root root 0000000 0000000 package filter
import (
"encoding/json"
"fmt"
"regexp"
"strings"
"github.com/ffuf/ffuf/pkg/ffuf"
)
type RegexpFilter struct {
Value *regexp.Regexp
valueRaw string
}
func NewRegexpFilter(value string) (ffuf.FilterProvider, error) {
re, err := regexp.Compile(value)
if err != nil {
return &RegexpFilter{}, fmt.Errorf("Regexp filter or matcher (-fr / -mr): invalid value: %s", value)
}
return &RegexpFilter{Value: re, valueRaw: value}, nil
}
func (f *RegexpFilter) MarshalJSON() ([]byte, error) {
return json.Marshal(&struct {
Value string `json:"value"`
}{
Value: f.valueRaw,
})
}
func (f *RegexpFilter) Filter(response *ffuf.Response) (bool, error) {
matchheaders := ""
for k, v := range response.Headers {
for _, iv := range v {
matchheaders += k + ": " + iv + "\r\n"
}
}
matchdata := []byte(matchheaders)
matchdata = append(matchdata, response.Data...)
pattern := f.valueRaw
for keyword, inputitem := range response.Request.Input {
pattern = strings.Replace(pattern, keyword, regexp.QuoteMeta(string(inputitem)), -1)
}
matched, err := regexp.Match(pattern, matchdata)
if err != nil {
return false, nil
}
return matched, nil
}
func (f *RegexpFilter) Repr() string {
return fmt.Sprintf("Regexp: %s", f.valueRaw)
}
ffuf-1.1.0/pkg/filter/regexp_test.go 0000664 0000000 0000000 00000002007 13706265214 0017372 0 ustar 00root root 0000000 0000000 package filter
import (
"strings"
"testing"
"github.com/ffuf/ffuf/pkg/ffuf"
)
func TestNewRegexpFilter(t *testing.T) {
f, _ := NewRegexpFilter("s([a-z]+)arch")
statusRepr := f.Repr()
if strings.Index(statusRepr, "s([a-z]+)arch") == -1 {
t.Errorf("Status filter was expected to have a regexp value")
}
}
func TestNewRegexpFilterError(t *testing.T) {
_, err := NewRegexpFilter("r((")
if err == nil {
t.Errorf("Was expecting an error from errenous input data")
}
}
func TestRegexpFiltering(t *testing.T) {
f, _ := NewRegexpFilter("s([a-z]+)arch")
for i, test := range []struct {
input string
output bool
}{
{"search", true},
{"text and search", true},
{"sbarch in beginning", true},
{"midd scarch le", true},
{"s1arch", false},
{"invalid", false},
} {
resp := ffuf.Response{Data: []byte(test.input)}
filterReturn, _ := f.Filter(&resp)
if filterReturn != test.output {
t.Errorf("Filter test %d: Was expecing filter return value of %t but got %t", i, test.output, filterReturn)
}
}
}
ffuf-1.1.0/pkg/filter/size.go 0000664 0000000 0000000 00000002721 13706265214 0016016 0 ustar 00root root 0000000 0000000 package filter
import (
"encoding/json"
"fmt"
"strconv"
"strings"
"github.com/ffuf/ffuf/pkg/ffuf"
)
type SizeFilter struct {
Value []ffuf.ValueRange
}
func NewSizeFilter(value string) (ffuf.FilterProvider, error) {
var intranges []ffuf.ValueRange
for _, sv := range strings.Split(value, ",") {
vr, err := ffuf.ValueRangeFromString(sv)
if err != nil {
return &SizeFilter{}, fmt.Errorf("Size filter or matcher (-fs / -ms): invalid value: %s", sv)
}
intranges = append(intranges, vr)
}
return &SizeFilter{Value: intranges}, nil
}
func (f *SizeFilter) MarshalJSON() ([]byte, error) {
value := make([]string, 0)
for _, v := range f.Value {
if v.Min == v.Max {
value = append(value, strconv.FormatInt(v.Min, 10))
} else {
value = append(value, fmt.Sprintf("%d-%d", v.Min, v.Max))
}
}
return json.Marshal(&struct {
Value string `json:"value"`
}{
Value: strings.Join(value, ","),
})
}
func (f *SizeFilter) Filter(response *ffuf.Response) (bool, error) {
for _, iv := range f.Value {
if iv.Min <= response.ContentLength && response.ContentLength <= iv.Max {
return true, nil
}
}
return false, nil
}
func (f *SizeFilter) Repr() string {
var strval []string
for _, iv := range f.Value {
if iv.Min == iv.Max {
strval = append(strval, strconv.Itoa(int(iv.Min)))
} else {
strval = append(strval, strconv.Itoa(int(iv.Min))+"-"+strconv.Itoa(int(iv.Max)))
}
}
return fmt.Sprintf("Response size: %s", strings.Join(strval, ","))
}
ffuf-1.1.0/pkg/filter/size_test.go 0000664 0000000 0000000 00000001725 13706265214 0017060 0 ustar 00root root 0000000 0000000 package filter
import (
"strings"
"testing"
"github.com/ffuf/ffuf/pkg/ffuf"
)
func TestNewSizeFilter(t *testing.T) {
f, _ := NewSizeFilter("1,2,3,444,5-90")
sizeRepr := f.Repr()
if strings.Index(sizeRepr, "1,2,3,444,5-90") == -1 {
t.Errorf("Size filter was expected to have 5 values")
}
}
func TestNewSizeFilterError(t *testing.T) {
_, err := NewSizeFilter("invalid")
if err == nil {
t.Errorf("Was expecting an error from errenous input data")
}
}
func TestFiltering(t *testing.T) {
f, _ := NewSizeFilter("1,2,3,5-90,444")
for i, test := range []struct {
input int64
output bool
}{
{1, true},
{2, true},
{3, true},
{4, false},
{5, true},
{70, true},
{90, true},
{91, false},
{444, true},
} {
resp := ffuf.Response{ContentLength: test.input}
filterReturn, _ := f.Filter(&resp)
if filterReturn != test.output {
t.Errorf("Filter test %d: Was expecing filter return value of %t but got %t", i, test.output, filterReturn)
}
}
}
ffuf-1.1.0/pkg/filter/status.go 0000664 0000000 0000000 00000003602 13706265214 0016366 0 ustar 00root root 0000000 0000000 package filter
import (
"encoding/json"
"fmt"
"strconv"
"strings"
"github.com/ffuf/ffuf/pkg/ffuf"
)
const AllStatuses = 0
type StatusFilter struct {
Value []ffuf.ValueRange
}
func NewStatusFilter(value string) (ffuf.FilterProvider, error) {
var intranges []ffuf.ValueRange
for _, sv := range strings.Split(value, ",") {
if sv == "all" {
intranges = append(intranges, ffuf.ValueRange{AllStatuses, AllStatuses})
} else {
vr, err := ffuf.ValueRangeFromString(sv)
if err != nil {
return &StatusFilter{}, fmt.Errorf("Status filter or matcher (-fc / -mc): invalid value %s", sv)
}
intranges = append(intranges, vr)
}
}
return &StatusFilter{Value: intranges}, nil
}
func (f *StatusFilter) MarshalJSON() ([]byte, error) {
value := make([]string, 0)
for _, v := range f.Value {
if v.Min == 0 && v.Max == 0 {
value = append(value, "all")
} else {
if v.Min == v.Max {
value = append(value, strconv.FormatInt(v.Min, 10))
} else {
value = append(value, fmt.Sprintf("%d-%d", v.Min, v.Max))
}
}
}
return json.Marshal(&struct {
Value string `json:"value"`
}{
Value: strings.Join(value, ","),
})
}
func (f *StatusFilter) Filter(response *ffuf.Response) (bool, error) {
for _, iv := range f.Value {
if iv.Min == AllStatuses && iv.Max == AllStatuses {
// Handle the "all" case
return true, nil
}
if iv.Min <= response.StatusCode && response.StatusCode <= iv.Max {
return true, nil
}
}
return false, nil
}
func (f *StatusFilter) Repr() string {
var strval []string
for _, iv := range f.Value {
if iv.Min == AllStatuses && iv.Max == AllStatuses {
strval = append(strval, "all")
} else if iv.Min == iv.Max {
strval = append(strval, strconv.Itoa(int(iv.Min)))
} else {
strval = append(strval, strconv.Itoa(int(iv.Min))+"-"+strconv.Itoa(int(iv.Max)))
}
}
return fmt.Sprintf("Response status: %s", strings.Join(strval, ","))
}
ffuf-1.1.0/pkg/filter/status_test.go 0000664 0000000 0000000 00000002023 13706265214 0017421 0 ustar 00root root 0000000 0000000 package filter
import (
"strings"
"testing"
"github.com/ffuf/ffuf/pkg/ffuf"
)
func TestNewStatusFilter(t *testing.T) {
f, _ := NewStatusFilter("200,301,400-410,500")
statusRepr := f.Repr()
if strings.Index(statusRepr, "200,301,400-410,500") == -1 {
t.Errorf("Status filter was expected to have 4 values")
}
}
func TestNewStatusFilterError(t *testing.T) {
_, err := NewStatusFilter("invalid")
if err == nil {
t.Errorf("Was expecting an error from errenous input data")
}
}
func TestStatusFiltering(t *testing.T) {
f, _ := NewStatusFilter("200,301,400-498,500")
for i, test := range []struct {
input int64
output bool
}{
{200, true},
{301, true},
{500, true},
{4, false},
{399, false},
{400, true},
{444, true},
{498, true},
{499, false},
{302, false},
} {
resp := ffuf.Response{StatusCode: test.input}
filterReturn, _ := f.Filter(&resp)
if filterReturn != test.output {
t.Errorf("Filter test %d: Was expecing filter return value of %t but got %t", i, test.output, filterReturn)
}
}
}
ffuf-1.1.0/pkg/filter/words.go 0000664 0000000 0000000 00000003002 13706265214 0016173 0 ustar 00root root 0000000 0000000 package filter
import (
"encoding/json"
"fmt"
"strconv"
"strings"
"github.com/ffuf/ffuf/pkg/ffuf"
)
type WordFilter struct {
Value []ffuf.ValueRange
}
func NewWordFilter(value string) (ffuf.FilterProvider, error) {
var intranges []ffuf.ValueRange
for _, sv := range strings.Split(value, ",") {
vr, err := ffuf.ValueRangeFromString(sv)
if err != nil {
return &WordFilter{}, fmt.Errorf("Word filter or matcher (-fw / -mw): invalid value: %s", sv)
}
intranges = append(intranges, vr)
}
return &WordFilter{Value: intranges}, nil
}
func (f *WordFilter) MarshalJSON() ([]byte, error) {
value := make([]string, 0)
for _, v := range f.Value {
if v.Min == v.Max {
value = append(value, strconv.FormatInt(v.Min, 10))
} else {
value = append(value, fmt.Sprintf("%d-%d", v.Min, v.Max))
}
}
return json.Marshal(&struct {
Value string `json:"value"`
}{
Value: strings.Join(value, ","),
})
}
func (f *WordFilter) Filter(response *ffuf.Response) (bool, error) {
wordsSize := len(strings.Split(string(response.Data), " "))
for _, iv := range f.Value {
if iv.Min <= int64(wordsSize) && int64(wordsSize) <= iv.Max {
return true, nil
}
}
return false, nil
}
func (f *WordFilter) Repr() string {
var strval []string
for _, iv := range f.Value {
if iv.Min == iv.Max {
strval = append(strval, strconv.Itoa(int(iv.Min)))
} else {
strval = append(strval, strconv.Itoa(int(iv.Min))+"-"+strconv.Itoa(int(iv.Max)))
}
}
return fmt.Sprintf("Response words: %s", strings.Join(strval, ","))
}
ffuf-1.1.0/pkg/filter/words_test.go 0000664 0000000 0000000 00000002161 13706265214 0017237 0 ustar 00root root 0000000 0000000 package filter
import (
"strings"
"testing"
"github.com/ffuf/ffuf/pkg/ffuf"
)
func TestNewWordFilter(t *testing.T) {
f, _ := NewWordFilter("200,301,400-410,500")
wordsRepr := f.Repr()
if strings.Index(wordsRepr, "200,301,400-410,500") == -1 {
t.Errorf("Word filter was expected to have 4 values")
}
}
func TestNewWordFilterError(t *testing.T) {
_, err := NewWordFilter("invalid")
if err == nil {
t.Errorf("Was expecting an error from errenous input data")
}
}
func TestWordFiltering(t *testing.T) {
f, _ := NewWordFilter("200,301,402-450,500")
for i, test := range []struct {
input int64
output bool
}{
{200, true},
{301, true},
{500, true},
{4, false},
{444, true},
{302, false},
{401, false},
{402, true},
{450, true},
{451, false},
} {
var data []string
for i := int64(0); i < test.input; i++ {
data = append(data, "A")
}
resp := ffuf.Response{Data: []byte(strings.Join(data, " "))}
filterReturn, _ := f.Filter(&resp)
if filterReturn != test.output {
t.Errorf("Filter test %d: Was expecing filter return value of %t but got %t", i, test.output, filterReturn)
}
}
}
ffuf-1.1.0/pkg/input/ 0000775 0000000 0000000 00000000000 13706265214 0014365 5 ustar 00root root 0000000 0000000 ffuf-1.1.0/pkg/input/command.go 0000664 0000000 0000000 00000003030 13706265214 0016326 0 ustar 00root root 0000000 0000000 package input
import (
"bytes"
"os"
"os/exec"
"strconv"
"github.com/ffuf/ffuf/pkg/ffuf"
)
type CommandInput struct {
config *ffuf.Config
count int
keyword string
command string
}
func NewCommandInput(keyword string, value string, conf *ffuf.Config) (*CommandInput, error) {
var cmd CommandInput
cmd.keyword = keyword
cmd.config = conf
cmd.count = 0
cmd.command = value
return &cmd, nil
}
//Keyword returns the keyword assigned to this InternalInputProvider
func (c *CommandInput) Keyword() string {
return c.keyword
}
//Position will return the current position in the input list
func (c *CommandInput) Position() int {
return c.count
}
//ResetPosition will reset the current position of the InternalInputProvider
func (c *CommandInput) ResetPosition() {
c.count = 0
}
//IncrementPosition increments the current position in the inputprovider
func (c *CommandInput) IncrementPosition() {
c.count += 1
}
//Next will increment the cursor position, and return a boolean telling if there's iterations left
func (c *CommandInput) Next() bool {
if c.count >= c.config.InputNum {
return false
}
return true
}
//Value returns the input from command stdoutput
func (c *CommandInput) Value() []byte {
var stdout bytes.Buffer
os.Setenv("FFUF_NUM", strconv.Itoa(c.count))
cmd := exec.Command(SHELL_CMD, SHELL_ARG, c.command)
cmd.Stdout = &stdout
err := cmd.Run()
if err != nil {
return []byte("")
}
return stdout.Bytes()
}
//Total returns the size of wordlist
func (c *CommandInput) Total() int {
return c.config.InputNum
}
ffuf-1.1.0/pkg/input/const.go 0000664 0000000 0000000 00000000126 13706265214 0016041 0 ustar 00root root 0000000 0000000 // +build !windows
package input
const (
SHELL_CMD = "/bin/sh"
SHELL_ARG = "-c"
)
ffuf-1.1.0/pkg/input/const_windows.go 0000664 0000000 0000000 00000000125 13706265214 0017612 0 ustar 00root root 0000000 0000000 // +build windows
package input
const (
SHELL_CMD = "cmd.exe"
SHELL_ARG = "/C"
)
ffuf-1.1.0/pkg/input/input.go 0000664 0000000 0000000 00000007325 13706265214 0016062 0 ustar 00root root 0000000 0000000 package input
import (
"fmt"
"github.com/ffuf/ffuf/pkg/ffuf"
)
type MainInputProvider struct {
Providers []ffuf.InternalInputProvider
Config *ffuf.Config
position int
msbIterator int
}
func NewInputProvider(conf *ffuf.Config) (ffuf.InputProvider, error) {
validmode := false
for _, mode := range []string{"clusterbomb", "pitchfork"} {
if conf.InputMode == mode {
validmode = true
}
}
if !validmode {
return &MainInputProvider{}, fmt.Errorf("Input mode (-mode) %s not recognized", conf.InputMode)
}
return &MainInputProvider{Config: conf, msbIterator: 0}, nil
}
func (i *MainInputProvider) AddProvider(provider ffuf.InputProviderConfig) error {
if provider.Name == "command" {
newcomm, _ := NewCommandInput(provider.Keyword, provider.Value, i.Config)
i.Providers = append(i.Providers, newcomm)
} else {
// Default to wordlist
newwl, err := NewWordlistInput(provider.Keyword, provider.Value, i.Config)
if err != nil {
return err
}
i.Providers = append(i.Providers, newwl)
}
return nil
}
//Position will return the current position of progress
func (i *MainInputProvider) Position() int {
return i.position
}
//Next will increment the cursor position, and return a boolean telling if there's inputs left
func (i *MainInputProvider) Next() bool {
if i.position >= i.Total() {
return false
}
i.position++
return true
}
//Value returns a map of inputs for keywords
func (i *MainInputProvider) Value() map[string][]byte {
retval := make(map[string][]byte)
if i.Config.InputMode == "clusterbomb" {
retval = i.clusterbombValue()
}
if i.Config.InputMode == "pitchfork" {
retval = i.pitchforkValue()
}
return retval
}
//Reset resets all the inputproviders and counters
func (i *MainInputProvider) Reset() {
for _, p := range i.Providers {
p.ResetPosition()
}
i.position = 0
i.msbIterator = 0
}
//pitchforkValue returns a map of keyword:value pairs including all inputs.
//This mode will iterate through wordlists in lockstep.
func (i *MainInputProvider) pitchforkValue() map[string][]byte {
values := make(map[string][]byte)
for _, p := range i.Providers {
if !p.Next() {
// Loop to beginning if the inputprovider has been exhausted
p.ResetPosition()
}
values[p.Keyword()] = p.Value()
p.IncrementPosition()
}
return values
}
//clusterbombValue returns map of keyword:value pairs including all inputs.
//this mode will iterate through all possible combinations.
func (i *MainInputProvider) clusterbombValue() map[string][]byte {
values := make(map[string][]byte)
// Should we signal the next InputProvider in the slice to increment
signalNext := false
first := true
for index, p := range i.Providers {
if signalNext {
p.IncrementPosition()
signalNext = false
}
if !p.Next() {
// No more inputs in this inputprovider
if index == i.msbIterator {
// Reset all previous wordlists and increment the msb counter
i.msbIterator += 1
i.clusterbombIteratorReset()
// Start again
return i.clusterbombValue()
}
p.ResetPosition()
signalNext = true
}
values[p.Keyword()] = p.Value()
if first {
p.IncrementPosition()
first = false
}
}
return values
}
func (i *MainInputProvider) clusterbombIteratorReset() {
for index, p := range i.Providers {
if index < i.msbIterator {
p.ResetPosition()
}
if index == i.msbIterator {
p.IncrementPosition()
}
}
}
//Total returns the amount of input combinations available
func (i *MainInputProvider) Total() int {
count := 0
if i.Config.InputMode == "pitchfork" {
for _, p := range i.Providers {
if p.Total() > count {
count = p.Total()
}
}
}
if i.Config.InputMode == "clusterbomb" {
count = 1
for _, p := range i.Providers {
count = count * p.Total()
}
}
return count
}
ffuf-1.1.0/pkg/input/wordlist.go 0000664 0000000 0000000 00000007075 13706265214 0016574 0 ustar 00root root 0000000 0000000 package input
import (
"bufio"
"os"
"regexp"
"strings"
"github.com/ffuf/ffuf/pkg/ffuf"
)
type WordlistInput struct {
config *ffuf.Config
data [][]byte
position int
keyword string
}
func NewWordlistInput(keyword string, value string, conf *ffuf.Config) (*WordlistInput, error) {
var wl WordlistInput
wl.keyword = keyword
wl.config = conf
wl.position = 0
var valid bool
var err error
// stdin?
if value == "-" {
// yes
valid = true
} else {
// no
valid, err = wl.validFile(value)
}
if err != nil {
return &wl, err
}
if valid {
err = wl.readFile(value)
}
return &wl, err
}
//Position will return the current position in the input list
func (w *WordlistInput) Position() int {
return w.position
}
//ResetPosition resets the position back to beginning of the wordlist.
func (w *WordlistInput) ResetPosition() {
w.position = 0
}
//Keyword returns the keyword assigned to this InternalInputProvider
func (w *WordlistInput) Keyword() string {
return w.keyword
}
//Next will increment the cursor position, and return a boolean telling if there's words left in the list
func (w *WordlistInput) Next() bool {
if w.position >= len(w.data) {
return false
}
return true
}
//IncrementPosition will increment the current position in the inputprovider data slice
func (w *WordlistInput) IncrementPosition() {
w.position += 1
}
//Value returns the value from wordlist at current cursor position
func (w *WordlistInput) Value() []byte {
return w.data[w.position]
}
//Total returns the size of wordlist
func (w *WordlistInput) Total() int {
return len(w.data)
}
//validFile checks that the wordlist file exists and can be read
func (w *WordlistInput) validFile(path string) (bool, error) {
_, err := os.Stat(path)
if err != nil {
return false, err
}
f, err := os.Open(path)
if err != nil {
return false, err
}
f.Close()
return true, nil
}
//readFile reads the file line by line to a byte slice
func (w *WordlistInput) readFile(path string) error {
var file *os.File
var err error
if path == "-" {
file = os.Stdin
} else {
file, err = os.Open(path)
if err != nil {
return err
}
}
defer file.Close()
var data [][]byte
var ok bool
reader := bufio.NewScanner(file)
re := regexp.MustCompile(`(?i)%ext%`)
for reader.Scan() {
if w.config.DirSearchCompat && len(w.config.Extensions) > 0 {
text := []byte(reader.Text())
if re.Match(text) {
for _, ext := range w.config.Extensions {
contnt := re.ReplaceAll(text, []byte(ext))
data = append(data, []byte(contnt))
}
} else {
text := reader.Text()
if w.config.IgnoreWordlistComments {
text, ok = stripComments(text)
if !ok {
continue
}
}
data = append(data, []byte(text))
}
} else {
text := reader.Text()
if w.config.IgnoreWordlistComments {
text, ok = stripComments(text)
if !ok {
continue
}
}
data = append(data, []byte(text))
if w.keyword == "FUZZ" && len(w.config.Extensions) > 0 {
for _, ext := range w.config.Extensions {
data = append(data, []byte(text+ext))
}
}
}
}
w.data = data
return reader.Err()
}
// stripComments removes all kind of comments from the word
func stripComments(text string) (string, bool) {
// If the line starts with a # ignoring any space on the left,
// return blank.
if strings.HasPrefix(strings.TrimLeft(text, " "), "#") {
return "", false
}
// If the line has # later after a space, that's a comment.
// Only send the word upto space to the routine.
index := strings.Index(text, " #")
if index == -1 {
return text, true
}
return text[:index], true
}
ffuf-1.1.0/pkg/output/ 0000775 0000000 0000000 00000000000 13706265214 0014566 5 ustar 00root root 0000000 0000000 ffuf-1.1.0/pkg/output/const.go 0000664 0000000 0000000 00000000372 13706265214 0016245 0 ustar 00root root 0000000 0000000 // +build !windows
package output
const (
TERMINAL_CLEAR_LINE = "\r\x1b[2K"
ANSI_CLEAR = "\x1b[0m"
ANSI_RED = "\x1b[31m"
ANSI_GREEN = "\x1b[32m"
ANSI_BLUE = "\x1b[34m"
ANSI_YELLOW = "\x1b[33m"
)
ffuf-1.1.0/pkg/output/const_windows.go 0000664 0000000 0000000 00000000315 13706265214 0020014 0 ustar 00root root 0000000 0000000 // +build windows
package output
const (
TERMINAL_CLEAR_LINE = "\r\r"
ANSI_CLEAR = ""
ANSI_RED = ""
ANSI_GREEN = ""
ANSI_BLUE = ""
ANSI_YELLOW = ""
)
ffuf-1.1.0/pkg/output/file_csv.go 0000664 0000000 0000000 00000003067 13706265214 0016715 0 ustar 00root root 0000000 0000000 package output
import (
"encoding/base64"
"encoding/csv"
"os"
"strconv"
"github.com/ffuf/ffuf/pkg/ffuf"
)
var staticheaders = []string{"url", "redirectlocation", "position", "status_code", "content_length", "content_words", "content_lines", "resultfile"}
func writeCSV(config *ffuf.Config, res []Result, encode bool) error {
header := make([]string, 0)
f, err := os.Create(config.OutputFile)
if err != nil {
return err
}
defer f.Close()
w := csv.NewWriter(f)
defer w.Flush()
for _, inputprovider := range config.InputProviders {
header = append(header, inputprovider.Keyword)
}
for _, item := range staticheaders {
header = append(header, item)
}
if err := w.Write(header); err != nil {
return err
}
for _, r := range res {
if encode {
inputs := make(map[string][]byte, 0)
for k, v := range r.Input {
inputs[k] = []byte(base64encode(v))
}
r.Input = inputs
}
err := w.Write(toCSV(r))
if err != nil {
return err
}
}
return nil
}
func base64encode(in []byte) string {
return base64.StdEncoding.EncodeToString(in)
}
func toCSV(r Result) []string {
res := make([]string, 0)
for _, v := range r.Input {
res = append(res, string(v))
}
res = append(res, r.Url)
res = append(res, r.RedirectLocation)
res = append(res, strconv.Itoa(r.Position))
res = append(res, strconv.FormatInt(r.StatusCode, 10))
res = append(res, strconv.FormatInt(r.ContentLength, 10))
res = append(res, strconv.FormatInt(r.ContentWords, 10))
res = append(res, strconv.FormatInt(r.ContentLines, 10))
res = append(res, r.ResultFile)
return res
}
ffuf-1.1.0/pkg/output/file_html.go 0000664 0000000 0000000 00000011703 13706265214 0017062 0 ustar 00root root 0000000 0000000 package output
import (
"html/template"
"os"
"time"
"github.com/ffuf/ffuf/pkg/ffuf"
)
type htmlFileOutput struct {
CommandLine string
Time string
Keys []string
Results []Result
}
const (
htmlTemplate = `