Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implemented rate limit for shortscan cmd. #23

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 30 additions & 0 deletions pkg/shortscan/shortscan.go
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,9 @@ var statusCache map[string]map[int]struct{}
var distanceCache map[string]map[int]distances
var checksumRegex *regexp.Regexp

// Delay between requests set by the rate limit.
var requestDelay time.Duration

// Command-line arguments and help
type arguments struct {
Urls []string `arg:"positional,required" help:"url to scan (multiple URLs can be provided; a file containing URLs can be specified with an «at» prefix, for example: @urls.txt)" placeholder:"URL"`
Expand All @@ -148,6 +151,7 @@ type arguments struct {
FullUrl bool `arg:"-F" help:"display the full URL for confirmed files rather than just the filename" default:"false"`
NoRecurse bool `arg:"-n" help:"don't detect and recurse into subdirectories (disabled when autocomplete is disabled)" default:"false"`
Stabilise bool `arg:"-s" help:"attempt to get coherent autocomplete results from an unstable server (generates more requests)" default:"false"`
Rate uint `arg:"-r" help:"maximum requests per second" default:"0"`
Patience int `arg:"-p" help:"patience level when determining vulnerability (0 = patient; 1 = very patient)" placeholder:"LEVEL" default:"0"`
Characters string `arg:"-C" help:"filename characters to enumerate" default:"JFKGOTMYVHSPCANDXLRWEBQUIZ8549176320-_()&'!#$%@^{}~"`
Autocomplete string `arg:"-a" help:"autocomplete detection mode (auto = autoselect; method = HTTP method magic; status = HTTP status; distance = Levenshtein distance; none = disable)" placeholder:"mode" default:"auto"`
Expand All @@ -170,8 +174,25 @@ func pathEscape(url string) string {
return strings.Replace(nurl.QueryEscape(url), "+", "%20", -1)
}

// Helper variable to the requestDelay function, keeps track of the time since the last request
var lastRequestTime time.Time

// delayRequest delays a request if it were to go over the rate limit
func delayRequest() {
// Sleep to prevent going over the rate limit
sleepDuration := time.Until(lastRequestTime.Add(requestDelay))
time.Sleep(sleepDuration)

log.WithFields(log.Fields{"lastRequestTime": lastRequestTime, "requestDelay": requestDelay}).Info("Rate limit metadata")

// Update last request time
lastRequestTime = time.Now()
}

// fetch requests the given URL and returns an HTTP response object, handling retries gracefully
func fetch(hc *http.Client, st *httpStats, method string, url string) (*http.Response, error) {
// Sleep until we are within rate limit constraints
delayRequest()

// Create a request object
req, err := http.NewRequest(method, url, nil)
Expand Down Expand Up @@ -1047,6 +1068,15 @@ func Run() {
p.Fail("output must be one of: human, json")
}

// If the rate limit is a negative treat it as no rate limit
delay := args.Rate
if args.Rate != 0 {
delay = 1_000_000 / args.Rate
}

// Initialize the calculated delay between requests, according to rate limit
requestDelay = time.Duration(delay) * time.Microsecond

// Build the list of URLs to scan
var urls []string
for _, url := range args.Urls {
Expand Down