mirror of
https://github.com/projectdiscovery/nuclei.git
synced 2026-02-02 00:33:10 +08:00
* added logs for debug * fixes * removed logs * using cache item * implemented multiple tests * fixed some unit tests * implemented test for skipping * added multiple tests together * added mark failed * fix on tests * better test implementation + concurrent * fix: fixes on concurrent tests * removed parallel and 1 unit test DOCS: by default the command go test runs in parallel tests for different packages, and default is the number of CPUs available (see go help build) * fixes on go routine * increasing parallelism of once.Do * bumping go to 1.19 for atomic types support * removing redundant check + fixing test concurrency on create Co-authored-by: Mzack9999 <mzack9999@protonmail.com> Co-authored-by: mzack <marco.rivoli.nvh@gmail.com>
This commit is contained in:
@@ -5,9 +5,10 @@ import (
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
|
||||
"github.com/bluele/gcache"
|
||||
|
||||
"github.com/projectdiscovery/gologger"
|
||||
)
|
||||
|
||||
@@ -31,6 +32,11 @@ type Cache struct {
|
||||
failedTargets gcache.Cache
|
||||
}
|
||||
|
||||
type cacheItem struct {
|
||||
errors atomic.Int32
|
||||
sync.Once
|
||||
}
|
||||
|
||||
const DefaultMaxHostsCount = 10000
|
||||
|
||||
// New returns a new host max errors cache
|
||||
@@ -87,19 +93,17 @@ func (c *Cache) Check(value string) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
numberOfErrors, err := c.failedTargets.GetIFPresent(finalValue)
|
||||
existingCacheItem, err := c.failedTargets.GetIFPresent(finalValue)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
numberOfErrorsValue := numberOfErrors.(int)
|
||||
existingCacheItemValue := existingCacheItem.(*cacheItem)
|
||||
|
||||
if numberOfErrors == -1 {
|
||||
return true
|
||||
}
|
||||
if numberOfErrorsValue >= c.MaxHostError {
|
||||
_ = c.failedTargets.Set(finalValue, -1)
|
||||
if existingCacheItemValue.errors.Load() >= int32(c.MaxHostError) {
|
||||
if c.verbose {
|
||||
gologger.Verbose().Msgf("Skipping %s as previously unresponsive %d times", finalValue, numberOfErrorsValue)
|
||||
existingCacheItemValue.Do(func() {
|
||||
gologger.Verbose().Msgf("Skipping %s as previously unresponsive %d times", finalValue, existingCacheItemValue.errors.Load())
|
||||
})
|
||||
}
|
||||
return true
|
||||
}
|
||||
@@ -112,26 +116,23 @@ func (c *Cache) MarkFailed(value string, err error) {
|
||||
return
|
||||
}
|
||||
finalValue := c.normalizeCacheValue(value)
|
||||
if !c.failedTargets.Has(finalValue) {
|
||||
_ = c.failedTargets.Set(finalValue, 1)
|
||||
existingCacheItem, err := c.failedTargets.GetIFPresent(finalValue)
|
||||
if err != nil || existingCacheItem == nil {
|
||||
newItem := &cacheItem{errors: atomic.Int32{}}
|
||||
newItem.errors.Store(1)
|
||||
_ = c.failedTargets.Set(finalValue, newItem)
|
||||
return
|
||||
}
|
||||
|
||||
numberOfErrors, err := c.failedTargets.GetIFPresent(finalValue)
|
||||
if err != nil || numberOfErrors == nil {
|
||||
_ = c.failedTargets.Set(finalValue, 1)
|
||||
return
|
||||
}
|
||||
numberOfErrorsValue := numberOfErrors.(int)
|
||||
|
||||
_ = c.failedTargets.Set(finalValue, numberOfErrorsValue+1)
|
||||
existingCacheItemValue := existingCacheItem.(*cacheItem)
|
||||
existingCacheItemValue.errors.Add(1)
|
||||
_ = c.failedTargets.Set(finalValue, existingCacheItemValue)
|
||||
}
|
||||
|
||||
var checkErrorRegexp = regexp.MustCompile(`(no address found for host|Client\.Timeout exceeded while awaiting headers|could not resolve host|connection refused)`)
|
||||
var reCheckError = regexp.MustCompile(`(no address found for host|Client\.Timeout exceeded while awaiting headers|could not resolve host|connection refused)`)
|
||||
|
||||
// checkError checks if an error represents a type that should be
|
||||
// added to the host skipping table.
|
||||
func (c *Cache) checkError(err error) bool {
|
||||
errString := err.Error()
|
||||
return checkErrorRegexp.MatchString(errString)
|
||||
return reCheckError.MatchString(errString)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user