Skip to content

Commit

Permalink
re-order function def
Browse files Browse the repository at this point in the history
  • Loading branch information
ocervell committed Nov 16, 2023
1 parent 579eb6c commit 91e631e
Show file tree
Hide file tree
Showing 6 changed files with 73 additions and 80 deletions.
2 changes: 0 additions & 2 deletions secator/tasks/ffuf.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,5 +82,3 @@ class ffuf(HttpFuzzer):
def on_item(self, item):
item.method = self.get_opt_value(METHOD) or 'GET'
return item

# TODO: write custom item_loader to pick up Progress items too
4 changes: 0 additions & 4 deletions secator/tasks/gau.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,3 @@ class gau(HttpCrawler):
proxy_socks5 = True
proxy_http = True
profile = 'io'

# @staticmethod
# def validate_item(self, item):
# return item['url'] == 'response'
93 changes: 46 additions & 47 deletions secator/tasks/grype.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,52 +7,6 @@
from secator.tasks._categories import VulnCode


def grype_item_loader(self, line):
"""Load vulnerabilty dicts from grype line output."""
split = [i for i in line.split(' ') if i]
if not len(split) in [5, 6] or split[0] == 'NAME':
return None
version_fixed = None
if len(split) == 5: # no version fixed
product, version, product_type, vuln_id, severity = tuple(split)
elif len(split) == 6:
product, version, version_fixed, product_type, vuln_id, severity = tuple(split)
extra_data = {
'lang': product_type,
'product': product,
'version': version,
}
if version_fixed:
extra_data['version_fixed'] = version_fixed
data = {
'id': vuln_id,
'name': vuln_id,
'matched_at': self.input,
'confidence': 'medium',
'severity': severity.lower(),
'provider': 'grype',
'cvss_score': -1,
'tags': [],
}
if vuln_id.startswith('GHSA'):
data['provider'] = 'github.com'
data['references'] = [f'https://github.com/advisories/{vuln_id}']
data['tags'].extend(['cve', 'ghsa'])
vuln = VulnCode.lookup_ghsa(vuln_id)
if vuln:
data.update(vuln)
data['severity'] = data['severity'] or severity.lower()
extra_data['ghsa_id'] = vuln_id
elif vuln_id.startswith('CVE'):
vuln = VulnCode.lookup_cve(vuln_id)
if vuln:
vuln['tags'].append('cve')
data.update(vuln)
data['severity'] = data['severity'] or severity.lower()
data['extra_data'] = extra_data
return data


@task()
class grype(VulnCode):
"""Vulnerability scanner for container images and filesystems."""
Expand All @@ -76,4 +30,49 @@ class grype(VulnCode):
install_cmd = (
'curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sudo sh -s -- -b /usr/local/bin'
)
item_loaders = [grype_item_loader]

@staticmethod
def item_loader(self, line):
"""Load vulnerabilty dicts from grype line output."""
split = [i for i in line.split(' ') if i]
if not len(split) in [5, 6] or split[0] == 'NAME':
return None
version_fixed = None
if len(split) == 5: # no version fixed
product, version, product_type, vuln_id, severity = tuple(split)
elif len(split) == 6:
product, version, version_fixed, product_type, vuln_id, severity = tuple(split)
extra_data = {
'lang': product_type,
'product': product,
'version': version,
}
if version_fixed:
extra_data['version_fixed'] = version_fixed
data = {
'id': vuln_id,
'name': vuln_id,
'matched_at': self.input,
'confidence': 'medium',
'severity': severity.lower(),
'provider': 'grype',
'cvss_score': -1,
'tags': [],
}
if vuln_id.startswith('GHSA'):
data['provider'] = 'github.com'
data['references'] = [f'https://github.com/advisories/{vuln_id}']
data['tags'].extend(['cve', 'ghsa'])
vuln = VulnCode.lookup_ghsa(vuln_id)
if vuln:
data.update(vuln)
data['severity'] = data['severity'] or severity.lower()
extra_data['ghsa_id'] = vuln_id
elif vuln_id.startswith('CVE'):
vuln = VulnCode.lookup_cve(vuln_id)
if vuln:
vuln['tags'].append('cve')
data.update(vuln)
data['severity'] = data['severity'] or severity.lower()
data['extra_data'] = extra_data
return data
26 changes: 13 additions & 13 deletions secator/tasks/httpx.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,19 +63,6 @@ class httpx(Http):
proxy_http = True
profile = 'cpu'

@staticmethod
def on_item_pre_convert(self, item):
for k, v in item.items():
if k == 'time':
response_time = float(''.join(ch for ch in v if not ch.isalpha()))
if v[-2:] == 'ms':
response_time = response_time / 1000
item[k] = response_time
elif k == URL:
item[k] = sanitize_url(v)
item[URL] = item.get('final_url') or item[URL]
return item

@staticmethod
def on_init(self):
debug_resp = self.get_opt_value('debug_resp')
Expand All @@ -90,6 +77,19 @@ def on_init(self):
os.makedirs(self.output_screenshot_path, exist_ok=True)
self.cmd += f' -sr -srd {output_path}'

@staticmethod
def on_item_pre_convert(self, item):
for k, v in item.items():
if k == 'time':
response_time = float(''.join(ch for ch in v if not ch.isalpha()))
if v[-2:] == 'ms':
response_time = response_time / 1000
item[k] = response_time
elif k == URL:
item[k] = sanitize_url(v)
item[URL] = item.get('final_url') or item[URL]
return item

@staticmethod
def on_end(self):
if DEFAULT_STORE_HTTP_RESPONSES:
Expand Down
10 changes: 5 additions & 5 deletions secator/tasks/katana.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,11 +113,6 @@ def on_init(self):
os.makedirs(self.output_response_path, exist_ok=True)
self.cmd += f' -sr -srd {output_path}'

@staticmethod
def on_end(self):
if DEFAULT_STORE_HTTP_RESPONSES and os.path.exists(self.output_response_path + '/index.txt'):
os.remove(self.output_response_path + '/index.txt')

@staticmethod
def on_item(self, item):
if not isinstance(item, Url):
Expand All @@ -131,3 +126,8 @@ def on_item(self, item):
fout.writelines('\n')
fout.writelines(first_line)
return item

@staticmethod
def on_end(self):
if DEFAULT_STORE_HTTP_RESPONSES and os.path.exists(self.output_response_path + '/index.txt'):
os.remove(self.output_response_path + '/index.txt')
18 changes: 9 additions & 9 deletions secator/tasks/wpscan.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,15 +74,6 @@ class wpscan(VulnHttp):
ignore_return_code = True
profile = 'io'

@staticmethod
def on_init(self):
output_path = self.get_opt_value('output_path')
if not output_path:
timestr = get_file_timestamp()
output_path = f'{DATA_FOLDER}/wpscan_{timestr}.json'
self.output_path = output_path
self.cmd += f' -o {self.output_path}'

def yielder(self):
prev = self.print_item_count
self.print_item_count = False
Expand Down Expand Up @@ -177,3 +168,12 @@ def yielder(self):
)

self.print_item_count = prev

@staticmethod
def on_init(self):
output_path = self.get_opt_value('output_path')
if not output_path:
timestr = get_file_timestamp()
output_path = f'{DATA_FOLDER}/wpscan_{timestr}.json'
self.output_path = output_path
self.cmd += f' -o {self.output_path}'

0 comments on commit 91e631e

Please sign in to comment.