From afa8bf594c22edd11bd51eebaed351d70902dc12 Mon Sep 17 00:00:00 2001 From: sebastien Date: Thu, 15 Feb 2024 17:41:23 +0100 Subject: [PATCH 1/4] Updated VERSION (apivoid) --- engines/apivoid/Dockerfile | 2 +- engines/apivoid/VERSION | 2 +- engines/apivoid/__init__.py | 2 +- engines/apivoid/apivoid.json.sample | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/engines/apivoid/Dockerfile b/engines/apivoid/Dockerfile index 91bba8eb..cc0c955b 100644 --- a/engines/apivoid/Dockerfile +++ b/engines/apivoid/Dockerfile @@ -1,5 +1,5 @@ FROM alpine:3.16.3 -LABEL Name="APIVoid\ \(Patrowl engine\)" Version="1.4.32" +LABEL Name="APIVoid\ \(Patrowl engine\)" Version="1.4.33" # Create the target repo RUN mkdir -p /opt/patrowl-engines/apivoid diff --git a/engines/apivoid/VERSION b/engines/apivoid/VERSION index 00bbe72a..9baec2fd 100644 --- a/engines/apivoid/VERSION +++ b/engines/apivoid/VERSION @@ -1 +1 @@ -1.4.32 +1.4.33 diff --git a/engines/apivoid/__init__.py b/engines/apivoid/__init__.py index dc091835..4f8b61ff 100644 --- a/engines/apivoid/__init__.py +++ b/engines/apivoid/__init__.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- __title__ = 'patrowl_engine_apivoid' -__version__ = '1.4.32' +__version__ = '1.4.33' __author__ = 'Nicolas MATTIOCCO' __license__ = 'AGPLv3' __copyright__ = 'Copyright (C) 2020-2023 Nicolas Mattiocco - @MaKyOtOx' diff --git a/engines/apivoid/apivoid.json.sample b/engines/apivoid/apivoid.json.sample index b7a7b4c7..aa0852a3 100755 --- a/engines/apivoid/apivoid.json.sample +++ b/engines/apivoid/apivoid.json.sample @@ -1,6 +1,6 @@ { "name": "APIVOID", - "version": "1.4.32", + "version": "1.4.33", "description": "APIVoid reputation API", "allowed_asset_types": ["domain", "fqdn", "ip", "url", "ip-subnet"], "apikeys": [ From 5317b3b76376f5530eb13cf108f9327a587b93fa Mon Sep 17 00:00:00 2001 From: sebastien Date: Thu, 15 Feb 2024 17:42:52 +0100 Subject: [PATCH 2/4] Apply black repo wide --- engines/apivoid/engine-apivoid.py | 404 +++--- engines/arachni/engine-arachni.py | 553 ++++---- engines/burp/engine-burp.py | 457 ++++--- engines/censys/engine-censys.py | 1581 +++++++++++++++-------- engines/certstream/engine-certstream.py | 392 ++++-- engines/cortex/engine-cortex.py | 493 ++++--- engines/cybelangel/engine-cybelangel.py | 310 +++-- engines/droopescan/engine-droopescan.py | 624 +++++---- engines/eyewitness/engine-eyewitness.py | 511 +++++--- engines/nessus/engine-nessus.py | 766 ++++++----- engines/nmap/engine-nmap.py | 1 + engines/openvas/engine-openvas-noexe.py | 317 +++-- engines/openvas/engine-openvas-omp.py | 402 ++++-- engines/openvas/engine-openvas.py | 806 +++++++----- engines/owl_dns/engine-owl_dns.py | 10 +- engines/sslscan/engine-sslscan.py | 382 ++++-- engines/urlvoid/engine-urlvoid.py | 255 ++-- engines/wpscan/engine-wpscan.py | 696 ++++++---- 18 files changed, 5540 insertions(+), 3420 deletions(-) diff --git a/engines/apivoid/engine-apivoid.py b/engines/apivoid/engine-apivoid.py index dff35a53..8df7feb1 100755 --- a/engines/apivoid/engine-apivoid.py +++ b/engines/apivoid/engine-apivoid.py @@ -1,7 +1,6 @@ #!/usr/bin/python3 # -*- coding: utf-8 -*- """APIVoid PatrOwl engine application.""" - import os import sys import json @@ -23,7 +22,7 @@ APP_DEBUG = False APP_HOST = "0.0.0.0" APP_PORT = 5022 -APP_MAXSCANS = int(os.environ.get('APP_MAXSCANS', 25)) +APP_MAXSCANS = int(os.environ.get("APP_MAXSCANS", 25)) APP_ENGINE_NAME = "apivoid" APP_BASE_DIR = os.path.dirname(os.path.realpath(__file__)) VERSION = "1.4.32" @@ -33,7 +32,7 @@ base_dir=APP_BASE_DIR, name=APP_ENGINE_NAME, max_scans=APP_MAXSCANS, - version=VERSION + version=VERSION, ) this = sys.modules[__name__] @@ -55,147 +54,164 @@ def handle_invalid_usage(error): return response -@app.route('/') +@app.route("/") def default(): """Route by default.""" return engine.default() -@app.route('/engines/apivoid/') +@app.route("/engines/apivoid/") def index(): """Return index page.""" return engine.index() -@app.route('/engines/apivoid/liveness') +@app.route("/engines/apivoid/liveness") def liveness(): """Return liveness page.""" return engine.liveness() -@app.route('/engines/apivoid/readiness') +@app.route("/engines/apivoid/readiness") def readiness(): """Return readiness page.""" return engine.readiness() -@app.route('/engines/apivoid/test') +@app.route("/engines/apivoid/test") def test(): """Return test page.""" return engine.test() -@app.route('/engines/apivoid/info') +@app.route("/engines/apivoid/info") def info(): """Get info on running engine.""" return engine.info() -@app.route('/engines/apivoid/clean') +@app.route("/engines/apivoid/clean") def clean(): """Clean all scans.""" return engine.clean() -@app.route('/engines/apivoid/clean/') +@app.route("/engines/apivoid/clean/") def clean_scan(scan_id): """Clean scan identified by id.""" return engine.clean_scan(scan_id) -@app.route('/engines/apivoid/status') +@app.route("/engines/apivoid/status") def status(): res = {"page": "status"} if len(engine.scans) == APP_MAXSCANS * 2: - engine.scanner['status'] = "BUSY" + engine.scanner["status"] = "BUSY" else: - engine.scanner['status'] = "READY" + engine.scanner["status"] = "READY" scans = [] for scan_id in engine.scans.keys(): status_scan(scan_id) - scans.append({scan_id: { - "status": engine.scans[scan_id]['status'], - "started_at": engine.scans[scan_id]['started_at'], - "assets": engine.scans[scan_id]['assets'] - }}) - - res.update({ - "nb_scans": len(engine.scans), - "status": engine.scanner['status'], - "scanner": engine.scanner, - "scans": scans}) + scans.append( + { + scan_id: { + "status": engine.scans[scan_id]["status"], + "started_at": engine.scans[scan_id]["started_at"], + "assets": engine.scans[scan_id]["assets"], + } + } + ) + + res.update( + { + "nb_scans": len(engine.scans), + "status": engine.scanner["status"], + "scanner": engine.scanner, + "scans": scans, + } + ) return jsonify(res) -@app.route('/engines/apivoid/status/') +@app.route("/engines/apivoid/status/") def status_scan(scan_id): """Get status on scan identified by id.""" if scan_id not in engine.scans.keys(): - return jsonify({ - "status": "ERROR", - "details": "scan_id '{}' not found".format(scan_id) - }) + return jsonify( + {"status": "ERROR", "details": "scan_id '{}' not found".format(scan_id)} + ) all_threads_finished = True - if 'futures' in engine.scans[scan_id]: - for f in engine.scans[scan_id]['futures']: + if "futures" in engine.scans[scan_id]: + for f in engine.scans[scan_id]["futures"]: if not f.done(): - engine.scans[scan_id]['status'] = "SCANNING" + engine.scans[scan_id]["status"] = "SCANNING" all_threads_finished = False break else: - engine.scans[scan_id]['futures'].remove(f) + engine.scans[scan_id]["futures"].remove(f) try: - if all_threads_finished and len(engine.scans[scan_id]['threads']) == 0 and len(engine.scans[scan_id]['futures']) == 0: - engine.scans[scan_id]['status'] = "FINISHED" - engine.scans[scan_id]['finished_at'] = int(time.time() * 1000) + if ( + all_threads_finished + and len(engine.scans[scan_id]["threads"]) == 0 + and len(engine.scans[scan_id]["futures"]) == 0 + ): + engine.scans[scan_id]["status"] = "FINISHED" + engine.scans[scan_id]["finished_at"] = int(time.time() * 1000) except Exception: pass - return jsonify({"status": engine.scans[scan_id]['status']}) + return jsonify({"status": engine.scans[scan_id]["status"]}) -@app.route('/engines/apivoid/stopscans') +@app.route("/engines/apivoid/stopscans") def stop(): """Stop all scans.""" return engine.stop() -@app.route('/engines/apivoid/stop/') +@app.route("/engines/apivoid/stop/") def stop_scan(scan_id): """Stop scan identified by id.""" return engine.stop_scan(scan_id) -@app.route('/engines/apivoid/getreport/') +@app.route("/engines/apivoid/getreport/") def getreport(scan_id): if not scan_id.isdecimal(): - return jsonify({"status": "error", "reason": "scan_id must be numeric digits only"}) + return jsonify( + {"status": "error", "reason": "scan_id must be numeric digits only"} + ) filepath = f"{APP_BASE_DIR}/results/apivoid_{scan_id}.json" if not os.path.exists(filepath): - return jsonify({"status": "error", "reason": f"report file for scan_id '{scan_id}' not found"}) + return jsonify( + { + "status": "error", + "reason": f"report file for scan_id '{scan_id}' not found", + } + ) return send_from_directory(f"{APP_BASE_DIR}/results/", "apivoid_{scan_id}.json") def _loadconfig(): - conf_file = APP_BASE_DIR + '/apivoid.json' + conf_file = APP_BASE_DIR + "/apivoid.json" if os.path.exists(conf_file): json_data = open(conf_file) engine.scanner = json.load(json_data) try: - this.keys = os.environ.get('APIVOID_APIKEY', engine.scanner['apikeys'][0]) - engine.scanner['status'] = "READY" + this.keys = os.environ.get("APIVOID_APIKEY", engine.scanner["apikeys"][0]) + engine.scanner["status"] = "READY" except Exception: this.keys = "" - engine.scanner['status'] = "ERROR" + engine.scanner["status"] = "ERROR" app.logger.error("Error: No API KEY available") return {"status": "error", "reason": "No API KEY available"} @@ -204,7 +220,7 @@ def _loadconfig(): return {"status": "error", "reason": "config file not found"} -@app.route('/engines/apivoid/reloadconfig', methods=['GET']) +@app.route("/engines/apivoid/reloadconfig", methods=["GET"]) def reloadconfig(): res = {"page": "reloadconfig"} _loadconfig() @@ -212,64 +228,60 @@ def reloadconfig(): return jsonify(res) -@app.route('/engines/apivoid/startscan', methods=['POST']) +@app.route("/engines/apivoid/startscan", methods=["POST"]) def start_scan(): res = {"page": "startscan"} # Check the scanner is ready to start a new scan if len(engine.scans) == APP_MAXSCANS: - res.update({ - "status": "error", - "reason": f"Scan refused: max concurrent active scans reached ({APP_MAXSCANS})" - }) + res.update( + { + "status": "error", + "reason": f"Scan refused: max concurrent active scans reached ({APP_MAXSCANS})", + } + ) return jsonify(res) status() # print(engine.scanner['status']) - if engine.scanner['status'] != "READY": - res.update({ - "status": "refused", - "details": { - "reason": f"Bad scanner status {engine.scanner['status']}" - }}) + if engine.scanner["status"] != "READY": + res.update( + { + "status": "refused", + "details": {"reason": f"Bad scanner status {engine.scanner['status']}"}, + } + ) return jsonify(res) data = json.loads(request.data) # print(data) - if 'assets' not in data.keys(): - res.update({ - "status": "refused", - "details": { - "reason": "no asset specified" - }}) + if "assets" not in data.keys(): + res.update({"status": "refused", "details": {"reason": "no asset specified"}}) return jsonify(res) - if 'scan_id' not in data.keys(): - res.update({ - "status": "refused", - "details": { - "reason": "scan_id missing" - }}) + if "scan_id" not in data.keys(): + res.update({"status": "refused", "details": {"reason": "scan_id missing"}}) return jsonify(res) assets = [] for asset in data["assets"]: # Value if "value" not in asset.keys() or not asset["value"]: - res.update({ - "status": "error", - "reason": "asset value missing" - }) + res.update({"status": "error", "reason": "asset value missing"}) return jsonify(res) # Supported datatypes if asset["datatype"] not in engine.scanner["allowed_asset_types"]: - res.update({ - "status": "error", - "reason": "asset '{}' has unsupported datatype '{}'".format(asset["value"], asset["datatype"]) - }) + res.update( + { + "status": "error", + "reason": "asset '{}' has unsupported datatype '{}'".format( + asset["value"], asset["datatype"] + ), + } + ) return jsonify(res) - + if asset["datatype"] == "ip-subnet": for ip in get_ips_from_subnet(asset["value"]): assets.append(ip) @@ -278,7 +290,7 @@ def start_scan(): if asset["datatype"] == "url": parsed_uri = urlparse(asset["value"]) asset["value"] = parsed_uri.netloc - + # Check the netloc type if is_valid_ip(asset["value"]): asset["datatype"] == "ip" @@ -287,52 +299,52 @@ def start_scan(): assets.append(asset["value"]) - scan_id = str(data['scan_id']) + scan_id = str(data["scan_id"]) - if data['scan_id'] in engine.scans.keys(): - res.update({ - "status": "refused", - "details": { - "reason": f"scan '{data['scan_id']}' already launched", + if data["scan_id"] in engine.scans.keys(): + res.update( + { + "status": "refused", + "details": { + "reason": f"scan '{data['scan_id']}' already launched", + }, } - }) + ) return jsonify(res) scan = { - 'assets': assets, - 'threads': [], - 'futures': [], - 'options': data['options'], - 'scan_id': scan_id, - 'status': "STARTED", - 'started_at': int(time.time() * 1000), - 'findings': {} + "assets": assets, + "threads": [], + "futures": [], + "options": data["options"], + "scan_id": scan_id, + "status": "STARTED", + "started_at": int(time.time() * 1000), + "findings": {}, } engine.scans.update({scan_id: scan}) - if 'ip_reputation' in scan['options'].keys() and data['options']['ip_reputation']: + if "ip_reputation" in scan["options"].keys() and data["options"]["ip_reputation"]: for asset in data["assets"]: if asset["datatype"] == "ip": th = this.pool.submit(_scan_ip_reputation, scan_id, asset["value"]) - engine.scans[scan_id]['futures'].append(th) + engine.scans[scan_id]["futures"].append(th) elif asset["datatype"] == "ip-subnet": for ip in get_ips_from_subnet(asset["value"]): th = this.pool.submit(_scan_ip_reputation, scan_id, ip) - engine.scans[scan_id]['futures'].append(th) + engine.scans[scan_id]["futures"].append(th) - if 'domain_reputation' in scan['options'].keys() and data['options']['domain_reputation']: + if ( + "domain_reputation" in scan["options"].keys() + and data["options"]["domain_reputation"] + ): for asset in data["assets"]: if asset["datatype"] in ["domain", "fqdn"]: th = this.pool.submit(_scan_domain_reputation, scan_id, asset["value"]) - engine.scans[scan_id]['futures'].append(th) + engine.scans[scan_id]["futures"].append(th) - res.update({ - "status": "accepted", - "details": { - "scan_id": scan['scan_id'] - } - }) + res.update({"status": "accepted", "details": {"scan_id": scan["scan_id"]}}) return jsonify(res) @@ -341,9 +353,15 @@ def _scan_ip_reputation(scan_id, asset): if asset not in engine.scans[scan_id]["findings"]: engine.scans[scan_id]["findings"][asset] = {} try: - engine.scans[scan_id]["findings"][asset]['ip_reputation'] = get_report_ip_reputation(scan_id, asset, apikey) + engine.scans[scan_id]["findings"][asset]["ip_reputation"] = ( + get_report_ip_reputation(scan_id, asset, apikey) + ) except Exception as ex: - app.logger.error("_scan_ip_reputation failed: {}".format(re.sub(r'/' + apikey + '/', r'/***/', ex.__str__()))) + app.logger.error( + "_scan_ip_reputation failed: {}".format( + re.sub(r"/" + apikey + "/", r"/***/", ex.__str__()) + ) + ) return False return True @@ -354,9 +372,15 @@ def _scan_domain_reputation(scan_id, asset): if asset not in engine.scans[scan_id]["findings"]: engine.scans[scan_id]["findings"][asset] = {} try: - engine.scans[scan_id]["findings"][asset]['domain_reputation'] = get_report_domain_reputation(scan_id, asset, apikey) + engine.scans[scan_id]["findings"][asset]["domain_reputation"] = ( + get_report_domain_reputation(scan_id, asset, apikey) + ) except Exception as ex: - app.logger.error("_scan_domain_reputation failed: {}".format(re.sub(r'/' + apikey + '/', r'/***/', ex.__str__()))) + app.logger.error( + "_scan_domain_reputation failed: {}".format( + re.sub(r"/" + apikey + "/", r"/***/", ex.__str__()) + ) + ) return False return True @@ -372,13 +396,19 @@ def check_limit(): def get_report_ip_reputation(scan_id, asset, apikey): """Get APIvoid ip reputation report.""" check_limit() - scan_url = f"https://endpoint.apivoid.com/iprep/v1/pay-as-you-go/?key={apikey}&ip={asset}" + scan_url = ( + f"https://endpoint.apivoid.com/iprep/v1/pay-as-you-go/?key={apikey}&ip={asset}" + ) try: response = requests.get(scan_url) # print(response.content) except Exception as ex: - app.logger.error("get_report_ip_reputation failed: {}".format(re.sub(r'/' + apikey + '/', r'/***/', ex.__str__()))) + app.logger.error( + "get_report_ip_reputation failed: {}".format( + re.sub(r"/" + apikey + "/", r"/***/", ex.__str__()) + ) + ) return [] return response.content @@ -393,7 +423,11 @@ def get_report_domain_reputation(scan_id, asset, apikey): response = requests.get(scan_url) # print(response.content) except Exception as ex: - app.logger.error("get_report_domain_reputation failed: {}".format(re.sub(r'/' + apikey + '/', r'/***/', ex.__str__()))) + app.logger.error( + "get_report_domain_reputation failed: {}".format( + re.sub(r"/" + apikey + "/", r"/***/", ex.__str__()) + ) + ) return [] return response.content @@ -403,21 +437,15 @@ def _parse_results(scan_id): status = {"status": "success"} issues = [] summary = {} - nb_vulns = { - "info": 0, - "low": 0, - "medium": 0, - "high": 0, - "critical": 0 - } + nb_vulns = {"info": 0, "low": 0, "medium": 0, "high": 0, "critical": 0} ts = int(time.time() * 1000) for asset in engine.scans[scan_id]["findings"]: - - if 'ip_reputation' in engine.scans[scan_id]["findings"][asset].keys(): - res = json.loads(engine.scans[scan_id]["findings"][asset]['ip_reputation']) - - if 'data' in res: + + if "ip_reputation" in engine.scans[scan_id]["findings"][asset].keys(): + res = json.loads(engine.scans[scan_id]["findings"][asset]["ip_reputation"]) + + if "data" in res: severity = "info" report_summary = "" try: @@ -427,34 +455,34 @@ def _parse_results(scan_id): severity = "high" elif risk_score >= 70: severity = "medium" - + report_summary = f" (detect:{detections}, risk:{risk_score})" except Exception: pass - - nb_vulns['info'] += 1 - issues.append({ - "issue_id": len(issues) + 1, - "severity": severity, "confidence": "certain", - "target": { - "addr": [asset], - "protocol": "domain" - }, - "title": "IP Reputation Check"+report_summary, - "description": f"IP Reputation Check for '{asset}'\n\nSee raw_data", - "solution": "n/a", - "metadata": { - "tags": ["ip", "reputation"] - }, - "type": "ip_reputation", - "raw": res['data'], - "timestamp": ts - }) - - if 'domain_reputation' in engine.scans[scan_id]["findings"][asset].keys(): - res = json.loads(engine.scans[scan_id]["findings"][asset]['domain_reputation']) - - if 'data' in res: + + nb_vulns["info"] += 1 + issues.append( + { + "issue_id": len(issues) + 1, + "severity": severity, + "confidence": "certain", + "target": {"addr": [asset], "protocol": "domain"}, + "title": "IP Reputation Check" + report_summary, + "description": f"IP Reputation Check for '{asset}'\n\nSee raw_data", + "solution": "n/a", + "metadata": {"tags": ["ip", "reputation"]}, + "type": "ip_reputation", + "raw": res["data"], + "timestamp": ts, + } + ) + + if "domain_reputation" in engine.scans[scan_id]["findings"][asset].keys(): + res = json.loads( + engine.scans[scan_id]["findings"][asset]["domain_reputation"] + ) + + if "data" in res: severity = "info" report_summary = "" try: @@ -464,29 +492,27 @@ def _parse_results(scan_id): severity = "high" elif risk_score >= 70: severity = "medium" - + report_summary = f" (detect:{detections}, risk:{risk_score})" except Exception: pass - - nb_vulns['info'] += 1 - issues.append({ - "issue_id": len(issues) + 1, - "severity": severity, "confidence": "certain", - "target": { - "addr": [asset], - "protocol": "domain" - }, - "title": "Domain Reputation Check"+report_summary, - "description": f"Domain Reputation Check for '{asset}'\n\nSee raw_data", - "solution": "n/a", - "metadata": { - "tags": ["domain", "reputation"] - }, - "type": "ip_reputation", - "raw": res['data'], - "timestamp": ts - }) + + nb_vulns["info"] += 1 + issues.append( + { + "issue_id": len(issues) + 1, + "severity": severity, + "confidence": "certain", + "target": {"addr": [asset], "protocol": "domain"}, + "title": "Domain Reputation Check" + report_summary, + "description": f"Domain Reputation Check for '{asset}'\n\nSee raw_data", + "solution": "n/a", + "metadata": {"tags": ["domain", "reputation"]}, + "type": "ip_reputation", + "raw": res["data"], + "timestamp": ts, + } + ) summary = { "nb_issues": len(issues), @@ -496,13 +522,13 @@ def _parse_results(scan_id): "nb_high": nb_vulns["high"], "nb_critical": nb_vulns["critical"], "engine_name": "apivoid", - "engine_version": engine.scanner["version"] + "engine_version": engine.scanner["version"], } return status, issues, summary -@app.route('/engines/apivoid/getfindings/', methods=['GET']) +@app.route("/engines/apivoid/getfindings/", methods=["GET"]) def getfindings(scan_id): res = {"page": "getfindings", "scan_id": scan_id} @@ -513,29 +539,31 @@ def getfindings(scan_id): # check if the scan is finished status_scan(scan_id) - if engine.scans[scan_id]['status'] != "FINISHED": - res.update({ - "status": "error", - "reason": f"scan_id '{scan_id}' not finished (status={engine.scans[scan_id]['status']})" - }) + if engine.scans[scan_id]["status"] != "FINISHED": + res.update( + { + "status": "error", + "reason": f"scan_id '{scan_id}' not finished (status={engine.scans[scan_id]['status']})", + } + ) return jsonify(res) status, issues, summary = _parse_results(scan_id) scan = { "scan_id": scan_id, - "assets": engine.scans[scan_id]['assets'], - "options": engine.scans[scan_id]['options'], - "started_at": engine.scans[scan_id]['started_at'], - "finished_at": engine.scans[scan_id]['finished_at'] + "assets": engine.scans[scan_id]["assets"], + "options": engine.scans[scan_id]["options"], + "started_at": engine.scans[scan_id]["started_at"], + "finished_at": engine.scans[scan_id]["finished_at"], } scan.update(status) - + res_data = {"scan": scan, "summary": summary, "issues": issues} - + # Store the findings in a file - with open(f"{APP_BASE_DIR}/results/apivoid_{scan_id}.json", 'w') as report_file: + with open(f"{APP_BASE_DIR}/results/apivoid_{scan_id}.json", "w") as report_file: json.dump(res_data, report_file, default=_json_serial) # # Remove the scan from the active scan list @@ -573,8 +601,8 @@ def get_ips_from_subnet(subnet): def _json_serial(obj): """ - JSON serializer for objects not serializable by default json code - Used for datetime serialization when the results are written in file + JSON serializer for objects not serializable by default json code + Used for datetime serialization when the results are written in file """ if isinstance(obj, datetime.datetime) or isinstance(obj, datetime.date): serial = obj.isoformat() @@ -590,5 +618,5 @@ def main(): _loadconfig() -if __name__ == '__main__': +if __name__ == "__main__": engine.run_app(app_debug=APP_DEBUG, app_host=APP_HOST, app_port=APP_PORT) diff --git a/engines/arachni/engine-arachni.py b/engines/arachni/engine-arachni.py index 5a8ddadd..2c1c75ef 100644 --- a/engines/arachni/engine-arachni.py +++ b/engines/arachni/engine-arachni.py @@ -1,5 +1,6 @@ #!/usr/bin/python3 # -*- coding: utf-8 -*- + import os import sys import requests @@ -19,34 +20,34 @@ APP_DEBUG = False APP_HOST = "0.0.0.0" APP_PORT = 5005 -APP_MAXSCANS = int(os.environ.get('APP_MAXSCANS', 5)) +APP_MAXSCANS = int(os.environ.get("APP_MAXSCANS", 5)) BASE_DIR = os.path.dirname(os.path.realpath(__file__)) this = sys.modules[__name__] this.proc = None -this.scanner = {} # Scanner info -this.scans = {} # Active scan list +this.scanner = {} # Scanner info +this.scans = {} # Active scan list requests.packages.urllib3.disable_warnings() # logging.basicConfig(level=logging.DEBUG) -if __name__ != '__main__': - gunicorn_logger = logging.getLogger('gunicorn.error') +if __name__ != "__main__": + gunicorn_logger = logging.getLogger("gunicorn.error") app.logger.handlers = gunicorn_logger.handlers app.logger.setLevel(gunicorn_logger.level) -@app.route('/') +@app.route("/") def default(): - return redirect(url_for('index')) + return redirect(url_for("index")) -@app.route('/engines/arachni/') +@app.route("/engines/arachni/") def index(): return jsonify({"page": "index"}) -@app.route('/engines/arachni/clean') +@app.route("/engines/arachni/clean") def clean(): res = {"page": "clean"} this.scans.clear() @@ -55,13 +56,15 @@ def clean(): return jsonify(res) -@app.route('/engines/arachni/clean/') +@app.route("/engines/arachni/clean/") def clean_scan(scan_id): res = {"page": "clean_scan"} res.update({"scan_id": scan_id}) if scan_id not in this.scans.keys(): - res.update({"status": "ERROR", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "ERROR", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) this.scans.pop(scan_id) @@ -70,95 +73,111 @@ def clean_scan(scan_id): def _loadconfig(): - conf_file = BASE_DIR+'/arachni.json' + conf_file = BASE_DIR + "/arachni.json" if os.path.exists(conf_file): json_data = open(conf_file) this.scanner = json.load(json_data) - this.scanner['auth'] = (this.scanner['username'], this.scanner['password']) - this.scanner['status'] = 'unknown' + this.scanner["auth"] = (this.scanner["username"], this.scanner["password"]) + this.scanner["status"] = "unknown" else: app.logger.error("Error: config file '{}' not found".format(conf_file)) - this.scanner['status'] = 'ERROR' + this.scanner["status"] = "ERROR" return { "status": "ERROR", "reason": "config file '{}' not found".format(conf_file), - "details": {"filename": conf_file} + "details": {"filename": conf_file}, } - version_filename = BASE_DIR+'/VERSION' + version_filename = BASE_DIR + "/VERSION" if os.path.exists(version_filename): version_file = open(version_filename, "r") - this.scanner["version"] = version_file.read().rstrip('\n') + this.scanner["version"] = version_file.read().rstrip("\n") version_file.close() # check if an instance is running, then kill and restart it - if hasattr(this.proc, 'pid') and psutil.pid_exists(this.proc.pid): + if hasattr(this.proc, "pid") and psutil.pid_exists(this.proc.pid): app.logger.info(" * Terminate PID {}".format(this.proc.pid)) psutil.Process(this.proc.pid).terminate() time.sleep(5) - cmd = this.scanner['path'] + "/bin/arachni_rest_server " \ - + "--address " + this.scanner['listening_host'] \ - + " --port " + this.scanner['listening_port'] \ - + " --authentication-username " + this.scanner['username'] \ - + " --authentication-password " + this.scanner['password'] \ - + " --reroute-to-logfile " + BASE_DIR + "/logs" - this.proc = subprocess.Popen(cmd, shell=True, stdout=open("/dev/null", "w"), stderr=open("/dev/null", "w")) - this.scanner['status'] = 'READY' - app.logger.info(" * Arachni REST API server successfully started on http://{}:{}/" - .format(this.scanner['listening_host'], this.scanner['listening_port'])) + cmd = ( + this.scanner["path"] + + "/bin/arachni_rest_server " + + "--address " + + this.scanner["listening_host"] + + " --port " + + this.scanner["listening_port"] + + " --authentication-username " + + this.scanner["username"] + + " --authentication-password " + + this.scanner["password"] + + " --reroute-to-logfile " + + BASE_DIR + + "/logs" + ) + this.proc = subprocess.Popen( + cmd, shell=True, stdout=open("/dev/null", "w"), stderr=open("/dev/null", "w") + ) + this.scanner["status"] = "READY" + app.logger.info( + " * Arachni REST API server successfully started on http://{}:{}/".format( + this.scanner["listening_host"], this.scanner["listening_port"] + ) + ) # print(" * Arachni REST API server successfully started on http://{}:{}/" # .format(this.scanner['listening_host'], this.scanner['listening_port'])) return {"status": "READY"} -@app.route('/engines/arachni/reloadconfig') +@app.route("/engines/arachni/reloadconfig") def reloadconfig(): res = {"page": "reloadconfig"} res.update(_loadconfig()) - res.update({ - "config": this.scanner, - "details": {"pid": this.proc.pid} - }) + res.update({"config": this.scanner, "details": {"pid": this.proc.pid}}) return jsonify(res) -@app.route('/engines/arachni/info') +@app.route("/engines/arachni/info") def info(): res = {"page": "info"} - #todo check archni_status + # todo check archni_status - url = str(this.scanner['api_url']) + "/scans" + url = str(this.scanner["api_url"]) + "/scans" try: - r = requests.get(url=url, verify=False, auth=this.scanner['auth']) + r = requests.get(url=url, verify=False, auth=this.scanner["auth"]) if r.status_code == 200: - res.update({ - "status": "READY", - # "details": { "engine_config": this.scanner } - "engine_config": this.scanner - }) + res.update( + { + "status": "READY", + # "details": { "engine_config": this.scanner } + "engine_config": this.scanner, + } + ) else: - res.update({"status": "ERROR", "details": { - "engine_config": this.scanner}}) + res.update({"status": "ERROR", "details": {"engine_config": this.scanner}}) except Exception: - res.update({"status": "ERROR", "details": "connexion error to the API {}".format(url)}) + res.update( + {"status": "ERROR", "details": "connexion error to the API {}".format(url)} + ) return jsonify(res) -''' +""" # Function 'status()' # - display current status of the scanner: READY, ERROR # - display the last 10 scans status: SCANNING, DONE, ERROR + scan_id + timestamp -''' -@app.route('/engines/arachni/status') +""" + + +@app.route("/engines/arachni/status") def status(): res = {"page": "status"} # display the status of the scanner - this.scanner['status'] = json.loads(info().get_data().decode("utf-8"))['status'] - res.update({"status": this.scanner['status']}) + this.scanner["status"] = json.loads(info().get_data().decode("utf-8"))["status"] + res.update({"status": this.scanner["status"]}) # display info on the scanner res.update({"scanner": this.scanner}) @@ -178,9 +197,18 @@ def _is_scan_finished(scan_id): return True try: - url = this.scanner['api_url'] + "/scans/" + str(this.scans[scan_id]['arachni_scan_id']) + "/summary" - r = requests.get(url=url, verify=False, auth=this.scanner['auth']) - if r.status_code == 200 and r.json()["status"] == "done" and r.json()["busy"] is False: + url = ( + this.scanner["api_url"] + + "/scans/" + + str(this.scans[scan_id]["arachni_scan_id"]) + + "/summary" + ) + r = requests.get(url=url, verify=False, auth=this.scanner["auth"]) + if ( + r.status_code == 200 + and r.json()["status"] == "done" + and r.json()["busy"] is False + ): this.scans[scan_id]["status"] = "FINISHED" this.scans[scan_id]["finished_at"] = datetime.datetime.now() return True @@ -191,24 +219,33 @@ def _is_scan_finished(scan_id): return False -''' +""" # Function 'scan_status(scan_id=86a5f993-30c2-47b7-a401-c4ae7e2a1e57)' # - call the API to check status # - display current status of the scan: SCANNING, DONE, ERROR -''' -@app.route('/engines/arachni/status/') +""" + + +@app.route("/engines/arachni/status/") def scan_status(scan_id): res = {"page": "scan_status"} if scan_id not in this.scans.keys(): - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) # check id the scan is finished or not resp = None try: - url = this.scanner['api_url'] + "/scans/" + this.scans[scan_id]['arachni_scan_id'] + "/summary" - r = requests.get(url=url, verify=False, auth=this.scanner['auth']) + url = ( + this.scanner["api_url"] + + "/scans/" + + this.scans[scan_id]["arachni_scan_id"] + + "/summary" + ) + r = requests.get(url=url, verify=False, auth=this.scanner["auth"]) resp = r.json() if r.status_code == 200: if resp["status"] == "done" and resp["busy"] is False: @@ -218,105 +255,129 @@ def scan_status(scan_id): this.scans[scan_id]["status"] = str(resp["status"]).upper() except Exception: this.scans[scan_id]["status"] = "ERROR" - res.update({"status": "ERROR", "reason": "API error"}) + res.update({"status": "ERROR", "reason": "API error"}) # return the scan parameters and the status - res.update({ - "scan": this.scans[scan_id], - "stats": resp["statistics"], - "status": this.scans[scan_id]["status"] - }) + res.update( + { + "scan": this.scans[scan_id], + "stats": resp["statistics"], + "status": this.scans[scan_id]["status"], + } + ) return jsonify(res) -@app.route('/engines/arachni/startscan', methods=['POST']) +@app.route("/engines/arachni/startscan", methods=["POST"]) def start(): res = {"page": "startscan"} # check the scanner is ready to start a new scan if len(this.scans) == APP_MAXSCANS: - res.update({ - "status": "ERROR", - "reason": "Scan refused: max concurrent active scans reached ({})".format(APP_MAXSCANS) - }) + res.update( + { + "status": "ERROR", + "reason": "Scan refused: max concurrent active scans reached ({})".format( + APP_MAXSCANS + ), + } + ) return jsonify(res) scan = {} data = json.loads(request.data.decode("utf-8")) - if 'assets' not in data.keys() or 'scan_id' not in data.keys(): # or not 'base_url' in data['options'].keys(): - res.update({ - "status": "ERROR", - "reason": "arg error, something is missing (ex: 'assets', 'scan_id')" #, 'options/base_url')" - }) + if ( + "assets" not in data.keys() or "scan_id" not in data.keys() + ): # or not 'base_url' in data['options'].keys(): + res.update( + { + "status": "ERROR", + "reason": "arg error, something is missing (ex: 'assets', 'scan_id')", # , 'options/base_url')" + } + ) return jsonify(res) # scan["scan_id"] = data["scan_id"] - scan["scan_id"] = str(data['scan_id']) - scan_id = str(data['scan_id']) + scan["scan_id"] = str(data["scan_id"]) + scan_id = str(data["scan_id"]) if data["scan_id"] in this.scans.keys(): - res.update({"status": "ERROR", "reason": "scan already started (scan_id={})".format(data["scan_id"])}) + res.update( + { + "status": "ERROR", + "reason": "scan already started (scan_id={})".format(data["scan_id"]), + } + ) return jsonify(res) # Initialize the scan parameters - asset = data['assets'][0] + asset = data["assets"][0] if asset["datatype"] not in this.scanner["allowed_asset_types"]: - return jsonify({ - "status": "refused", - "details": { - "reason": "datatype '{}' not supported for the asset {}.".format(asset["datatype"], asset["value"]) - }}) + return jsonify( + { + "status": "refused", + "details": { + "reason": "datatype '{}' not supported for the asset {}.".format( + asset["datatype"], asset["value"] + ) + }, + } + ) - scan["asset_url"] = list(data['assets'])[0]['value'] # only take the 1st + scan["asset_url"] = list(data["assets"])[0]["value"] # only take the 1st scan["target_host"] = urlparse(scan["asset_url"]).netloc scan["target_protocol"] = urlparse(scan["asset_url"]).scheme - if 'ports' in data['options'].keys(): + if "ports" in data["options"].keys(): # get the 1st in list - scan["target_port"] = str(list(data['options']['ports'])[0]) + scan["target_port"] = str(list(data["options"]["ports"])[0]) elif urlparse(scan["asset_url"]).port: scan["target_port"] = urlparse(scan["asset_url"]).port - elif scan["target_protocol"] == 'http': + elif scan["target_protocol"] == "http": scan["target_port"] = 80 - elif scan["target_protocol"] == 'https': + elif scan["target_protocol"] == "https": scan["target_port"] = 443 scan["started_at"] = datetime.datetime.now() scan["options"] = {} - if 'http' in data['options'].keys(): - scan["options"].update({"http": data['options']['http']}) - if 'browser_cluster' in data['options'].keys(): - scan["options"].update({"browser_cluster": data['options']['browser_cluster']}) - if 'scope' in data['options'].keys(): - scan["options"].update({"scope": data['options']['scope']}) - if 'checks' in data['options'].keys(): - scan["options"].update({"checks": list(data['options']['checks'])}) - if 'audit' in data['options'].keys(): - scan["options"].update({"audit": data['options']['audit']}) - if 'no_fingerprinting' in data['options'].keys(): - scan["options"].update({"no_fingerprinting": data['options']['no_fingerprinting']}) - if 'input' in data['options'].keys(): - scan["options"].update({"input": data['options']['input']}) - - url = this.scanner['api_url'] + "/scans" - post_data = { - "url": scan["asset_url"] - } + if "http" in data["options"].keys(): + scan["options"].update({"http": data["options"]["http"]}) + if "browser_cluster" in data["options"].keys(): + scan["options"].update({"browser_cluster": data["options"]["browser_cluster"]}) + if "scope" in data["options"].keys(): + scan["options"].update({"scope": data["options"]["scope"]}) + if "checks" in data["options"].keys(): + scan["options"].update({"checks": list(data["options"]["checks"])}) + if "audit" in data["options"].keys(): + scan["options"].update({"audit": data["options"]["audit"]}) + if "no_fingerprinting" in data["options"].keys(): + scan["options"].update( + {"no_fingerprinting": data["options"]["no_fingerprinting"]} + ) + if "input" in data["options"].keys(): + scan["options"].update({"input": data["options"]["input"]}) + + url = this.scanner["api_url"] + "/scans" + post_data = {"url": scan["asset_url"]} post_data.update(scan["options"]) # Start the scan r = None try: - r = requests.post(url=url, data=json.dumps(post_data), verify=False, auth=this.scanner['auth']) + r = requests.post( + url=url, data=json.dumps(post_data), verify=False, auth=this.scanner["auth"] + ) if r.status_code == 200: res.update({"status": "accepted"}) scan["status"] = "SCANNING" - scan["arachni_scan_id"] = r.json()['id'] + scan["arachni_scan_id"] = r.json()["id"] res.update({"details": r.text}) else: - res.update({"status": "ERROR", "reason": "something wrong with the API invokation"}) + res.update( + {"status": "ERROR", "reason": "something wrong with the API invokation"} + ) scan["status"] = "ERROR" scan["finished_at"] = datetime.datetime.now() except Exception: @@ -338,17 +399,26 @@ def start(): genresults bydefaut stop/delete the scan in the arachni context """ -@app.route('/engines/arachni/stop/') + + +@app.route("/engines/arachni/stop/") def stop_scan(scan_id): res = {"page": "stop"} if scan_id not in this.scans.keys(): - res.update({"status": "ERROR", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "ERROR", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) try: - url = this.scanner['api_url'] + "/scans/" + this.scans[scan_id]['arachni_scan_id'] + "/pause" - r = requests.put(url=url, verify=False, auth=this.scanner['auth']) + url = ( + this.scanner["api_url"] + + "/scans/" + + this.scans[scan_id]["arachni_scan_id"] + + "/pause" + ) + r = requests.put(url=url, verify=False, auth=this.scanner["auth"]) if r.status_code == 200: this.scans[scan_id]["status"] = "STOPPED" this.scans[scan_id]["finished_at"] = datetime.datetime.now() @@ -358,13 +428,13 @@ def stop_scan(scan_id): res.update({"status": "success", "details": "scan successfully stopped"}) except Exception: this.scans[scan_id]["status"] = "ERROR" - res.update({"status": "ERROR", "reason": "API error"}) + res.update({"status": "ERROR", "reason": "API error"}) return jsonify(res) # Stop all scans -@app.route('/engines/arachni/stopscans', methods=['GET']) +@app.route("/engines/arachni/stopscans", methods=["GET"]) def stop(): res = {"page": "stopscans"} @@ -376,7 +446,7 @@ def stop(): return jsonify(res) -''' +""" # outputs: {"page": "report", "scan_id": 1212, @@ -395,43 +465,59 @@ def stop(): "type": 'ssl_protocols', "timestamp": 143545645775 }]} -''' +""" -@app.route('/engines/arachni/getfindings/') +@app.route("/engines/arachni/getfindings/") def getfindings(scan_id): res = {"page": "getfindings", "scan_id": scan_id} if not _is_scan_finished(scan_id): - res.update({"status": "ERROR", "reason": "scan '{}' not finished".format(scan_id)}) + res.update( + {"status": "ERROR", "reason": "scan '{}' not finished".format(scan_id)} + ) return jsonify(res) scan = this.scans[scan_id] app_url = scan["asset_url"] - host = scan['target_host'] - port = scan['target_port'] - protocol = scan['target_protocol'] - url = this.scanner['api_url'] + "/scans/" + str(scan['arachni_scan_id']) + "/report.json" + host = scan["target_host"] + port = scan["target_port"] + protocol = scan["target_protocol"] + url = ( + this.scanner["api_url"] + + "/scans/" + + str(scan["arachni_scan_id"]) + + "/report.json" + ) try: - r = requests.get(url=url, verify=False, auth=this.scanner['auth']) + r = requests.get(url=url, verify=False, auth=this.scanner["auth"]) if r.status_code != 200: - res.update({"status": "ERROR", "reason": "something wrong with the API invokation"}) + res.update( + {"status": "ERROR", "reason": "something wrong with the API invokation"} + ) return jsonify(res) except Exception: - res.update({"status": "ERROR", "reason": "something wrong with the API invokation"}) + res.update( + {"status": "ERROR", "reason": "something wrong with the API invokation"} + ) return jsonify(res) scan_results = r.json() issues, summary = _parse_report( - results=scan_results, asset_name=app_url, - asset_host=host, asset_port=port, asset_protocol=protocol + results=scan_results, + asset_name=app_url, + asset_host=host, + asset_port=port, + asset_protocol=protocol, ) # Definitely delete the scan in the arachni context try: - url = this.scanner['api_url'] + "/scans/" + this.scans[scan_id]['arachni_scan_id'] - r = requests.delete(url=url, verify=False, auth=this.scanner['auth']) + url = ( + this.scanner["api_url"] + "/scans/" + this.scans[scan_id]["arachni_scan_id"] + ) + r = requests.delete(url=url, verify=False, auth=this.scanner["auth"]) if r.status_code == 200: this.scans[scan_id]["status"] = "FINISHED" else: @@ -440,12 +526,14 @@ def getfindings(scan_id): this.scans[scan_id]["status"] = "ERROR" # Store the findings in a file - with open(BASE_DIR+"/results/arachni_"+str(scan_id)+".json", 'w') as report_file: - json.dump({ - "scan": scan, - "summary": summary, - "issues": issues - }, report_file, default=_json_serial) + with open( + BASE_DIR + "/results/arachni_" + str(scan_id) + ".json", "w" + ) as report_file: + json.dump( + {"scan": scan, "summary": summary, "issues": issues}, + report_file, + default=_json_serial, + ) # remove the scan from the active scan list clean_scan(scan_id) @@ -456,8 +544,8 @@ def getfindings(scan_id): def _json_serial(obj): """ - JSON serializer for objects not serializable by default json code - Used for datetime serialization when the results are written in file + JSON serializer for objects not serializable by default json code + Used for datetime serialization when the results are written in file """ if isinstance(obj, datetime.datetime): @@ -477,7 +565,7 @@ def _parse_report(results, asset_name, asset_host, asset_port, asset_protocol): "high": 0, } - ts = int(time.time() * 1000) # timestamp + ts = int(time.time() * 1000) # timestamp # Sitemap sitemap = results["sitemap"] @@ -485,80 +573,84 @@ def _parse_report(results, asset_name, asset_host, asset_port, asset_protocol): sitemap_str = "" for url in sorted(sitemap.keys()): if sitemap[url] == 200: - sitemap_str = "".join((sitemap_str, str(url)+"\n")) - - sitemap_hash = hashlib.sha1(str(sitemap_str).encode('utf-8')).hexdigest()[:6] - - nb_vulns['info'] += 1 - issues.append({ - "issue_id": len(issues)+1, - "severity": "info", "confidence": "certain", - "target": { - "addr": [asset_name, asset_host], - "port_id": asset_port, - "port_type": 'tcp', - "protocol": asset_protocol + sitemap_str = "".join((sitemap_str, str(url) + "\n")) + + sitemap_hash = hashlib.sha1(str(sitemap_str).encode("utf-8")).hexdigest()[:6] + + nb_vulns["info"] += 1 + issues.append( + { + "issue_id": len(issues) + 1, + "severity": "info", + "confidence": "certain", + "target": { + "addr": [asset_name, asset_host], + "port_id": asset_port, + "port_type": "tcp", + "protocol": asset_protocol, }, - "title": "Sitemap {} (#URL: {}, HASH: {})".format( - results["options"]["url"], nb_urls, sitemap_hash - ), - "description": "Sitemap: \n\n{}".format(sitemap_str), - "solution": "n/a", - "metadata": { - "tags": ["sitemap"] - }, - "type": "sitemap", - "raw": sitemap, - "timestamp": ts - }) + "title": "Sitemap {} (#URL: {}, HASH: {})".format( + results["options"]["url"], nb_urls, sitemap_hash + ), + "description": "Sitemap: \n\n{}".format(sitemap_str), + "solution": "n/a", + "metadata": {"tags": ["sitemap"]}, + "type": "sitemap", + "raw": sitemap, + "timestamp": ts, + } + ) # Loop for issues found by the scanner for issue in results["issues"]: # reword 'informational' -> 'info' - if issue['severity'] == "informational": - issue['severity'] = "info" - nb_vulns[issue['severity']] += 1 + if issue["severity"] == "informational": + issue["severity"] = "info" + nb_vulns[issue["severity"]] += 1 confidence = "" - if issue['trusted']: + if issue["trusted"]: confidence = "certain" else: confidence = "firm" vuln_refs = {} - if 'cwe' in issue.keys(): - vuln_refs = {"CWE": ', '.join([str(issue['cwe'])])} - - issues.append({ - "issue_id": len(issues)+1, - "severity": issue['severity'], - "confidence": confidence, - "target": { - "addr": [asset_name, asset_host], - "port_id": asset_port, - "port_type": 'tcp', - "protocol": asset_protocol + if "cwe" in issue.keys(): + vuln_refs = {"CWE": ", ".join([str(issue["cwe"])])} + + issues.append( + { + "issue_id": len(issues) + 1, + "severity": issue["severity"], + "confidence": confidence, + "target": { + "addr": [asset_name, asset_host], + "port_id": asset_port, + "port_type": "tcp", + "protocol": asset_protocol, }, - "title": "{} ({} [{}])".format( - issue['name'], - # str(issue['vector']['method']).upper(), # GET, POST, PUT, .. - urlparse(issue['vector']['url']).path, # /index.php - issue['vector']['affected_input_name']), # query - "description": "{}\\n\\nRequest: {}\\n\\nResponse: {}".format( - issue['description'], - issue['request']['headers_string'], - issue['response']['headers_string'] + "title": "{} ({} [{}])".format( + issue["name"], + # str(issue['vector']['method']).upper(), # GET, POST, PUT, .. + urlparse(issue["vector"]["url"]).path, # /index.php + issue["vector"]["affected_input_name"], + ), # query + "description": "{}\\n\\nRequest: {}\\n\\nResponse: {}".format( + issue["description"], + issue["request"]["headers_string"], + issue["response"]["headers_string"], ), - "solution": issue['remedy_guidance'], - "metadata": { - "tags": issue['tags'], - "vuln_refs": vuln_refs, - "links": list(issue['references'].values()) - }, - "type": issue['check']['shortname'], - "raw": issue, - "timestamp": ts - }) + "solution": issue["remedy_guidance"], + "metadata": { + "tags": issue["tags"], + "vuln_refs": vuln_refs, + "links": list(issue["references"].values()), + }, + "type": issue["check"]["shortname"], + "raw": issue, + "timestamp": ts, + } + ) summary = { "nb_issues": len(issues), @@ -568,28 +660,33 @@ def _parse_report(results, asset_name, asset_host, asset_port, asset_protocol): "nb_high": nb_vulns["high"], "delta_time": results["delta_time"], "engine_name": "arachni", - "engine_version": results["version"] + "engine_version": results["version"], } return issues, summary -@app.route('/engines/arachni/getreport/') +@app.route("/engines/arachni/getreport/") def getreport(scan_id): - filepath = BASE_DIR+"/results/arachni_"+scan_id+".json" + filepath = BASE_DIR + "/results/arachni_" + scan_id + ".json" if not os.path.exists(filepath): - return jsonify({"status": "ERROR", "reason": "report file for scan_id '{}' not found".format(scan_id)}) - - #@todo + return jsonify( + { + "status": "ERROR", + "reason": "report file for scan_id '{}' not found".format(scan_id), + } + ) + + # @todo # return send_file(filepath, # mimetype='application/json', # attachment_filename='arachni_'+str(scan_id)+".json", # as_attachment=True) - return send_from_directory(BASE_DIR+"/results/", "arachni_"+scan_id+".json") + return send_from_directory(BASE_DIR + "/results/", "arachni_" + scan_id + ".json") -@app.route('/engines/arachni/test') +@app.route("/engines/arachni/test") def test(): if not APP_DEBUG: return jsonify({"page": "test"}) @@ -600,9 +697,13 @@ def test(): for arg in rule.arguments: options[arg] = "[{0}]".format(arg) - methods = ','.join(rule.methods) + methods = ",".join(rule.methods) url = url_for(rule.endpoint, **options) - res += urlparse.unquote("{0:50s} {1:20s} {2}
".format(rule.endpoint, methods, url)) + res += urlparse.unquote( + "{0:50s} {1:20s} {2}
".format( + rule.endpoint, methods, url + ) + ) return res @@ -614,29 +715,35 @@ def page_not_found(e): @app.before_first_request def main(): - if not os.path.exists(BASE_DIR+"/results"): - os.makedirs(BASE_DIR+"/results") - if not os.path.exists(BASE_DIR+"/logs"): - os.makedirs(BASE_DIR+"/logs") + if not os.path.exists(BASE_DIR + "/results"): + os.makedirs(BASE_DIR + "/results") + if not os.path.exists(BASE_DIR + "/logs"): + os.makedirs(BASE_DIR + "/logs") _loadconfig() -if __name__ == '__main__': +if __name__ == "__main__": parser = optparse.OptionParser() parser.add_option( - "-H", "--host", + "-H", + "--host", help="Hostname of the Flask app [default %s]" % APP_HOST, - default=APP_HOST) + default=APP_HOST, + ) parser.add_option( - "-P", "--port", + "-P", + "--port", help="Port for the Flask app [default %s]" % APP_PORT, - default=APP_PORT) + default=APP_PORT, + ) parser.add_option( - "-d", "--debug", + "-d", + "--debug", action="store_true", dest="debug", help=optparse.SUPPRESS_HELP, - default=APP_DEBUG) + default=APP_DEBUG, + ) options, _ = parser.parse_args() app.run(debug=options.debug, host=options.host, port=int(options.port)) diff --git a/engines/burp/engine-burp.py b/engines/burp/engine-burp.py index 95ea959e..1ab00518 100644 --- a/engines/burp/engine-burp.py +++ b/engines/burp/engine-burp.py @@ -1,5 +1,6 @@ #!/usr/bin/python3 # -*- coding: utf-8 -*- + import json, os, subprocess, sys, requests, urlparse, datetime, time from flask import Flask, request, jsonify, redirect, url_for @@ -19,27 +20,27 @@ requests.packages.urllib3.disable_warnings() -@app.route('/') +@app.route("/") def default(): - return redirect(url_for('test')) + return redirect(url_for("test")) -@app.route('/engines/burp/') +@app.route("/engines/burp/") def index(): return jsonify({"page": "index"}) -@app.route('/engines/burp/clean') +@app.route("/engines/burp/clean") def clean(): - res = { "page": "clean" } + res = {"page": "clean"} this.scans = {} loadconfig() return jsonify(res) -@app.route('/engines/burp/clean/') +@app.route("/engines/burp/clean/") def clean_scan(scan_id): - res = { "page": "clean_scan" } + res = {"page": "clean_scan"} res.update({"scan_id": scan_id}) for scan in this.scans: if str(scan["scan_id"]) == str(scan_id): @@ -50,68 +51,75 @@ def clean_scan(scan_id): return jsonify(res) -@app.route('/engines/burp/_get_issues') +@app.route("/engines/burp/_get_issues") def _get_issues(): res = {"page": "_get_issues"} - url = this.scanurl + '/burp/scanner/issues' - if request.args.get('url_prefix'): - url += '?urlPrefix=' + request.args.get('url_prefix') + url = this.scanurl + "/burp/scanner/issues" + if request.args.get("url_prefix"): + url += "?urlPrefix=" + request.args.get("url_prefix") - headers = {'content-type': 'application/json'} + headers = {"content-type": "application/json"} r = requests.get(url, headers=headers) res.update(json.loads(r.text)) return jsonify(res) -@app.route('/engines/burp/_addto_sitemap') -def _addto_sitemap(base_url = None): +@app.route("/engines/burp/_addto_sitemap") +def _addto_sitemap(base_url=None): res = {"page": "_addto_sitemap"} - if request.args.get('base_url'): - base_url = request.args.get('base_url') + if request.args.get("base_url"): + base_url = request.args.get("base_url") else: if not base_url: - res.update({'status': 'error', 'reason': 'missing base_url parameter'}) + res.update({"status": "error", "reason": "missing base_url parameter"}) return jsonify(res) r = None try: - r = requests.get(base_url, verify=False, - proxies={'http': 'http://localhost:8080', - 'https': 'http://localhost:8080'}) + r = requests.get( + base_url, + verify=False, + proxies={"http": "http://localhost:8080", "https": "http://localhost:8080"}, + ) except requests.exceptions.RequestException as e: print(e) - res.update({'status': 'error', 'reason': 'unable to access the local proxies on port TCP/8080'}) + res.update( + { + "status": "error", + "reason": "unable to access the local proxies on port TCP/8080", + } + ) return jsonify(res) if r.status_code != 200: - res.update({'base url': base_url, 'status_code': r.status_code}) - res.update({'status': 'error', 'reason': 'base url not available'}) + res.update({"base url": base_url, "status_code": r.status_code}) + res.update({"status": "error", "reason": "base url not available"}) return jsonify(res) -@app.route('/engines/burp/_get_sitemap') +@app.route("/engines/burp/_get_sitemap") def _get_sitemap(): res = {"page": "_get_sitemap"} - url = this.scanurl + '/burp/target/sitemap' - if request.args.get('url_prefix'): - url += '?urlPrefix=' + request.args.get('url_prefix') + url = this.scanurl + "/burp/target/sitemap" + if request.args.get("url_prefix"): + url += "?urlPrefix=" + request.args.get("url_prefix") - headers = {'content-type': 'application/json'} + headers = {"content-type": "application/json"} r = requests.get(url, headers=headers) res.update(json.loads(r.text)) return jsonify(res) -@app.route('/engines/burp/_do_spider') -def _do_spider(base_url = None): +@app.route("/engines/burp/_do_spider") +def _do_spider(base_url=None): # Note: The baseUrl should be in scope for the Spider to run res = {"page": "_do_spider"} - url = this.scanurl + '/burp/spider' + url = this.scanurl + "/burp/spider" if not base_url: - base_url = request.args.get('base_url') + base_url = request.args.get("base_url") if not base_url: res.update({"status": "error", "reason": "'base_url' parameter not set"}) @@ -119,146 +127,146 @@ def _do_spider(base_url = None): payload = {"baseUrl": base_url} r = requests.post(url, data=payload) - res.update({'url': r.url, 'status_code': r.status_code}) - #todo: manage errors (if status_code != 200) + res.update({"url": r.url, "status_code": r.status_code}) + # todo: manage errors (if status_code != 200) return jsonify(res) -@app.route('/engines/burp/_get_spiderstatus') +@app.route("/engines/burp/_get_spiderstatus") def _get_spiderstatus(): - res = { "page": "_get_spiderstatus" } + res = {"page": "_get_spiderstatus"} res.update({"status": "error", "reason": "not implemented yet"}) - #@Todo: get the current spider scan + # @Todo: get the current spider scan return jsonify(res) -@app.route('/engines/burp/_get_scope') +@app.route("/engines/burp/_get_scope") def _get_scope(): res = {"page": "_get_scope"} - url = this.scanurl + '/burp/target/scope' - if not request.args.get('url'): + url = this.scanurl + "/burp/target/scope" + if not request.args.get("url"): res.update({"status": "error", "reason": "'url' parameter not set"}) return jsonify(res) - url += '?url=' + request.args.get('url') - headers = {'content-type': 'application/json', 'accept': 'application/json'} + url += "?url=" + request.args.get("url") + headers = {"content-type": "application/json", "accept": "application/json"} r = requests.get(url, headers=headers) res.update(json.loads(r.text)) return jsonify(res) -@app.route('/engines/burp/_addto_scope') -def _addto_scope(_url = None): +@app.route("/engines/burp/_addto_scope") +def _addto_scope(_url=None): res = {"page": "_addto_scope"} - url = this.scanurl + '/burp/target/scope' - if not (request.args.get('url') or _url): + url = this.scanurl + "/burp/target/scope" + if not (request.args.get("url") or _url): res.update({"status": "error", "reason": "'url' parameter not set"}) return jsonify(res) if _url: - url += '?url=' + str(_url) + url += "?url=" + str(_url) else: - url += '?url=' + str(request.args.get('url')) + url += "?url=" + str(request.args.get("url")) r = requests.put(url) - res.update({'url': r.url, 'status_code': r.status_code}) - #todo: manage errors (if status_code != 200) + res.update({"url": r.url, "status_code": r.status_code}) + # todo: manage errors (if status_code != 200) return jsonify(res) -@app.route('/engines/burp/_rm_scope') +@app.route("/engines/burp/_rm_scope") def _rm_scope(): res = {"page": "_rm_scope"} - url = this.scanurl + '/burp/target/scope' - if not request.args.get('url'): + url = this.scanurl + "/burp/target/scope" + if not request.args.get("url"): res.update({"status": "error", "reason": "'url' parameter not set"}) return jsonify(res) else: - url += '?url=' + request.args.get('url') + url += "?url=" + request.args.get("url") r = requests.delete(url) - res.update({'url': r.url, 'status_code': r.status_code}) - #todo: manage errors (if status_code != 200) + res.update({"url": r.url, "status_code": r.status_code}) + # todo: manage errors (if status_code != 200) return jsonify(res) -@app.route('/engines/burp/_scan_status') +@app.route("/engines/burp/_scan_status") def _scan_status(): res = {"page": "_scan_status"} - url = this.scanurl + '/burp/scanner/status' - #@todo: catch 'ConnectionError' (wait for engine fully started) before sending request + url = this.scanurl + "/burp/scanner/status" + # @todo: catch 'ConnectionError' (wait for engine fully started) before sending request - headers = {'content-type': 'application/json', 'accept': 'application/json'} + headers = {"content-type": "application/json", "accept": "application/json"} r = requests.get(url, headers=headers) res.update(json.loads(r.text)) return jsonify(res) -@app.route('/engines/burp/_addto_scanqueue') -def _addto_scanqueue(base_url = None): +@app.route("/engines/burp/_addto_scanqueue") +def _addto_scanqueue(base_url=None): # Note: The baseUrl should be in scope for the Active Scanner to run res = {"page": "_addto_scanqueue"} - url = this.scanurl + '/burp/scanner/scans/active' - if not (request.args.get('base_url') or base_url): + url = this.scanurl + "/burp/scanner/scans/active" + if not (request.args.get("base_url") or base_url): print("_addto_scanqueue(): 'base_url' parameter not set") res.update({"status": "error", "reason": "'base_url' parameter not set"}) return jsonify(res) - if request.args.get('base_url'): - base_url = request.args.get('base_url') + if request.args.get("base_url"): + base_url = request.args.get("base_url") payload = {"baseUrl": base_url} - url += '?baseUrl=' + base_url + url += "?baseUrl=" + base_url - _addto_scope(base_url) #@Todo check before... and check returncode - headers = {'content-type': 'application/json', 'accept': '*/*'} + _addto_scope(base_url) # @Todo check before... and check returncode + headers = {"content-type": "application/json", "accept": "*/*"} r = requests.post(url, data=payload, headers=headers) - res.update({'url': r.url, 'status_code': r.status_code}) - #todo: manage errors (if status_code != 200) + res.update({"url": r.url, "status_code": r.status_code}) + # todo: manage errors (if status_code != 200) return jsonify(res) -@app.route('/engines/burp/_del_fullscanqueue') +@app.route("/engines/burp/_del_fullscanqueue") def _del_fullscanqueue(): res = {"page": "_del_fullscanqueue"} - url = this.scanurl + '/burp/scanner/scans/active' + url = this.scanurl + "/burp/scanner/scans/active" r = requests.delete(url) - res.update({'url': r.url, 'status_code': r.status_code}) - #todo: manage errors (if status_code != 200) + res.update({"url": r.url, "status_code": r.status_code}) + # todo: manage errors (if status_code != 200) return jsonify(res) -@app.route('/engines/burp/_get_scanqueue') +@app.route("/engines/burp/_get_scanqueue") def _get_scanqueue(): res = {"page": "_get_scanqueue"} - url = this.scanurl + '/burp/scanner/scans/queue' + url = this.scanurl + "/burp/scanner/scans/queue" - if request.args.get('base_url'): - url += "?base_url={}".format(request.args.get('base_url')) + if request.args.get("base_url"): + url += "?base_url={}".format(request.args.get("base_url")) # useless till it's not implemented ;) - headers = {'content-type': 'application/json', 'accept': 'application/json'} + headers = {"content-type": "application/json", "accept": "application/json"} r = requests.get(url, headers=headers) - #todo: manage errors (if status_code != 200) + # todo: manage errors (if status_code != 200) - res.update({'url': r.url, 'status_code': r.status_code}) + res.update({"url": r.url, "status_code": r.status_code}) r = json.loads(r.text) - #print(r['urls_queued']) - res.update({'urls_queued': r['urls_queued']}) + # print(r['urls_queued']) + res.update({"urls_queued": r["urls_queued"]}) return jsonify(res) def loadconfig(): - conf_file = BASE_DIR+'/burp.json' + conf_file = BASE_DIR + "/burp.json" if os.path.exists(conf_file): json_data = open(conf_file) else: @@ -267,97 +275,128 @@ def loadconfig(): this.scanner = json.load(json_data) # check if an instance is running, then kill and restart it - if hasattr(this.proc, 'pid') and not this.proc.poll(): + if hasattr(this.proc, "pid") and not this.proc.poll(): print(" * Terminate PID {}".format(this.proc.pid)) this.proc.terminate() # delete and create tmp project file - if os.path.exists(BASE_DIR+'/'+this.scanner['project_file']): - os.remove(BASE_DIR+'/'+this.scanner['project_file']) - #f = open(BASE_DIR+'/'+this.scanner.project_file, 'w') + if os.path.exists(BASE_DIR + "/" + this.scanner["project_file"]): + os.remove(BASE_DIR + "/" + this.scanner["project_file"]) + # f = open(BASE_DIR+'/'+this.scanner.project_file, 'w') - if os.path.exists(BASE_DIR+'/'+this.scanner['path']): - cmd = "java -jar {}".format(BASE_DIR+'/'+this.scanner['path']) + if os.path.exists(BASE_DIR + "/" + this.scanner["path"]): + cmd = "java -jar {}".format(BASE_DIR + "/" + this.scanner["path"]) else: return {"status": "error", "reason": "jar file not found"} - # check launching options - if this.scanner['java_opts']: - cmd += this.scanner['java_opts'] - if this.scanner['server_port'] or this.scanner['server_host']: # mandatory options - cmd += " --server.port={}".format(this.scanner['server_port']) - this.scanurl = "http://{}:{}".format(this.scanner['server_host'], this.scanner['server_port']) + # check launching options + if this.scanner["java_opts"]: + cmd += this.scanner["java_opts"] + if this.scanner["server_port"] or this.scanner["server_host"]: # mandatory options + cmd += " --server.port={}".format(this.scanner["server_port"]) + this.scanurl = "http://{}:{}".format( + this.scanner["server_host"], this.scanner["server_port"] + ) else: - return {"status": "error", "reason": "'server_port' and/or 'server_host' option is missing"} - if this.scanner['project_file']: # file is created if not exists - cmd += " --project-file={}".format(BASE_DIR+'/'+this.scanner['project_file']) - if this.scanner['config_file']: # file is created if not exists - cmd += " --config-file={}".format(this.scanner['config_file']) - - this.proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - - print(" * Burp REST API engine succesfully started on http://{}:{}/" - .format(this.scanner['server_host'], this.scanner['server_port'])) + return { + "status": "error", + "reason": "'server_port' and/or 'server_host' option is missing", + } + if this.scanner["project_file"]: # file is created if not exists + cmd += " --project-file={}".format( + BASE_DIR + "/" + this.scanner["project_file"] + ) + if this.scanner["config_file"]: # file is created if not exists + cmd += " --config-file={}".format(this.scanner["config_file"]) + + this.proc = subprocess.Popen( + cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + + print( + " * Burp REST API engine succesfully started on http://{}:{}/".format( + this.scanner["server_host"], this.scanner["server_port"] + ) + ) return {"status": "READY"} -@app.route('/engines/burp/reloadconfig') + +@app.route("/engines/burp/reloadconfig") def reloadconfig(): res = {"page": "reloadconfig"} - status = loadconfig()['status'] - res.update({ - "status": status, - "config": this.scanner, - "details" : { - "pid": this.proc.pid}})#, - # "args": this.proc.args}}) + status = loadconfig()["status"] + res.update( + {"status": status, "config": this.scanner, "details": {"pid": this.proc.pid}} + ) # , + # "args": this.proc.args}}) return jsonify(res) -@app.route('/engines/burp/startscan', methods=['POST']) -def start(base_url = None, spider = False): + +@app.route("/engines/burp/startscan", methods=["POST"]) +def start(base_url=None, spider=False): res = {"page": "startscan"} scan = {} data = json.loads(request.data) - if not 'assets' in data.keys() or not'scan_id' in data.keys() or not 'base_url' in data['options']: - res.update({ - "status": "error", - "reason": "arg error, something is missing (ex: 'assets', 'scan_id', 'options/base_url')" - }) - return jsonify(res) + if ( + not "assets" in data.keys() + or not "scan_id" in data.keys() + or not "base_url" in data["options"] + ): + res.update( + { + "status": "error", + "reason": "arg error, something is missing (ex: 'assets', 'scan_id', 'options/base_url')", + } + ) + return jsonify(res) scan["scan_id"] = data["scan_id"] if data["scan_id"] in this.scans.keys(): - res.update({ "status": "error", "reason": "scan already started (scan_id={})".format(data["scan_id"])}) + res.update( + { + "status": "error", + "reason": "scan already started (scan_id={})".format(data["scan_id"]), + } + ) return jsonify(res) # Initialize the scan parameters - if not 'ports' in data['options'].keys(): + if not "ports" in data["options"].keys(): scan["target_port"] = "443" else: - scan["target_port"] = str(list(data['options']['ports'])[0]) # get the 1st in list - - scan["target_host"] = str(list(data['assets'])[0]) # get the 1st in list - scan["base_url"] = str(data['options']['protocol']) + "://" +\ - scan["target_host"]+":"+scan["target_port"]+str(data['options']['base_url']) + scan["target_port"] = str( + list(data["options"]["ports"])[0] + ) # get the 1st in list + + scan["target_host"] = str(list(data["assets"])[0]) # get the 1st in list + scan["base_url"] = ( + str(data["options"]["protocol"]) + + "://" + + scan["target_host"] + + ":" + + scan["target_port"] + + str(data["options"]["base_url"]) + ) scan["started_at"] = datetime.datetime.now() # if not request.args.get('base_url'): # res.update({"status": "error", "reason": "'base_url' parameter not set"}) # return jsonify(res) - #base_url = request.args.get('base_url') + # base_url = request.args.get('base_url') # Send it to spider if gently asked - if 'spider' in data['options'].keys() and data['options']['spider']: + if "spider" in data["options"].keys() and data["options"]["spider"]: # Add the base_url to scope _addto_scope(scan["base_url"]) print("[STARTSCAN] _addto_scope({})".format(scan["base_url"])) _do_spider(scan["base_url"]) print("[STARTSCAN] _do_spider({})".format(scan["base_url"])) - #/!\ oh wait ... spidering takes times ...!!!!! + # /!\ oh wait ... spidering takes times ...!!!!! # Send a request to the proxy in order to add it to the current sitemap _addto_sitemap(scan["base_url"]) @@ -365,57 +404,66 @@ def start(base_url = None, spider = False): # Send to active scan queue _addto_scanqueue(scan["base_url"]) - #@TODO: manage errors + # @TODO: manage errors # Prepare data returned this.scans.update({scan["scan_id"]: scan}) - res.update({ - "status": "accepted", - "scan": scan, - "details" : {"base_url": scan["base_url"]}}) + res.update( + {"status": "accepted", "scan": scan, "details": {"base_url": scan["base_url"]}} + ) return jsonify(res) + # Deletes the full scan queue map from memory -@app.route('/engines/burp/stopscans') +@app.route("/engines/burp/stopscans") def stop(): - res = { "page": "stopscans" } - url = this.scanurl + '/burp/scanner/scans/active' - #@todo: catch 'ConnectionError' (wait for engine fully started) before sending request + res = {"page": "stopscans"} + url = this.scanurl + "/burp/scanner/scans/active" + # @todo: catch 'ConnectionError' (wait for engine fully started) before sending request - headers = {'content-type': 'application/json', 'accept': 'application/json'} + headers = {"content-type": "application/json", "accept": "application/json"} r = requests.delete(url, headers=headers) if r.status_code != 200: - res.update({"status": "error", "reason": "undefined", "details": json.loads(r.text)}) + res.update( + {"status": "error", "reason": "undefined", "details": json.loads(r.text)} + ) else: res.update({"status": "success", "details": "Scan queue successfully deleted"}) return jsonify(res) -@app.route('/engines/burp/reset') + +@app.route("/engines/burp/reset") def reset(): - res = { "page": "reset" } - url = this.scanurl + '/burp/reset' - #@todo: catch 'ConnectionError' (wait for engine fully started) before sending request + res = {"page": "reset"} + url = this.scanurl + "/burp/reset" + # @todo: catch 'ConnectionError' (wait for engine fully started) before sending request - headers = {'content-type': 'application/json', 'accept': 'application/json'} + headers = {"content-type": "application/json", "accept": "application/json"} r = requests.get(url, headers=headers) if r.status_code != 200: - res.update({"status": "error", "reason": "undefined", "details": json.loads(r.text)}) + res.update( + {"status": "error", "reason": "undefined", "details": json.loads(r.text)} + ) return jsonify(res) -@app.route('/engines/burp/status') + +@app.route("/engines/burp/status") def status(): - res = { "page": "status" } - if hasattr(this.proc, 'pid') and not this.proc.poll(): - res.update({ - "status": "READY", # the rest api is alive - "details": { - "pid" : this.proc.pid#, - #"args": this.proc.args } - }}) + res = {"page": "status"} + if hasattr(this.proc, "pid") and not this.proc.poll(): + res.update( + { + "status": "READY", # the rest api is alive + "details": { + "pid": this.proc.pid # , + # "args": this.proc.args } + }, + } + ) else: - res.update({ "status": "ERROR" }) + res.update({"status": "ERROR"}) # display info on the scanner res.update({"scanner": this.scanner}) @@ -447,12 +495,14 @@ def _is_scan_finished(scan_id): return False -@app.route('/engines/burp/status/') +@app.route("/engines/burp/status/") def scan_status(scan_id): - res = { "page": "scan_status" } + res = {"page": "scan_status"} if not scan_id in this.scans.keys(): - res.update({ "status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) # @todo: check id the scan is finished or not @@ -465,53 +515,60 @@ def scan_status(scan_id): return jsonify(res) -@app.route('/engines/burp/info') +@app.route("/engines/burp/info") def info(): - res = { "page": "info", "engine_config": this.scanner} - - if hasattr(this.proc, 'pid') and not this.proc.poll(): - res.update({ - "status": "running", - "details": { - "pid" : this.proc.pid}})#, - #"args": this.proc.args }}) + res = {"page": "info", "engine_config": this.scanner} + + if hasattr(this.proc, "pid") and not this.proc.poll(): + res.update({"status": "running", "details": {"pid": this.proc.pid}}) # , + # "args": this.proc.args }}) else: res.update({"status": "idle"}) return jsonify(res) -@app.route('/engines/burp/genreport/') +@app.route("/engines/burp/genreport/") def genreport(scan_id): res = {"page": "report", "scan_id": scan_id} if not _is_scan_finished(scan_id): - res.update({ "status": "error", "reason": "scan '{}' not finished".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan '{}' not finished".format(scan_id)} + ) return jsonify(res) scan = this.scans[scan_id] - url = this.scanurl + '/burp/scanner/issues?urlPrefix=' + scan['base_url'] - headers = {'content-type': 'application/json', 'accept': 'application/json'} + url = this.scanurl + "/burp/scanner/issues?urlPrefix=" + scan["base_url"] + headers = {"content-type": "application/json", "accept": "application/json"} r = requests.get(url, headers=headers) if r.status_code != 200: - res.update({"status": "error", "reason": "undefined", "details": json.loads(r.text)}) + res.update( + {"status": "error", "reason": "undefined", "details": json.loads(r.text)} + ) else: - res.update({"status": "success", "details": "genreport error: issues not available"}) + res.update( + {"status": "success", "details": "genreport error: issues not available"} + ) print(json.loads(r.text)) - issues = _parse_report(results=json.loads(r.text)['issues'], asset_name=scan['target_host'], asset_port=scan['target_port']) + issues = _parse_report( + results=json.loads(r.text)["issues"], + asset_name=scan["target_host"], + asset_port=scan["target_port"], + ) + # @Todo: generate report - #@Todo: generate report + # @Todo: store report in file (archive) - #@Todo: store report in file (archive) - - #@Todo: clean Burp state + # @Todo: clean Burp state return jsonify(res) -''' + +""" { "issues": [ { @@ -571,7 +628,9 @@ def genreport(scan_id): } ] } -''' +""" + + def _parse_report(results, asset_name, asset_port): # Findings categories: @@ -579,18 +638,18 @@ def _parse_report(results, asset_name, asset_port): ts = int(time.time() * 1000) for result in results: - print result['confidence'] - result['severity'].replace('Informational', 'info') - result['severity'].replace('Low', 'low') - result['severity'].replace('Medium', 'medium') - result['severity'].replace('High', 'high') + print(result["confidence"]) + result["severity"].replace("Informational", "info") + result["severity"].replace("Low", "low") + result["severity"].replace("Medium", "medium") + result["severity"].replace("High", "high") return issues -@app.route('/engines/burp/test') +@app.route("/engines/burp/test") def test(): - #if not APP_DEBUG: + # if not APP_DEBUG: # return jsonify({"page": "test"}) res = "

Test Page (DEBUG):

" @@ -600,9 +659,13 @@ def test(): for arg in rule.arguments: options[arg] = "[{0}]".format(arg) - methods = ','.join(rule.methods) + methods = ",".join(rule.methods) url = url_for(rule.endpoint, **options) - res += urlparse.unquote("{0:50s} {1:20s} {2}
".format(rule.endpoint, methods, url)) + res += urlparse.unquote( + "{0:50s} {1:20s} {2}
".format( + rule.endpoint, methods, url + ) + ) return res @@ -612,11 +675,11 @@ def page_not_found(e): return jsonify({"page": "not found"}) -if __name__ == '__main__': +if __name__ == "__main__": if os.getuid() != 0: print("Error: Start the engine using root privileges !") sys.exit(-1) - if not os.path.exists(BASE_DIR+"/results"): - os.makedirs(BASE_DIR+"/results") + if not os.path.exists(BASE_DIR + "/results"): + os.makedirs(BASE_DIR + "/results") loadconfig() app.run(debug=APP_DEBUG, host=APP_HOST, port=APP_PORT) diff --git a/engines/censys/engine-censys.py b/engines/censys/engine-censys.py index 93086aab..eb938c93 100644 --- a/engines/censys/engine-censys.py +++ b/engines/censys/engine-censys.py @@ -1,5 +1,6 @@ #!/usr/bin/python3 # -*- coding: utf-8 -*- + import censys import time, OpenSSL, json, os, sys, requests, queue, threading, ssl, socket, hashlib, signal, optparse from urllib.parse import urlparse @@ -17,26 +18,27 @@ BASE_DIR = os.path.dirname(os.path.realpath(__file__)) this = sys.modules[__name__] -this.scanner = {} # config of the engine -this.scans = {} # var where we stock informations about scans -this.STOPPED = [] # var where we stock the scans stopped when stopping the scans -this.queries = [] # queries queue to censys api -this.certificates = [] # where we stock the instances of a connection to censys api -this.keys = [] # where we stock the keys used to connect to censys api -this.requestor = [] # wherer we stock the threads demon which query censys api - -@app.route('/') +this.scanner = {} # config of the engine +this.scans = {} # var where we stock informations about scans +this.STOPPED = [] # var where we stock the scans stopped when stopping the scans +this.queries = [] # queries queue to censys api +this.certificates = [] # where we stock the instances of a connection to censys api +this.keys = [] # where we stock the keys used to connect to censys api +this.requestor = [] # wherer we stock the threads demon which query censys api + + +@app.route("/") def default(): - return redirect(url_for('index')) + return redirect(url_for("index")) -@app.route('/engines/censys/') +@app.route("/engines/censys/") def index(): return jsonify({"page": "index"}) def _loadconfig(): - conf_file = BASE_DIR+'/censys.json' + conf_file = BASE_DIR + "/censys.json" if os.path.exists(conf_file): json_data = open(conf_file) @@ -45,8 +47,12 @@ def _loadconfig(): del this.scanner["keys"] id_resq = 0 for key in this.keys: - this.requestor.append( threading.Thread(target=_requestor_d, args=(id_resq,) ) ) - this.certificates.append( censys.certificates.CensysCertificates(key["uid"], key["secret"]) ) + this.requestor.append( + threading.Thread(target=_requestor_d, args=(id_resq,)) + ) + this.certificates.append( + censys.certificates.CensysCertificates(key["uid"], key["secret"]) + ) id_resq += 1 for resq in this.requestor: resq._Thread__stop() @@ -56,10 +62,14 @@ def _loadconfig(): for resq in this.requestor: resq.start() else: - return {"status": "error", "reason": "config file not found", "detail": {"filename" : conf_file}} + return { + "status": "error", + "reason": "config file not found", + "detail": {"filename": conf_file}, + } -@app.route('/engines/censys/reloadconfig', methods=['GET']) +@app.route("/engines/censys/reloadconfig", methods=["GET"]) def reloadconfig(): res = {"page": "reloadconfig"} @@ -70,40 +80,52 @@ def reloadconfig(): return jsonify(res) -@app.route('/engines/censys/startscan', methods=["POST"]) +@app.route("/engines/censys/startscan", methods=["POST"]) def start_scan(): res = {"page": "startscan"} if len(this.queries) == MAX_QUERIES: - res.update({ - "status": "error", - "reason": "Scan refused: max concurrent active scans reached" - }) + res.update( + { + "status": "error", + "reason": "Scan refused: max concurrent active scans reached", + } + ) return jsonify(res) data = json.loads(request.data) - if not 'assets' in data.keys() or not 'options' in data.keys() or not 'keyword' in data['options'].keys() or len(data['options']['keyword']) == 0: - res.update({ - "status": "refused", - "details": { - "reason": "arg error, something is missing ('options'.'keyword' ? 'assets' ?)" - }}) + if ( + not "assets" in data.keys() + or not "options" in data.keys() + or not "keyword" in data["options"].keys() + or len(data["options"]["keyword"]) == 0 + ): + res.update( + { + "status": "refused", + "details": { + "reason": "arg error, something is missing ('options'.'keyword' ? 'assets' ?)" + }, + } + ) return jsonify(res) - if str(data['scan_id']) in this.scans.keys(): - res.update({ - "status": "refused", - "details": { - "reason": "scan '{}' already launched".format(data['scan_id']) - }}) + if str(data["scan_id"]) in this.scans.keys(): + res.update( + { + "status": "refused", + "details": { + "reason": "scan '{}' already launched".format(data["scan_id"]) + }, + } + ) return jsonify(res) - _put_queries({ - "search": data['options']['keyword'], - "scan_id": str(data['scan_id']) - }) - this.scans[str(data['scan_id'])] = { - "keyword":{}, - "issues":[], + _put_queries( + {"search": data["options"]["keyword"], "scan_id": str(data["scan_id"])} + ) + this.scans[str(data["scan_id"])] = { + "keyword": {}, + "issues": [], "options": [], "up_cert": {}, "known_CA": [], @@ -118,67 +140,81 @@ def start_scan(): "analized_certificate": [], "host_self_signed": [], "alt_name_on_not_trusted_host": [], - "ca_not_trusted": {} + "ca_not_trusted": {}, }, "summary": { "engine_name": this.scanner["name"], "nb_issues": 0, "engine_version": this.scanner["version"], - "nb_info":0, "nb_high":0, "nb_medium":0, "nb_low":0 + "nb_info": 0, + "nb_high": 0, + "nb_medium": 0, + "nb_low": 0, }, - "totalLeft": 0 + "totalLeft": 0, } - if 'options' in data.keys(): - this.scans[str(data['scan_id'])]['options'] = data['options'] - - this.scans[str(data['scan_id'])]['assets'] = data['assets'] - - for keyword in data['options']['keyword']: - this.scans[str(data['scan_id'])]["keyword"][keyword]={"left": 0, "begin": False} - res.update({ - "status": "accepted", - "details" : { - "scan_id": str(data['scan_id']) - }}) + if "options" in data.keys(): + this.scans[str(data["scan_id"])]["options"] = data["options"] + + this.scans[str(data["scan_id"])]["assets"] = data["assets"] + + for keyword in data["options"]["keyword"]: + this.scans[str(data["scan_id"])]["keyword"][keyword] = { + "left": 0, + "begin": False, + } + res.update({"status": "accepted", "details": {"scan_id": str(data["scan_id"])}}) return jsonify(res) def _put_queries(dic): - this.queries.insert(0,dic) + this.queries.insert(0, dic) def _remove_scan(query): return not query["scan_id"] in this.STOPPED -@app.route('/engines/censys/stop/') +@app.route("/engines/censys/stop/") def stop_scan(scan_id): scan_id = str(scan_id) res = {"page": "stop"} if not scan_id in this.scans.keys(): - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) scan_status(scan_id) - if this.scans[scan_id]['status'] not in ["SCANNING"]: - res.update({"status": "error", "reason": "scan '{}' is not running (status={})".format(scan_id, this.scans[scan_id]['status'])}) + if this.scans[scan_id]["status"] not in ["SCANNING"]: + res.update( + { + "status": "error", + "reason": "scan '{}' is not running (status={})".format( + scan_id, this.scans[scan_id]["status"] + ), + } + ) return jsonify(res) this.STOPPED.append(scan_id) this.queries = filter(_remove_scan, this.queries) - this.scans[scan_id]['status'] = 'STOPPED' - this.scans[scan_id]['finished_at'] = int(time.time() * 1000) + this.scans[scan_id]["status"] = "STOPPED" + this.scans[scan_id]["finished_at"] = int(time.time() * 1000) this.STOPPED.remove(scan_id) clean_scan(scan_id) return jsonify(res) -@app.route('/engines/censys/clean') +@app.route("/engines/censys/clean") def clean(): message_error = "Some scan are not STOPPED or finished can't clean them :" res = {"page": "clean"} clean_error = False for scan in this.scans.keys(): - if this.scans[scan]['status'] != 'FINISHED' and this.scans[scan]['status'] != 'STOPPED': + if ( + this.scans[scan]["status"] != "FINISHED" + and this.scans[scan]["status"] != "STOPPED" + ): clean_error = True else: clean_scan(scan) @@ -188,18 +224,30 @@ def clean(): return jsonify(res) -@app.route('/engines/censys/clean/') +@app.route("/engines/censys/clean/") def clean_scan(scan_id): res = {"page": "clean_scan"} scan_id = str(scan_id) res.update({"scan_id": scan_id}) if not scan_id in this.scans.keys(): - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) - if this.scans[scan_id]['status'] != 'FINISHED' and this.scans[scan_id]['status'] != 'STOPPED': - res.update({"status": "error", "reason": "CAN'T CLEAN '{}' because not FINISHED or STOPPED".format(scan_id)}) + if ( + this.scans[scan_id]["status"] != "FINISHED" + and this.scans[scan_id]["status"] != "STOPPED" + ): + res.update( + { + "status": "error", + "reason": "CAN'T CLEAN '{}' because not FINISHED or STOPPED".format( + scan_id + ), + } + ) return jsonify(res) this.scans.pop(scan_id) @@ -208,113 +256,142 @@ def clean_scan(scan_id): return jsonify(res) -@app.route('/engines/censys/getreport/') +@app.route("/engines/censys/getreport/") def getreport(scan_id): scan_id = str(scan_id) - filepath = BASE_DIR+"/results/censys_{}.json".format(scan_id) + filepath = BASE_DIR + "/results/censys_{}.json".format(scan_id) if not os.path.exists(filepath): - return jsonify({"status": "error", "reason": "report file for scan_id '{}' not found".format(scan_id)}) - return send_from_directory(BASE_DIR+"/results/", "censys_{}.json".format(scan_id)) + return jsonify( + { + "status": "error", + "reason": "report file for scan_id '{}' not found".format(scan_id), + } + ) + return send_from_directory(BASE_DIR + "/results/", "censys_{}.json".format(scan_id)) -@app.route('/engines/censys/status/') +@app.route("/engines/censys/status/") def scan_status(scan_id): scan_id = str(scan_id) if not scan_id in this.scans.keys(): - return jsonify({ - "status": "ERROR", - "details": "scan_id '{}' not found".format(scan_id) - }) + return jsonify( + {"status": "ERROR", "details": "scan_id '{}' not found".format(scan_id)} + ) finish = False - if not this.scans[scan_id]['status'] == 'STOPPED': + if not this.scans[scan_id]["status"] == "STOPPED": for keyword in this.scans[scan_id]["keyword"].keys(): - if this.scans[scan_id]["keyword"][keyword]['begin'] and this.scans[scan_id]["keyword"][keyword]['left'] == 0: + if ( + this.scans[scan_id]["keyword"][keyword]["begin"] + and this.scans[scan_id]["keyword"][keyword]["left"] == 0 + ): finish = True if finish: - this.scans[scan_id]['status'] = "FINISHED" + this.scans[scan_id]["status"] = "FINISHED" else: - this.scans[scan_id]['status'] = "SCANNING" + this.scans[scan_id]["status"] = "SCANNING" - return jsonify({"scan_id": scan_id, "status": this.scans[scan_id]['status'],"detail": "'{}' certificates to proceed".format(this.scans[scan_id]['totalLeft'])}) + return jsonify( + { + "scan_id": scan_id, + "status": this.scans[scan_id]["status"], + "detail": "'{}' certificates to proceed".format( + this.scans[scan_id]["totalLeft"] + ), + } + ) -@app.route('/engines/censys/status') +@app.route("/engines/censys/status") def status(): res = {"page": "status"} scans = [] for scan_id in this.scans.keys(): scan_status(scan_id) - scans.append({ - "scan_id": scan_id, - "status": this.scans[scan_id]['status'], - "detail": "'{}' certificates to proceed".format(this.scans[scan_id]['totalLeft']) - }) + scans.append( + { + "scan_id": scan_id, + "status": this.scans[scan_id]["status"], + "detail": "'{}' certificates to proceed".format( + this.scans[scan_id]["totalLeft"] + ), + } + ) if APP_MAXSCANS <= len(this.scans): - res.update({"status":"BUSY"}) + res.update({"status": "BUSY"}) else: - res.update({"status":"READY"}) - res.update({ - "scanner": this.scanner, - "scans": scans - }) + res.update({"status": "READY"}) + res.update({"scanner": this.scanner, "scans": scans}) return jsonify(res) -@app.route('/engines/censys/debug') +@app.route("/engines/censys/debug") def debug(): return jsonify(this.scans) -@app.route('/engines/censys/info') +@app.route("/engines/censys/info") def info(): return jsonify({"page": "info", "engine_config": this.scanner}) -@app.route('/engines/censys/getfindings/') +@app.route("/engines/censys/getfindings/") def getfindings(scan_id): scan_id = str(scan_id) res = {"page": "getfindings", "scan_id": scan_id} if not scan_id in this.scans.keys(): - res.update({"status": "ERROR", - "details": "scan_id '{}' not found".format(scan_id) - }) + res.update( + {"status": "ERROR", "details": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) status() - if this.scans[scan_id]['status'] != "FINISHED" and this.scans[scan_id]['status'] != "STOPPED": - res.update({ - "status": "ERROR", - "details": "'{}' not finished or STOPPED (status='{}')".format(scan_id,this.scans[scan_id]['status']) - }) + if ( + this.scans[scan_id]["status"] != "FINISHED" + and this.scans[scan_id]["status"] != "STOPPED" + ): + res.update( + { + "status": "ERROR", + "details": "'{}' not finished or STOPPED (status='{}')".format( + scan_id, this.scans[scan_id]["status"] + ), + } + ) return jsonify(res) scan = { - "scan_id": scan_id, - "keyword": this.scans[scan_id]["keyword"].keys(), - "options": this.scans[scan_id]['options'], - "status":this.scans[scan_id]['status'], - "started_at":this.scans[scan_id]['started_at'], - "finished_at":this.scans[scan_id]['finished_at'] - } - if not os.path.exists(BASE_DIR+"/results"): - os.makedirs(BASE_DIR+"/results") + "scan_id": scan_id, + "keyword": this.scans[scan_id]["keyword"].keys(), + "options": this.scans[scan_id]["options"], + "status": this.scans[scan_id]["status"], + "started_at": this.scans[scan_id]["started_at"], + "finished_at": this.scans[scan_id]["finished_at"], + } + if not os.path.exists(BASE_DIR + "/results"): + os.makedirs(BASE_DIR + "/results") _create_issues(scan_id) - - with open(BASE_DIR+"/results/censys_"+scan_id+".json","w") as report_file: - json.dump({"scan": scan, - "summary":this.scans[scan_id]["summary"], - "issues":this.scans[scan_id]["issues"]}, - report_file, default=_json_serial) - - res.update({ - "scan": scan, - "summary":this.scans[scan_id]["summary"], - "issues":this.scans[scan_id]["issues"], - "status": "success" - }) + with open(BASE_DIR + "/results/censys_" + scan_id + ".json", "w") as report_file: + json.dump( + { + "scan": scan, + "summary": this.scans[scan_id]["summary"], + "issues": this.scans[scan_id]["issues"], + }, + report_file, + default=_json_serial, + ) + + res.update( + { + "scan": scan, + "summary": this.scans[scan_id]["summary"], + "issues": this.scans[scan_id]["issues"], + "status": "success", + } + ) clean_scan(scan_id) return jsonify(res) @@ -326,281 +403,403 @@ def _create_issues(scan_id): links = [] if len(this.scans[scan_id]["gather"]["certificate_expired"]) > 0: - this.scans[scan_id]["gather"]["certificate_expired"].sort(lambda x,y : cmp(x['description'], y['description'])) + this.scans[scan_id]["gather"]["certificate_expired"].sort( + lambda x, y: cmp(x["description"], y["description"]) + ) for issues in this.scans[scan_id]["gather"]["certificate_expired"]: description = description + issues["description"] + "\n" target.append(issues["target"]) links.append(issues["links"]) - if "options" in this.scans[scan_id] and "verbose" in this.scans[scan_id]['options'] and this.scans[scan_id]['options']['verbose']: - this.scans[scan_id]["summary"]["nb_issues"]+=1 - this.scans[scan_id]['issues'].append({"title": "Certificate is expired", - "solution" : "Renew certificate", - "issue_id": this.scans[scan_id]["summary"]["nb_issues"], - "type": "certificate_expired", - "target": {"addr": this.scans[scan_id]["assets"]}, - "severity": "high", - "confidence": "certain", - "metadata": {"tags": ["certificate","expired"], "links": links}, - "description": description, - "hash": hashlib.sha256(description).hexdigest() - }) - this.scans[scan_id]["summary"]["nb_high"]+=1 + if ( + "options" in this.scans[scan_id] + and "verbose" in this.scans[scan_id]["options"] + and this.scans[scan_id]["options"]["verbose"] + ): + this.scans[scan_id]["summary"]["nb_issues"] += 1 + this.scans[scan_id]["issues"].append( + { + "title": "Certificate is expired", + "solution": "Renew certificate", + "issue_id": this.scans[scan_id]["summary"]["nb_issues"], + "type": "certificate_expired", + "target": {"addr": this.scans[scan_id]["assets"]}, + "severity": "high", + "confidence": "certain", + "metadata": { + "tags": ["certificate", "expired"], + "links": links, + }, + "description": description, + "hash": hashlib.sha256(description).hexdigest(), + } + ) + this.scans[scan_id]["summary"]["nb_high"] += 1 description = "" links = [] - if not "options" in this.scans[scan_id] or not "verbose" in this.scans[scan_id]['options'] or not this.scans[scan_id]['options']['verbose']: - this.scans[scan_id]["summary"]["nb_issues"]+=1 - this.scans[scan_id]['issues'].append({"title": "Several certificates are expired", - "solution" : "Renew certificates", - "issue_id": this.scans[scan_id]["summary"]["nb_issues"], - "type": "certificate_expired", - "target": {"addr": this.scans[scan_id]["assets"]}, - "severity": "high", - "confidence": "certain", - "metadata": {"tags": ["certificate","expired"], "links": links}, - "description": description, - "hash": hashlib.sha256(description).hexdigest() - }) - this.scans[scan_id]["summary"]["nb_high"]+=1 + if ( + not "options" in this.scans[scan_id] + or not "verbose" in this.scans[scan_id]["options"] + or not this.scans[scan_id]["options"]["verbose"] + ): + this.scans[scan_id]["summary"]["nb_issues"] += 1 + this.scans[scan_id]["issues"].append( + { + "title": "Several certificates are expired", + "solution": "Renew certificates", + "issue_id": this.scans[scan_id]["summary"]["nb_issues"], + "type": "certificate_expired", + "target": {"addr": this.scans[scan_id]["assets"]}, + "severity": "high", + "confidence": "certain", + "metadata": {"tags": ["certificate", "expired"], "links": links}, + "description": description, + "hash": hashlib.sha256(description).hexdigest(), + } + ) + this.scans[scan_id]["summary"]["nb_high"] += 1 description = "" target = [] links = [] if len(this.scans[scan_id]["gather"]["certificate_expired_in_two_weeks"]) > 0: - this.scans[scan_id]["gather"]["certificate_expired_in_two_weeks"].sort(lambda x,y : cmp(x['description'], y['description'])) + this.scans[scan_id]["gather"]["certificate_expired_in_two_weeks"].sort( + lambda x, y: cmp(x["description"], y["description"]) + ) for issues in this.scans[scan_id]["gather"]["certificate_expired_in_two_weeks"]: description += issues["description"] target.append(issues["target"]) links.append(issues["links"]) - if "options" in this.scans[scan_id] and "verbose" in this.scans[scan_id]['options'] and this.scans[scan_id]['options']['verbose']: - this.scans[scan_id]["summary"]["nb_issues"]+=1 - this.scans[scan_id]['issues'].append({"title": "Certificates expire in two weeks", - "solution" : "Change certificate before two weeks", - "issue_id": this.scans[scan_id]["summary"]["nb_issues"], - "type": "certificate_expired_in_two_weeks", - "target": {"addr": this.scans[scan_id]["assets"]}, - "severity": "high", - "confidence": "certain", - "metadata": {"tags": ["certificate","expire"], "links": links}, - "description": description, - "hash": hashlib.sha256(description).hexdigest() - }) - this.scans[scan_id]["summary"]["nb_info"]+=1 + if ( + "options" in this.scans[scan_id] + and "verbose" in this.scans[scan_id]["options"] + and this.scans[scan_id]["options"]["verbose"] + ): + this.scans[scan_id]["summary"]["nb_issues"] += 1 + this.scans[scan_id]["issues"].append( + { + "title": "Certificates expire in two weeks", + "solution": "Change certificate before two weeks", + "issue_id": this.scans[scan_id]["summary"]["nb_issues"], + "type": "certificate_expired_in_two_weeks", + "target": {"addr": this.scans[scan_id]["assets"]}, + "severity": "high", + "confidence": "certain", + "metadata": {"tags": ["certificate", "expire"], "links": links}, + "description": description, + "hash": hashlib.sha256(description).hexdigest(), + } + ) + this.scans[scan_id]["summary"]["nb_info"] += 1 description = "" links = [] - if not "options" in this.scans[scan_id] or not "verbose" in this.scans[scan_id]['options'] or not this.scans[scan_id]['options']['verbose']: - this.scans[scan_id]["summary"]["nb_issues"]+=1 - this.scans[scan_id]['issues'].append({"title": "Several certificates expire in two weeks", - "solution" : "Change certificate before two weeks", - "issue_id": this.scans[scan_id]["summary"]["nb_issues"], - "type": "certificate_expired_in_two_weeks", - "target": {"addr": this.scans[scan_id]["assets"]}, - "severity": "high", - "confidence": "certain", - "metadata": {"tags": ["certificate","expire"], "links": links}, - "description": description, - "hash": hashlib.sha256(description).hexdigest() - }) - this.scans[scan_id]["summary"]["nb_high"]+=1 + if ( + not "options" in this.scans[scan_id] + or not "verbose" in this.scans[scan_id]["options"] + or not this.scans[scan_id]["options"]["verbose"] + ): + this.scans[scan_id]["summary"]["nb_issues"] += 1 + this.scans[scan_id]["issues"].append( + { + "title": "Several certificates expire in two weeks", + "solution": "Change certificate before two weeks", + "issue_id": this.scans[scan_id]["summary"]["nb_issues"], + "type": "certificate_expired_in_two_weeks", + "target": {"addr": this.scans[scan_id]["assets"]}, + "severity": "high", + "confidence": "certain", + "metadata": {"tags": ["certificate", "expire"], "links": links}, + "description": description, + "hash": hashlib.sha256(description).hexdigest(), + } + ) + this.scans[scan_id]["summary"]["nb_high"] += 1 description = "" target = [] links = [] if len(this.scans[scan_id]["gather"]["fail_load_crl"]) > 0: - this.scans[scan_id]["gather"]["fail_load_crl"].sort(lambda x,y : cmp(x['description'], y['description'])) + this.scans[scan_id]["gather"]["fail_load_crl"].sort( + lambda x, y: cmp(x["description"], y["description"]) + ) for issues in this.scans[scan_id]["gather"]["fail_load_crl"]: description += issues["description"] target.append(issues["target"]) links.append(issues["links"]) - if "options" in this.scans[scan_id] and "verbose" in this.scans[scan_id]['options'] and this.scans[scan_id]['options']['verbose']: - this.scans[scan_id]["summary"]["nb_issues"]+=1 - this.scans[scan_id]['issues'].append({"title": "Unable to load/reach revokation list", - "solution" : "N/A", - "issue_id": this.scans[scan_id]["summary"]["nb_issues"], - "type": "fail_load_crl", - "target": {"addr": this.scans[scan_id]["assets"]}, - "severity": "info", - "confidence": "certain", - "metadata": {"tags": ["certificate","revokation"], "links": links}, - "description": description, - "hash": hashlib.sha256(description).hexdigest() - }) - this.scans[scan_id]["summary"]["nb_info"]+=1 + if ( + "options" in this.scans[scan_id] + and "verbose" in this.scans[scan_id]["options"] + and this.scans[scan_id]["options"]["verbose"] + ): + this.scans[scan_id]["summary"]["nb_issues"] += 1 + this.scans[scan_id]["issues"].append( + { + "title": "Unable to load/reach revokation list", + "solution": "N/A", + "issue_id": this.scans[scan_id]["summary"]["nb_issues"], + "type": "fail_load_crl", + "target": {"addr": this.scans[scan_id]["assets"]}, + "severity": "info", + "confidence": "certain", + "metadata": { + "tags": ["certificate", "revokation"], + "links": links, + }, + "description": description, + "hash": hashlib.sha256(description).hexdigest(), + } + ) + this.scans[scan_id]["summary"]["nb_info"] += 1 description = "" links = [] - if not "options" in this.scans[scan_id] or not "verbose" in this.scans[scan_id]['options'] or not this.scans[scan_id]['options']['verbose']: - this.scans[scan_id]["summary"]["nb_issues"]+=1 - this.scans[scan_id]['issues'].append({"title": "Unable to load/reach revokation lists", - "solution" : "N/A", - "issue_id": this.scans[scan_id]["summary"]["nb_issues"], - "type": "fail_load_crl", - "target": {"addr": this.scans[scan_id]["assets"]}, - "severity": "info", - "confidence": "certain", - "metadata": {"tags": ["certificate","revokation"], "links": links}, - "description": description, - "hash": hashlib.sha256(description).hexdigest() - }) - this.scans[scan_id]["summary"]["nb_info"]+=1 + if ( + not "options" in this.scans[scan_id] + or not "verbose" in this.scans[scan_id]["options"] + or not this.scans[scan_id]["options"]["verbose"] + ): + this.scans[scan_id]["summary"]["nb_issues"] += 1 + this.scans[scan_id]["issues"].append( + { + "title": "Unable to load/reach revokation lists", + "solution": "N/A", + "issue_id": this.scans[scan_id]["summary"]["nb_issues"], + "type": "fail_load_crl", + "target": {"addr": this.scans[scan_id]["assets"]}, + "severity": "info", + "confidence": "certain", + "metadata": {"tags": ["certificate", "revokation"], "links": links}, + "description": description, + "hash": hashlib.sha256(description).hexdigest(), + } + ) + this.scans[scan_id]["summary"]["nb_info"] += 1 description = "" target = [] links = [] if len(this.scans[scan_id]["gather"]["certificate_in_crl"]) > 0: - this.scans[scan_id]["gather"]["certificate_in_crl"].sort(lambda x,y : cmp(x['description'], y['description'])) + this.scans[scan_id]["gather"]["certificate_in_crl"].sort( + lambda x, y: cmp(x["description"], y["description"]) + ) for issues in this.scans[scan_id]["gather"]["certificate_in_crl"]: description += issues["description"] target.append(issues["target"]) links.append(issues["links"]) - if "options" in this.scans[scan_id] and "verbose" in this.scans[scan_id]['options'] and this.scans[scan_id]['options']['verbose']: - this.scans[scan_id]["summary"]["nb_issues"]+=1 - this.scans[scan_id]['issues'].append({"title": "Certificate is revoked", - "solution" : "Change the certificate", - "issue_id": this.scans[scan_id]["summary"]["nb_issues"], - "type": "certificate_in_crl", - "target": {"addr": this.scans[scan_id]["assets"]}, - "severity": "high", - "confidence": "certain", - "metadata": {"tags": ["certificate","revoked"], "links": links}, - "description": description, - "hash": hashlib.sha256(description).hexdigest() - }) - this.scans[scan_id]["summary"]["nb_high"]+=1 + if ( + "options" in this.scans[scan_id] + and "verbose" in this.scans[scan_id]["options"] + and this.scans[scan_id]["options"]["verbose"] + ): + this.scans[scan_id]["summary"]["nb_issues"] += 1 + this.scans[scan_id]["issues"].append( + { + "title": "Certificate is revoked", + "solution": "Change the certificate", + "issue_id": this.scans[scan_id]["summary"]["nb_issues"], + "type": "certificate_in_crl", + "target": {"addr": this.scans[scan_id]["assets"]}, + "severity": "high", + "confidence": "certain", + "metadata": { + "tags": ["certificate", "revoked"], + "links": links, + }, + "description": description, + "hash": hashlib.sha256(description).hexdigest(), + } + ) + this.scans[scan_id]["summary"]["nb_high"] += 1 description = "" links = [] - if not "options" in this.scans[scan_id] or not "verbose" in this.scans[scan_id]['options'] or not this.scans[scan_id]['options']['verbose']: - this.scans[scan_id]["summary"]["nb_issues"]+=1 - this.scans[scan_id]['issues'].append({"title": "Several certificates are revoked", - "solution" : "Change the certificate", - "issue_id": this.scans[scan_id]["summary"]["nb_issues"], - "type": "certificate_in_crl", - "target": {"addr": this.scans[scan_id]["assets"]}, - "severity": "high", - "confidence": "certain", - "metadata": {"tags": ["certificate","revoked"], "links": links}, - "description": description, - "hash": hashlib.sha256(description).hexdigest() - }) - this.scans[scan_id]["summary"]["nb_high"]+=1 + if ( + not "options" in this.scans[scan_id] + or not "verbose" in this.scans[scan_id]["options"] + or not this.scans[scan_id]["options"]["verbose"] + ): + this.scans[scan_id]["summary"]["nb_issues"] += 1 + this.scans[scan_id]["issues"].append( + { + "title": "Several certificates are revoked", + "solution": "Change the certificate", + "issue_id": this.scans[scan_id]["summary"]["nb_issues"], + "type": "certificate_in_crl", + "target": {"addr": this.scans[scan_id]["assets"]}, + "severity": "high", + "confidence": "certain", + "metadata": {"tags": ["certificate", "revoked"], "links": links}, + "description": description, + "hash": hashlib.sha256(description).hexdigest(), + } + ) + this.scans[scan_id]["summary"]["nb_high"] += 1 description = "" target = [] links = [] if len(this.scans[scan_id]["gather"]["host_self_signed"]) > 0: - this.scans[scan_id]["gather"]["host_self_signed"].sort(lambda x,y : cmp(x['description'], y['description'])) + this.scans[scan_id]["gather"]["host_self_signed"].sort( + lambda x, y: cmp(x["description"], y["description"]) + ) for issues in this.scans[scan_id]["gather"]["host_self_signed"]: description += issues["description"] target.append(issues["target"]) links.append(issues["links"]) - if "options" in this.scans[scan_id] and "verbose" in this.scans[scan_id]['options'] and this.scans[scan_id]['options']['verbose']: - this.scans[scan_id]["summary"]["nb_issues"]+=1 - this.scans[scan_id]['issues'].append({"title": "Self-signed certificates", - "solution" : "Consider signing the certificate using a trusted CA", - "issue_id": this.scans[scan_id]["summary"]["nb_issues"], - "type": "host_self_signed", - "target": {"addr": this.scans[scan_id]["assets"]}, - "severity": "medium", - "confidence": "certain", - "metadata": {"tags": ["certificate","self-signed"], "links": links}, - "description": description, - "hash": hashlib.sha256(description).hexdigest() - }) - this.scans[scan_id]["summary"]["nb_medium"]+=1 + if ( + "options" in this.scans[scan_id] + and "verbose" in this.scans[scan_id]["options"] + and this.scans[scan_id]["options"]["verbose"] + ): + this.scans[scan_id]["summary"]["nb_issues"] += 1 + this.scans[scan_id]["issues"].append( + { + "title": "Self-signed certificates", + "solution": "Consider signing the certificate using a trusted CA", + "issue_id": this.scans[scan_id]["summary"]["nb_issues"], + "type": "host_self_signed", + "target": {"addr": this.scans[scan_id]["assets"]}, + "severity": "medium", + "confidence": "certain", + "metadata": { + "tags": ["certificate", "self-signed"], + "links": links, + }, + "description": description, + "hash": hashlib.sha256(description).hexdigest(), + } + ) + this.scans[scan_id]["summary"]["nb_medium"] += 1 description = "" links = [] - if not "options" in this.scans[scan_id] or not "verbose" in this.scans[scan_id]['options'] or not this.scans[scan_id]['options']['verbose']: - this.scans[scan_id]["summary"]["nb_issues"]+=1 - this.scans[scan_id]['issues'].append({"title": "Self-signed certificates", - "solution" : "Consider signing the certificates using a trusted CA", - "issue_id": this.scans[scan_id]["summary"]["nb_issues"], - "type": "host_self_signed", - "target": {"addr": this.scans[scan_id]["assets"]}, - "severity": "medium", - "confidence": "certain", - "metadata": {"tags": ["certificate","self-signed"], "links": links}, - "description": description, - "hash": hashlib.sha256(description).hexdigest() - }) - this.scans[scan_id]["summary"]["nb_medium"]+=1 + if ( + not "options" in this.scans[scan_id] + or not "verbose" in this.scans[scan_id]["options"] + or not this.scans[scan_id]["options"]["verbose"] + ): + this.scans[scan_id]["summary"]["nb_issues"] += 1 + this.scans[scan_id]["issues"].append( + { + "title": "Self-signed certificates", + "solution": "Consider signing the certificates using a trusted CA", + "issue_id": this.scans[scan_id]["summary"]["nb_issues"], + "type": "host_self_signed", + "target": {"addr": this.scans[scan_id]["assets"]}, + "severity": "medium", + "confidence": "certain", + "metadata": { + "tags": ["certificate", "self-signed"], + "links": links, + }, + "description": description, + "hash": hashlib.sha256(description).hexdigest(), + } + ) + this.scans[scan_id]["summary"]["nb_medium"] += 1 description = "" links = [] if len(this.scans[scan_id]["gather"]["analized_certificate"]) > 0: - this.scans[scan_id]["gather"]["analized_certificate"].sort(lambda x,y : cmp(x['description'], y['description'])) + this.scans[scan_id]["gather"]["analized_certificate"].sort( + lambda x, y: cmp(x["description"], y["description"]) + ) for issues in this.scans[scan_id]["gather"]["analized_certificate"]: description += issues["description"] links.append(issues["links"]) - this.scans[scan_id]["summary"]["nb_issues"]+=1 - this.scans[scan_id]['issues'].append({"title": "List of analized certificate", - "solution" : "n/a", - "issue_id": this.scans[scan_id]["summary"]["nb_issues"], - "type": "ananlized_certificate", - "target": {"addr": this.scans[scan_id]["assets"]}, - "severity": "info", - "confidence": "certain", - "metadata": {"tags": ["certificate","analized"], "links": links}, - "description": description, - "hash": hashlib.sha256(description).hexdigest() - }) - this.scans[scan_id]["summary"]["nb_info"]+=1 + this.scans[scan_id]["summary"]["nb_issues"] += 1 + this.scans[scan_id]["issues"].append( + { + "title": "List of analized certificate", + "solution": "n/a", + "issue_id": this.scans[scan_id]["summary"]["nb_issues"], + "type": "ananlized_certificate", + "target": {"addr": this.scans[scan_id]["assets"]}, + "severity": "info", + "confidence": "certain", + "metadata": {"tags": ["certificate", "analized"], "links": links}, + "description": description, + "hash": hashlib.sha256(description).hexdigest(), + } + ) + this.scans[scan_id]["summary"]["nb_info"] += 1 description = "" target = [] links = [] if len(this.scans[scan_id]["gather"]["alt_name_on_not_trusted_host"]) > 0: - this.scans[scan_id]["gather"]["alt_name_on_not_trusted_host"].sort(lambda x,y : cmp(x['description'], y['description'])) + this.scans[scan_id]["gather"]["alt_name_on_not_trusted_host"].sort( + lambda x, y: cmp(x["description"], y["description"]) + ) for issues in this.scans[scan_id]["gather"]["alt_name_on_not_trusted_host"]: description += issues["description"] target.append(issues["target"]) links.append(issues["links"]) - if "options" in this.scans[scan_id] and "verbose" in this.scans[scan_id]['options'] and this.scans[scan_id]['options']['verbose']: - this.scans[scan_id]["summary"]["nb_issues"]+=1 - this.scans[scan_id]['issues'].append({"title": "Certificate CN or alternative names not trusted", - "solution" : "Verify if the CN or alternative names are trusted", - "issue_id": this.scans[scan_id]["summary"]["nb_issues"], - "type": "alt_name_on_not_trusted_host", - "target": {"addr": this.scans[scan_id]["assets"]}, - "severity": "medium", - "confidence": "certain", - "metadata": {"tags": ["certificate","alt-name","trust"], "links": links}, - "description": description, - "hash": hashlib.sha256(description).hexdigest() - }) - this.scans[scan_id]["summary"]["nb_medium"]+=1 + if ( + "options" in this.scans[scan_id] + and "verbose" in this.scans[scan_id]["options"] + and this.scans[scan_id]["options"]["verbose"] + ): + this.scans[scan_id]["summary"]["nb_issues"] += 1 + this.scans[scan_id]["issues"].append( + { + "title": "Certificate CN or alternative names not trusted", + "solution": "Verify if the CN or alternative names are trusted", + "issue_id": this.scans[scan_id]["summary"]["nb_issues"], + "type": "alt_name_on_not_trusted_host", + "target": {"addr": this.scans[scan_id]["assets"]}, + "severity": "medium", + "confidence": "certain", + "metadata": { + "tags": ["certificate", "alt-name", "trust"], + "links": links, + }, + "description": description, + "hash": hashlib.sha256(description).hexdigest(), + } + ) + this.scans[scan_id]["summary"]["nb_medium"] += 1 description = "" links = [] - if not "options" in this.scans[scan_id] or not "verbose" in this.scans[scan_id]['options'] or not this.scans[scan_id]['options']['verbose']: - this.scans[scan_id]["summary"]["nb_issues"]+=1 - this.scans[scan_id]['issues'].append({"title": "Certificate CN or alternative names not trusted", - "solution" : "Verify if the CN or alternative names are trusted", - "issue_id": this.scans[scan_id]["summary"]["nb_issues"], - "type": "alt_name_on_not_trusted_host", - "target": {"addr": this.scans[scan_id]["assets"]}, - "severity": "medium", - "confidence": "certain", - "metadata": {"tags": ["certificate","alt-name","trust"], "links": links }, - "description": description, - "hash": hashlib.sha256(description).hexdigest() - }) - this.scans[scan_id]["summary"]["nb_medium"]+=1 + if ( + not "options" in this.scans[scan_id] + or not "verbose" in this.scans[scan_id]["options"] + or not this.scans[scan_id]["options"]["verbose"] + ): + this.scans[scan_id]["summary"]["nb_issues"] += 1 + this.scans[scan_id]["issues"].append( + { + "title": "Certificate CN or alternative names not trusted", + "solution": "Verify if the CN or alternative names are trusted", + "issue_id": this.scans[scan_id]["summary"]["nb_issues"], + "type": "alt_name_on_not_trusted_host", + "target": {"addr": this.scans[scan_id]["assets"]}, + "severity": "medium", + "confidence": "certain", + "metadata": { + "tags": ["certificate", "alt-name", "trust"], + "links": links, + }, + "description": description, + "hash": hashlib.sha256(description).hexdigest(), + } + ) + this.scans[scan_id]["summary"]["nb_medium"] += 1 description = "" target = [] @@ -608,51 +807,79 @@ def _create_issues(scan_id): if len(this.scans[scan_id]["gather"]["ca_not_trusted"]) > 0: for issues in this.scans[scan_id]["gather"]["ca_not_trusted"].keys(): - target.append(this.scans[scan_id]["gather"]["ca_not_trusted"][issues]["target"]) - description += this.scans[scan_id]["gather"]["ca_not_trusted"][issues]["description"] - links.append(this.scans[scan_id]["gather"]["ca_not_trusted"][issues]["links"]) + target.append( + this.scans[scan_id]["gather"]["ca_not_trusted"][issues]["target"] + ) + description += this.scans[scan_id]["gather"]["ca_not_trusted"][issues][ + "description" + ] + links.append( + this.scans[scan_id]["gather"]["ca_not_trusted"][issues]["links"] + ) for ch in this.scans[scan_id]["gather"]["ca_not_trusted"][issues]["chains"]: description += "Certificate with chain : " for c in ch: - description = description + " - " + c["serial"] + " : " + c["subject"] + "\n" - - if "options" in this.scans[scan_id] and "verbose" in this.scans[scan_id]['options'] and this.scans[scan_id]['options']['verbose']: - this.scans[scan_id]["summary"]["nb_issues"]+=1 - this.scans[scan_id]['issues'].append({"title": "Certificate signed by an unknown CA", - "solution" : "Check if the certificate should be trusted (add the CA in the trusted list ?)", - "issue_id": this.scans[scan_id]["summary"]["nb_issues"], - "type": "ca_not_trusted", - "target": {"addr": this.scans[scan_id]["assets"]}, - "severity": "medium", - "confidence": "certain", - "metadata": {"tags": ["certificate","certification authority","trust"], "links": links}, - "description": description, - "hash": hashlib.sha256(description).hexdigest() - }) - this.scans[scan_id]["summary"]["nb_high"]+=1 + description = ( + description + " - " + c["serial"] + " : " + c["subject"] + "\n" + ) + + if ( + "options" in this.scans[scan_id] + and "verbose" in this.scans[scan_id]["options"] + and this.scans[scan_id]["options"]["verbose"] + ): + this.scans[scan_id]["summary"]["nb_issues"] += 1 + this.scans[scan_id]["issues"].append( + { + "title": "Certificate signed by an unknown CA", + "solution": "Check if the certificate should be trusted (add the CA in the trusted list ?)", + "issue_id": this.scans[scan_id]["summary"]["nb_issues"], + "type": "ca_not_trusted", + "target": {"addr": this.scans[scan_id]["assets"]}, + "severity": "medium", + "confidence": "certain", + "metadata": { + "tags": ["certificate", "certification authority", "trust"], + "links": links, + }, + "description": description, + "hash": hashlib.sha256(description).hexdigest(), + } + ) + this.scans[scan_id]["summary"]["nb_high"] += 1 description = "" links = [] - if not "options" in this.scans[scan_id] or not "verbose" in this.scans[scan_id]['options'] or not this.scans[scan_id]['options']['verbose']: - this.scans[scan_id]["summary"]["nb_issues"]+=1 - this.scans[scan_id]['issues'].append({"title": "Certificates signed by an unknown CA", - "solution" : "Check if the certificate should be trusted (add the CA in the trusted list ?)", - "issue_id": this.scans[scan_id]["summary"]["nb_issues"], - "type": "ca_not_trusted", - "target": {"addr": this.scans[scan_id]["assets"]}, - "severity": "medium", - "confidence": "certain", - "metadata": {"tags": ["certificate","certification authority","trust"], "links": links}, - "description": description, - "hash": hashlib.sha256(description).hexdigest() - }) - this.scans[scan_id]["summary"]["nb_high"]+=1 + if ( + not "options" in this.scans[scan_id] + or not "verbose" in this.scans[scan_id]["options"] + or not this.scans[scan_id]["options"]["verbose"] + ): + this.scans[scan_id]["summary"]["nb_issues"] += 1 + this.scans[scan_id]["issues"].append( + { + "title": "Certificates signed by an unknown CA", + "solution": "Check if the certificate should be trusted (add the CA in the trusted list ?)", + "issue_id": this.scans[scan_id]["summary"]["nb_issues"], + "type": "ca_not_trusted", + "target": {"addr": this.scans[scan_id]["assets"]}, + "severity": "medium", + "confidence": "certain", + "metadata": { + "tags": ["certificate", "certification authority", "trust"], + "links": links, + }, + "description": description, + "hash": hashlib.sha256(description).hexdigest(), + } + ) + this.scans[scan_id]["summary"]["nb_high"] += 1 def _json_serial(obj): - if isinstance(obj, datetime) or isinstance(obj,date): + if isinstance(obj, datetime) or isinstance(obj, date): serial = obj.isoformat() return serial raise TypeError("Type not serialzable ({})".format(obj)) @@ -664,54 +891,120 @@ def _requestor_d(key): action = this.queries.pop() try: if "search" in action.keys(): - if not action['scan_id'] in this.STOPPED: - this.scans[action['scan_id']]['started_at'] = int(time.time() * 1000) - for keyword in action['search']: - _search_cert(keyword,action['scan_id'], key) + if not action["scan_id"] in this.STOPPED: + this.scans[action["scan_id"]]["started_at"] = int( + time.time() * 1000 + ) + for keyword in action["search"]: + _search_cert(keyword, action["scan_id"], key) time.sleep(2.5) if "view" in action.keys(): - if not action['scan_id'] in this.STOPPED: - views = _get_view_cert(action['view'], key) + if not action["scan_id"] in this.STOPPED: + views = _get_view_cert(action["view"], key) ignore = False - if "options" in this.scans[action['scan_id']] and "ignore_changed_certificate" in this.scans[action['scan_id']]['options'] and this.scans[action['scan_id']]['options']['ignore_changed_certificate']: - ignore = _ignore_changed_certificate(views, action['scan_id']) + if ( + "options" in this.scans[action["scan_id"]] + and "ignore_changed_certificate" + in this.scans[action["scan_id"]]["options"] + and this.scans[action["scan_id"]]["options"][ + "ignore_changed_certificate" + ] + ): + ignore = _ignore_changed_certificate( + views, action["scan_id"] + ) if not ignore: - this.scans[action['scan_id']]["gather"]["ananlized_certificate"].append({ - "links": "https://censys.io/certificates/{}".format(action['view']), - "description": "Ananlized certificate '{}'\n\n".format(views["parsed"]["subject_dn"]) - }) - - if "options" in this.scans[action['scan_id']] and "do_scan_valid" in this.scans[action['scan_id']]['options'] and this.scans[action['scan_id']]['options']['do_scan_valid'] and not ignore: - _view_valid(views,action['view'],action['scan_id'],action['keyword']) - - if "options" in this.scans[action['scan_id']] and "do_scan_trusted" in this.scans[action['scan_id']]['options'] and this.scans[action['scan_id']]['options']['do_scan_trusted'] and not ignore: - _view_trusted(views,action['scan_id'],action['keyword']) - - if "options" in this.scans[action['scan_id']] and "do_scan_self_signed" in this.scans[action['scan_id']]['options'] and this.scans[action['scan_id']]['options']['do_scan_self_signed'] and not ignore: - _is_self_signed(views,action['scan_id'],action['keyword']) - - if "options" in this.scans[action['scan_id']] and "do_scan_ca_trusted" in this.scans[action['scan_id']]['options'] and this.scans[action['scan_id']]['options']['do_scan_ca_trusted'] and not ignore: - _ca_trusted(views,action['scan_id'],action['keyword'],key,chain=[]) + this.scans[action["scan_id"]]["gather"][ + "ananlized_certificate" + ].append( + { + "links": "https://censys.io/certificates/{}".format( + action["view"] + ), + "description": "Ananlized certificate '{}'\n\n".format( + views["parsed"]["subject_dn"] + ), + } + ) + + if ( + "options" in this.scans[action["scan_id"]] + and "do_scan_valid" + in this.scans[action["scan_id"]]["options"] + and this.scans[action["scan_id"]]["options"][ + "do_scan_valid" + ] + and not ignore + ): + _view_valid( + views, + action["view"], + action["scan_id"], + action["keyword"], + ) + + if ( + "options" in this.scans[action["scan_id"]] + and "do_scan_trusted" + in this.scans[action["scan_id"]]["options"] + and this.scans[action["scan_id"]]["options"][ + "do_scan_trusted" + ] + and not ignore + ): + _view_trusted(views, action["scan_id"], action["keyword"]) + + if ( + "options" in this.scans[action["scan_id"]] + and "do_scan_self_signed" + in this.scans[action["scan_id"]]["options"] + and this.scans[action["scan_id"]]["options"][ + "do_scan_self_signed" + ] + and not ignore + ): + _is_self_signed(views, action["scan_id"], action["keyword"]) + + if ( + "options" in this.scans[action["scan_id"]] + and "do_scan_ca_trusted" + in this.scans[action["scan_id"]]["options"] + and this.scans[action["scan_id"]]["options"][ + "do_scan_ca_trusted" + ] + and not ignore + ): + _ca_trusted( + views, + action["scan_id"], + action["keyword"], + key, + chain=[], + ) time.sleep(2.5) with this.lock: - this.scans[action['scan_id']]["keyword"][action['keyword']]['left']-=1 - this.scans[action['scan_id']]['totalLeft']-=1 - if this.scans[action['scan_id']]['totalLeft'] == 0: - this.scans[action['scan_id']]['finished_at'] = int(time.time() * 1000) + this.scans[action["scan_id"]]["keyword"][action["keyword"]][ + "left" + ] -= 1 + this.scans[action["scan_id"]]["totalLeft"] -= 1 + if this.scans[action["scan_id"]]["totalLeft"] == 0: + this.scans[action["scan_id"]]["finished_at"] = int( + time.time() * 1000 + ) except Exception: print(sys.exc_info()) else: time.sleep(1) -def _search_cert(keyword,scan_id, key): +def _search_cert(keyword, scan_id, key): while True: # While Rate Limit Exceeded we wait and try again try: cert = this.certificates[key].search(keyword) - #time.sleep(1) + # time.sleep(1) break except censys.base.CensysRateLimitExceededException: time.sleep(1) @@ -724,13 +1017,19 @@ def _search_cert(keyword,scan_id, key): # for all certificates try: for c in cert: - if this.scans[scan_id]['totalLeft'] == MAX_QUERIES: - break; - _put_queries({"view": c["parsed.fingerprint_sha256"],"scan_id":scan_id,"keyword": keyword}) + if this.scans[scan_id]["totalLeft"] == MAX_QUERIES: + break + _put_queries( + { + "view": c["parsed.fingerprint_sha256"], + "scan_id": scan_id, + "keyword": keyword, + } + ) with this.lock: - this.scans[scan_id]['keyword'][keyword]['left']+=1 - this.scans[scan_id]['totalLeft']+=1 - this.scans[scan_id]['keyword'][keyword]['begin']=True + this.scans[scan_id]["keyword"][keyword]["left"] += 1 + this.scans[scan_id]["totalLeft"] += 1 + this.scans[scan_id]["keyword"][keyword]["begin"] = True except Exception: pass return True @@ -738,11 +1037,13 @@ def _search_cert(keyword,scan_id, key): def _get_view_cert(cert_sha, key): while True: - # While Rate Limit Exceeded we wait and try again + # While Rate Limit Exceeded we wait and try again try: - views = this.certificates[key].view(cert_sha) # get the certificates by censys api + views = this.certificates[key].view( + cert_sha + ) # get the certificates by censys api break - except censys.base.CensysRateLimitExceededException: # fail Rate limit wait + except censys.base.CensysRateLimitExceededException: # fail Rate limit wait time.sleep(1) except censys.base.CensysNotFoundException: return False @@ -753,111 +1054,236 @@ def _get_view_cert(cert_sha, key): def _ignore_changed_certificate(views, scan_id): - if not "options" in this.scans[scan_id] or not "changed_certificate_port_test" in this.scans[scan_id]['options']: + if ( + not "options" in this.scans[scan_id] + or not "changed_certificate_port_test" in this.scans[scan_id]["options"] + ): port = [443] else: - port = this.scans[scan_id]['options']['changed_certificate_port_test'] + port = this.scans[scan_id]["options"]["changed_certificate_port_test"] try: url = views["parsed"]["subject"]["common_name"][0] - if url in this.scans[scan_id]['unreachable_host']: + if url in this.scans[scan_id]["unreachable_host"]: return False - if not _still_exist(url, views["parsed"]["serial_number"],port, scan_id): + if not _still_exist(url, views["parsed"]["serial_number"], port, scan_id): return True except Exception: - this.scans[scan_id]['unreachable_host'].append(url) + this.scans[scan_id]["unreachable_host"].append(url) return False -def _view_valid(views,cert_sha,scan_id,keyword): +def _view_valid(views, cert_sha, scan_id, keyword): - datetstart = datetime.strptime(views["parsed"]["validity"]["start"],"%Y-%m-%dT%H:%M:%SZ") #make datetime on validity start - datetend = datetime.strptime(views["parsed"]["validity"]["end"],"%Y-%m-%dT%H:%M:%SZ") #make datetime on validity end + datetstart = datetime.strptime( + views["parsed"]["validity"]["start"], "%Y-%m-%dT%H:%M:%SZ" + ) # make datetime on validity start + datetend = datetime.strptime( + views["parsed"]["validity"]["end"], "%Y-%m-%dT%H:%M:%SZ" + ) # make datetime on validity end two_weeks_later = datetime.now() + timedelta(days=15) try: - if datetend < datetime.today() or datetstart > datetime.today(): # see if certificates is outdated - if views["parsed"]["subject"]["common_name"][0] in this.scans[scan_id]['unreachable_host']: - this.scans[scan_id]["gather"]["certificate_expired"].append({ - "target": {"serial": views["parsed"]["serial_number"], "subject": views["parsed"]["subject_dn"], "keyword":keyword}, - "links": "https://censys.io/certificates/{}".format(cert_sha), - "description": "Unreachable certificate expired '{}', we were unable to reach the certificate at '{}'\nValid from '{}' to '{}'\n\n".format(views["parsed"]["subject_dn"], views["parsed"]["subject"]["common_name"][0],datetstart, datetend) - }) + if ( + datetend < datetime.today() or datetstart > datetime.today() + ): # see if certificates is outdated + if ( + views["parsed"]["subject"]["common_name"][0] + in this.scans[scan_id]["unreachable_host"] + ): + this.scans[scan_id]["gather"]["certificate_expired"].append( + { + "target": { + "serial": views["parsed"]["serial_number"], + "subject": views["parsed"]["subject_dn"], + "keyword": keyword, + }, + "links": "https://censys.io/certificates/{}".format(cert_sha), + "description": "Unreachable certificate expired '{}', we were unable to reach the certificate at '{}'\nValid from '{}' to '{}'\n\n".format( + views["parsed"]["subject_dn"], + views["parsed"]["subject"]["common_name"][0], + datetstart, + datetend, + ), + } + ) else: - this.scans[scan_id]["gather"]["certificate_expired"].append({ - "target": {"serial": views["parsed"]["serial_number"], "subject": views["parsed"]["subject_dn"], "keyword":keyword}, - "links": "https://censys.io/certificates/{}".format(cert_sha), - "description": "Certificate expired '{}', certificate on '{}:{}'\nValid from '{}' to '{}'\n\n".format(views["parsed"]["subject_dn"],views["parsed"]["subject"]["common_name"][0], this.scans[scan_id]['up_cert'][views["parsed"]["subject"]["common_name"][0]]['port'],datetstart, datetend) - }) - else: - if datetend < two_weeks_later: # see if certificates is outdated - if views["parsed"]["subject"]["common_name"][0] in this.scans[scan_id]['unreachable_host']: - this.scans[scan_id]["gather"]["certificate_expired_in_two_weeks"].append({ - "target": {"serial": views["parsed"]["serial_number"], "subject": views["parsed"]["subject_dn"], "keyword":keyword}, + this.scans[scan_id]["gather"]["certificate_expired"].append( + { + "target": { + "serial": views["parsed"]["serial_number"], + "subject": views["parsed"]["subject_dn"], + "keyword": keyword, + }, "links": "https://censys.io/certificates/{}".format(cert_sha), - "description": "Unreachable certificate will expired in two weeks'{}' we were unable to reach the certificate at '{}'\nEnd date : '{}'\n\n".format(views["parsed"]["subject_dn"],views["parsed"]["subject"]["common_name"][0],datetend) - }) + "description": "Certificate expired '{}', certificate on '{}:{}'\nValid from '{}' to '{}'\n\n".format( + views["parsed"]["subject_dn"], + views["parsed"]["subject"]["common_name"][0], + this.scans[scan_id]["up_cert"][ + views["parsed"]["subject"]["common_name"][0] + ]["port"], + datetstart, + datetend, + ), + } + ) + else: + if datetend < two_weeks_later: # see if certificates is outdated + if ( + views["parsed"]["subject"]["common_name"][0] + in this.scans[scan_id]["unreachable_host"] + ): + this.scans[scan_id]["gather"][ + "certificate_expired_in_two_weeks" + ].append( + { + "target": { + "serial": views["parsed"]["serial_number"], + "subject": views["parsed"]["subject_dn"], + "keyword": keyword, + }, + "links": "https://censys.io/certificates/{}".format( + cert_sha + ), + "description": "Unreachable certificate will expired in two weeks'{}' we were unable to reach the certificate at '{}'\nEnd date : '{}'\n\n".format( + views["parsed"]["subject_dn"], + views["parsed"]["subject"]["common_name"][0], + datetend, + ), + } + ) else: - this.scans[scan_id]["gather"]["certificate_expired_in_two_weeks"].append({ - "target": {"serial": views["parsed"]["serial_number"], "subject": views["parsed"]["subject_dn"], "keyword":keyword}, - "links": "https://censys.io/certificates/{}".format(cert_sha), - "description": "Certificate will expired in two weeks'{}', certificate on '{}:{}'\nEnd date : '{}'\n\n".format(views["parsed"]["subject_dn"],views["parsed"]["subject"]["common_name"][0], this.scans[scan_id]['up_cert'][views["parsed"]["subject"]["common_name"][0]]['port'],datetend) - }) + this.scans[scan_id]["gather"][ + "certificate_expired_in_two_weeks" + ].append( + { + "target": { + "serial": views["parsed"]["serial_number"], + "subject": views["parsed"]["subject_dn"], + "keyword": keyword, + }, + "links": "https://censys.io/certificates/{}".format( + cert_sha + ), + "description": "Certificate will expired in two weeks'{}', certificate on '{}:{}'\nEnd date : '{}'\n\n".format( + views["parsed"]["subject_dn"], + views["parsed"]["subject"]["common_name"][0], + this.scans[scan_id]["up_cert"][ + views["parsed"]["subject"]["common_name"][0] + ]["port"], + datetend, + ), + } + ) crl_description = "" crl_fail = False - for crl in views["parsed"]["extensions"]["crl_distribution_points"]: # for all crl see if certificates is in it + for crl in views["parsed"]["extensions"][ + "crl_distribution_points" + ]: # for all crl see if certificates is in it - if not ( crl in this.scans[scan_id]["revoked"].keys()): # new crl point we had it with his list in a "revoke" dict for later search - this.scans[scan_id]["revoked"][crl]=[] # create structure of revoke + if not ( + crl in this.scans[scan_id]["revoked"].keys() + ): # new crl point we had it with his list in a "revoke" dict for later search + this.scans[scan_id]["revoked"][crl] = [] # create structure of revoke try: - html = requests.get(crl, timeout=2) # get the crl list on the crl point + html = requests.get( + crl, timeout=2 + ) # get the crl list on the crl point try: - crl_object = OpenSSL.crypto.load_crl(OpenSSL.crypto.FILETYPE_ASN1, html.content) # we create a crl_object to help us - - revoked_objects = crl_object.get_revoked() # we get the revoked cert - - for rvk in revoked_objects: # for all revoked certificates we add it on our revoke dict - this.scans[scan_id]["revoked"][crl].append(rvk.get_serial()) # add it to the list - except OpenSSL.crypto.Error: # in case wrong format + crl_object = OpenSSL.crypto.load_crl( + OpenSSL.crypto.FILETYPE_ASN1, html.content + ) # we create a crl_object to help us + + revoked_objects = ( + crl_object.get_revoked() + ) # we get the revoked cert + + for ( + rvk + ) in ( + revoked_objects + ): # for all revoked certificates we add it on our revoke dict + this.scans[scan_id]["revoked"][crl].append( + rvk.get_serial() + ) # add it to the list + except OpenSSL.crypto.Error: # in case wrong format try: - crl_object = OpenSSL.crypto.load_crl(OpenSSL.crypto.FILETYPE_PEM, html.content) + crl_object = OpenSSL.crypto.load_crl( + OpenSSL.crypto.FILETYPE_PEM, html.content + ) revoked_objects = crl_object.get_revoked() for rvk in revoked_objects: - this.scans[scan_id]["revoked"][crl].append(rvk.get_serial()) + this.scans[scan_id]["revoked"][crl].append( + rvk.get_serial() + ) except OpenSSL.crypto.Error: crl_fail = True - crl_description = crl_description + "Crl file '{}' unknow format\n".format(crl) - except TypeError: # in case crl list empty + crl_description = ( + crl_description + + "Crl file '{}' unknow format\n".format(crl) + ) + except TypeError: # in case crl list empty crl_fail = True - crl_description = crl_description + "Crl file '{}' unknow format\n".format(crl) - except Exception: # in case can't reach url of crl point + crl_description = ( + crl_description + + "Crl file '{}' unknow format\n".format(crl) + ) + except Exception: # in case can't reach url of crl point crl_fail = True - crl_description = crl_description + "Crl file '{}' unable to reach file\n".format(crl) + crl_description = ( + crl_description + + "Crl file '{}' unable to reach file\n".format(crl) + ) if crl_fail: - this.scans[scan_id]["gather"]["fail_load_crl"].append({ - "target": {"serial": views["parsed"]["serial_number"], "subject": views["parsed"]["subject_dn"], "keyword":keyword}, - "links": "https://censys.io/certificates/{}".format(cert_sha), - "description": crl_description + "\n" - }) - - if views["parsed"]["serial_number"] in this.scans[scan_id]["revoked"][crl]: # search our revoke dict if our certificates - if views["parsed"]["subject"]["common_name"][0] in this.scans[scan_id]['unreachable_host']: - this.scans[scan_id]["gather"]["certificate_in_crl"].append({ - "target": {"serial": views["parsed"]["serial_number"], "subject": views["parsed"]["subject_dn"], "keyword":keyword}, + this.scans[scan_id]["gather"]["fail_load_crl"].append( + { + "target": { + "serial": views["parsed"]["serial_number"], + "subject": views["parsed"]["subject_dn"], + "keyword": keyword, + }, "links": "https://censys.io/certificates/{}".format(cert_sha), - "description": "Unreachable host in crl :\n" + crl_description + "\n" - }) + "description": crl_description + "\n", + } + ) + + if ( + views["parsed"]["serial_number"] in this.scans[scan_id]["revoked"][crl] + ): # search our revoke dict if our certificates + if ( + views["parsed"]["subject"]["common_name"][0] + in this.scans[scan_id]["unreachable_host"] + ): + this.scans[scan_id]["gather"]["certificate_in_crl"].append( + { + "target": { + "serial": views["parsed"]["serial_number"], + "subject": views["parsed"]["subject_dn"], + "keyword": keyword, + }, + "links": "https://censys.io/certificates/{}".format(cert_sha), + "description": "Unreachable host in crl :\n" + + crl_description + + "\n", + } + ) else: - this.scans[scan_id]["gather"]["certificate_in_crl"].append({ - "target": {"serial": views["parsed"]["serial_number"], "subject": views["parsed"]["subject_dn"], "keyword":keyword}, - "links": "https://censys.io/certificates/{}".format(cert_sha), - "description": crl_description + "\n" - }) + this.scans[scan_id]["gather"]["certificate_in_crl"].append( + { + "target": { + "serial": views["parsed"]["serial_number"], + "subject": views["parsed"]["subject_dn"], + "keyword": keyword, + }, + "links": "https://censys.io/certificates/{}".format(cert_sha), + "description": crl_description + "\n", + } + ) except KeyError: pass @@ -866,8 +1292,8 @@ def _view_valid(views,cert_sha,scan_id,keyword): def _still_exist(url, serial, port, scan_id): - if url in this.scans[scan_id]['up_cert'].keys(): - return this.scans[scan_id]['up_cert'][url]['serial'] == serial + if url in this.scans[scan_id]["up_cert"].keys(): + return this.scans[scan_id]["up_cert"][url]["serial"] == serial for p in port: try: @@ -879,85 +1305,179 @@ def _still_exist(url, serial, port, scan_id): cert = wrap_socket.getpeercert(True) try: - cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_ASN1, cert) - except OpenSSL.crypto.Error: # in case wrong format - cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, cert) + cert = OpenSSL.crypto.load_certificate( + OpenSSL.crypto.FILETYPE_ASN1, cert + ) + except OpenSSL.crypto.Error: # in case wrong format + cert = OpenSSL.crypto.load_certificate( + OpenSSL.crypto.FILETYPE_PEM, cert + ) new_serial = cert.get_serial_number() - this.scans[scan_id]['up_cert'][url] = {'serial': new_serial, 'port': p} + this.scans[scan_id]["up_cert"][url] = {"serial": new_serial, "port": p} - break; + break except Exception: pass - #print(sys.exc_info()) + # print(sys.exc_info()) return new_serial == int(serial) -def _view_trusted(views,scan_id,keyword): +def _view_trusted(views, scan_id, keyword): description = "" try: for sb in views["parsed"]["names"]: - if not sb in this.scans[scan_id]['assets'] or (not "extended_trusted_host" in this.scans[scan_id]['options'] and not sb in this.scans[scan_id]['options']['extended_trusted_host']): - description += sb+"\n" + if not sb in this.scans[scan_id]["assets"] or ( + not "extended_trusted_host" in this.scans[scan_id]["options"] + and not sb in this.scans[scan_id]["options"]["extended_trusted_host"] + ): + description += sb + "\n" if description != "": - this.scans[scan_id]["gather"]["alt_name_on_not_trusted_host"].append({ - "target": {"serial": views["parsed"]["serial_number"], "subject": views["parsed"]["subject_dn"], "keyword":keyword}, - "links": "https://censys.io/certificates/{}".format(views["parsed"]["fingerprint_sha256"]), - "description": "Certificate with altenative name on not trusted host '{}'\n Altenative name : '{}'\n\n".format(views["parsed"]["subject_dn"],description) - }) + this.scans[scan_id]["gather"]["alt_name_on_not_trusted_host"].append( + { + "target": { + "serial": views["parsed"]["serial_number"], + "subject": views["parsed"]["subject_dn"], + "keyword": keyword, + }, + "links": "https://censys.io/certificates/{}".format( + views["parsed"]["fingerprint_sha256"] + ), + "description": "Certificate with altenative name on not trusted host '{}'\n Altenative name : '{}'\n\n".format( + views["parsed"]["subject_dn"], description + ), + } + ) except KeyError: pass -def _is_self_signed(views,scan_id,keyword): - if "self-signed" in views["tags"] and (not "trusted_ca_certificate" in this.scans[scan_id]["options"].keys() or not views["parsed"]["serial_number"] in this.scans[scan_id]["options"]["trusted_self_signed"]): - if views["parsed"]["subject"]["common_name"][0] in this.scans[scan_id]['unreachable_host']: - this.scans[scan_id]["gather"]["host_self_signed"].append({ - "target": {"serial": views["parsed"]["serial_number"], "subject": views["parsed"]["subject_dn"], "keyword":keyword}, - "links": "https://censys.io/certificates/{}".format(views["parsed"]["fingerprint_sha256"]), - "description": "Unreachable certificate is self-signed '{}', we were unable to reach the certificate at '{}'\n\n".format(views["parsed"]["subject_dn"],views["parsed"]["subject"]["common_name"][0]) - }) +def _is_self_signed(views, scan_id, keyword): + if "self-signed" in views["tags"] and ( + not "trusted_ca_certificate" in this.scans[scan_id]["options"].keys() + or not views["parsed"]["serial_number"] + in this.scans[scan_id]["options"]["trusted_self_signed"] + ): + if ( + views["parsed"]["subject"]["common_name"][0] + in this.scans[scan_id]["unreachable_host"] + ): + this.scans[scan_id]["gather"]["host_self_signed"].append( + { + "target": { + "serial": views["parsed"]["serial_number"], + "subject": views["parsed"]["subject_dn"], + "keyword": keyword, + }, + "links": "https://censys.io/certificates/{}".format( + views["parsed"]["fingerprint_sha256"] + ), + "description": "Unreachable certificate is self-signed '{}', we were unable to reach the certificate at '{}'\n\n".format( + views["parsed"]["subject_dn"], + views["parsed"]["subject"]["common_name"][0], + ), + } + ) else: - this.scans[scan_id]["gather"]["host_self_signed"].append({ - "target": {"serial": views["parsed"]["serial_number"], "subject": views["parsed"]["subject_dn"], "keyword":keyword}, - "links": "https://censys.io/certificates/{}".format(views["parsed"]["fingerprint_sha256"]), - "description": "Certificate is self-signed '{}', certificate on '{}:{}\n\n".format(views["parsed"]["subject_dn"],views["parsed"]["subject"]["common_name"][0], this.scans[scan_id]['up_cert'][views["parsed"]["subject"]["common_name"][0]]['port']) - }) + this.scans[scan_id]["gather"]["host_self_signed"].append( + { + "target": { + "serial": views["parsed"]["serial_number"], + "subject": views["parsed"]["subject_dn"], + "keyword": keyword, + }, + "links": "https://censys.io/certificates/{}".format( + views["parsed"]["fingerprint_sha256"] + ), + "description": "Certificate is self-signed '{}', certificate on '{}:{}\n\n".format( + views["parsed"]["subject_dn"], + views["parsed"]["subject"]["common_name"][0], + this.scans[scan_id]["up_cert"][ + views["parsed"]["subject"]["common_name"][0] + ]["port"], + ), + } + ) return True else: return False -def _ca_trusted(views,scan_id,keyword,key,chain=[]): - chain.append({"serial": views["parsed"]["serial_number"], "subject": views["parsed"]["subject_dn"]}) - if "self-signed" in views["tags"] or "root" in views["tags"] or ((not "basic_constaintd" in views["parsed"]["extensions"] or not "is_ca" in views["parsed"]["extensions"]["basic_constraints"] or views["parsed"]["extensions"]["basic_constraints"]["is_ca"] == True) and "trusted" in views["tags"]): - if not "trusted_ca_certificate" in this.scans[scan_id]["options"].keys() or not views["parsed"]["serial_number"] in this.scans[scan_id]["options"]["trusted_ca_certificate"]: +def _ca_trusted(views, scan_id, keyword, key, chain=[]): + chain.append( + { + "serial": views["parsed"]["serial_number"], + "subject": views["parsed"]["subject_dn"], + } + ) + if ( + "self-signed" in views["tags"] + or "root" in views["tags"] + or ( + ( + not "basic_constaintd" in views["parsed"]["extensions"] + or not "is_ca" in views["parsed"]["extensions"]["basic_constraints"] + or views["parsed"]["extensions"]["basic_constraints"]["is_ca"] == True + ) + and "trusted" in views["tags"] + ) + ): + if ( + not "trusted_ca_certificate" in this.scans[scan_id]["options"].keys() + or not views["parsed"]["serial_number"] + in this.scans[scan_id]["options"]["trusted_ca_certificate"] + ): if not views["parsed"]["serial_number"] in this.scans[scan_id]["known_CA"]: this.scans[scan_id]["known_CA"].append(views["parsed"]["serial_number"]) - this.scans[scan_id]["gather"]["ca_not_trusted"][views["parsed"]["serial_number"]] = { - "target": {"serial": views["parsed"]["serial_number"], "subject": views["parsed"]["subject_dn"], "keyword":keyword}, - "links": "https://censys.io/certificates/{}".format(views["parsed"]["fingerprint_sha256"]), - "description": "Certificate signed by an unknown CA '{}'\n\n".format(views["parsed"]["subject_dn"]), - "chains": [chain] + this.scans[scan_id]["gather"]["ca_not_trusted"][ + views["parsed"]["serial_number"] + ] = { + "target": { + "serial": views["parsed"]["serial_number"], + "subject": views["parsed"]["subject_dn"], + "keyword": keyword, + }, + "links": "https://censys.io/certificates/{}".format( + views["parsed"]["fingerprint_sha256"] + ), + "description": "Certificate signed by an unknown CA '{}'\n\n".format( + views["parsed"]["subject_dn"] + ), + "chains": [chain], } return True else: - this.scans[scan_id]["gather"]["ca_not_trusted"][views["parsed"]["serial_number"]]["chains"].append(chain) + this.scans[scan_id]["gather"]["ca_not_trusted"][ + views["parsed"]["serial_number"] + ]["chains"].append(chain) else: try: - html = requests.get(views["parsed"]["extensions"]["authority_info_access"]["issuer_urls"][0], timeout=2) + html = requests.get( + views["parsed"]["extensions"]["authority_info_access"]["issuer_urls"][ + 0 + ], + timeout=2, + ) the_certificate = hashlib.sha256(html.content).hexdigest() - crl_object = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_ASN1, html.content) + crl_object = OpenSSL.crypto.load_certificate( + OpenSSL.crypto.FILETYPE_ASN1, html.content + ) except Exception: - if not "extensions" in views["parsed"].keys() or not "authority_key_id" in views["parsed"]["extensions"].keys(): + if ( + not "extensions" in views["parsed"].keys() + or not "authority_key_id" in views["parsed"]["extensions"].keys() + ): return False while True: try: - cert = this.certificates[key].search("parsed.extensions.subject_key_id:" + views["parsed"]["extensions"]["authority_key_id"]) + cert = this.certificates[key].search( + "parsed.extensions.subject_key_id:" + + views["parsed"]["extensions"]["authority_key_id"] + ) time.sleep(2.5) break except censys.base.CensysRateLimitExceededException: @@ -971,71 +1491,100 @@ def _ca_trusted(views,scan_id,keyword,key,chain=[]): for ct in cert: if i == 0: the_certificate = ct["parsed.fingerprint_sha256"] - i+=1 + i += 1 else: return False while True: try: - views2 = this.certificates[key].view(the_certificate)#c["parsed.fingerprint_sha256"]) # get the certificates by censys api - #time.sleep(2) + views2 = this.certificates[key].view( + the_certificate + ) # c["parsed.fingerprint_sha256"]) # get the certificates by censys api + # time.sleep(2) break - except censys.base.CensysRateLimitExceededException: # fail Rate limit wait + except censys.base.CensysRateLimitExceededException: # fail Rate limit wait time.sleep(1) except censys.base.CensysNotFoundException: return False - except : + except: time.sleep(1) print(sys.exc_info()) - _ca_trusted(views2,scan_id,keyword,key,chain=chain) + _ca_trusted(views2, scan_id, keyword, key, chain=chain) return False -@app.route('/engines/censys/test') +@app.route("/engines/censys/test") def test(): if not APP_DEBUG: return jsonify({"page": "test"}) res = "

Test Page (DEBUG):

" import urllib + for rule in app.url_map.iter_rules(): options = {} for arg in rule.arguments: options[arg] = "[{0}]".format(arg) - methods = ','.join(rule.methods) + methods = ",".join(rule.methods) url = url_for(rule.endpoint, **options) - res += urlparse.unquote("{0:50s} {1:20s} {2}
".format(rule.endpoint, methods, url)) + res += urlparse.unquote( + "{0:50s} {1:20s} {2}
".format( + rule.endpoint, methods, url + ) + ) return res + @app.errorhandler(404) def page_not_found(e): return jsonify({"page": "not found"}) + def _exit_thread(signum, frame): print("\nClean Thread then exit ...") for resq in this.requestor: resq._Thread__stop() sys.exit(1) + @app.before_first_request def main(): - if not os.path.exists(BASE_DIR+"/results"): - os.makedirs(BASE_DIR+"/results") + if not os.path.exists(BASE_DIR + "/results"): + os.makedirs(BASE_DIR + "/results") _loadconfig() -if __name__ == '__main__': +if __name__ == "__main__": signal.signal(signal.SIGINT, _exit_thread) - #context = ('../../certificat/engine-censys.crt','../../certificat/engine-censys.key') - #app.run(debug=APP_DEBUG, host=APP_HOST, port=APP_PORT, threaded=True) #, ssl_context=context) + # context = ('../../certificat/engine-censys.crt','../../certificat/engine-censys.key') + # app.run(debug=APP_DEBUG, host=APP_HOST, port=APP_PORT, threaded=True) #, ssl_context=context) parser = optparse.OptionParser() - parser.add_option("-H", "--host", help="Hostname of the Flask app [default %s]" % APP_HOST, default=APP_HOST) - parser.add_option("-P", "--port", help="Port for the Flask app [default %s]" % APP_PORT, default=APP_PORT) - parser.add_option("-d", "--debug", action="store_true", dest="debug", help=optparse.SUPPRESS_HELP, default=APP_DEBUG) + parser.add_option( + "-H", + "--host", + help="Hostname of the Flask app [default %s]" % APP_HOST, + default=APP_HOST, + ) + parser.add_option( + "-P", + "--port", + help="Port for the Flask app [default %s]" % APP_PORT, + default=APP_PORT, + ) + parser.add_option( + "-d", + "--debug", + action="store_true", + dest="debug", + help=optparse.SUPPRESS_HELP, + default=APP_DEBUG, + ) options, _ = parser.parse_args() - app.run(debug=options.debug, host=options.host, port=int(options.port), threaded=True) + app.run( + debug=options.debug, host=options.host, port=int(options.port), threaded=True + ) diff --git a/engines/certstream/engine-certstream.py b/engines/certstream/engine-certstream.py index 78834b1c..63bbdc71 100755 --- a/engines/certstream/engine-certstream.py +++ b/engines/certstream/engine-certstream.py @@ -3,7 +3,7 @@ """ CertStream PatrOwl engine application. -Copyright (C) 2021 Nicolas Mattiocco - @MaKyOtOx +Copyright (C) 2024 Nicolas Mattiocco - @MaKyOtOx Licensed under the AGPLv3 License Written by Nicolas BEGUIER (nicolas.beguier@adevinta.com) """ @@ -28,7 +28,9 @@ from utils.confparser import ConfParser import gethost except ModuleNotFoundError: - LOG.warning("[WARNING] You have to 'git clone https://github.com/AssuranceMaladieSec/CertStreamMonitor.git'") + LOG.warning( + "[WARNING] You have to 'git clone https://github.com/AssuranceMaladieSec/CertStreamMonitor.git'" + ) # Own library imports from PatrowlEnginesUtils.PatrowlEngine import _json_serial @@ -42,7 +44,7 @@ APP_DEBUG = False APP_HOST = "0.0.0.0" APP_PORT = 5017 -APP_MAXSCANS = int(os.environ.get('APP_MAXSCANS', 5)) +APP_MAXSCANS = int(os.environ.get("APP_MAXSCANS", 5)) APP_ENGINE_NAME = "certstream" APP_BASE_DIR = dirname(realpath(__file__)) CREATED_CERT_CVSS = 5 @@ -56,7 +58,7 @@ base_dir=APP_BASE_DIR, name=APP_ENGINE_NAME, max_scans=APP_MAXSCANS, - version=VERSION + version=VERSION, ) this = modules[__name__] @@ -102,10 +104,11 @@ def in_whitelist(domain): if domain in whitelist: return True for white in whitelist: - if domain.endswith("."+white): + if domain.endswith("." + white): return True return False + @app.errorhandler(404) def page_not_found(e): """Page not found.""" @@ -173,8 +176,17 @@ def status(): """Get status on engine and all scans.""" CertStreamMonitorFile = engine.scanner["options"]["CertStreamMonitorFile"]["value"] if not exists(CertStreamMonitorFile): - LOG.error("Error: CertStreamMonitorFile not found : {}".format(CertStreamMonitorFile)) - return jsonify({"status": "error", "reason": "CertStreamMonitorFile not found : {}".format(CertStreamMonitorFile)}) + LOG.error( + "Error: CertStreamMonitorFile not found : {}".format(CertStreamMonitorFile) + ) + return jsonify( + { + "status": "error", + "reason": "CertStreamMonitorFile not found : {}".format( + CertStreamMonitorFile + ), + } + ) try: CONF = ConfParser(CertStreamMonitorFile) @@ -182,12 +194,34 @@ def status(): engine.scanner["options"]["TABLEname"] = CONF.TABLEname engine.scanner["options"]["SearchKeywords"] = CONF.SearchKeywords except Exception: - LOG.error("Error: Cannot read CertStreamMonitorFile : {}".format(CertStreamMonitorFile)) - return jsonify({"status": "error", "reason": "Cannot read CertStreamMonitorFile : {}".format(CertStreamMonitorFile)}) + LOG.error( + "Error: Cannot read CertStreamMonitorFile : {}".format( + CertStreamMonitorFile + ) + ) + return jsonify( + { + "status": "error", + "reason": "Cannot read CertStreamMonitorFile : {}".format( + CertStreamMonitorFile + ), + } + ) if not exists(engine.scanner["options"]["DBFile"]): - LOG.error("Error: sqlite file not found : {}".format(engine.scanner["options"]["DBFile"])) - return jsonify({"status": "error", "reason": "sqlite file not found : {}".format(engine.scanner["options"]["DBFile"])}) + LOG.error( + "Error: sqlite file not found : {}".format( + engine.scanner["options"]["DBFile"] + ) + ) + return jsonify( + { + "status": "error", + "reason": "sqlite file not found : {}".format( + engine.scanner["options"]["DBFile"] + ), + } + ) return engine.getstatus() @@ -197,7 +231,9 @@ def status_scan(scan_id): """Get status on scan identified by id.""" res = {"page": "status", "status": "UNKNOWN"} if scan_id not in engine.scans.keys(): - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) if engine.scans[scan_id]["status"] == "ERROR": @@ -210,7 +246,9 @@ def status_scan(scan_id): try: _scan_urls(scan_id) except Exception as e: - res.update({"status": "error", "reason": "scan_urls did not worked ! ({})".format(e)}) + res.update( + {"status": "error", "reason": "scan_urls did not worked ! ({})".format(e)} + ) return jsonify(res) return jsonify(res) @@ -235,8 +273,8 @@ def getreport(scan_id): def _loadconfig(): - conf_file = APP_BASE_DIR+"/certstream.json" - if len(argv) > 1 and exists(APP_BASE_DIR+"/"+argv[1]): + conf_file = APP_BASE_DIR + "/certstream.json" + if len(argv) > 1 and exists(APP_BASE_DIR + "/" + argv[1]): conf_file = APP_BASE_DIR + "/" + argv[1] if exists(conf_file): json_data = open(conf_file) @@ -246,30 +284,44 @@ def _loadconfig(): LOG.error("Error: config file '{}' not found".format(conf_file)) return {"status": "error", "reason": "config file not found"} - version_filename = APP_BASE_DIR+'/VERSION' + version_filename = APP_BASE_DIR + "/VERSION" if os.path.exists(version_filename): version_file = open(version_filename, "r") - engine.version = version_file.read().rstrip('\n') + engine.version = version_file.read().rstrip("\n") version_file.close() if "options" not in engine.scanner: LOG.error("Error: You have to specify options") return {"status": "error", "reason": "You have to specify options"} - engine.scanner["options"]["Whitelist"]["present"] = "Whitelist" in engine.scanner["options"] and exists(engine.scanner["options"]["Whitelist"]["value"]) + engine.scanner["options"]["Whitelist"]["present"] = "Whitelist" in engine.scanner[ + "options" + ] and exists(engine.scanner["options"]["Whitelist"]["value"]) - with open(engine.scanner["options"]["Whitelist"]["value"], "r", encoding="UTF-8") as whitelist_file: + with open( + engine.scanner["options"]["Whitelist"]["value"], "r", encoding="UTF-8" + ) as whitelist_file: whitelist = whitelist_file.read() engine.scanner["options"]["Whitelist"]["list"] = whitelist.split("\n")[:-1] if "CertStreamMonitorFile" not in engine.scanner["options"]: LOG.error("Error: You have to specify CertStreamMonitorFile in options") - return {"status": "error", "reason": "You have to specify CertStreamMonitorFile in options"} + return { + "status": "error", + "reason": "You have to specify CertStreamMonitorFile in options", + } CertStreamMonitorFile = engine.scanner["options"]["CertStreamMonitorFile"]["value"] if not exists(CertStreamMonitorFile): - LOG.error("Error: CertStreamMonitorFile not found : {}".format(CertStreamMonitorFile)) - return {"status": "error", "reason": "CertStreamMonitorFile not found : {}".format(CertStreamMonitorFile)} + LOG.error( + "Error: CertStreamMonitorFile not found : {}".format(CertStreamMonitorFile) + ) + return { + "status": "error", + "reason": "CertStreamMonitorFile not found : {}".format( + CertStreamMonitorFile + ), + } LOG.info("[OK] CertStreamMonitorFile") @@ -280,12 +332,30 @@ def _loadconfig(): engine.scanner["options"]["TABLEname"] = CONF.TABLEname engine.scanner["options"]["SearchKeywords"] = CONF.SearchKeywords except Exception: - LOG.error("Error: Cannot read CertStreamMonitorFile : {}".format(CertStreamMonitorFile)) - return {"status": "error", "reason": "Cannot read CertStreamMonitorFile : {}".format(CertStreamMonitorFile)} + LOG.error( + "Error: Cannot read CertStreamMonitorFile : {}".format( + CertStreamMonitorFile + ) + ) + return { + "status": "error", + "reason": "Cannot read CertStreamMonitorFile : {}".format( + CertStreamMonitorFile + ), + } if not exists(engine.scanner["options"]["DBFile"]): - LOG.error("Error: sqlite file not found : {}".format(engine.scanner["options"]["DBFile"])) - return {"status": "error", "reason": "sqlite file not found : {}".format(engine.scanner["options"]["DBFile"])} + LOG.error( + "Error: sqlite file not found : {}".format( + engine.scanner["options"]["DBFile"] + ) + ) + return { + "status": "error", + "reason": "sqlite file not found : {}".format( + engine.scanner["options"]["DBFile"] + ), + } @app.route("/engines/certstream/reloadconfig", methods=["GET"]) @@ -302,47 +372,61 @@ def start_scan(): # Check the scanner is ready to start a new scan if len(engine.scans) == APP_MAXSCANS: - res.update({ - "status": "error", - "reason": "Scan refused: max concurrent active scans reached ({})".format(APP_MAXSCANS) - }) + res.update( + { + "status": "error", + "reason": "Scan refused: max concurrent active scans reached ({})".format( + APP_MAXSCANS + ), + } + ) return jsonify(res) status() if engine.scanner["status"] != "READY": - res.update({ - "status": "refused", - "details": { - "reason": "scanner not ready", - "status": engine.scanner["status"] - }}) + res.update( + { + "status": "refused", + "details": { + "reason": "scanner not ready", + "status": engine.scanner["status"], + }, + } + ) return jsonify(res) data = loads(request.data.decode("utf-8")) if "assets" not in data.keys() or "scan_id" not in data.keys(): - res.update({ - "status": "refused", - "details": { - "reason": "arg error, something is missing ('assets' ?)" - }}) + res.update( + { + "status": "refused", + "details": {"reason": "arg error, something is missing ('assets' ?)"}, + } + ) return jsonify(res) assets = [] for asset in data["assets"]: # Value if "value" not in asset.keys() or not asset["value"]: - res.update({ - "status": "error", - "reason": "arg error, something is missing ('asset.value')" - }) + res.update( + { + "status": "error", + "reason": "arg error, something is missing ('asset.value')", + } + ) return jsonify(res) # Supported datatypes if asset["datatype"] not in engine.scanner["allowed_asset_types"]: - res.update({ - "status": "error", - "reason": "arg error, bad value for '{}' datatype (not supported)".format(asset["value"]) - }) + res.update( + { + "status": "error", + "reason": "arg error, bad value for '{}' datatype (not supported)".format( + asset["value"] + ), + } + ) return jsonify(res) if asset["datatype"] == "url": @@ -354,35 +438,43 @@ def start_scan(): scan_id = str(data["scan_id"]) if data["scan_id"] in engine.scans.keys(): - res.update({ - "status": "refused", - "details": { - "reason": "scan '{}' is probably already launched".format(data["scan_id"]), + res.update( + { + "status": "refused", + "details": { + "reason": "scan '{}' is probably already launched".format( + data["scan_id"] + ), + }, } - }) + ) return jsonify(res) # Default Value if not "options" in data: data["options"] = {"since": 3600} scan = { - "assets": assets, - "threads": [], - "options": data["options"], - "scan_id": scan_id, - "status": "STARTED", - "lock": False, - "started_at": int(time() * 1000), - "findings": {} + "assets": assets, + "threads": [], + "options": data["options"], + "scan_id": scan_id, + "status": "STARTED", + "lock": False, + "started_at": int(time() * 1000), + "findings": {}, } options = get_options(data) if options["since"] == 0: - res.update({ - "status": "refused", - "details": { - "reason": "You need to specify a valid since options in seconds"}}) + res.update( + { + "status": "refused", + "details": { + "reason": "You need to specify a valid since options in seconds" + }, + } + ) return jsonify(res) engine.scanner["options"]["since"] = options["since"] @@ -392,12 +484,7 @@ def start_scan(): thread.start() engine.scans[scan_id]["threads"].append(thread) - res.update({ - "status": "accepted", - "details": { - "scan_id": scan["scan_id"] - } - }) + res.update({"status": "accepted", "details": {"scan_id": scan["scan_id"]}}) return jsonify(res) @@ -427,7 +514,9 @@ def _scan_urls(scan_id): if asset not in engine.scans[scan_id]["findings"]: engine.scans[scan_id]["findings"][asset] = {} try: - engine.scans[scan_id]["findings"][asset]["issues"] = get_report(asset, scan_id) + engine.scans[scan_id]["findings"][asset]["issues"] = get_report( + asset, scan_id + ) except Exception as e: LOG.error("_scan_urls: API Connexion error (quota?): {}".format(e)) return False @@ -444,13 +533,19 @@ def get_report(asset, scan_id): if not isfile("results/certstream_report_{scan_id}.txt".format(scan_id=scan_id)): gethost.SINCE = engine.scanner["options"]["since"] conn = gethost.create_connection(engine.scanner["options"]["DBFile"]) - result = gethost.parse_and_display_all_hostnames(engine.scanner["options"]["TABLEname"], conn) - result_file = open("results/certstream_report_{scan_id}.txt".format(scan_id=scan_id), "w") + result = gethost.parse_and_display_all_hostnames( + engine.scanner["options"]["TABLEname"], conn + ) + result_file = open( + "results/certstream_report_{scan_id}.txt".format(scan_id=scan_id), "w" + ) result_file.write(dumps(result)) result_file.close() try: - result_file = open("results/certstream_report_{scan_id}.txt".format(scan_id=scan_id), "r") + result_file = open( + "results/certstream_report_{scan_id}.txt".format(scan_id=scan_id), "r" + ) result = loads(result_file.read()) result_file.close() except Exception: @@ -467,12 +562,7 @@ def _parse_results(scan_id): issues = [] summary = {} - nb_vulns = { - "info": 0, - "low": 0, - "medium": 0, - "high": 0 - } + nb_vulns = {"info": 0, "low": 0, "medium": 0, "high": 0} timestamp = int(time() * 1000) for asset in engine.scans[scan_id]["findings"]: @@ -484,57 +574,82 @@ def _parse_results(scan_id): if in_whitelist(domain): continue cvss_local = CREATED_CERT_CVSS - description_local = "Domain: {} has been created\nIssuer: {}\nFingerprint {}\n".format( - domain, - report[domain]["issuer"], - report[domain]["fingerprint"]) - issues.append({ - "issue_id": len(issues)+1, - "severity": get_criticity(cvss_local), "confidence": "certain", - "target": {"addr": [domain], "protocol": "http", "parent": asset}, - "title": "Domain '{}' has been identified in certstream".format(domain), - "solution": "n/a", - "metadata": {"risk": {"cvss_base_score": cvss_local}}, - "type": "certstream_report", - "timestamp": timestamp, - "description": description_local, - }) - nb_vulns[get_criticity(cvss_local)] += 1 - - if report[domain]["still_investing"] is not None: - cvss_local = UP_DOMAIN_CVSS - description_local += "Last time up: {}\n".format(report[domain]["still_investing"]) - issues.append({ - "issue_id": len(issues)+1, - "severity": get_criticity(cvss_local), "confidence": "certain", - "target": {"addr": [domain], "protocol": "http", "parent": asset}, - "title": "Domain '{}' is reacheable".format(domain), + description_local = ( + "Domain: {} has been created\nIssuer: {}\nFingerprint {}\n".format( + domain, report[domain]["issuer"], report[domain]["fingerprint"] + ) + ) + issues.append( + { + "issue_id": len(issues) + 1, + "severity": get_criticity(cvss_local), + "confidence": "certain", + "target": { + "addr": [domain], + "protocol": "http", + "parent": asset, + }, + "title": "Domain '{}' has been identified in certstream".format( + domain + ), "solution": "n/a", "metadata": {"risk": {"cvss_base_score": cvss_local}}, "type": "certstream_report", "timestamp": timestamp, "description": description_local, - }) + } + ) + nb_vulns[get_criticity(cvss_local)] += 1 + + if report[domain]["still_investing"] is not None: + cvss_local = UP_DOMAIN_CVSS + description_local += "Last time up: {}\n".format( + report[domain]["still_investing"] + ) + issues.append( + { + "issue_id": len(issues) + 1, + "severity": get_criticity(cvss_local), + "confidence": "certain", + "target": { + "addr": [domain], + "protocol": "http", + "parent": asset, + }, + "title": "Domain '{}' is reacheable".format(domain), + "solution": "n/a", + "metadata": {"risk": {"cvss_base_score": cvss_local}}, + "type": "certstream_report", + "timestamp": timestamp, + "description": description_local, + } + ) nb_vulns[get_criticity(cvss_local)] += 1 cvss_max = max(cvss_local, cvss_max) description += description_local if cvss_max > PARENT_ASSET_CREATE_FINDING_CEIL: - issues.append({ - "issue_id": len(issues)+1, - "severity": get_criticity(PARENT_ASSET_CREATE_FINDING_CVSS), "confidence": "certain", - "target": {"addr": [asset], "protocol": "http"}, - "title": "[{}] Some domain has been identified in certstream".format(timestamp), - "solution": "n/a", - "metadata": {"risk": {"cvss_base_score": PARENT_ASSET_CREATE_FINDING_CVSS}}, - "type": "certstream_report", - "timestamp": timestamp, - "description": description, - }) + issues.append( + { + "issue_id": len(issues) + 1, + "severity": get_criticity(PARENT_ASSET_CREATE_FINDING_CVSS), + "confidence": "certain", + "target": {"addr": [asset], "protocol": "http"}, + "title": "[{}] Some domain has been identified in certstream".format( + timestamp + ), + "solution": "n/a", + "metadata": { + "risk": {"cvss_base_score": PARENT_ASSET_CREATE_FINDING_CVSS} + }, + "type": "certstream_report", + "timestamp": timestamp, + "description": description, + } + ) nb_vulns[get_criticity(PARENT_ASSET_CREATE_FINDING_CVSS)] += 1 - summary = { "nb_issues": len(issues), "nb_info": nb_vulns["info"], @@ -542,7 +657,7 @@ def _parse_results(scan_id): "nb_medium": nb_vulns["medium"], "nb_high": nb_vulns["high"], "engine_name": "certstream", - "engine_version": engine.scanner["version"] + "engine_version": engine.scanner["version"], } return issues, summary @@ -554,13 +669,22 @@ def getfindings(scan_id): # check if the scan_id exists if scan_id not in engine.scans.keys(): - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) # check if the scan is finished status() if engine.scans[scan_id]["status"] != "FINISHED": - res.update({"status": "error", "reason": "scan_id '{}' not finished (status={})".format(scan_id, engine.scans[scan_id]["status"])}) + res.update( + { + "status": "error", + "reason": "scan_id '{}' not finished (status={})".format( + scan_id, engine.scans[scan_id]["status"] + ), + } + ) return jsonify(res) issues, summary = _parse_results(scan_id) @@ -571,29 +695,31 @@ def getfindings(scan_id): "options": engine.scans[scan_id]["options"], "status": engine.scans[scan_id]["status"], "started_at": engine.scans[scan_id]["started_at"], - "finished_at": engine.scans[scan_id]["finished_at"] + "finished_at": engine.scans[scan_id]["finished_at"], } # Store the findings in a file - with open(APP_BASE_DIR+"/results/certstream_"+scan_id+".json", "w") as rf: - dump({ - "scan": scan, - "summary": summary, - "issues": issues - }, rf, default=_json_serial) + with open(APP_BASE_DIR + "/results/certstream_" + scan_id + ".json", "w") as rf: + dump( + {"scan": scan, "summary": summary, "issues": issues}, + rf, + default=_json_serial, + ) # remove the scan from the active scan list clean_scan(scan_id) - res.update({"scan": scan, "summary": summary, "issues": issues, "status": "success"}) + res.update( + {"scan": scan, "summary": summary, "issues": issues, "status": "success"} + ) return jsonify(res) @app.before_first_request def main(): """First function called.""" - if not exists(APP_BASE_DIR+"/results"): - os.makedirs(APP_BASE_DIR+"/results") + if not exists(APP_BASE_DIR + "/results"): + os.makedirs(APP_BASE_DIR + "/results") _loadconfig() LOG.debug("Run engine") diff --git a/engines/cortex/engine-cortex.py b/engines/cortex/engine-cortex.py index f2fe65d2..12c4c473 100644 --- a/engines/cortex/engine-cortex.py +++ b/engines/cortex/engine-cortex.py @@ -1,5 +1,6 @@ #!/usr/bin/python3 # -*- coding: utf-8 -*- + import os, sys, json, time, datetime, threading, hashlib, optparse from urllib.parse import urlparse from flask import Flask, request, jsonify, redirect, url_for, send_from_directory @@ -9,27 +10,27 @@ APP_DEBUG = False APP_HOST = "0.0.0.0" APP_PORT = 5009 -APP_MAXSCANS = int(os.environ.get('APP_MAXSCANS', 100)) +APP_MAXSCANS = int(os.environ.get("APP_MAXSCANS", 100)) BASE_DIR = os.path.dirname(os.path.realpath(__file__)) this = sys.modules[__name__] -this.scanner = {} # Scanner config -this.scans = {} # Scans list -this.api = None # Cortex API instance +this.scanner = {} # Scanner config +this.scans = {} # Scans list +this.api = None # Cortex API instance -@app.route('/') +@app.route("/") def default(): - return redirect(url_for('index')) + return redirect(url_for("index")) -@app.route('/engines/cortex/') +@app.route("/engines/cortex/") def index(): return jsonify({"page": "index"}) def _loadconfig(): - conf_file = BASE_DIR+'/cortex.json' + conf_file = BASE_DIR + "/cortex.json" if os.path.exists(conf_file): json_data = open(conf_file) this.scanner = json.load(json_data) @@ -38,16 +39,17 @@ def _loadconfig(): this.scanner["api_url"], this.scanner["api_key"], proxies=this.scanner["proxies"], - cert=False) + cert=False, + ) this.scanner["status"] = "READY" - version_filename = BASE_DIR+'/VERSION' + version_filename = BASE_DIR + "/VERSION" if os.path.exists(version_filename): version_file = open(version_filename, "r") - this.scanner["version"] = version_file.read().rstrip('\n') + this.scanner["version"] = version_file.read().rstrip("\n") version_file.close() - + _refresh_analyzers() else: this.scanner["status"] = "ERROR" @@ -61,12 +63,12 @@ def _refresh_analyzers(): this.scanner["analyzers"] = analyzers except CortexException as ex: this.scanner["status"] = "ERROR" - print('[ERROR]: Failed to list analyzers ({})'.format(ex.message)) + print("[ERROR]: Failed to list analyzers ({})".format(ex.message)) return False return True -@app.route('/engines/cortex/reloadconfig') +@app.route("/engines/cortex/reloadconfig") def reloadconfig(): res = {"page": "reloadconfig"} _loadconfig() @@ -74,87 +76,99 @@ def reloadconfig(): return jsonify(res) -@app.route('/engines/cortex/startscan', methods=['POST']) +@app.route("/engines/cortex/startscan", methods=["POST"]) def start_scan(): """ List available Cortex Analyzers (refresh). Ensure each scans comply with the analyzer (ready, datatype, ...) """ - #@todo: validate parameters and options format + # @todo: validate parameters and options format res = {"page": "startscan"} # check the scanner is ready to start a new scan if len(this.scans) == APP_MAXSCANS: - res.update({ - "status": "error", - "reason": "Scan refused: max concurrent active scans reached ({})".format(APP_MAXSCANS) - }) + res.update( + { + "status": "error", + "reason": "Scan refused: max concurrent active scans reached ({})".format( + APP_MAXSCANS + ), + } + ) return jsonify(res) status() - if this.scanner['status'] != "READY": - res.update({ - "status": "refused", - "details": { - "reason": "scanner not ready", - "status": this.scanner['status'] - }}) + if this.scanner["status"] != "READY": + res.update( + { + "status": "refused", + "details": { + "reason": "scanner not ready", + "status": this.scanner["status"], + }, + } + ) return jsonify(res) data = json.loads(request.data) # Assets - if 'assets' not in data.keys(): - res.update({ - "status": "refused", - "details": { - "reason": "arg error, something is missing ('assets' ?)" - }}) + if "assets" not in data.keys(): + res.update( + { + "status": "refused", + "details": {"reason": "arg error, something is missing ('assets' ?)"}, + } + ) return jsonify(res) # Scan ID - scan_id = str(data['scan_id']) - if data['scan_id'] in this.scans.keys(): - res.update({ - "status": "refused", - "details": { - "reason": "scan '{}' already launched".format(data['scan_id']), - }}) + scan_id = str(data["scan_id"]) + if data["scan_id"] in this.scans.keys(): + res.update( + { + "status": "refused", + "details": { + "reason": "scan '{}' already launched".format(data["scan_id"]), + }, + } + ) return jsonify(res) # Analyzers availability _refresh_analyzers() if not _refresh_analyzers(): - res.update({ - "status": "refused", - "details": { - "reason": "Scan refused: having troubles with the Cortex analyzers" - }}) + res.update( + { + "status": "refused", + "details": { + "reason": "Scan refused: having troubles with the Cortex analyzers" + }, + } + ) return jsonify(res) scan = { - 'assets': data['assets'], - 'threads': [], - 'jobs': [], - 'options': data['options'], - 'scan_id': scan_id, - 'status': "STARTED", - 'started_at': int(time.time() * 1000), - 'findings': [] + "assets": data["assets"], + "threads": [], + "jobs": [], + "options": data["options"], + "scan_id": scan_id, + "status": "STARTED", + "started_at": int(time.time() * 1000), + "findings": [], } this.scans.update({scan_id: scan}) - for asset in data['assets']: - th = threading.Thread(target=_start_analyzes, args=(scan_id, asset["value"], asset["datatype"])) + for asset in data["assets"]: + th = threading.Thread( + target=_start_analyzes, args=(scan_id, asset["value"], asset["datatype"]) + ) th.start() - this.scans[scan_id]['threads'].append(th) + this.scans[scan_id]["threads"].append(th) - res.update({ - "status": "accepted", - "details": { - "scan_id": scan['scan_id'] - }}) + res.update({"status": "accepted", "details": {"scan_id": scan["scan_id"]}}) return jsonify(res) @@ -163,20 +177,32 @@ def _start_analyzes(scan_id, asset, datatype): analyzers = [] # Analyzers selected one by one - if "use_analyzers" in this.scans[scan_id]["options"] and this.scans[scan_id]["options"]["use_analyzers"]: + if ( + "use_analyzers" in this.scans[scan_id]["options"] + and this.scans[scan_id]["options"]["use_analyzers"] + ): preselected_analyzers = this.scans[scan_id]["options"]["use_analyzers"] for analyzer in this.scanner["analyzers"]: # Check availability and datatype - if analyzer["analyzerDefinitionId"] in preselected_analyzers and datatype in analyzer["dataTypeList"]: + if ( + analyzer["analyzerDefinitionId"] in preselected_analyzers + and datatype in analyzer["dataTypeList"] + ): analyzers.append(analyzer["id"]) - if "all_datatype_analyzers" in this.scans[scan_id]["options"] and this.scans[scan_id]["options"]["all_datatype_analyzers"]: + if ( + "all_datatype_analyzers" in this.scans[scan_id]["options"] + and this.scans[scan_id]["options"]["all_datatype_analyzers"] + ): for analyzer in this.scanner["analyzers"]: if datatype in analyzer["dataTypeList"]: analyzers.append(analyzer["id"]) - if "meta_analyzers" in this.scans[scan_id]["options"] and this.scans[scan_id]["options"]["meta_analyzers"]: + if ( + "meta_analyzers" in this.scans[scan_id]["options"] + and this.scans[scan_id]["options"]["meta_analyzers"] + ): for ma in this.scans[scan_id]["options"]["meta_analyzers"]: if ma in this.scanner["meta_analyzers"].keys(): # valid meta-analyzer @@ -188,39 +214,48 @@ def _start_analyzes(scan_id, asset, datatype): resp = this.api.run_analyzer(analyzer, datatype, 1, asset) this.scans[scan_id]["jobs"].append(resp["id"]) except CortexException as ex: - print('[ERROR]: Failed to run analyzer: {}'.format(ex.message)) + print("[ERROR]: Failed to run analyzer: {}".format(ex.message)) return False return True -@app.route('/engines/cortex/stop/') +@app.route("/engines/cortex/stop/") def stop_scan(scan_id): res = {"page": "stop"} if scan_id not in this.scans.keys(): - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) scan_status(scan_id) - if this.scans[scan_id]['status'] != "SCANNING": - res.update({"status": "error", "reason": "scan '{}' is not running (status={})".format(scan_id, this.scans[scan_id]['status'])}) + if this.scans[scan_id]["status"] != "SCANNING": + res.update( + { + "status": "error", + "reason": "scan '{}' is not running (status={})".format( + scan_id, this.scans[scan_id]["status"] + ), + } + ) return jsonify(res) - for job_id in this.scans[scan_id]['jobs']: + for job_id in this.scans[scan_id]["jobs"]: _clean_job(job_id) - for t in this.scans[scan_id]['threads']: + for t in this.scans[scan_id]["threads"]: t._Thread__stop() - this.scans[scan_id]['status'] = "STOPPED" - this.scans[scan_id]['finished_at'] = int(time.time() * 1000) + this.scans[scan_id]["status"] = "STOPPED" + this.scans[scan_id]["finished_at"] = int(time.time() * 1000) res.update({"status": "success"}) return jsonify(res) # Stop all scans -@app.route('/engines/cortex/stopscans', methods=['GET']) +@app.route("/engines/cortex/stopscans", methods=["GET"]) def stop(): res = {"page": "stopscans"} @@ -232,7 +267,7 @@ def stop(): return jsonify(res) -@app.route('/engines/cortex/clean') +@app.route("/engines/cortex/clean") def clean(): res = {"page": "clean"} this.scans.clear() @@ -241,13 +276,15 @@ def clean(): return jsonify(res) -@app.route('/engines/cortex/clean/') +@app.route("/engines/cortex/clean/") def clean_scan(scan_id): res = {"page": "clean_scan"} res.update({"scan_id": scan_id}) if scan_id not in this.scans.keys(): - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) for job in this.scans[scan_id]["jobs"]: @@ -262,75 +299,81 @@ def _clean_job(job_id): try: this.api.delete_job(job_id) except CortexException as ex: - print('[ERROR]: Failed to get job report: {}'.format(ex.message)) + print("[ERROR]: Failed to get job report: {}".format(ex.message)) return True -@app.route('/engines/cortex/status/') +@app.route("/engines/cortex/status/") def scan_status(scan_id): if scan_id not in this.scans.keys(): - return jsonify({ - "status": "ERROR", - "details": "scan_id '{}' not found".format(scan_id)}) + return jsonify( + {"status": "ERROR", "details": "scan_id '{}' not found".format(scan_id)} + ) - for job_id in this.scans[scan_id]['jobs']: + for job_id in this.scans[scan_id]["jobs"]: try: r = this.api.get_job_report(job_id) if r["status"] in ["Success", "Failure"]: - this.scans[scan_id]["findings"] = this.scans[scan_id]["findings"] + _parse_results(scan_id, r) + this.scans[scan_id]["findings"] = this.scans[scan_id][ + "findings" + ] + _parse_results(scan_id, r) this.scans[scan_id]["jobs"].remove(job_id) except CortexException as ex: - print('[ERROR]: Failed to get job report: {}'.format(ex.message)) + print("[ERROR]: Failed to get job report: {}".format(ex.message)) all_threads_finished = False - for t in this.scans[scan_id]['threads']: + for t in this.scans[scan_id]["threads"]: if t.isAlive(): - this.scans[scan_id]['status'] = "SCANNING" + this.scans[scan_id]["status"] = "SCANNING" all_threads_finished = False break else: all_threads_finished = True - if all_threads_finished and len(this.scans[scan_id]['jobs']) == 0 and this.scans[scan_id]['status'] == "SCANNING": - this.scans[scan_id]['status'] = "FINISHED" - this.scans[scan_id]['finished_at'] = int(time.time() * 1000) + if ( + all_threads_finished + and len(this.scans[scan_id]["jobs"]) == 0 + and this.scans[scan_id]["status"] == "SCANNING" + ): + this.scans[scan_id]["status"] = "FINISHED" + this.scans[scan_id]["finished_at"] = int(time.time() * 1000) - return jsonify({"status": this.scans[scan_id]['status']}) + return jsonify({"status": this.scans[scan_id]["status"]}) -@app.route('/engines/cortex/status') +@app.route("/engines/cortex/status") def status(): res = {"page": "status"} if len(this.scans) == APP_MAXSCANS: - this.scanner['status'] = "BUSY" + this.scanner["status"] = "BUSY" else: - this.scanner['status'] = "READY" + this.scanner["status"] = "READY" scans = [] for scan_id in this.scans.keys(): scan_status(scan_id) - scans.append({scan_id: { - "status": this.scans[scan_id]['status'], - "started_at": this.scans[scan_id]['started_at'], - "assets": this.scans[scan_id]['assets'] - }}) + scans.append( + { + scan_id: { + "status": this.scans[scan_id]["status"], + "started_at": this.scans[scan_id]["started_at"], + "assets": this.scans[scan_id]["assets"], + } + } + ) - res.update({ - "nb_scans": len(this.scans), - "status": this.scanner['status'], - "scans": scans}) + res.update( + {"nb_scans": len(this.scans), "status": this.scanner["status"], "scans": scans} + ) return jsonify(res) -@app.route('/engines/cortex/info') +@app.route("/engines/cortex/info") def info(): status() - return jsonify({ - "page": "info", - "engine_config": this.scanner - }) + return jsonify({"page": "info", "engine_config": this.scanner}) def _parse_results(scan_id, results): @@ -340,55 +383,59 @@ def _parse_results(scan_id, results): # if failure: return an issue if results["status"] == "Failure": - if "display_failures" in this.scans[scan_id]["options"].keys() and this.scans[scan_id]["options"]["display_failures"] is True: - issues.append({ - "issue_id": len(issues)+1, - "severity": "info", "confidence": "certain", + if ( + "display_failures" in this.scans[scan_id]["options"].keys() + and this.scans[scan_id]["options"]["display_failures"] is True + ): + issues.append( + { + "issue_id": len(issues) + 1, + "severity": "info", + "confidence": "certain", "target": { "addr": [results["data"]], - "protocol": results["dataType"]}, + "protocol": results["dataType"], + }, "title": "Failure in '{}' analyze".format(results["analyzerName"]), "solution": "n/a", - "metadata": {"tags": [ - "cortex", - results["dataType"], - results["analyzerName"] - ] + "metadata": { + "tags": ["cortex", results["dataType"], results["analyzerName"]] }, "type": "cortex_report", "timestamp": ts, - "description": "Error message: {}".format(results["report"]["full"]["errorMessage"]) + "description": "Error message: {}".format( + results["report"]["full"]["errorMessage"] + ), } ) return issues # Artifact issue if asked (get_artifacts=True option) - if "get_artifacts" in this.scans[scan_id]["options"].keys() and this.scans[scan_id]["options"]["get_artifacts"]: + if ( + "get_artifacts" in this.scans[scan_id]["options"].keys() + and this.scans[scan_id]["options"]["get_artifacts"] + ): description = "Following artefacts have been found during the analyze:\n" for artefact in results["report"]["artifacts"]: description += "\n{} ({})".format(artefact["data"], artefact["dataType"]) - issue_hash = hashlib.sha1(str(description).encode('utf-8')).hexdigest()[:6] - - issues.append({ - "issue_id": len(issues)+1, - "severity": "info", "confidence": "certain", - "target": { - "addr": [results["data"]], - "protocol": results["dataType"]}, + issue_hash = hashlib.sha1(str(description).encode("utf-8")).hexdigest()[:6] + + issues.append( + { + "issue_id": len(issues) + 1, + "severity": "info", + "confidence": "certain", + "target": {"addr": [results["data"]], "protocol": results["dataType"]}, "title": "Artefacts from '{}' analyzer (HASH: {})".format( - results["analyzerName"], - issue_hash + results["analyzerName"], issue_hash ), "solution": "n/a", - "metadata": {"tags": [ - "cortex", - results["dataType"], - results["analyzerName"] - ] + "metadata": { + "tags": ["cortex", results["dataType"], results["analyzerName"]] }, "type": "cortex_report", "timestamp": ts, - "description": description + "description": description, } ) @@ -396,72 +443,86 @@ def _parse_results(scan_id, results): if "taxonomies" in results["report"]["summary"].keys(): for taxo in results["report"]["summary"]["taxonomies"]: severity = "info" - if taxo["level"] == "info": severity = "info" - elif taxo["level"] == "safe": severity = "info" - elif taxo["level"] == "suspicious": severity = "medium" - elif taxo["level"] == "malicious": severity = "high" - - issues.append({ - "issue_id": len(issues)+1, - "severity": severity, "confidence": "certain", + if taxo["level"] == "info": + severity = "info" + elif taxo["level"] == "safe": + severity = "info" + elif taxo["level"] == "suspicious": + severity = "medium" + elif taxo["level"] == "malicious": + severity = "high" + + issues.append( + { + "issue_id": len(issues) + 1, + "severity": severity, + "confidence": "certain", "target": { "addr": [results["data"]], - "protocol": results["dataType"]}, - "title": "{}: {}={}".format(taxo["namespace"], taxo["predicate"], taxo["value"]), + "protocol": results["dataType"], + }, + "title": "{}: {}={}".format( + taxo["namespace"], taxo["predicate"], taxo["value"] + ), "solution": "n/a", - "metadata": {"tags": [ - "cortex", - results["dataType"], - results["analyzerName"] - ] + "metadata": { + "tags": ["cortex", results["dataType"], results["analyzerName"]] }, "type": "cortex_report", "timestamp": ts, "description": "Analyzer '{}' stated following taxo:\n{}={}".format( - taxo["namespace"], taxo["predicate"], taxo["value"]) + taxo["namespace"], taxo["predicate"], taxo["value"] + ), } ) # Full report description = json.dumps(results["report"]["full"], indent=4) - description_hash = hashlib.sha1(str(description).encode('utf-8')).hexdigest()[:6] - issues.append({ - "issue_id": len(issues)+1, - "severity": "info", "confidence": "certain", - "target": { - "addr": [results["data"]], - "protocol": results["dataType"]}, + description_hash = hashlib.sha1(str(description).encode("utf-8")).hexdigest()[:6] + issues.append( + { + "issue_id": len(issues) + 1, + "severity": "info", + "confidence": "certain", + "target": {"addr": [results["data"]], "protocol": results["dataType"]}, "title": "{} full results (HASH: {})".format( - results["analyzerName"], description_hash), + results["analyzerName"], description_hash + ), "solution": "n/a", - "metadata": {"tags": [ - "cortex", - results["dataType"], - results["analyzerName"] - ] + "metadata": { + "tags": ["cortex", results["dataType"], results["analyzerName"]] }, "type": "cortex_report", "timestamp": ts, - "description": description + "description": description, } ) return issues -@app.route('/engines/cortex/getfindings/') +@app.route("/engines/cortex/getfindings/") def getfindings(scan_id): res = {"page": "getfindings", "scan_id": scan_id} # check if the scan_id exists if scan_id not in this.scans.keys(): - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) # check if the scan is finished status() - if this.scans[scan_id]['status'] != "FINISHED": - res.update({"status": "error", "reason": "scan_id '{}' not finished (status={})".format(scan_id, this.scans[scan_id]['status'])}) + if this.scans[scan_id]["status"] != "FINISHED": + res.update( + { + "status": "error", + "reason": "scan_id '{}' not finished (status={})".format( + scan_id, this.scans[scan_id]["status"] + ), + } + ) return jsonify(res) findings = this.scans[scan_id]["findings"] @@ -474,42 +535,44 @@ def getfindings(scan_id): "nb_high": 0, "nb_critical": 0, "engine_name": "cortex", - "engine_version": this.scanner["version"] + "engine_version": this.scanner["version"], } # Update summary with severity counts for finding in findings: sev = finding["severity"] - summary.update({"nb_"+sev: summary["nb_"+sev] + 1}) + summary.update({"nb_" + sev: summary["nb_" + sev] + 1}) scan = { "scan_id": scan_id, - "assets": this.scans[scan_id]['assets'], - "options": this.scans[scan_id]['options'], - "status": this.scans[scan_id]['status'], - "started_at": this.scans[scan_id]['started_at'], - "finished_at": this.scans[scan_id]['finished_at'] + "assets": this.scans[scan_id]["assets"], + "options": this.scans[scan_id]["options"], + "status": this.scans[scan_id]["status"], + "started_at": this.scans[scan_id]["started_at"], + "finished_at": this.scans[scan_id]["finished_at"], } # store the findings in a file - with open(BASE_DIR+"/results/cortex_"+scan_id+".json", 'w') as report_file: - json.dump({ - "scan": scan, - "summary": summary, - "issues": findings - }, report_file, default=_json_serial) + with open(BASE_DIR + "/results/cortex_" + scan_id + ".json", "w") as report_file: + json.dump( + {"scan": scan, "summary": summary, "issues": findings}, + report_file, + default=_json_serial, + ) # remove the scan from the active scan list clean_scan(scan_id) - res.update({"scan": scan, "summary": summary, "issues": findings, "status": "success"}) + res.update( + {"scan": scan, "summary": summary, "issues": findings, "status": "success"} + ) return jsonify(res) def _json_serial(obj): """ - JSON serializer for objects not serializable by default json code - Used for datetime serialization when the results are written in file + JSON serializer for objects not serializable by default json code + Used for datetime serialization when the results are written in file """ if isinstance(obj, datetime.datetime) or isinstance(obj, datetime.date): @@ -518,17 +581,22 @@ def _json_serial(obj): raise TypeError("Type not serializable ({})".format(obj)) -@app.route('/engines/cortex/getreport/') +@app.route("/engines/cortex/getreport/") def getreport(scan_id): - filepath = BASE_DIR+"/results/cortex_"+scan_id+".json" + filepath = BASE_DIR + "/results/cortex_" + scan_id + ".json" if not os.path.exists(filepath): - return jsonify({"status": "error", "reason": "report file for scan_id '{}' not found".format(scan_id)}) + return jsonify( + { + "status": "error", + "reason": "report file for scan_id '{}' not found".format(scan_id), + } + ) - return send_from_directory(BASE_DIR+"/results/", "cortex_"+scan_id+".json") + return send_from_directory(BASE_DIR + "/results/", "cortex_" + scan_id + ".json") -@app.route('/engines/cortex/test') +@app.route("/engines/cortex/test") def test(): if not APP_DEBUG: return jsonify({"page": "test"}) @@ -539,9 +607,13 @@ def test(): for arg in rule.arguments: options[arg] = "[{0}]".format(arg) - methods = ','.join(rule.methods) + methods = ",".join(rule.methods) url = url_for(rule.endpoint, **options) - res += urlparse.unquote("{0:50s} {1:20s} {2}
".format(rule.endpoint, methods, url)) + res += urlparse.unquote( + "{0:50s} {1:20s} {2}
".format( + rule.endpoint, methods, url + ) + ) return res @@ -553,16 +625,35 @@ def page_not_found(e): @app.before_first_request def main(): - if not os.path.exists(BASE_DIR+"/results"): - os.makedirs(BASE_DIR+"/results") + if not os.path.exists(BASE_DIR + "/results"): + os.makedirs(BASE_DIR + "/results") _loadconfig() -if __name__ == '__main__': +if __name__ == "__main__": parser = optparse.OptionParser() - parser.add_option("-H", "--host", help="Hostname of the Flask app [default %s]" % APP_HOST, default=APP_HOST) - parser.add_option("-P", "--port", help="Port for the Flask app [default %s]" % APP_PORT, default=APP_PORT) - parser.add_option("-d", "--debug", action="store_true", dest="debug", help=optparse.SUPPRESS_HELP, default=APP_DEBUG) + parser.add_option( + "-H", + "--host", + help="Hostname of the Flask app [default %s]" % APP_HOST, + default=APP_HOST, + ) + parser.add_option( + "-P", + "--port", + help="Port for the Flask app [default %s]" % APP_PORT, + default=APP_PORT, + ) + parser.add_option( + "-d", + "--debug", + action="store_true", + dest="debug", + help=optparse.SUPPRESS_HELP, + default=APP_DEBUG, + ) options, _ = parser.parse_args() - app.run(debug=options.debug, host=options.host, port=int(options.port), threaded=True) + app.run( + debug=options.debug, host=options.host, port=int(options.port), threaded=True + ) diff --git a/engines/cybelangel/engine-cybelangel.py b/engines/cybelangel/engine-cybelangel.py index 946dac02..a0c576dc 100755 --- a/engines/cybelangel/engine-cybelangel.py +++ b/engines/cybelangel/engine-cybelangel.py @@ -3,7 +3,7 @@ """ CybelAngel PatrOwl engine application -Copyright (C) 2021 Nicolas Mattiocco - @MaKyOtOx +Copyright (C) 2024 Nicolas Mattiocco - @MaKyOtOx Licensed under the AGPLv3 License Written by Fabien Martinez (fabien.martinez@adevinta.com) """ @@ -27,20 +27,20 @@ APP_DEBUG = False APP_HOST = "0.0.0.0" APP_PORT = 5024 -APP_MAXSCANS = int(os.environ.get('APP_MAXSCANS', 5)) +APP_MAXSCANS = int(os.environ.get("APP_MAXSCANS", 5)) APP_ENGINE_NAME = "cybelangel" APP_BASE_DIR = Path(__file__).parent VERSION = "1.0.0" logging.basicConfig(level=(logging.DEBUG if APP_DEBUG else logging.INFO)) -LOGGER = logging.getLogger('cybelangel') +LOGGER = logging.getLogger("cybelangel") engine = PatrowlEngine( app=app, base_dir=str(APP_BASE_DIR.absolute()), name=APP_ENGINE_NAME, max_scans=APP_MAXSCANS, - version=VERSION + version=VERSION, ) this = sys.modules[__name__] @@ -61,61 +61,61 @@ def handle_invalid_usage(error): return response -@app.route('/') +@app.route("/") def default(): """Route by default.""" return engine.default() -@app.route('/engines/cybelangel/') +@app.route("/engines/cybelangel/") def index(): """Return index page.""" return engine.index() -@app.route('/engines/cybelangel/liveness') +@app.route("/engines/cybelangel/liveness") def liveness(): """Return liveness page.""" return engine.liveness() -@app.route('/engines/cybelangel/readiness') +@app.route("/engines/cybelangel/readiness") def readiness(): """Return readiness page.""" return engine.readiness() -@app.route('/engines/cybelangel/test') +@app.route("/engines/cybelangel/test") def test(): """Return test page.""" return engine.test() -@app.route('/engines/cybelangel/info') +@app.route("/engines/cybelangel/info") def info(): """Get info on running engine.""" return engine.info() -@app.route('/engines/cybelangel/clean') +@app.route("/engines/cybelangel/clean") def clean(): """Clean all scans.""" return engine.clean() -@app.route('/engines/cybelangel/clean/') +@app.route("/engines/cybelangel/clean/") def clean_scan(scan_id): """Clean scan identified by id.""" return engine.clean_scan(scan_id) -@app.route('/engines/cybelangel/status') +@app.route("/engines/cybelangel/status") def status(): """Get status on engine and all scans.""" return engine.getstatus() -@app.route('/engines/cybelangel/status/') +@app.route("/engines/cybelangel/status/") def status_scan(scan_id): """Get status on scan identified by id.""" res = {"page": "status", "status": "UNKNOWN"} @@ -127,31 +127,31 @@ def status_scan(scan_id): res.update({"status": "error", "reason": "todo"}) return jsonify(res) - res.update({'status': engine.scans[scan_id]['status']}) + res.update({"status": engine.scans[scan_id]["status"]}) return jsonify(res) -@app.route('/engines/cybelangel/stopscans') +@app.route("/engines/cybelangel/stopscans") def stop(): """Stop all scans.""" return engine.stop() -@app.route('/engines/cybelangel/stop/') +@app.route("/engines/cybelangel/stop/") def stop_scan(scan_id): """Stop scan identified by id.""" return engine.stop_scan(scan_id) -@app.route('/engines/cybelangel/getreport/') +@app.route("/engines/cybelangel/getreport/") def getreport(scan_id): """Get report on finished scans.""" return engine.getreport(scan_id) def _loadconfig(): - conf_file = APP_BASE_DIR / 'cybelangel.json' + conf_file = APP_BASE_DIR / "cybelangel.json" try: json_data = conf_file.read_text() except FileNotFoundError: @@ -164,29 +164,38 @@ def _loadconfig(): try: engine.scanner = json.loads(json_data) except Exception as e: - LOGGER.error(f'Unable to convert config file to json: {e}') - return {"status": "error", "reason": "unable to convert config file to json"} + LOGGER.error(f"Unable to convert config file to json: {e}") + return { + "status": "error", + "reason": "unable to convert config file to json", + } else: - engine.scanner['status'] = 'READY' - if 'options' not in engine.scanner: + engine.scanner["status"] = "READY" + if "options" not in engine.scanner: LOGGER.error("Unable to find options in config file") - return {"status": "error", "reason": "you have to specify options in your config file"} - for key, value in engine.scanner['options'].items(): + return { + "status": "error", + "reason": "you have to specify options in your config file", + } + for key, value in engine.scanner["options"].items(): if not isinstance(value, dict) or not "type" in value.keys(): LOGGER.error(f"Bad format for options! ({key})") return {"status": "error", "reason": f"bad format for options ({key})!"} - if value['type'] == 'required' and len(value['value']) == 0: - LOGGER.error(f'Required option empty / not found: {key}') - return {"status": "error", "reason": f"you have to specify {key} in options"} - - version_filename = APP_BASE_DIR+'/VERSION' + if value["type"] == "required" and len(value["value"]) == 0: + LOGGER.error(f"Required option empty / not found: {key}") + return { + "status": "error", + "reason": f"you have to specify {key} in options", + } + + version_filename = APP_BASE_DIR + "/VERSION" if os.path.exists(version_filename): version_file = open(version_filename, "r") - engine.version = version_file.read().rstrip('\n') + engine.version = version_file.read().rstrip("\n") version_file.close() -@app.route('/engines/cybelangel/reloadconfig', methods=['GET']) +@app.route("/engines/cybelangel/reloadconfig", methods=["GET"]) def reloadconfig(): res = {"page": "reloadconfig"} _loadconfig() @@ -194,126 +203,145 @@ def reloadconfig(): return jsonify(res) -@app.route('/engines/cybelangel/startscan', methods=['POST']) +@app.route("/engines/cybelangel/startscan", methods=["POST"]) def start_scan(): res = {"page": "startscan"} # Check the scanner is ready to start a new scan if len(engine.scans) == APP_MAXSCANS: - res.update({ - "status": "error", - "reason": "Scan refused: max concurrent active scans reached ({})".format(APP_MAXSCANS) - }) + res.update( + { + "status": "error", + "reason": "Scan refused: max concurrent active scans reached ({})".format( + APP_MAXSCANS + ), + } + ) return jsonify(res) status() - if engine.scanner['status'] != "READY": - res.update({ - "status": "refused", - "details": { - "reason": "scanner not ready", - "status": engine.scanner['status'] - }}) + if engine.scanner["status"] != "READY": + res.update( + { + "status": "refused", + "details": { + "reason": "scanner not ready", + "status": engine.scanner["status"], + }, + } + ) return jsonify(res) data = json.loads(request.data) - if 'assets' not in data.keys() or 'scan_id' not in data.keys(): - res.update({ - "status": "refused", - "details": { - "reason": "arg error, something is missing ('assets' ?)" - }}) + if "assets" not in data.keys() or "scan_id" not in data.keys(): + res.update( + { + "status": "refused", + "details": {"reason": "arg error, something is missing ('assets' ?)"}, + } + ) return jsonify(res) asset_groups = [] for asset in data["assets"]: # Value if "value" not in asset.keys() or not asset["value"]: - res.update({ - "status": "error", - "reason": "arg error, something is missing ('asset.value')" - }) + res.update( + { + "status": "error", + "reason": "arg error, something is missing ('asset.value')", + } + ) return jsonify(res) # Supported datatypes if asset["datatype"] not in engine.scanner["allowed_asset_types"]: - res.update({ - "status": "error", - "reason": "arg error, bad value for '{}' datatype (not supported)".format(asset["value"]) - }) + res.update( + { + "status": "error", + "reason": "arg error, bad value for '{}' datatype (not supported)".format( + asset["value"] + ), + } + ) return jsonify(res) asset_groups.append(asset["value"]) - scan_id = str(data['scan_id']) + scan_id = str(data["scan_id"]) - if data['scan_id'] in engine.scans.keys(): - res.update({ - "status": "refused", - "details": { - "reason": "scan '{}' already launched".format(data['scan_id']), + if data["scan_id"] in engine.scans.keys(): + res.update( + { + "status": "refused", + "details": { + "reason": "scan '{}' already launched".format(data["scan_id"]), + }, } - }) + ) return jsonify(res) scan = { - 'assets': asset_groups, - 'threads': [], - 'options': data['options'], - 'scan_id': scan_id, - 'status': "STARTED", - 'started_at': int(time.time() * 1000), - 'findings': [] + "assets": asset_groups, + "threads": [], + "options": data["options"], + "scan_id": scan_id, + "status": "STARTED", + "started_at": int(time.time() * 1000), + "findings": [], } engine.scans.update({scan_id: scan}) thread = threading.Thread(target=_scan_malicious_websites, args=(scan_id,)) thread.start() - engine.scans[scan_id]['threads'].append(thread) + engine.scans[scan_id]["threads"].append(thread) - res.update({ - "status": "accepted", - "details": { - "scan_id": scan['scan_id'] - } - }) + res.update({"status": "accepted", "details": {"scan_id": scan["scan_id"]}}) return jsonify(res) def _scan_malicious_websites(scan_id): cybelangel_manager = CybelAngel( - engine.scanner['options']['api_client_id']['value'], - engine.scanner['options']['api_client_secret']['value'] + engine.scanner["options"]["api_client_id"]["value"], + engine.scanner["options"]["api_client_secret"]["value"], ) reports = cybelangel_manager.process() error = None if reports is False: - LOGGER.error('Unable to get reports from cybelangel!') - engine.scans[scan_id]['status'] = 'ERROR' + LOGGER.error("Unable to get reports from cybelangel!") + engine.scans[scan_id]["status"] = "ERROR" return False - engine.scans[scan_id]['findings'] = [] + engine.scans[scan_id]["findings"] = [] if not error: for report in reports: LOGGER.info(f'Checking report threat {report["threat"]}') - if not report['keywords'][0]['rule'].lower() in engine.scans[scan_id]['assets']: - LOGGER.error(f'Unable to fin asset group for {report["threat"]}: {report["keywords"][0]["rule"]} found but no match') + if ( + not report["keywords"][0]["rule"].lower() + in engine.scans[scan_id]["assets"] + ): + LOGGER.error( + f'Unable to fin asset group for {report["threat"]}: {report["keywords"][0]["rule"]} found but no match' + ) continue - engine.scans[scan_id]['findings'].append({ - 'domain': report['threat'], - 'asset_group': report['keywords'][0]['rule'].lower() - }) - if not cybelangel_manager.resolve_report(report['id']): + engine.scans[scan_id]["findings"].append( + { + "domain": report["threat"], + "asset_group": report["keywords"][0]["rule"].lower(), + } + ) + if not cybelangel_manager.resolve_report(report["id"]): LOGGER.error(f'Unable to resolve report {report["threat"]}') - engine.scans[scan_id]['status'] = 'FINISHED' - engine.scans[scan_id]['finished_at'] = int(time.time() * 1000) + engine.scans[scan_id]["status"] = "FINISHED" + engine.scans[scan_id]["finished_at"] = int(time.time() * 1000) return True -@app.route('/engines/cybelangel/getreport/') + +@app.route("/engines/cybelangel/getreport/") def get_report(scan_id): """Get report.""" result = dict() - result_file = APP_BASE_DIR / 'results' / f'cybelangel_{scan_id}.json' + result_file = APP_BASE_DIR / "results" / f"cybelangel_{scan_id}.json" try: result = json.loads(result_file.read_text()) result_file.close() @@ -322,32 +350,34 @@ def get_report(scan_id): return result + def _parse_results(scan_id): issues = [] summary = {} - nb_vulns = { - "info": 0, - "low": 0, - "medium": 0, - "high": 0 - } + nb_vulns = {"info": 0, "low": 0, "medium": 0, "high": 0} timestamp = int(time.time() * 1000) for finding in engine.scans[scan_id]["findings"]: - nb_vulns['medium'] += 1 - issues.append({ - "issue_id": len(issues)+1, - "severity": "medium", - "confidence": "certain", - "target": {"addr": [finding['domain']], "protocol": "http", "parent": finding['asset_group']}, - "title": f"[CybelAngel] New asset found on: {finding['domain']}", - "solution": "n/a", - "metadata": {"risk": {"criticity": "medium"}}, - "type": "cybelangel_report", - "timestamp": timestamp, - "description": f"Domain {finding['domain']} found as a malicious domain name by Cybel Angel", - }) + nb_vulns["medium"] += 1 + issues.append( + { + "issue_id": len(issues) + 1, + "severity": "medium", + "confidence": "certain", + "target": { + "addr": [finding["domain"]], + "protocol": "http", + "parent": finding["asset_group"], + }, + "title": f"[CybelAngel] New asset found on: {finding['domain']}", + "solution": "n/a", + "metadata": {"risk": {"criticity": "medium"}}, + "type": "cybelangel_report", + "timestamp": timestamp, + "description": f"Domain {finding['domain']} found as a malicious domain name by Cybel Angel", + } + ) summary = { "nb_issues": len(issues), @@ -356,50 +386,62 @@ def _parse_results(scan_id): "nb_medium": nb_vulns["medium"], "nb_high": nb_vulns["high"], "engine_name": "cybelangel", - "engine_version": engine.scanner["version"] + "engine_version": engine.scanner["version"], } return issues, summary -@app.route('/engines/cybelangel/getfindings/', methods=['GET']) +@app.route("/engines/cybelangel/getfindings/", methods=["GET"]) def getfindings(scan_id): res = {"page": "getfindings", "scan_id": scan_id} # check if the scan_id exists if scan_id not in engine.scans.keys(): - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) # check if the scan is finished - if engine.scans[scan_id]['status'] != "FINISHED": - res.update({"status": "error", "reason": "scan_id '{}' not finished (status={})".format(scan_id, engine.scans[scan_id]['status'])}) + if engine.scans[scan_id]["status"] != "FINISHED": + res.update( + { + "status": "error", + "reason": "scan_id '{}' not finished (status={})".format( + scan_id, engine.scans[scan_id]["status"] + ), + } + ) return jsonify(res) issues, summary = _parse_results(scan_id) scan = { "scan_id": scan_id, - "assets": engine.scans[scan_id]['assets'], - "options": engine.scans[scan_id]['options'], - "status": engine.scans[scan_id]['status'], - "started_at": engine.scans[scan_id]['started_at'], - "finished_at": engine.scans[scan_id]['finished_at'] + "assets": engine.scans[scan_id]["assets"], + "options": engine.scans[scan_id]["options"], + "status": engine.scans[scan_id]["status"], + "started_at": engine.scans[scan_id]["started_at"], + "finished_at": engine.scans[scan_id]["finished_at"], } # Store the findings in a file - report_file = APP_BASE_DIR / 'results' / f'cybelangel_{scan_id}.json' + report_file = APP_BASE_DIR / "results" / f"cybelangel_{scan_id}.json" try: - report_file.write_text(json.dumps({ - "scan": scan, - "summary": summary, - "issues": issues - }, default=_json_serial)) + report_file.write_text( + json.dumps( + {"scan": scan, "summary": summary, "issues": issues}, + default=_json_serial, + ) + ) except Exception as e: - LOGGER.error(f'Unable to write in {report_file.absolute()}: {e}') + LOGGER.error(f"Unable to write in {report_file.absolute()}: {e}") res.update({"status": "error", "reason": "unable to write in report file"}) else: - res.update({"scan": scan, "summary": summary, "issues": issues, "status": "success"}) + res.update( + {"scan": scan, "summary": summary, "issues": issues, "status": "success"} + ) finally: # remove the scan from the active scan list clean_scan(scan_id) @@ -409,11 +451,11 @@ def getfindings(scan_id): @app.before_first_request def main(): """First function called.""" - result_path = APP_BASE_DIR / 'results' + result_path = APP_BASE_DIR / "results" if not result_path.exists: result_path.mkdir(parents=True) _loadconfig() -if __name__ == '__main__': +if __name__ == "__main__": engine.run_app(app_debug=APP_DEBUG, app_host=APP_HOST, app_port=APP_PORT) diff --git a/engines/droopescan/engine-droopescan.py b/engines/droopescan/engine-droopescan.py index 2c5b1dff..21d8c7dd 100644 --- a/engines/droopescan/engine-droopescan.py +++ b/engines/droopescan/engine-droopescan.py @@ -13,6 +13,7 @@ from copy import deepcopy from shlex import quote, split from flask import Flask, request, jsonify, url_for, send_file + try: from patrowlhears4py.api import PatrowlHearsApi except ModuleNotFoundError: @@ -23,11 +24,12 @@ from PatrowlEnginesUtils.PatrowlEngine import _json_serial from PatrowlEnginesUtils.PatrowlEngine import PatrowlEngine from PatrowlEnginesUtils.PatrowlEngineExceptions import PatrowlEngineExceptions + app = Flask(__name__) -APP_DEBUG = os.environ.get('DEBUG', '').lower() in ['true', '1', 'yes', 'y', 'on'] +APP_DEBUG = os.environ.get("DEBUG", "").lower() in ["true", "1", "yes", "y", "on"] APP_HOST = "0.0.0.0" APP_PORT = 5021 -APP_MAXSCANS = int(os.environ.get('APP_MAXSCANS', 25)) +APP_MAXSCANS = int(os.environ.get("APP_MAXSCANS", 25)) APP_ENGINE_NAME = "patrowl-droopescan" VERSION = "1.4.30" @@ -44,7 +46,7 @@ base_dir=BASE_DIR, name=APP_ENGINE_NAME, max_scans=APP_MAXSCANS, - version=VERSION + version=VERSION, ) @@ -56,64 +58,66 @@ def handle_invalid_usage(error): return response -@app.route('/') +@app.route("/") def default(): """Route by default.""" return engine.default() -@app.route('/engines/droopescan/') +@app.route("/engines/droopescan/") def index(): """Return index page.""" return engine.index() -@app.route('/engines/droopescan/liveness') +@app.route("/engines/droopescan/liveness") def liveness(): """Return liveness page.""" return engine.liveness() -@app.route('/engines/droopescan/readiness') +@app.route("/engines/droopescan/readiness") def readiness(): """Return readiness page.""" return engine.readiness() -@app.route('/engines/droopescan/info') +@app.route("/engines/droopescan/info") def info(): """Get info on running engine.""" scans = {} for scan in this.scans.keys(): scan_status(scan) - scans.update({scan: { - "status": this.scans[scan]["status"], - "options": this.scans[scan]["options"], - "nb_findings": this.scans[scan]["nb_findings"], - }}) - - res = { - "page": "info", - "engine_config": this.scanner, - "scans": scans - } + scans.update( + { + scan: { + "status": this.scans[scan]["status"], + "options": this.scans[scan]["options"], + "nb_findings": this.scans[scan]["nb_findings"], + } + } + ) + + res = {"page": "info", "engine_config": this.scanner, "scans": scans} return jsonify(res) -@app.route('/engines/droopescan/clean') +@app.route("/engines/droopescan/clean") def clean(): """Clean all scans.""" return engine.clean() -@app.route('/engines/droopescan/clean/') +@app.route("/engines/droopescan/clean/") def clean_scan(scan_id): """Clean scan identified by id.""" res = {"page": "clean_scan"} res.update({"scan_id": scan_id}) if scan_id not in this.scans.keys(): - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) this.scans.pop(scan_id) @@ -121,26 +125,26 @@ def clean_scan(scan_id): return jsonify(res) -@app.route('/engines/droopescan/status') +@app.route("/engines/droopescan/status") def status(): """Get status on engine and all scans.""" res = {"page": "status"} if len(this.scans) == APP_MAXSCANS: - this.scanner['status'] = "BUSY" + this.scanner["status"] = "BUSY" else: - this.scanner['status'] = "READY" + this.scanner["status"] = "READY" - if not os.path.exists(BASE_DIR+'/droopescan.json'): - this.scanner['status'] = "ERROR" - res.update({ - "status": "error", - "reason": "Config file droopescan.json not found"}) + if not os.path.exists(BASE_DIR + "/droopescan.json"): + this.scanner["status"] = "ERROR" + res.update( + {"status": "error", "reason": "Config file droopescan.json not found"} + ) app.logger.error("Config file droopescan.json not found") -# if not os.path.isfile(this.scanner['path']): -# this.scanner['status'] = "ERROR" + # if not os.path.isfile(this.scanner['path']): + # this.scanner['status'] = "ERROR" - res.update({"status": this.scanner['status']}) + res.update({"status": this.scanner["status"]}) # display info on the scanner res.update({"scanner": this.scanner}) @@ -149,57 +153,67 @@ def status(): scans = {} for scan in this.scans.keys(): scan_status(scan) - scans.update({scan: { - "status": this.scans[scan]["status"], - "options": this.scans[scan]["options"], - "nb_findings": this.scans[scan]["nb_findings"], - }}) + scans.update( + { + scan: { + "status": this.scans[scan]["status"], + "options": this.scans[scan]["options"], + "nb_findings": this.scans[scan]["nb_findings"], + } + } + ) res.update({"scans": scans}) return jsonify(res) -@app.route('/engines/droopescan/getreport/') +@app.route("/engines/droopescan/getreport/") def getreport(scan_id): """Get report on finished scans.""" if scan_id not in this.scans.keys(): - return jsonify({"status": "ERROR", "reason": "scan_id '{}' not found".format(scan_id)}) + return jsonify( + {"status": "ERROR", "reason": "scan_id '{}' not found".format(scan_id)} + ) # remove the scan from the active scan list clean_scan(scan_id) - filepath = BASE_DIR+"/results/droopescan-"+scan_id+".json" + filepath = BASE_DIR + "/results/droopescan-" + scan_id + ".json" if not os.path.exists(filepath): - return jsonify({"status": "ERROR", - "reason": "report file for scan_id '{}' not found".format(scan_id)}) + return jsonify( + { + "status": "ERROR", + "reason": "report file for scan_id '{}' not found".format(scan_id), + } + ) return send_file( filepath, - mimetype='application/json', - download_name='droopescan-'+str(scan_id)+".json", - as_attachment=True + mimetype="application/json", + download_name="droopescan-" + str(scan_id) + ".json", + as_attachment=True, ) def loadconfig(): """Load engine configuration.""" - conf_file = BASE_DIR+'/droopescan.json' + conf_file = BASE_DIR + "/droopescan.json" if os.path.exists(conf_file): json_data = open(conf_file) this.scanner = json.load(json_data) - this.scanner['status'] = "READY" + this.scanner["status"] = "READY" - version_filename = BASE_DIR+'/VERSION' + version_filename = BASE_DIR + "/VERSION" if os.path.exists(version_filename): version_file = open(version_filename, "r") - engine.version = version_file.read().rstrip('\n') + engine.version = version_file.read().rstrip("\n") version_file.close() return {"status": "OK", "reason": "config file loaded."} - this.scanner['status'] = "ERROR" + this.scanner["status"] = "ERROR" return {"status": "ERROR", "reason": "config file not found."} -@app.route('/engines/droopescan/reloadconfig') +@app.route("/engines/droopescan/reloadconfig") def reloadconfig(): """Reload engine configuration.""" res = {"page": "reloadconfig"} @@ -214,7 +228,7 @@ def page_not_found(e): return jsonify({"page": "not found"}) -@app.route('/engines/droopescan/test') +@app.route("/engines/droopescan/test") def test(): """Return test page.""" res = "

Test Page (DEBUG):

" @@ -223,21 +237,26 @@ def test(): for arg in rule.arguments: options[arg] = "[{0}]".format(arg) - methods = ','.join(rule.methods) + methods = ",".join(rule.methods) url = url_for(rule.endpoint, **options) - res += urllib.url2pathname("{0:50s} {1:20s} {2}
".format( - rule.endpoint, methods, url)) + res += urllib.url2pathname( + "{0:50s} {1:20s} {2}
".format( + rule.endpoint, methods, url + ) + ) return res -@app.route('/engines/droopescan/status/', methods=['GET']) +@app.route("/engines/droopescan/status/", methods=["GET"]) def scan_status(scan_id): """Get status on scan identified by id.""" res = {"page": "scan_status", "status": "UNKNOWN"} if scan_id not in this.scans.keys(): - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) proc = this.scans[scan_id]["proc"] @@ -254,14 +273,16 @@ def scan_status(scan_id): res.update({"status": "FINISHED"}) this.scans[scan_id]["status"] = "FINISHED" - elif psutil.pid_exists(proc.pid) and \ - psutil.Process(proc.pid).status() in ["sleeping", "running"]: - res.update({ - "status": "SCANNING", - "info": { - "pid": proc.pid, - "cmd": this.scans[scan_id]["proc_cmd"]} - }) + elif psutil.pid_exists(proc.pid) and psutil.Process(proc.pid).status() in [ + "sleeping", + "running", + ]: + res.update( + { + "status": "SCANNING", + "info": {"pid": proc.pid, "cmd": this.scans[scan_id]["proc_cmd"]}, + } + ) elif psutil.pid_exists(proc.pid) and psutil.Process(proc.pid).status() == "zombie": res.update({"status": "FINISHED"}) @@ -269,20 +290,31 @@ def scan_status(scan_id): psutil.Process(proc.pid).terminate() # return the scan parameters and the status - #res.update({ + # res.update({ # "scan": this.scans[scan_id], # #"status": this.scans[scan_id]["status"] - #}) + # }) return jsonify(res) -def _add_issue(scan_id, target, timestamp, title, desc, type, - severity="info", confidence="certain", - vuln_refs=None, links=None, tags=None, risk=None): +def _add_issue( + scan_id, + target, + timestamp, + title, + desc, + type, + severity="info", + confidence="certain", + vuln_refs=None, + links=None, + tags=None, + risk=None, +): """Add findings to results.""" this.scans[scan_id]["nb_findings"] = this.scans[scan_id]["nb_findings"] + 1 - if (vuln_refs is None and links is None and tags is None and risk is None): + if vuln_refs is None and links is None and tags is None and risk is None: issue = { "issue_id": this.scans[scan_id]["nb_findings"], "severity": severity, @@ -292,7 +324,7 @@ def _add_issue(scan_id, target, timestamp, title, desc, type, "description": desc, "solution": "n/a", "type": type, - "timestamp": timestamp + "timestamp": timestamp, } else: risk = {} @@ -312,14 +344,14 @@ def _add_issue(scan_id, target, timestamp, title, desc, type, "vuln_refs": vuln_refs, "risk": risk, "links": links, - "tags": tags - } + "tags": tags, + }, } return issue # Stop all scans -@app.route('/engines/droopescan/stopscans') +@app.route("/engines/droopescan/stopscans") def stop(): """Stop all scans.""" res = {"page": "stopscans"} @@ -332,91 +364,109 @@ def stop(): return jsonify(res) -@app.route('/engines/droopescan/stop/') +@app.route("/engines/droopescan/stop/") def stop_scan(scan_id): """Stop scan identified by id.""" res = {"page": "stopscan"} if scan_id not in this.scans.keys(): - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) proc = this.scans[scan_id]["proc"] - if hasattr(proc, 'pid'): + if hasattr(proc, "pid"): if psutil.pid_exists(proc.pid): psutil.Process(proc.pid).terminate() - res.update({"status": "TERMINATED", - "details": { - "pid": proc.pid, - "cmd": this.scans[scan_id]["proc_cmd"], - "scan_id": scan_id}}) + res.update( + { + "status": "TERMINATED", + "details": { + "pid": proc.pid, + "cmd": this.scans[scan_id]["proc_cmd"], + "scan_id": scan_id, + }, + } + ) return jsonify(res) ########################## -@app.route('/engines/droopescan/startscan', methods=['POST']) +@app.route("/engines/droopescan/startscan", methods=["POST"]) def start(): """Start a scan.""" res = {"page": "startscan"} # check the scanner is ready to start a new scan if len(this.scans) == APP_MAXSCANS: - res.update({ - "status": "error", - "reason": "Scan refused: max concurrent active scans reached ({})".format(APP_MAXSCANS) - }) + res.update( + { + "status": "error", + "reason": "Scan refused: max concurrent active scans reached ({})".format( + APP_MAXSCANS + ), + } + ) return jsonify(res) # update scanner status status() - if this.scanner['status'] != "READY": - res.update({ - "status": "refused", - "details": { - "reason": "scanner not ready", - "status": this.scanner['status']}}) + if this.scanner["status"] != "READY": + res.update( + { + "status": "refused", + "details": { + "reason": "scanner not ready", + "status": this.scanner["status"], + }, + } + ) return jsonify(res) # Load scan parameters data = json.loads(request.data.decode("UTF-8")) - if 'assets' not in data.keys(): - res.update({ - "status": "refused", - "details": { - "reason": "arg error, something is missing ('assets' ?)"}}) + if "assets" not in data.keys(): + res.update( + { + "status": "refused", + "details": {"reason": "arg error, something is missing ('assets' ?)"}, + } + ) return jsonify(res) - scan_id = str(data['scan_id']) - if data['scan_id'] in this.scans.keys(): - res.update({ - "status": "refused", - "details": { - "reason": "scan '{}' already launched".format(data['scan_id'])}}) + scan_id = str(data["scan_id"]) + if data["scan_id"] in this.scans.keys(): + res.update( + { + "status": "refused", + "details": { + "reason": "scan '{}' already launched".format(data["scan_id"]) + }, + } + ) return jsonify(res) scan = { - 'assets': data['assets'], - 'threads': [], - 'proc': None, - 'options': data['options'], - 'cms': "", - 'hears_api': {}, - 'scan_id': scan_id, - 'status': "STARTED", - 'started_at': int(time.time() * 1000), - 'nb_findings': 0 + "assets": data["assets"], + "threads": [], + "proc": None, + "options": data["options"], + "cms": "", + "hears_api": {}, + "scan_id": scan_id, + "status": "STARTED", + "started_at": int(time.time() * 1000), + "nb_findings": 0, } this.scans.update({scan_id: scan}) thread = threading.Thread(target=_scan_thread, args=(scan_id,)) thread.start() - this.scans[scan_id]['threads'].append(thread) + this.scans[scan_id]["threads"].append(thread) - res.update({ - "status": "accepted", - "details": {"scan_id": scan['scan_id']} - }) + res.update({"status": "accepted", "details": {"scan_id": scan["scan_id"]}}) return jsonify(res) @@ -425,26 +475,34 @@ def _scan_thread(scan_id): """Attribute scan to a thread and launch it.""" hosts = [] - for asset in this.scans[scan_id]['assets']: + for asset in this.scans[scan_id]["assets"]: if asset["datatype"] not in this.scanner["allowed_asset_types"]: - return jsonify({ - "status": "refused", - "details": { - "reason": "datatype '{}' not supported for the asset {}.".format( - asset["datatype"], asset["value"])}}) - # commentaire = ''' To delete, somtimes we scan app + return jsonify( + { + "status": "refused", + "details": { + "reason": "datatype '{}' not supported for the asset {}.".format( + asset["datatype"], asset["value"] + ) + }, + } + ) + # commentaire = ''' To delete, somtimes we scan app # like https://example.com/app1name/ only and nor https://example.com''' else: # extract the net location from urls if needed - if asset["datatype"] == 'url': - hosts.append("{uri.netloc}".format( - uri=urllib.parse.urlparse(quote(asset["value"]))).strip()) - app.logger.debug('Adding URL {} to hosts'.format(asset["value"])) + if asset["datatype"] == "url": + hosts.append( + "{uri.netloc}".format( + uri=urllib.parse.urlparse(quote(asset["value"])) + ).strip() + ) + app.logger.debug("Adding URL {} to hosts".format(asset["value"])) else: hosts.append(quote(asset["value"]).strip()) - app.logger.debug('Adding asset {} to hosts'.format(asset["value"])) + app.logger.debug("Adding asset {} to hosts".format(asset["value"])) - app.logger.debug('Hosts set : %s', hosts) + app.logger.debug("Hosts set : %s", hosts) # Update status this.scans[scan_id]["status"] = "SCANNING" @@ -453,18 +511,20 @@ def _scan_thread(scan_id): hosts = list(set(hosts)) # Write hosts in a file (cleaner,doesn't break with shell arguments limit (1000+ hosts)) - hosts_filename = BASE_DIR+"/tmp/engine_droopescan_hosts_file_scan_id_{}.tmp".format(scan_id) - with open(hosts_filename, 'w') as hosts_file: + hosts_filename = ( + BASE_DIR + "/tmp/engine_droopescan_hosts_file_scan_id_{}.tmp".format(scan_id) + ) + with open(hosts_filename, "w") as hosts_file: for item in hosts: hosts_file.write("{}\n".format(quote(item))) # Sanitize args : - th_options = this.scans[scan_id]['options'] - app.logger.debug('options: %s', th_options) + th_options = this.scans[scan_id]["options"] + app.logger.debug("options: %s", th_options) # Log file path - log_path = BASE_DIR+"/results/droopescan-" + scan_id + ".json" + log_path = BASE_DIR + "/results/droopescan-" + scan_id + ".json" # Error log file path - error_log_path = BASE_DIR+"/logs/droopescan-error-" + scan_id + ".log" + error_log_path = BASE_DIR + "/logs/droopescan-error-" + scan_id + ".log" # Base command cmd = "droopescan " @@ -485,8 +545,8 @@ def _scan_thread(scan_id): return False elif opt_key == "host_file_path": if os.path.isfile(th_options.get(opt_key)): - with open(th_options.get(opt_key), 'r') as file_host: - with open(hosts_filename, 'a') as hosts_file: + with open(th_options.get(opt_key), "r") as file_host: + with open(hosts_filename, "a") as hosts_file: for line in file_host: hosts_file.write(quote(line)) elif opt_key == "hears_api": @@ -498,16 +558,15 @@ def _scan_thread(scan_id): return False cmd += " -U " + hosts_filename cmd += " --output json " - app.logger.debug('cmd: %s', cmd) + app.logger.debug("cmd: %s", cmd) cmd_sec = split(cmd) this.scans[scan_id]["proc_cmd"] = "not set!!" with open(error_log_path, "w") as stderr: - this.scans[scan_id]["proc"] = subprocess.Popen(cmd_sec, - shell=False, - stdout=open(log_path, "w"), - stderr=stderr) + this.scans[scan_id]["proc"] = subprocess.Popen( + cmd_sec, shell=False, stdout=open(log_path, "w"), stderr=stderr + ) this.scans[scan_id]["proc_cmd"] = cmd return True @@ -520,9 +579,15 @@ def _get_hears_findings(scan_id, t_vendor=None, t_product=None, t_product_versio hears_token = this.scans[scan_id]["hears_api"]["token"] # Retrieve data api = PatrowlHearsApi(url=hears_url, auth_token=hears_token) - json_data = api.search_vulns(cveid=None, monitored=None, search=None, - vendor_name=t_vendor, product_name=t_product, - product_version=t_product_version, cpe=None) + json_data = api.search_vulns( + cveid=None, + monitored=None, + search=None, + vendor_name=t_vendor, + product_name=t_product, + product_version=t_product_version, + cpe=None, + ) if json_data["count"] == 0: return None # Handle JSON data @@ -534,7 +599,7 @@ def _get_hears_findings(scan_id, t_vendor=None, t_product=None, t_product_versio # Get CPE if cpe == "": for cpe_version in vln["vulnerable_products"]: - if t_product_version+":*" in cpe_version: + if t_product_version + ":*" in cpe_version: cpe = cpe_version app.logger.debug(cpe) pass @@ -580,20 +645,24 @@ def _parse_report(filename, scan_id): """Parse the Droopescan report.""" res = [] target = {} - app.logger.debug('Opening results file for scan %s', str(scan_id) + " : " + str(filename)) + app.logger.debug( + "Opening results file for scan %s", str(scan_id) + " : " + str(filename) + ) if os.path.isfile(filename): # TODO Catch Exception for open() function - with open(filename, 'r') as file_desc: - app.logger.debug('Opened file named {} in mode {}'.format(file_desc.name, - file_desc.mode)) + with open(filename, "r") as file_desc: + app.logger.debug( + "Opened file named {} in mode {}".format(file_desc.name, file_desc.mode) + ) try: json_data = json.load(file_desc) except ValueError: - app.logger.debug('Error happened - DecodeJSONError : {}'.format( - sys.exc_info()[0])) + app.logger.debug( + "Error happened - DecodeJSONError : {}".format(sys.exc_info()[0]) + ) return {"status": "error", "reason": "Decoding JSON failed"} except Exception: - app.logger.debug('Error happened - {}'.format(sys.exc_info()[0])) + app.logger.debug("Error happened - {}".format(sys.exc_info()[0])) return {"status": "error", "reason": "An error occurred"} timestamp = this.scans[scan_id]["started_at"] @@ -602,7 +671,7 @@ def _parse_report(filename, scan_id): addr_list = [] addr_list.append(str(json_data["host"])) # addr_type = "url" - #addr_list.append("https://"+str(json_data["host"])) + # addr_list.append("https://"+str(json_data["host"])) target = { "addr": addr_list, @@ -611,54 +680,97 @@ def _parse_report(filename, scan_id): cms_name = str(json_data["cms_name"]).capitalize() # Check for plugins - if "plugins" in json_data.keys() and json_data["plugins"]["is_empty"] is False: - #has_plugins = True + if ( + "plugins" in json_data.keys() + and json_data["plugins"]["is_empty"] is False + ): + # has_plugins = True for fd_elt in json_data["plugins"]["finds"]: plg_name = fd_elt["name"] - app.logger.debug('{} - Plugin {} is installed'.format(cms_name, plg_name)) + app.logger.debug( + "{} - Plugin {} is installed".format(cms_name, plg_name) + ) desc = "" - if hasattr(fd_elt, 'imu'): - desc = 'The scan detected that the plugin {} is installed on this CMS \ - ({}).'.format(plg_name, fd_elt["imu"]["description"]) + if hasattr(fd_elt, "imu"): + desc = "The scan detected that the plugin {} is installed on this CMS \ + ({}).".format( + plg_name, fd_elt["imu"]["description"] + ) else: - desc = 'The scan detected that the plugin {} is installed on this CMS \ - .'.format(plg_name) + desc = "The scan detected that the plugin {} is installed on this CMS \ + .".format( + plg_name + ) # Add plugin found to findings - res.append(deepcopy(_add_issue(scan_id, target, timestamp, - '{} - Plugin {} is installed'.format(cms_name, plg_name), - desc[0], type='intalled_plugin'))) + res.append( + deepcopy( + _add_issue( + scan_id, + target, + timestamp, + "{} - Plugin {} is installed".format( + cms_name, plg_name + ), + desc[0], + type="intalled_plugin", + ) + ) + ) # Check for themes - #has_themes = False - if "themes" in json_data.keys() and json_data["themes"]["is_empty"] is False: - #has_themes = True + # has_themes = False + if ( + "themes" in json_data.keys() + and json_data["themes"]["is_empty"] is False + ): + # has_themes = True for fd_elt in json_data["themes"]["finds"]: thm_name = fd_elt["name"] thm_url = fd_elt["url"] - app.logger.debug('Theme {} is installed'.format(thm_name)) + app.logger.debug("Theme {} is installed".format(thm_name)) # Add theme found to findings - res.append(deepcopy( - _add_issue(scan_id, target, timestamp, - '{} - Theme {} is installed'.format(cms_name, thm_name), - 'The scan detected that the theme {} is installed on \ - {}.'.format(thm_name, thm_url), type='intalled_theme'))) + res.append( + deepcopy( + _add_issue( + scan_id, + target, + timestamp, + "{} - Theme {} is installed".format(cms_name, thm_name), + "The scan detected that the theme {} is installed on \ + {}.".format( + thm_name, thm_url + ), + type="intalled_theme", + ) + ) + ) # Check for interesting URLs - #has_urls = False + # has_urls = False if json_data["interesting urls"]["is_empty"] is False: - #has_urls = True + # has_urls = True for fd_elt in json_data["interesting urls"]["finds"]: url_name = fd_elt["url"] url_desc = fd_elt["description"] - app.logger.debug('Found interesting url: {}'.format(url_name)) + app.logger.debug("Found interesting url: {}".format(url_name)) # Add intesresting url found to findings - res.append(deepcopy( - _add_issue(scan_id, target, timestamp, - '{} - Interesting url {} found'.format(cms_name, url_name), - 'An interesting URL was found: {} - "{}"'.format( - url_name, url_desc), - type='interesting_url'))) + res.append( + deepcopy( + _add_issue( + scan_id, + target, + timestamp, + "{} - Interesting url {} found".format( + cms_name, url_name + ), + 'An interesting URL was found: {} - "{}"'.format( + url_name, url_desc + ), + type="interesting_url", + ) + ) + ) # TODO Check host availability - #if False: + # if False: # res.append(deepcopy(_add_issue(scan_id, target, ts, # "Host is up", # "The scan detected that the host was up", @@ -667,33 +779,48 @@ def _parse_report(filename, scan_id): if json_data["version"]["is_empty"] is False: version_list = json_data["version"]["finds"] for ver in version_list: - app.logger.debug('Version {} is possibly installed'.format(ver)) + app.logger.debug("Version {} is possibly installed".format(ver)) # Get vulns from hears - #app.logger.debug("Login is {}".format(this.scans[scan_id]["hears_api"])) - if "hears_api" in this.scans[scan_id] and "url" in this.scans[scan_id]["hears_api"]: + # app.logger.debug("Login is {}".format(this.scans[scan_id]["hears_api"])) + if ( + "hears_api" in this.scans[scan_id] + and "url" in this.scans[scan_id]["hears_api"] + ): try: - t_vuln_refs, t_cvss_score, t_desc = _get_hears_findings(scan_id, - cms_name, - cms_name, - ver) + t_vuln_refs, t_cvss_score, t_desc = _get_hears_findings( + scan_id, cms_name, cms_name, ver + ) except Exception: - app.logger.debug("Error while loading Hears API, \ - skipping vulnerability checking") + app.logger.debug( + "Error while loading Hears API, \ + skipping vulnerability checking" + ) t_vuln_refs, t_cvss_score, t_desc = None, 0.0, "" pass else: app.logger.debug("Skipping vulnerability checking") t_vuln_refs, t_cvss_score, t_desc = None, 0.0, "" # Add version found to findings - res.append(deepcopy( - _add_issue(scan_id, target, timestamp, - '{} - Version {} is possibly installed'.format(cms_name, ver), - 'The scan detected that the version {} \ - is possibly installed.\n{}'.format(ver, t_desc), - type='intalled_version', - confidence='low', - vuln_refs=t_vuln_refs, - severity=_get_cvss_severity(t_cvss_score)))) + res.append( + deepcopy( + _add_issue( + scan_id, + target, + timestamp, + "{} - Version {} is possibly installed".format( + cms_name, ver + ), + "The scan detected that the version {} \ + is possibly installed.\n{}".format( + ver, t_desc + ), + type="intalled_version", + confidence="low", + vuln_refs=t_vuln_refs, + severity=_get_cvss_severity(t_cvss_score), + ) + ) + ) # Remove credentials this.scans[scan_id]["hears_api"] = {} # Return results @@ -703,12 +830,14 @@ def _parse_report(filename, scan_id): ########################### -@app.route('/engines/droopescan/getfindings/') +@app.route("/engines/droopescan/getfindings/") def getfindings(scan_id): """Retrieve findings from scan results.""" res = {"page": "getfindings", "scan_id": scan_id} if scan_id not in this.scans.keys(): - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) proc = this.scans[scan_id]["proc"] @@ -716,9 +845,11 @@ def getfindings(scan_id): # check if the scan is finished status() - if (hasattr(proc, 'pid') and - psutil.pid_exists(proc.pid) and - psutil.Process(proc.pid).status() in ["sleeping", "running"]): + if ( + hasattr(proc, "pid") + and psutil.pid_exists(proc.pid) + and psutil.Process(proc.pid).status() in ["sleeping", "running"] + ): res.update({"status": "error", "reason": "Scan in progress"}) return jsonify(res) @@ -729,9 +860,7 @@ def getfindings(scan_id): return jsonify(res) issues = _parse_report(report_filename, scan_id) - scan = { - "scan_id": scan_id - } + scan = {"scan_id": scan_id} summary = { "nb_issues": len(issues), "nb_info": len(issues), @@ -740,25 +869,29 @@ def getfindings(scan_id): "nb_high": 0, "nb_critical": 0, "engine_name": "droopescan", - "engine_version": this.scanner['version'] + "engine_version": this.scanner["version"], } # Store the findings in a file - with open(BASE_DIR+"/results/droopescan_"+scan_id+".json", 'w') as report_file: - json.dump({"scan": scan, "summary": summary, "issues": issues}, - report_file, default=_json_serial) + with open( + BASE_DIR + "/results/droopescan_" + scan_id + ".json", "w" + ) as report_file: + json.dump( + {"scan": scan, "summary": summary, "issues": issues}, + report_file, + default=_json_serial, + ) # Delete the tmp hosts file (used with -iL argument upon launching Droopescan) - hosts_filename = BASE_DIR+"/tmp/engine_droopescan_hosts_file_scan_id_{}.tmp".format(scan_id) + hosts_filename = ( + BASE_DIR + "/tmp/engine_droopescan_hosts_file_scan_id_{}.tmp".format(scan_id) + ) if os.path.exists(hosts_filename): os.remove(hosts_filename) - res.update({ - "scan": scan, - "summary": summary, - "issues": issues, - "status": "success" - }) + res.update( + {"scan": scan, "summary": summary, "issues": issues, "status": "success"} + ) return jsonify(res) @@ -766,23 +899,34 @@ def getfindings(scan_id): @app.before_first_request def main(): """First function called.""" - if not os.path.exists(BASE_DIR+"/results"): - os.makedirs(BASE_DIR+"/results") - if not os.path.exists(BASE_DIR+"/logs"): - os.makedirs(BASE_DIR+"/logs") - if not os.path.exists(BASE_DIR+"/tmp"): - os.makedirs(BASE_DIR+"/tmp") + if not os.path.exists(BASE_DIR + "/results"): + os.makedirs(BASE_DIR + "/results") + if not os.path.exists(BASE_DIR + "/logs"): + os.makedirs(BASE_DIR + "/logs") + if not os.path.exists(BASE_DIR + "/tmp"): + os.makedirs(BASE_DIR + "/tmp") loadconfig() -if __name__ == '__main__': +if __name__ == "__main__": parser = optparse.OptionParser() - parser.add_option("-H", "--host", help="Hostname of the Flask app [default %s]" % - APP_HOST, default=APP_HOST) - parser.add_option("-P", "--port", help="Port for the Flask app [default %s]" % - APP_PORT, default=APP_PORT) - parser.add_option("-d", "--debug", action="store_true", - dest="debug", help=optparse.SUPPRESS_HELP) + parser.add_option( + "-H", + "--host", + help="Hostname of the Flask app [default %s]" % APP_HOST, + default=APP_HOST, + ) + parser.add_option( + "-P", + "--port", + help="Port for the Flask app [default %s]" % APP_PORT, + default=APP_PORT, + ) + parser.add_option( + "-d", "--debug", action="store_true", dest="debug", help=optparse.SUPPRESS_HELP + ) options, _ = parser.parse_args() - app.run(debug=options.debug, host=options.host, port=int(options.port), threaded=True) + app.run( + debug=options.debug, host=options.host, port=int(options.port), threaded=True + ) diff --git a/engines/eyewitness/engine-eyewitness.py b/engines/eyewitness/engine-eyewitness.py index d79c6ac0..078c8d1b 100755 --- a/engines/eyewitness/engine-eyewitness.py +++ b/engines/eyewitness/engine-eyewitness.py @@ -3,7 +3,7 @@ """ EyeWitness PatrOwl engine application. -Copyright (C) 2021 Nicolas Mattiocco - @MaKyOtOx +Copyright (C) 2024 Nicolas Mattiocco - @MaKyOtOx Licensed under the AGPLv3 License Written by Nicolas BEGUIER (nicolas.beguier@adevinta.com) """ @@ -34,7 +34,7 @@ APP_DEBUG = False APP_HOST = "0.0.0.0" APP_PORT = 5018 -APP_MAXSCANS = int(os.environ.get('APP_MAXSCANS', 25)) +APP_MAXSCANS = int(os.environ.get("APP_MAXSCANS", 25)) APP_ENGINE_NAME = "eyewitness" APP_BASE_DIR = dirname(realpath(__file__)) COMPARE_CEIL = 25 @@ -46,7 +46,7 @@ base_dir=APP_BASE_DIR, name=APP_ENGINE_NAME, max_scans=APP_MAXSCANS, - version=VERSION + version=VERSION, ) @@ -75,8 +75,8 @@ def get_criticity(score): def eyewitness_cmd(list_url, asset_id, scan_id, extra_opts): """Return the screenshot path.""" - if 'extra_opts' in extra_opts: - extra_opts = extra_opts['extra_opts'].split(' ') + if "extra_opts" in extra_opts: + extra_opts = extra_opts["extra_opts"].split(" ") else: extra_opts = [] result = dict() @@ -90,7 +90,20 @@ def eyewitness_cmd(list_url, asset_id, scan_id, extra_opts): for url in list_url: screenshot_base_path = asset_base_path + "/" + str(count) try: - check_output(["{}/EyeWitness.py".format(ENGINE.scanner["options"]["EyeWitnessDirectory"]["value"]), "--single", url, "--web", "-d", screenshot_base_path, "--no-prompt"] + extra_opts) + check_output( + [ + "{}/EyeWitness.py".format( + ENGINE.scanner["options"]["EyeWitnessDirectory"]["value"] + ), + "--single", + url, + "--web", + "-d", + screenshot_base_path, + "--no-prompt", + ] + + extra_opts + ) except Exception as err_msg: LOG.warning(err_msg) continue @@ -98,20 +111,55 @@ def eyewitness_cmd(list_url, asset_id, scan_id, extra_opts): # Retry screenshot capture if previous fail if not screenshot_files: try: - check_output(["{}/EyeWitness.py".format(ENGINE.scanner["options"]["EyeWitnessDirectory"]["value"]), "--single", url, "--web", "-d", screenshot_base_path, "--no-prompt"] + extra_opts) + check_output( + [ + "{}/EyeWitness.py".format( + ENGINE.scanner["options"]["EyeWitnessDirectory"]["value"] + ), + "--single", + url, + "--web", + "-d", + screenshot_base_path, + "--no-prompt", + ] + + extra_opts + ) except Exception as err_msg: LOG.warning(err_msg) continue if not screenshot_files: continue - result_url = "{repo_url}/{scan_id}/{asset_id}/{count}/screens/{screenshot}".format(repo_url=ENGINE.scanner["options"]["ScreenshotsURL"]["value"], scan_id=scan_id, asset_id=asset_id, count=count, screenshot=screenshot_files[0]) - report_url = "{repo_url}/{scan_id}/{asset_id}/{count}/report.html".format(repo_url=ENGINE.scanner["options"]["ScreenshotsURL"]["value"], scan_id=scan_id, asset_id=asset_id, count=count) - report_sources_path = "{base_path}/{asset_id}/{count}/source/".format(base_path=base_path, asset_id=asset_id, count=count) - result.update({url: { - "path": "{}/screens/{}".format(screenshot_base_path, screenshot_files[0]), - "url": result_url, - "report": report_url, - "report_sources_path": report_sources_path}}) + result_url = ( + "{repo_url}/{scan_id}/{asset_id}/{count}/screens/{screenshot}".format( + repo_url=ENGINE.scanner["options"]["ScreenshotsURL"]["value"], + scan_id=scan_id, + asset_id=asset_id, + count=count, + screenshot=screenshot_files[0], + ) + ) + report_url = "{repo_url}/{scan_id}/{asset_id}/{count}/report.html".format( + repo_url=ENGINE.scanner["options"]["ScreenshotsURL"]["value"], + scan_id=scan_id, + asset_id=asset_id, + count=count, + ) + report_sources_path = "{base_path}/{asset_id}/{count}/source/".format( + base_path=base_path, asset_id=asset_id, count=count + ) + result.update( + { + url: { + "path": "{}/screens/{}".format( + screenshot_base_path, screenshot_files[0] + ), + "url": result_url, + "report": report_url, + "report_sources_path": report_sources_path, + } + } + ) count += 1 return result @@ -120,8 +168,10 @@ def get_last_screenshot(current_path, asset_id, scan_id): """Return the path and the URL of the last screenshot taken.""" last_scan_id = 0 last_scan_path = current_path - last_scan_url = '' - for root, _, files in os.walk(ENGINE.scanner["options"]["ScreenshotsDirectory"]["value"]): + last_scan_url = "" + for root, _, files in os.walk( + ENGINE.scanner["options"]["ScreenshotsDirectory"]["value"] + ): if current_path.split("/")[-1] in files: _scan_id = int(root.split("/")[4]) # Get the latest scan_id valid @@ -129,7 +179,15 @@ def get_last_screenshot(current_path, asset_id, scan_id): last_scan_id = _scan_id last_scan_path = "{}/{}".format(root, current_path.split("/")[-1]) - last_scan_url = "{repo_url}/{scan_id}/{asset_id}/{count}/screens/{screenshot}".format(repo_url=ENGINE.scanner["options"]["ScreenshotsURL"]["value"], scan_id=last_scan_id, asset_id=asset_id, count=last_scan_path.split("/")[6], screenshot=last_scan_path.split("/")[-1]) + last_scan_url = ( + "{repo_url}/{scan_id}/{asset_id}/{count}/screens/{screenshot}".format( + repo_url=ENGINE.scanner["options"]["ScreenshotsURL"]["value"], + scan_id=last_scan_id, + asset_id=asset_id, + count=last_scan_path.split("/")[6], + screenshot=last_scan_path.split("/")[-1], + ) + ) return last_scan_path, last_scan_url @@ -137,7 +195,17 @@ def get_last_screenshot(current_path, asset_id, scan_id): def diff_screenshot(screenshot1, screenshot2): """Return the percentage of differences between 2 screenshots.""" try: - output = check_output([ENGINE.scanner["options"]["ImageMagickComparePath"]["value"], "-metric", "RMSE", screenshot1, screenshot2, "NULL:"], stderr=STDOUT) + output = check_output( + [ + ENGINE.scanner["options"]["ImageMagickComparePath"]["value"], + "-metric", + "RMSE", + screenshot1, + screenshot2, + "NULL:", + ], + stderr=STDOUT, + ) except CalledProcessError as err_msg: output = err_msg.output except ValueError: @@ -154,12 +222,14 @@ def diff_screenshot(screenshot1, screenshot2): def is_forsale(report_sources_path): """Return True if domain is for sale.""" - if not exists(report_sources_path) or \ - not os.listdir(report_sources_path) or \ - "RulesForSale" not in ENGINE.scanner["options"]: + if ( + not exists(report_sources_path) + or not os.listdir(report_sources_path) + or "RulesForSale" not in ENGINE.scanner["options"] + ): return False report_file_path = report_sources_path + os.listdir(report_sources_path)[0] - report_file = open(report_file_path, 'r') + report_file = open(report_file_path, "r") report_content = report_file.read() report_file.close() for rule_name in ENGINE.scanner["options"]["RulesForSale"]: @@ -238,17 +308,38 @@ def status(): eyewitness_directory = ENGINE.scanner["options"]["EyeWitnessDirectory"]["value"] if not exists(eyewitness_directory): LOG.error("Error: EyeWitnessDirectory not found : %s", eyewitness_directory) - return jsonify({"status": "error", "reason": "EyeWitnessDirectory not found : {}".format(eyewitness_directory)}) + return jsonify( + { + "status": "error", + "reason": "EyeWitnessDirectory not found : {}".format( + eyewitness_directory + ), + } + ) screenshots_directory = ENGINE.scanner["options"]["ScreenshotsDirectory"]["value"] if not exists(screenshots_directory): LOG.error("Error: ScreenshotsDirectory not found : %s", screenshots_directory) - return {"status": "error", "reason": "ScreenshotsDirectory not found : {}".format(screenshots_directory)} + return { + "status": "error", + "reason": "ScreenshotsDirectory not found : {}".format( + screenshots_directory + ), + } - imagemagick_compare_path = ENGINE.scanner["options"]["ImageMagickComparePath"]["value"] + imagemagick_compare_path = ENGINE.scanner["options"]["ImageMagickComparePath"][ + "value" + ] if not exists(imagemagick_compare_path): - LOG.error("Error: ImageMagickComparePath not found : %s", imagemagick_compare_path) - return {"status": "error", "reason": "ImageMagickComparePath not found : {}".format(imagemagick_compare_path)} + LOG.error( + "Error: ImageMagickComparePath not found : %s", imagemagick_compare_path + ) + return { + "status": "error", + "reason": "ImageMagickComparePath not found : {}".format( + imagemagick_compare_path + ), + } return ENGINE.getstatus() @@ -258,7 +349,9 @@ def status_scan(scan_id): """Get status on scan identified by id.""" res = {"page": "status", "status": "UNKNOWN"} if scan_id not in ENGINE.scans.keys(): - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) LOG.warning(res) return jsonify(res) @@ -297,7 +390,7 @@ def getreport(scan_id): def _loadconfig(): """Load config during startup.""" - conf_file = APP_BASE_DIR+"/eyewitness.json" + conf_file = APP_BASE_DIR + "/eyewitness.json" if exists(conf_file): json_data = open(conf_file) ENGINE.scanner = load(json_data) @@ -308,37 +401,66 @@ def _loadconfig(): if "EyeWitnessDirectory" not in ENGINE.scanner["options"]: LOG.error("Error: You have to specify EyeWitnessDirectory in options") - return {"status": "error", "reason": "You have to specify EyeWitnessDirectory in options"} + return { + "status": "error", + "reason": "You have to specify EyeWitnessDirectory in options", + } eyewitness_directory = ENGINE.scanner["options"]["EyeWitnessDirectory"]["value"] if not exists(eyewitness_directory): LOG.error("Error: EyeWitnessDirectory not found : %s", eyewitness_directory) - return {"status": "error", "reason": "EyeWitnessDirectory not found : {}".format(eyewitness_directory)} + return { + "status": "error", + "reason": "EyeWitnessDirectory not found : {}".format(eyewitness_directory), + } LOG.warning("[OK] EyeWitnessDirectory") if "ScreenshotsURL" not in ENGINE.scanner["options"]: LOG.error("Error: You have to specify ScreenshotsURL in options") - return {"status": "error", "reason": "You have to specify ScreenshotsURL in options"} + return { + "status": "error", + "reason": "You have to specify ScreenshotsURL in options", + } if "ScreenshotsDirectory" not in ENGINE.scanner["options"]: LOG.error("Error: You have to specify ScreenshotsDirectory in options") - return {"status": "error", "reason": "You have to specify ScreenshotsDirectory in options"} + return { + "status": "error", + "reason": "You have to specify ScreenshotsDirectory in options", + } screenshots_directory = ENGINE.scanner["options"]["ScreenshotsDirectory"]["value"] if not exists(screenshots_directory): LOG.error("Error: ScreenshotsDirectory not found : %s", screenshots_directory) - return {"status": "error", "reason": "ScreenshotsDirectory not found : {}".format(screenshots_directory)} + return { + "status": "error", + "reason": "ScreenshotsDirectory not found : {}".format( + screenshots_directory + ), + } LOG.warning("[OK] ScreenshotsDirectory") if "ImageMagickComparePath" not in ENGINE.scanner["options"]: LOG.error("Error: You have to specify ImageMagickComparePath in options") - return {"status": "error", "reason": "You have to specify ImageMagickComparePath in options"} - imagemagick_compare_path = ENGINE.scanner["options"]["ImageMagickComparePath"]["value"] + return { + "status": "error", + "reason": "You have to specify ImageMagickComparePath in options", + } + imagemagick_compare_path = ENGINE.scanner["options"]["ImageMagickComparePath"][ + "value" + ] if not exists(imagemagick_compare_path): - LOG.error("Error: ImageMagickComparePath not found : %s", imagemagick_compare_path) - return {"status": "error", "reason": "ImageMagickComparePath not found : {}".format(imagemagick_compare_path)} + LOG.error( + "Error: ImageMagickComparePath not found : %s", imagemagick_compare_path + ) + return { + "status": "error", + "reason": "ImageMagickComparePath not found : {}".format( + imagemagick_compare_path + ), + } LOG.warning("[OK] ImageMagickComparePath") @@ -348,10 +470,10 @@ def _loadconfig(): ENGINE.scanner["options"]["RulesForSale"] LOG.warning("[OK] RulesForSale") - version_filename = APP_BASE_DIR+'/VERSION' + version_filename = APP_BASE_DIR + "/VERSION" if os.path.exists(version_filename): version_file = open(version_filename, "r") - ENGINE.version = version_file.read().rstrip('\n') + ENGINE.version = version_file.read().rstrip("\n") version_file.close() @@ -372,31 +494,39 @@ def start_scan(): # Check the scanner is ready to start a new scan if len(ENGINE.scans) == APP_MAXSCANS: - res.update({ - "status": "error", - "reason": "Scan refused: max concurrent active scans reached ({})".format(APP_MAXSCANS) - }) + res.update( + { + "status": "error", + "reason": "Scan refused: max concurrent active scans reached ({})".format( + APP_MAXSCANS + ), + } + ) LOG.warning(res) return jsonify(res) status() if ENGINE.scanner["status"] != "READY": - res.update({ - "status": "refused", - "details": { - "reason": "scanner not ready", - "status": ENGINE.scanner["status"] - }}) + res.update( + { + "status": "refused", + "details": { + "reason": "scanner not ready", + "status": ENGINE.scanner["status"], + }, + } + ) LOG.warning(res) return jsonify(res) data = loads(request.data.decode("utf-8")) if "assets" not in data.keys() or "scan_id" not in data.keys(): - res.update({ - "status": "refused", - "details": { - "reason": "arg error, something is missing ('assets' ?)" - }}) + res.update( + { + "status": "refused", + "details": {"reason": "arg error, something is missing ('assets' ?)"}, + } + ) LOG.warning(res) return jsonify(res) @@ -404,19 +534,25 @@ def start_scan(): for asset in data["assets"]: # Value if "value" not in asset.keys() or not asset["value"]: - res.update({ - "status": "error", - "reason": "arg error, something is missing ('asset.value')" - }) + res.update( + { + "status": "error", + "reason": "arg error, something is missing ('asset.value')", + } + ) LOG.warning(res) return jsonify(res) # Supported datatypes if asset["datatype"] not in ENGINE.scanner["allowed_asset_types"]: - res.update({ - "status": "error", - "reason": "arg error, bad value for '{}' datatype (not supported)".format(asset["value"]) - }) + res.update( + { + "status": "error", + "reason": "arg error, bad value for '{}' datatype (not supported)".format( + asset["value"] + ), + } + ) LOG.warning(res) return jsonify(res) @@ -430,25 +566,29 @@ def start_scan(): scan_id = str(data["scan_id"]) if data["scan_id"] in ENGINE.scans.keys(): - res.update({ - "status": "refused", - "details": { - "reason": "scan '{}' is probably already launched".format(data["scan_id"]), + res.update( + { + "status": "refused", + "details": { + "reason": "scan '{}' is probably already launched".format( + data["scan_id"] + ), + }, } - }) + ) LOG.warning(res) return jsonify(res) scan = { - "assets": assets, - "assets_data": data["assets"], - "threads": [], - "options": get_options(data), - "scan_id": scan_id, - "status": "STARTED", - "lock": False, - "started_at": int(time() * 1000), - "findings": {} + "assets": assets, + "assets_data": data["assets"], + "threads": [], + "options": get_options(data), + "scan_id": scan_id, + "status": "STARTED", + "lock": False, + "started_at": int(time() * 1000), + "findings": {}, } ENGINE.scans.update({scan_id: scan}) @@ -456,12 +596,7 @@ def start_scan(): thread.start() ENGINE.scans[scan_id]["threads"].append(thread) - res.update({ - "status": "accepted", - "details": { - "scan_id": scan["scan_id"] - } - }) + res.update({"status": "accepted", "details": {"scan_id": scan["scan_id"]}}) LOG.warning(res) return jsonify(res) @@ -484,40 +619,60 @@ def _scan_urls(scan_id): if asset not in ENGINE.scans[scan_id]["findings"]: ENGINE.scans[scan_id]["findings"][asset] = {} try: - asset_data = next((x for x in ENGINE.scans[scan_id]["assets_data"] if x["value"] == asset), None) + asset_data = next( + ( + x + for x in ENGINE.scans[scan_id]["assets_data"] + if x["value"] == asset + ), + None, + ) urls = list() if asset.startswith("http://"): - urls.append("http://"+asset) + urls.append("http://" + asset) elif asset.startswith("https://"): - urls.append("https://"+asset) + urls.append("https://" + asset) else: # Check both - urls.append("http://"+asset) - urls.append("https://"+asset) + urls.append("http://" + asset) + urls.append("https://" + asset) - LOG.warning("[%s/%s] Screenshoting %s...", i+1, len(assets), asset) - result = eyewitness_cmd(urls, asset_data["id"], scan_id, ENGINE.scans[scan_id]['options']) - LOG.warning("[%s/%s] Screenshot result: %s", i+1, len(assets), result) + LOG.warning("[%s/%s] Screenshoting %s...", i + 1, len(assets), asset) + result = eyewitness_cmd( + urls, asset_data["id"], scan_id, ENGINE.scans[scan_id]["options"] + ) + LOG.warning("[%s/%s] Screenshot result: %s", i + 1, len(assets), result) # Get differences with the last screenshot for url in result: - last_screenshot_path, last_screenshot_url = get_last_screenshot(result[url]["path"], asset_data["id"], scan_id) + last_screenshot_path, last_screenshot_url = get_last_screenshot( + result[url]["path"], asset_data["id"], scan_id + ) diff = diff_screenshot(result[url]["path"], last_screenshot_path) - LOG.warning("[%s/%s] Screenshot diff: %s percent", i+1, len(assets), diff) - result[url].update({ - "previous_diff": diff, - "last_screenshot_path": last_screenshot_path, - "last_screenshot_url": last_screenshot_url}) + LOG.warning( + "[%s/%s] Screenshot diff: %s percent", i + 1, len(assets), diff + ) + result[url].update( + { + "previous_diff": diff, + "last_screenshot_path": last_screenshot_path, + "last_screenshot_url": last_screenshot_url, + } + ) # Get the difference between the current screenshots current_diff = None if len(result) == 2: - current_diff = diff_screenshot(result[urls[0]]["path"], result[urls[1]]["path"]) + current_diff = diff_screenshot( + result[urls[0]]["path"], result[urls[1]]["path"] + ) result["current_diff"] = current_diff ENGINE.scans[scan_id]["findings"][asset]["issues"] = result except Exception as err_msg: - LOG.error("_scan_urls: API Connexion error for asset %s: %s", asset, err_msg) + LOG.error( + "_scan_urls: API Connexion error for asset %s: %s", asset, err_msg + ) return False LOG.warning("lock off") @@ -533,12 +688,7 @@ def _parse_results(scan_id): issues = [] summary = {} - nb_vulns = { - "info": 0, - "low": 0, - "medium": 0, - "high": 0 - } + nb_vulns = {"info": 0, "low": 0, "medium": 0, "high": 0} timestamp = datetime.now() for asset in ENGINE.scans[scan_id]["findings"]: @@ -557,49 +707,89 @@ def _parse_results(scan_id): report_urls.append(asset_issues[url]["report"]) # Create an issue if the screenshot differs from last time previous_diff = asset_issues[url]["previous_diff"] - is_for_sale = is_forsale(asset_issues[url]["report_sources_path"]) or is_for_sale + is_for_sale = ( + is_forsale(asset_issues[url]["report_sources_path"]) or is_for_sale + ) if previous_diff is None: nb_vulns[get_criticity(cvss_max)] += 1 - issues.append({ - "issue_id": len(issues)+1, - "severity": "medium", "confidence": "certain", - "target": {"addr": [asset], "protocol": "http"}, - "title": "[{}] Screenshot differs from last time".format(timestamp), - "solution": "n/a", - "metadata": {"risk": {"cvss_base_score": 0}, "links": [asset_issues[url]["url"], asset_issues[url]["last_screenshot_url"]]}, - "type": "eyewitness_screenshot_diff", - "timestamp": timestamp, - "description": "Too much differences, Domain for sale: {}.".format(is_for_sale) - }) + issues.append( + { + "issue_id": len(issues) + 1, + "severity": "medium", + "confidence": "certain", + "target": {"addr": [asset], "protocol": "http"}, + "title": "[{}] Screenshot differs from last time".format( + timestamp + ), + "solution": "n/a", + "metadata": { + "risk": {"cvss_base_score": 0}, + "links": [ + asset_issues[url]["url"], + asset_issues[url]["last_screenshot_url"], + ], + }, + "type": "eyewitness_screenshot_diff", + "timestamp": timestamp, + "description": "Too much differences, Domain for sale: {}.".format( + is_for_sale + ), + } + ) elif previous_diff >= COMPARE_CEIL: nb_vulns[get_criticity(cvss_max)] += 1 - issues.append({ - "issue_id": len(issues)+1, - "severity": "medium", "confidence": "certain", - "target": {"addr": [asset], "protocol": "http"}, - "title": "[{}] Screenshot differs from last time".format(timestamp), - "solution": "n/a", - "metadata": {"risk": {"cvss_base_score": 0}, "links": [asset_issues[url]["url"], asset_issues[url]["last_screenshot_url"]]}, - "type": "eyewitness_screenshot_diff", - "timestamp": timestamp, - "description": "The difference is about {}%, Domain for sale: {}.".format(previous_diff, is_for_sale) - }) + issues.append( + { + "issue_id": len(issues) + 1, + "severity": "medium", + "confidence": "certain", + "target": {"addr": [asset], "protocol": "http"}, + "title": "[{}] Screenshot differs from last time".format( + timestamp + ), + "solution": "n/a", + "metadata": { + "risk": {"cvss_base_score": 0}, + "links": [ + asset_issues[url]["url"], + asset_issues[url]["last_screenshot_url"], + ], + }, + "type": "eyewitness_screenshot_diff", + "timestamp": timestamp, + "description": "The difference is about {}%, Domain for sale: {}.".format( + previous_diff, is_for_sale + ), + } + ) current_diff = "These screeshots are different" if asset_issues["current_diff"] is not None: - current_diff = "The diffence between these screenshots is {}%".format(asset_issues["current_diff"]) + current_diff = "The diffence between these screenshots is {}%".format( + asset_issues["current_diff"] + ) nb_vulns[get_criticity(cvss_max)] += 1 - issues.append({ - "issue_id": len(issues)+1, - "severity": get_criticity(cvss_max), "confidence": "certain", - "target": {"addr": [asset], "protocol": "http"}, - "title": "[{}] Some domain has been screenshoted by eyewitness".format(timestamp), - "solution": "n/a", - "metadata": {"risk": {"cvss_base_score": cvss_max}, "links": report_urls}, - "type": "eyewitness_screenshot", - "timestamp": timestamp, - "description": "Screenshots: {}, Current Diff: {}, Domain for sale: {}".format(screenshot_urls, current_diff, is_for_sale) - }) + issues.append( + { + "issue_id": len(issues) + 1, + "severity": get_criticity(cvss_max), + "confidence": "certain", + "target": {"addr": [asset], "protocol": "http"}, + "title": "[{}] Some domain has been screenshoted by eyewitness".format( + timestamp + ), + "solution": "n/a", + "metadata": { + "risk": {"cvss_base_score": cvss_max}, + "links": report_urls, + }, + "type": "eyewitness_screenshot", + "timestamp": timestamp, + "description": "Screenshots: {}, Current Diff: {}, Domain for sale: {}".format( + screenshot_urls, current_diff, is_for_sale + ), + } + ) summary = { "nb_issues": len(issues), @@ -608,7 +798,7 @@ def _parse_results(scan_id): "nb_medium": nb_vulns["medium"], "nb_high": nb_vulns["high"], "engine_name": "eyewitness", - "engine_version": ENGINE.scanner["version"] + "engine_version": ENGINE.scanner["version"], } return issues, summary @@ -621,14 +811,23 @@ def getfindings(scan_id): # check if the scan_id exists if scan_id not in ENGINE.scans.keys(): - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) LOG.warning(res) return jsonify(res) # check if the scan is finished status() if ENGINE.scans[scan_id]["status"] != "FINISHED": - res.update({"status": "error", "reason": "scan_id '{}' not finished (status={})".format(scan_id, ENGINE.scans[scan_id]["status"])}) + res.update( + { + "status": "error", + "reason": "scan_id '{}' not finished (status={})".format( + scan_id, ENGINE.scans[scan_id]["status"] + ), + } + ) LOG.warning(res) return jsonify(res) @@ -640,29 +839,33 @@ def getfindings(scan_id): "options": ENGINE.scans[scan_id]["options"], "status": ENGINE.scans[scan_id]["status"], "started_at": ENGINE.scans[scan_id]["started_at"], - "finished_at": ENGINE.scans[scan_id]["finished_at"] + "finished_at": ENGINE.scans[scan_id]["finished_at"], } # Store the findings in a file - with open(APP_BASE_DIR+"/results/eyewitness_"+scan_id+".json", "w") as report_file: - dump({ - "scan": scan, - "summary": summary, - "issues": issues - }, report_file, default=_json_serial) + with open( + APP_BASE_DIR + "/results/eyewitness_" + scan_id + ".json", "w" + ) as report_file: + dump( + {"scan": scan, "summary": summary, "issues": issues}, + report_file, + default=_json_serial, + ) # remove the scan from the active scan list clean_scan(scan_id) - res.update({"scan": scan, "summary": summary, "issues": issues, "status": "success"}) + res.update( + {"scan": scan, "summary": summary, "issues": issues, "status": "success"} + ) return jsonify(res) @app.before_first_request def main(): """First function called.""" - if not exists(APP_BASE_DIR+"/results"): - os.makedirs(APP_BASE_DIR+"/results") + if not exists(APP_BASE_DIR + "/results"): + os.makedirs(APP_BASE_DIR + "/results") _loadconfig() LOG.warning("Run engine") diff --git a/engines/nessus/engine-nessus.py b/engines/nessus/engine-nessus.py index f7091867..0a289ff2 100644 --- a/engines/nessus/engine-nessus.py +++ b/engines/nessus/engine-nessus.py @@ -21,82 +21,95 @@ app = Flask(__name__) -APP_DEBUG = os.environ.get('APP_DEBUG', '').lower() in ['true', '1', 'on', 'yes', 'y'] +APP_DEBUG = os.environ.get("APP_DEBUG", "").lower() in ["true", "1", "on", "yes", "y"] APP_HOST = "0.0.0.0" APP_PORT = 5002 -APP_MAXSCANS = int(os.environ.get('APP_MAXSCANS', 20)) +APP_MAXSCANS = int(os.environ.get("APP_MAXSCANS", 20)) BASE_DIR = os.path.dirname(os.path.realpath(__file__)) -UPLOAD_FOLDER = BASE_DIR + '/tmp' -POLICY_FOLDER = BASE_DIR + '/etc' +UPLOAD_FOLDER = BASE_DIR + "/tmp" +POLICY_FOLDER = BASE_DIR + "/etc" this = sys.modules[__name__] -table = TinyDB('db.json').table('table') +table = TinyDB("db.json").table("table") this.nessscan = None this.scanner = {} this.scans = {} -if __name__ != '__main__': - gunicorn_logger = logging.getLogger('gunicorn.error') +if __name__ != "__main__": + gunicorn_logger = logging.getLogger("gunicorn.error") app.logger.handlers = gunicorn_logger.handlers app.logger.setLevel(gunicorn_logger.level) -@app.route('/') +@app.route("/") def default(): - return redirect(url_for('index')) + return redirect(url_for("index")) -@app.route('/engines/nessus/') +@app.route("/engines/nessus/") def index(): return jsonify({"page": "index"}) def _loadconfig(): - conf_file = BASE_DIR+'/nessus.json' + conf_file = BASE_DIR + "/nessus.json" if os.path.exists(conf_file): json_data = open(conf_file) this.scanner = json.load(json_data) - version_filename = BASE_DIR+'/VERSION' + version_filename = BASE_DIR + "/VERSION" if os.path.exists(version_filename): version_file = open(version_filename, "r") - this.scanner["version"] = version_file.read().rstrip('\n') + this.scanner["version"] = version_file.read().rstrip("\n") version_file.close() - os.environ['NO_PROXY'] = this.scanner["server_host"] + os.environ["NO_PROXY"] = this.scanner["server_host"] else: app.logger.debug("Error: config file '{}' not found".format(conf_file)) return {"status": "error", "details": {"reason": "config file not found"}} try: # Check authentication methods (login/pass vs. api) - if 'access_key' in this.scanner.keys() and 'secret_key' in this.scanner.keys(): + if "access_key" in this.scanner.keys() and "secret_key" in this.scanner.keys(): this.nessscan = ness6rest.Scanner( - url="https://{}:{}".format(this.scanner['server_host'], this.scanner['server_port']), - api_akey=this.scanner['access_key'], - api_skey=this.scanner['secret_key'], - insecure=True) - elif 'server_username' in this.scanner.keys() and 'server_password' in this.scanner.keys(): + url="https://{}:{}".format( + this.scanner["server_host"], this.scanner["server_port"] + ), + api_akey=this.scanner["access_key"], + api_skey=this.scanner["secret_key"], + insecure=True, + ) + elif ( + "server_username" in this.scanner.keys() + and "server_password" in this.scanner.keys() + ): this.nessscan = ness6rest.Scanner( - url="https://{}:{}".format(this.scanner['server_host'], this.scanner['server_port']), - login=this.scanner['server_username'], - password=this.scanner['server_password'], - insecure=True) - if this.nessscan.res['scanners'][0]['status'] == "on": + url="https://{}:{}".format( + this.scanner["server_host"], this.scanner["server_port"] + ), + login=this.scanner["server_username"], + password=this.scanner["server_password"], + insecure=True, + ) + if this.nessscan.res["scanners"][0]["status"] == "on": return {"status": "success"} else: return { "status": "error", - "details": {"reason": "connection error to Nessus instance (bad credz? not available ?)"} + "details": { + "reason": "connection error to Nessus instance (bad credz? not available ?)" + }, } except Exception: return { "status": "error", - "details": {"reason": "connection error to Nessus instance (bad credz? not available ?)"} + "details": { + "reason": "connection error to Nessus instance (bad credz? not available ?)" + }, } -@app.route('/engines/nessus/reloadconfig') +@app.route("/engines/nessus/reloadconfig") def reloadconfig(): res = {"page": "reloadconfig"} _loadconfig() @@ -104,7 +117,7 @@ def reloadconfig(): return jsonify(res) -@app.route('/engines/nessus/_upload_policy', methods=['POST']) +@app.route("/engines/nessus/_upload_policy", methods=["POST"]) def _upload_policy(): res = {"page": "_upload_policy"} @@ -113,18 +126,18 @@ def _upload_policy(): return jsonify(res) -@app.route('/engines/nessus/_get_scanlist') +@app.route("/engines/nessus/_get_scanlist") def _get_scanlist(): - res = { "page": "_get_scanlist"} + res = {"page": "_get_scanlist"} this.nessscan.action(action="scans", method="GET") - scan_list = [scan for scan in this.nessscan.res['scans']] + scan_list = [scan for scan in this.nessscan.res["scans"]] res.update({"status": "success", "details": {"scan_list": scan_list}}) return jsonify(res) -@app.route('/engines/nessus/getfindings/') +@app.route("/engines/nessus/getfindings/") def getfindings(scan_id): res = {"page": "getfindings", "scan_id": scan_id} scan_id = str(scan_id) @@ -132,54 +145,66 @@ def getfindings(scan_id): item = table.search(Query().scan_id == scan_id) if not item: - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) time.sleep(3) # Check the scan status scan_status(scan_id) - if item[0]['status'] not in ['FINISHED', 'STOPPED']: - res.update({"status": "error", "reason": "scan_id '{}' not finished".format(scan_id)}) + if item[0]["status"] not in ["FINISHED", "STOPPED"]: + res.update( + {"status": "error", "reason": "scan_id '{}' not finished".format(scan_id)} + ) return jsonify(res) nessscan_id = str(item[0]["nessscan_id"]) - if item[0]['nessus_scan_hid'] is not None: - this.nessscan.action(action="scans/"+nessscan_id+"?history_id="+item[0]['nessus_scan_hid'], method="GET") + if item[0]["nessus_scan_hid"] is not None: + this.nessscan.action( + action="scans/" + nessscan_id + "?history_id=" + item[0]["nessus_scan_hid"], + method="GET", + ) else: - this.nessscan.action(action="scans/"+nessscan_id, method="GET") + this.nessscan.action(action="scans/" + nessscan_id, method="GET") ###### report_content = this.nessscan.download_scan( - export_format='nessus', - history_id=item[0]['nessus_scan_hid'], - scan_id=nessscan_id) + export_format="nessus", + history_id=item[0]["nessus_scan_hid"], + scan_id=nessscan_id, + ) report_filename = "{}/reports/nessus_{}_{}.nessus".format( - BASE_DIR, scan_id, int(time.time())) - with open(report_filename, 'wb') as w: + BASE_DIR, scan_id, int(time.time()) + ) + with open(report_filename, "wb") as w: w.write(report_content) - nessus_prefix = "https://{}:{}".format(this.scanner['server_host'], this.scanner['server_port']) + nessus_prefix = "https://{}:{}".format( + this.scanner["server_host"], this.scanner["server_port"] + ) # Check if FQDN shoud be resolved (default=false) resolve_fqdn = False - if 'identifybyfqdn' in item[0]['options'].keys() and item[0]['options']['identifybyfqdn'] is True: + if ( + "identifybyfqdn" in item[0]["options"].keys() + and item[0]["options"]["identifybyfqdn"] is True + ): resolve_fqdn = True - block_summary, block_issues = parse_report(report_filename, nessus_prefix, resolve_fqdn) + block_summary, block_issues = parse_report( + report_filename, nessus_prefix, resolve_fqdn + ) ###### # Store the findings in a file - with open(BASE_DIR+"/results/nessus_"+scan_id+".json", 'w') as report_file: - json.dump({ - "scan": item[0], - "summary": block_summary, - "issues": block_issues - }, report_file, default=_json_serial) - - res.update({ - "status": "success", - "summary": block_summary, - "issues": block_issues - }) + with open(BASE_DIR + "/results/nessus_" + scan_id + ".json", "w") as report_file: + json.dump( + {"scan": item[0], "summary": block_summary, "issues": block_issues}, + report_file, + default=_json_serial, + ) + + res.update({"status": "success", "summary": block_summary, "issues": block_issues}) # Remove the scan from the active scan list clean_scan(scan_id) @@ -198,18 +223,20 @@ def _json_serial(obj): raise TypeError("Type not serializable") -@app.route('/engines/nessus/getreport/') +@app.route("/engines/nessus/getreport/") def getreport(scan_id): scan_id = str(scan_id) - filepath = BASE_DIR+"/results/nessus_"+scan_id+".json" + filepath = BASE_DIR + "/results/nessus_" + scan_id + ".json" if not os.path.exists(filepath): - return jsonify({ - "status": "error", - "reason": "report file for scan_id '{}' not found".format(scan_id)} + return jsonify( + { + "status": "error", + "reason": "report file for scan_id '{}' not found".format(scan_id), + } ) - return send_from_directory(BASE_DIR+"/results/", "nessus_"+scan_id+".json") + return send_from_directory(BASE_DIR + "/results/", "nessus_" + scan_id + ".json") def allowed_file(filename): @@ -217,20 +244,20 @@ def allowed_file(filename): return True -@app.route('/engines/nessus/_get_custom_policy', methods=['POST']) +@app.route("/engines/nessus/_get_custom_policy", methods=["POST"]) def _get_custom_policy(): res = {"page": "_get_custom_policy"} # check if the post request has the file part - if 'file' not in request.files: + if "file" not in request.files: res.update({"status": "error", "reason": "No file uploaded"}) return jsonify(res) - file = request.files['file'] - if file.filename == '': + file = request.files["file"] + if file.filename == "": res.update({"status": "error", "reason": "No file selected"}) return jsonify(res) if file and allowed_file(file.filename): - filename = secure_filename(file.filename) + '_' + str(int(time.time())) - file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) + filename = secure_filename(file.filename) + "_" + str(int(time.time())) + file.save(os.path.join(app.config["UPLOAD_FOLDER"], filename)) # remainder: purge the custom policy file? res.update({"status": "success", "details": {"filename": filename}}) @@ -239,93 +266,108 @@ def _get_custom_policy(): return jsonify(res) -@app.route('/engines/nessus/_get_local_policy', methods=['GET']) +@app.route("/engines/nessus/_get_local_policy", methods=["GET"]) def _get_local_policy(policy=None): res = {"page": "_get_local_policy"} - if not policy and not request.args.get('policy'): + if not policy and not request.args.get("policy"): res.update({"status": "error", "reason": "'policy' arg is missing"}) return jsonify(res) if not policy: - policy = request.args.get('policy') - policy_filename = POLICY_FOLDER + '/' + policy + policy = request.args.get("policy") + policy_filename = POLICY_FOLDER + "/" + policy if not os.path.exists(policy_filename): - res.update({ - "status": "error", - "reason": "policy file not found", - "details": {"filename": policy_filename, "name": policy}}) + res.update( + { + "status": "error", + "reason": "policy file not found", + "details": {"filename": policy_filename, "name": policy}, + } + ) else: - res.update({ - "status": "success", - "details": {"filename": policy_filename, "name": policy}}) + res.update( + { + "status": "success", + "details": {"filename": policy_filename, "name": policy}, + } + ) return jsonify(res) def _get_credentials(credz): format_credentials = [] for cred in credz: - if 'type' not in cred.keys(): + if "type" not in cred.keys(): continue - elif cred['type'] == "windows_password": - if 'username' in cred.keys() and 'password' in cred.keys(): + elif cred["type"] == "windows_password": + if "username" in cred.keys() and "password" in cred.keys(): win_domain = "" - if 'domain' in cred.keys(): - win_domain = cred['domain'] - format_credentials.append(credentials.WindowsPassword( - username=cred['username'], - password=cred['password'], - domain=win_domain - )) - elif cred['type'] == "ssh_password": - if 'username' in cred.keys() and 'password' in cred.keys(): - format_credentials.append(credentials.SshPassword( - username=cred['username'], - password=cred['password'] - )) + if "domain" in cred.keys(): + win_domain = cred["domain"] + format_credentials.append( + credentials.WindowsPassword( + username=cred["username"], + password=cred["password"], + domain=win_domain, + ) + ) + elif cred["type"] == "ssh_password": + if "username" in cred.keys() and "password" in cred.keys(): + format_credentials.append( + credentials.SshPassword( + username=cred["username"], password=cred["password"] + ) + ) # elif cred['type'] == "ssh_publickey": # pass # @Todo return format_credentials -@app.route('/engines/nessus/startscan', methods=['POST']) +@app.route("/engines/nessus/startscan", methods=["POST"]) def start_scan(): # @todo: validate parameters and options format res = {"page": "startscan"} - # check the scanner is ready to start a new scan - if table.count(Query().status == 'SCANNING') == APP_MAXSCANS: - res.update({ - "status": "error", - "details": { - "reason": "Scan refused: max concurrent active scans reached ({})".format(APP_MAXSCANS) + if table.count(Query().status == "SCANNING") == APP_MAXSCANS: + res.update( + { + "status": "error", + "details": { + "reason": "Scan refused: max concurrent active scans reached ({})".format( + APP_MAXSCANS + ) + }, } - }) + ) return jsonify(res) status() - if this.scanner['status'] != "READY": - res.update({ - "status": "refused", - "details": { - "reason": "scanner not ready", - "status": this.scanner['status'] - }}) + if this.scanner["status"] != "READY": + res.update( + { + "status": "refused", + "details": { + "reason": "scanner not ready", + "status": this.scanner["status"], + }, + } + ) return jsonify(res) scan = {} # Parse the args in POST post_args = json.loads(request.data) - scan_id = str(post_args['scan_id']) + scan_id = str(post_args["scan_id"]) # Check assets allowed_assets = [] - for asset in post_args['assets']: + for asset in post_args["assets"]: if asset["datatype"] in this.scanner["allowed_asset_types"]: # extract the net location from urls - if asset["datatype"] == 'url': + if asset["datatype"] == "url": asset["value"] = "{uri.netloc}".format(uri=urlparse(asset["value"])) allowed_assets.append(asset["value"].strip()) assets = ",".join(allowed_assets) @@ -334,32 +376,41 @@ def start_scan(): nessus_scan_hid = None # Check action - if 'action' not in post_args['options'].keys(): - res.update({ - "status": "error", - "details": {"reason": "Missing action ('scan', 'getreports', ...)"} - }) + if "action" not in post_args["options"].keys(): + res.update( + { + "status": "error", + "details": {"reason": "Missing action ('scan', 'getreports', ...)"}, + } + ) return jsonify(res) - if post_args['options']['action'] == 'getreports': + if post_args["options"]["action"] == "getreports": # Search form policies - scan_name = post_args['options']['name'] + scan_name = post_args["options"]["name"] if not this.nessscan.scan_exists(scan_name): - res.update({ - "status": "error", - "details": {"reason": "Scan '{}' does not exist.".format(scan_name)} - }) + res.update( + { + "status": "error", + "details": { + "reason": "Scan '{}' does not exist.".format(scan_name) + }, + } + ) return jsonify(res) # Get scan details this.nessscan.scan_details(scan_name) - nessus_scan_uuid = this.nessscan.res['info']['uuid'] - if 'getlastcompletereport' not in post_args['options'].keys() or post_args['options']['getlastcompletereport'] is True: - for nessus_scan in this.nessscan.res['history'][::-1]: - if nessus_scan['status'] == 'completed': - nessus_scan_hid = str(nessus_scan['history_id']) - nessus_scan_uuid = nessus_scan['uuid'] + nessus_scan_uuid = this.nessscan.res["info"]["uuid"] + if ( + "getlastcompletereport" not in post_args["options"].keys() + or post_args["options"]["getlastcompletereport"] is True + ): + for nessus_scan in this.nessscan.res["history"][::-1]: + if nessus_scan["status"] == "completed": + nessus_scan_hid = str(nessus_scan["history_id"]) + nessus_scan_uuid = nessus_scan["uuid"] break # Update the scan info @@ -379,29 +430,29 @@ def start_scan(): # } # }) with transaction(table) as tr: - tr.insert({ - "scan_id": scan_id, - "scan_name": scan_name, - "nessscan_id": this.nessscan.res["info"]["object_id"], - "nessus_scan_hid": nessus_scan_hid, - "nessscan_uuid": nessus_scan_uuid, - "options": post_args['options'], - "policy_name": this.nessscan.res['info']['policy'], - "assets": post_args['assets'], - "status": "STARTED", - "started_at": int(time.time() * 1000), - "findings": {} - }) - - if post_args['options']['action'] == 'scan': + tr.insert( + { + "scan_id": scan_id, + "scan_name": scan_name, + "nessscan_id": this.nessscan.res["info"]["object_id"], + "nessus_scan_hid": nessus_scan_hid, + "nessscan_uuid": nessus_scan_uuid, + "options": post_args["options"], + "policy_name": this.nessscan.res["info"]["policy"], + "assets": post_args["assets"], + "status": "STARTED", + "started_at": int(time.time() * 1000), + "findings": {}, + } + ) + + if post_args["options"]["action"] == "scan": # Check scan policy - if 'policy' not in post_args['options'].keys(): - res.update({ - "status": "error", - "reason": "Missing policy name"}) + if "policy" not in post_args["options"].keys(): + res.update({"status": "error", "reason": "Missing policy name"}) return jsonify(res) - policy_name = post_args['options']['policy'].split(".nessus")[0] + policy_name = post_args["options"]["policy"].split(".nessus")[0] # Check the policy is already uploaded to the scanner # @Todo @@ -409,22 +460,23 @@ def start_scan(): try: this.nessscan.policy_set(name=policy_name) except SystemExit: - res.update({ - "status": "error", - "reason": "Bad policy name: {}".format(policy_name)}) + res.update( + {"status": "error", "reason": "Bad policy name: {}".format(policy_name)} + ) return jsonify(res) this.nessscan.policy_set(name=policy_name) this.nessscan.action( - action="policies/" + str(this.nessscan.policy_id), - method="put") + action="policies/" + str(this.nessscan.policy_id), method="put" + ) # Add credentials (if any) - if 'credentials' in post_args['options'].keys(): - credz = _get_credentials(post_args['options']['credentials']) + if "credentials" in post_args["options"].keys(): + credz = _get_credentials(post_args["options"]["credentials"]) this.nessscan.policy_copy( existing_policy_name=policy_name, - new_policy_name=policy_name + "-" + scan_id) + new_policy_name=policy_name + "-" + scan_id, + ) this.nessscan.policy_add_creds(credz) # Create the scan @@ -455,24 +507,26 @@ def start_scan(): # }) with transaction(table) as tr: - tr.insert({ + tr.insert( + { "scan_id": scan_id, "scan_name": nessscan_name, "nessscan_id": nessscan_id, "nessus_scan_hid": nessus_scan_hid, - "nessscan_uuid": this.nessscan.res['scan_uuid'], - "options": post_args['options'], + "nessscan_uuid": this.nessscan.res["scan_uuid"], + "options": post_args["options"], "policy_name": policy_name, - "assets": post_args['assets'], + "assets": post_args["assets"], "status": "STARTED", "started_at": int(time.time() * 1000), - "findings": {} - }) + "findings": {}, + } + ) res.update({"status": "accepted", "details": scan}) return jsonify(res) -@app.route('/engines/nessus/stop/', methods=['GET']) +@app.route("/engines/nessus/stop/", methods=["GET"]) def stop_scan(scan_id): res = {"page": "stopscan"} scan_id = str(scan_id) @@ -481,28 +535,30 @@ def stop_scan(scan_id): # todo: use this.scans and nessus_scan_id if not item: - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) - if item[0]['options']['action'] == 'scan': + if item[0]["options"]["action"] == "scan": this.nessscan.action( - action="scans/"+str(item[0]["nessscan_id"])+"/stop", - method="POST") + action="scans/" + str(item[0]["nessscan_id"]) + "/stop", method="POST" + ) if this.nessscan.res != {}: - res.update({"status": "error", "reason": this.nessscan.res['error']}) + res.update({"status": "error", "reason": this.nessscan.res["error"]}) return jsonify(res) with transaction(table) as tr: - tr.update({ - "status": "STOPPED", - "finished_at": int(time.time() * 1000)}, - where('scan_id') == scan_id) + tr.update( + {"status": "STOPPED", "finished_at": int(time.time() * 1000)}, + where("scan_id") == scan_id, + ) res.update({"status": "success", "scan": item[0]}) return jsonify(res) -@app.route('/engines/nessus/stopscans', methods=['GET']) +@app.route("/engines/nessus/stopscans", methods=["GET"]) def stop(): res = {"page": "stopscans"} @@ -510,12 +566,11 @@ def stop(): scan_id = item.scan_id clean_scan(scan_id) - res.update({"status": "success", "details": { - "timestamp": int(time.time())}}) + res.update({"status": "success", "details": {"timestamp": int(time.time())}}) return jsonify(res) -@app.route('/engines/nessus/clean', methods=['GET']) +@app.route("/engines/nessus/clean", methods=["GET"]) def clean(): res = {"page": "clean"} table.truncate() @@ -523,7 +578,7 @@ def clean(): return jsonify(res) -@app.route('/engines/nessus/clean/', methods=['GET']) +@app.route("/engines/nessus/clean/", methods=["GET"]) def clean_scan(scan_id): res = {"page": "clean_scan"} scan_id = str(scan_id) @@ -532,135 +587,162 @@ def clean_scan(scan_id): item = table.search(Query().scan_id == scan_id) if not item: - res.update({ - "status": "error", - "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) - #this.scans.pop(scan_id) + # this.scans.pop(scan_id) with transaction(table) as tr: - tr.remove(where('scan_id') == scan_id) + tr.remove(where("scan_id") == scan_id) res.update({"status": "removed"}) return jsonify(res) -@app.route('/engines/nessus/status', methods=['GET']) +@app.route("/engines/nessus/status", methods=["GET"]) def status(): - res = {'page': 'status', "scans": this.scans} + res = {"page": "status", "scans": this.scans} - if table.count(Query().status == 'SCANNING') == APP_MAXSCANS: - this.scanner['status'] = "BUSY" - res.update({ - "status": "BUSY", - "reason": "Max concurrent active scans reached ({})".format(APP_MAXSCANS) - }) + if table.count(Query().status == "SCANNING") == APP_MAXSCANS: + this.scanner["status"] = "BUSY" + res.update( + { + "status": "BUSY", + "reason": "Max concurrent active scans reached ({})".format( + APP_MAXSCANS + ), + } + ) return jsonify(res) # Check if the remote service is available try: scan = {} - if 'access_key' in this.scanner.keys() and 'secret_key' in this.scanner.keys(): + if "access_key" in this.scanner.keys() and "secret_key" in this.scanner.keys(): scan = ness6rest.Scanner( - url="https://{}:{}".format(this.scanner['server_host'], this.scanner['server_port']), - api_akey=this.scanner['access_key'], - api_skey=this.scanner['secret_key'], - insecure=True) - elif 'server_username' in this.scanner.keys() and 'server_password' in this.scanner.keys(): + url="https://{}:{}".format( + this.scanner["server_host"], this.scanner["server_port"] + ), + api_akey=this.scanner["access_key"], + api_skey=this.scanner["secret_key"], + insecure=True, + ) + elif ( + "server_username" in this.scanner.keys() + and "server_password" in this.scanner.keys() + ): scan = ness6rest.Scanner( - url="https://{}:{}".format(this.scanner['server_host'], this.scanner['server_port']), - login=this.scanner['server_username'], - password=this.scanner['server_password'], - insecure=True) - if 'status' in scan.res.keys(): - this.scanner['status'] = "READY" - res.update({ - 'status': 'READY', - 'details': { - 'server_host': this.scanner['server_host'], - 'server_port': this.scanner['server_port'], - 'status': scan.res['status'] + url="https://{}:{}".format( + this.scanner["server_host"], this.scanner["server_port"] + ), + login=this.scanner["server_username"], + password=this.scanner["server_password"], + insecure=True, + ) + if "status" in scan.res.keys(): + this.scanner["status"] = "READY" + res.update( + { + "status": "READY", + "details": { + "server_host": this.scanner["server_host"], + "server_port": this.scanner["server_port"], + "status": scan.res["status"], + }, } - }) - elif scan.res['scanners'][0]['status'] == "on": - this.scanner['status'] = "READY" - res.update({ - 'status': 'READY', - 'details': { - 'server_host': this.scanner['server_host'], - 'server_port': this.scanner['server_port'], - 'engine_version': scan.res['scanners'][0]['engine_version'], - 'engine_build': scan.res['scanners'][0]['engine_build'], - 'scan_count': scan.res['scanners'][0]['scan_count'] + ) + elif scan.res["scanners"][0]["status"] == "on": + this.scanner["status"] = "READY" + res.update( + { + "status": "READY", + "details": { + "server_host": this.scanner["server_host"], + "server_port": this.scanner["server_port"], + "engine_version": scan.res["scanners"][0]["engine_version"], + "engine_build": scan.res["scanners"][0]["engine_build"], + "scan_count": scan.res["scanners"][0]["scan_count"], + }, } - }) + ) else: - this.scanner['status'] = "ERROR" - res.update({'status': 'ERROR', 'details': {'reason': 'Nessus engine not available'}}) + this.scanner["status"] = "ERROR" + res.update( + { + "status": "ERROR", + "details": {"reason": "Nessus engine not available"}, + } + ) except Exception: - this.scanner['status'] = "ERROR" - res.update({'status': 'ERROR', 'details': {'reason': 'Nessus engine not available'}}) + this.scanner["status"] = "ERROR" + res.update( + {"status": "ERROR", "details": {"reason": "Nessus engine not available"}} + ) return jsonify(res) -@app.route('/engines/nessus/status/', methods=['GET']) +@app.route("/engines/nessus/status/", methods=["GET"]) def scan_status(scan_id): scan_id = str(scan_id) item = table.search(Query().scan_id == scan_id) if not item: - return jsonify({ - "status": "ERROR", - "details": {"reason": "scan_id '{}' not found".format(scan_id)}}) + return jsonify( + { + "status": "ERROR", + "details": {"reason": "scan_id '{}' not found".format(scan_id)}, + } + ) # @todo: directly access to the right entry with transaction(table) as tr: try: this.nessscan.action( - action="scans/"+str(item[0]["nessscan_id"]), - method="GET") + action="scans/" + str(item[0]["nessscan_id"]), method="GET" + ) scan_status = this.nessscan.res - nessus_scan_status = 'unknown' - - if 'info' in scan_status.keys(): - nessus_scan_status = this.nessscan.res['info']['status'] - elif 'status' in scan_status.keys(): - nessus_scan_status = scan_status['status'] - - if item[0]['nessus_scan_hid'] is not None: - tr.update({'status': 'FINISHED'}, where('scan_id') == scan_id) - elif nessus_scan_status == 'completed': - tr.update({'status': 'FINISHED'}, where('scan_id') == scan_id) - elif nessus_scan_status in ['running', 'loading']: - tr.update({'status': 'SCANNING'}, where('scan_id') == scan_id) - elif nessus_scan_status == 'canceled': - tr.update({'status': 'STOPPED'}, where('scan_id') == scan_id) + nessus_scan_status = "unknown" + + if "info" in scan_status.keys(): + nessus_scan_status = this.nessscan.res["info"]["status"] + elif "status" in scan_status.keys(): + nessus_scan_status = scan_status["status"] + + if item[0]["nessus_scan_hid"] is not None: + tr.update({"status": "FINISHED"}, where("scan_id") == scan_id) + elif nessus_scan_status == "completed": + tr.update({"status": "FINISHED"}, where("scan_id") == scan_id) + elif nessus_scan_status in ["running", "loading"]: + tr.update({"status": "SCANNING"}, where("scan_id") == scan_id) + elif nessus_scan_status == "canceled": + tr.update({"status": "STOPPED"}, where("scan_id") == scan_id) else: - tr.update({'status': nessus_scan_status.upper()}, where('scan_id') == scan_id) + tr.update( + {"status": nessus_scan_status.upper()}, where("scan_id") == scan_id + ) except Exception: - tr.update({'status': 'ERROR'}, where('scan_id') == scan_id) + tr.update({"status": "ERROR"}, where("scan_id") == scan_id) - return jsonify({ - "status": item[0]['status'], - "scan": item[0]}) + return jsonify({"status": item[0]["status"], "scan": item[0]}) def _without_keys(d, keys): return {x: d[x] for x in d if x not in keys} -@app.route('/engines/nessus/info') +@app.route("/engines/nessus/info") def info(): secret_fields = ["access_key", "secret_key", "server_password"] - return jsonify({ - "page": "info", - "engine_config": _without_keys(this.scanner, secret_fields)}) + return jsonify( + {"page": "info", "engine_config": _without_keys(this.scanner, secret_fields)} + ) -@app.route('/engines/nessus/genreport', methods=['GET']) +@app.route("/engines/nessus/genreport", methods=["GET"]) def genreport(scan_id=None, report_format="html"): res = {"page": "genreport"} scan_id = str(scan_id) @@ -668,17 +750,25 @@ def genreport(scan_id=None, report_format="html"): item = table.search(Query().scan_id == scan_id) if not item: - return jsonify({ - "status": "ERROR", - "details": {"reason": "scan_id '{}' not found".format(scan_id)}}) + return jsonify( + { + "status": "ERROR", + "details": {"reason": "scan_id '{}' not found".format(scan_id)}, + } + ) this.nessscan.action(action="scans", method="GET") scan_status = None - for scan in this.nessscan.res['scans']: - if scan['id'] == int(scan_id): + for scan in this.nessscan.res["scans"]: + if scan["id"] == int(scan_id): scan_status = "found" if not scan_status: - res.update({"status": "error", "details": {"reason": "'scan_id={}' not found".format(scan_id)}}) + res.update( + { + "status": "error", + "details": {"reason": "'scan_id={}' not found".format(scan_id)}, + } + ) return jsonify(res) post_data = {"format": report_format} @@ -686,35 +776,43 @@ def genreport(scan_id=None, report_format="html"): post_data.update({"chapters": "vuln_by_host"}) ness_export_url = "scans/{}/export".format(scan_id) - if item[0]['nessus_scan_hid'] is not None: - ness_export_url += "?history_id=" + item[0]['nessus_scan_hid'] + if item[0]["nessus_scan_hid"] is not None: + ness_export_url += "?history_id=" + item[0]["nessus_scan_hid"] this.nessscan.action(action=ness_export_url, method="POST", extra=post_data) - res.update({"status": "success", "details": { - "timestamp": int(time.time()), - "scan_id": scan_id, - "format": report_format, - "token": this.nessscan.res['token'], - "file": this.nessscan.res['file'], - "url": "https://{}:{}/scans/exports/{}/download".format( - this.scanner['server_host'], - this.scanner['server_port'], - this.nessscan.res['token']) - }}) + res.update( + { + "status": "success", + "details": { + "timestamp": int(time.time()), + "scan_id": scan_id, + "format": report_format, + "token": this.nessscan.res["token"], + "file": this.nessscan.res["file"], + "url": "https://{}:{}/scans/exports/{}/download".format( + this.scanner["server_host"], + this.scanner["server_port"], + this.nessscan.res["token"], + ), + }, + } + ) return jsonify(res) -@app.route('/engines/nessus/getrawreports/', methods=['GET']) +@app.route("/engines/nessus/getrawreports/", methods=["GET"]) def getrawreports(scan_id=None, report_format="html"): - REPORT_FORMATS = ['html', 'csv', 'nessus'] # 'db' format not supported + REPORT_FORMATS = ["html", "csv", "nessus"] # 'db' format not supported res = {"page": "getreport"} scan_id = str(scan_id) item = table.search(Query().scan_id == scan_id) if not scan_id and not request.args.get("scan_id"): - res.update({"status": "error", "details": {"reason": "'scan_id' arg is missing"}}) + res.update( + {"status": "error", "details": {"reason": "'scan_id' arg is missing"}} + ) return jsonify(res) if not scan_id and request.args.get("scan_id"): @@ -725,12 +823,17 @@ def getrawreports(scan_id=None, report_format="html"): this.nessscan.action(action="scans", method="GET") scan_status = None - for scan in this.nessscan.res['scans']: - if scan['id'] == int(scan_id): + for scan in this.nessscan.res["scans"]: + if scan["id"] == int(scan_id): scan_status = "found" break if not scan_status: - res.update({"status": "error", "details": {"reason": "'scan_id={}' not found".format(scan_id)}}) + res.update( + { + "status": "error", + "details": {"reason": "'scan_id={}' not found".format(scan_id)}, + } + ) return jsonify(res) post_data = {"format": report_format} @@ -738,44 +841,57 @@ def getrawreports(scan_id=None, report_format="html"): post_data.update({"chapters": "vuln_by_host"}) ness_export_url = "scans/{}/export".format(scan_id) - if item[0]['nessus_scan_hid'] is not None: - ness_export_url += "?history_id=" + item[0]['nessus_scan_hid'] + if item[0]["nessus_scan_hid"] is not None: + ness_export_url += "?history_id=" + item[0]["nessus_scan_hid"] this.nessscan.action(action=ness_export_url, method="POST", extra=post_data) - report_fileid = str(this.nessscan.res['file']) - report_token = str(this.nessscan.res['token']) + report_fileid = str(this.nessscan.res["file"]) + report_token = str(this.nessscan.res["token"]) this.nessscan.action( - action="scans/{}/export/{}/status".format(scan_id, report_fileid), - method="GET" + action="scans/{}/export/{}/status".format(scan_id, report_fileid), method="GET" ) - if hasattr(this.nessscan.res, "status") and not this.nessscan.res['status'] == "ready": + if ( + hasattr(this.nessscan.res, "status") + and not this.nessscan.res["status"] == "ready" + ): res.update({"status": "error", "details": {"reason": "report not available"}}) return jsonify(res) - tmp_filename = "nessus_{}_{}_{}.{}".format(scan_id, report_fileid, int(time.time()), report_format) - with open(UPLOAD_FOLDER+'/'+tmp_filename, 'wb') as handle: - report_url = "https://{}:{}/scans/exports/{}/download".format(this.scanner['server_host'], this.scanner['server_port'], report_token) + tmp_filename = "nessus_{}_{}_{}.{}".format( + scan_id, report_fileid, int(time.time()), report_format + ) + with open(UPLOAD_FOLDER + "/" + tmp_filename, "wb") as handle: + report_url = "https://{}:{}/scans/exports/{}/download".format( + this.scanner["server_host"], this.scanner["server_port"], report_token + ) response = requests.get(report_url, stream=True, verify=False) if not response.ok: - res.update({"status": "error", "details": {"reason": "something got wrong in d/l"}}) + res.update( + {"status": "error", "details": {"reason": "something got wrong in d/l"}} + ) return jsonify(res) for block in response.iter_content(1024): handle.write(block) - res.update({"status": "success", "details": { - "timestamp": int(time.time()), - "scan_id": scan_id, - "format": report_format, - "fileid": report_fileid, - "token": report_token - }}) + res.update( + { + "status": "success", + "details": { + "timestamp": int(time.time()), + "scan_id": scan_id, + "format": report_format, + "fileid": report_fileid, + "token": report_token, + }, + } + ) return jsonify(res) -@app.route('/engines/nessus/test', methods=['GET']) +@app.route("/engines/nessus/test", methods=["GET"]) def test(): if not APP_DEBUG: return jsonify({"page": "test"}) @@ -786,47 +902,53 @@ def test(): for arg in rule.arguments: options[arg] = "[{0}]".format(arg) - methods = ','.join(rule.methods) + methods = ",".join(rule.methods) url = url_for(rule.endpoint, **options) - res += urlparse.unquote("{0:50s} {1:20s} {2}
".format(rule.endpoint, methods, url)) + res += urlparse.unquote( + "{0:50s} {1:20s} {2}
".format( + rule.endpoint, methods, url + ) + ) return res @app.errorhandler(404) def page_not_found(e): - return jsonify({ - "page": "undefined", - "status": "error", - "reason": "Page not found" - }) + return jsonify({"page": "undefined", "status": "error", "reason": "Page not found"}) @app.before_first_request def main(): - if not os.path.exists(BASE_DIR+"/results"): - os.makedirs(BASE_DIR+"/results") - if not os.path.exists(BASE_DIR+"/reports"): - os.makedirs(BASE_DIR+"/reports") + if not os.path.exists(BASE_DIR + "/results"): + os.makedirs(BASE_DIR + "/results") + if not os.path.exists(BASE_DIR + "/reports"): + os.makedirs(BASE_DIR + "/reports") _loadconfig() -if __name__ == '__main__': +if __name__ == "__main__": parser = optparse.OptionParser() parser.add_option( - "-H", "--host", + "-H", + "--host", help="Hostname of the Flask app [default %s]" % APP_HOST, - default=APP_HOST) + default=APP_HOST, + ) parser.add_option( - "-P", "--port", + "-P", + "--port", help="Port for the Flask app [default %s]" % APP_PORT, - default=APP_PORT) + default=APP_PORT, + ) parser.add_option( - "-d", "--debug", + "-d", + "--debug", action="store_true", dest="debug", help=optparse.SUPPRESS_HELP, - default=APP_DEBUG) + default=APP_DEBUG, + ) options, _ = parser.parse_args() app.run(debug=options.debug, host=options.host, port=int(options.port), processes=1) diff --git a/engines/nmap/engine-nmap.py b/engines/nmap/engine-nmap.py index 091f5d8b..1d63384f 100644 --- a/engines/nmap/engine-nmap.py +++ b/engines/nmap/engine-nmap.py @@ -1,5 +1,6 @@ #!/usr/bin/python3 # -*- coding: utf-8 -*- + import os import subprocess import sys diff --git a/engines/openvas/engine-openvas-noexe.py b/engines/openvas/engine-openvas-noexe.py index 299f7d5f..4c3db4b6 100755 --- a/engines/openvas/engine-openvas-noexe.py +++ b/engines/openvas/engine-openvas-noexe.py @@ -8,17 +8,20 @@ import time import threading from urllib.parse import urlparse + # import random # import string from datetime import date, datetime from uuid import UUID from flask import Flask, request, jsonify + # from PatrowlEnginesUtils.PatrowlEngine import _json_serial from PatrowlEnginesUtils.PatrowlEngine import PatrowlEngine from PatrowlEnginesUtils.PatrowlEngine import PatrowlEngineFinding from PatrowlEnginesUtils.PatrowlEngineExceptions import PatrowlEngineExceptions import xml.etree.ElementTree as ET from dns.resolver import query + # from dns.reversename import from_address from openvas_lib import VulnscanManager, VulnscanException from threading import Semaphore @@ -40,7 +43,7 @@ base_dir=APP_BASE_DIR, name=APP_ENGINE_NAME, max_scans=APP_MAXSCANS, - version=VERSION + version=VERSION, ) this = sys.modules[__name__] @@ -72,66 +75,68 @@ def handle_invalid_usage(error): return response -@app.route('/') +@app.route("/") def default(): """Route by default.""" return engine.default() -@app.route('/engines/openvas/') +@app.route("/engines/openvas/") def index(): """Return index page.""" return engine.index() -@app.route('/engines/openvas/liveness') +@app.route("/engines/openvas/liveness") def liveness(): """Return liveness page.""" return engine.liveness() -@app.route('/engines/openvas/readiness') +@app.route("/engines/openvas/readiness") def readiness(): """Return readiness page.""" return engine.readiness() -@app.route('/engines/openvas/test') +@app.route("/engines/openvas/test") def test(): """Return test page.""" return engine.test() -@app.route('/engines/openvas/info') +@app.route("/engines/openvas/info") def info(): """Get info on running engine.""" return engine.info() -@app.route('/engines/openvas/clean') +@app.route("/engines/openvas/clean") def clean(): """Clean all scans.""" return engine.clean() -@app.route('/engines/openvas/clean/') +@app.route("/engines/openvas/clean/") def clean_scan(scan_id): """Clean scan identified by id.""" return engine.clean_scan(scan_id) -@app.route('/engines/openvas/status') +@app.route("/engines/openvas/status") def status(): """Get status on engine and all scans.""" return engine.getstatus() -@app.route('/engines/openvas/status/') +@app.route("/engines/openvas/status/") def status_scan(scan_id): """Get status on scan identified by id.""" res = {"page": "status", "status": "UNKNOWN"} if scan_id not in engine.scans.keys(): - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) if engine.scans[scan_id]["status"] == "ERROR": res.update({"status": "error", "reason": "todo"}) @@ -140,7 +145,9 @@ def status_scan(scan_id): elif engine.scans[scan_id]["status"] == "SCANNING": ov_scan_status = "unknown" if engine.scans[scan_id]["ov_scan_id"] != "": - ov_scan_status = this.openvas_cli.get_scan_status(engine.scans[scan_id]["ov_scan_id"]) + ov_scan_status = this.openvas_cli.get_scan_status( + engine.scans[scan_id]["ov_scan_id"] + ) # print(ov_scan_status) if ov_scan_status not in ["Requested", "Running", "Done"]: res.update({"status": "ERROR"}) @@ -152,101 +159,101 @@ def status_scan(scan_id): return jsonify(res) -@app.route('/engines/openvas/stopscans') +@app.route("/engines/openvas/stopscans") def stop(): """Stop all scans.""" return engine.stop() -@app.route('/engines/openvas/stop/') +@app.route("/engines/openvas/stop/") def stop_scan(scan_id): res = {"page": "status", "status": "success"} """Stop scan identified by id.""" if scan_id not in engine.scans.keys(): - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) this.openvas_cli.stop_audit(scan_id) - if engine.scans[scan_id]['status'] not in ["FINISHED", "ERROR"]: - engine.scans[scan_id]['status'] = "STOPPED" + if engine.scans[scan_id]["status"] not in ["FINISHED", "ERROR"]: + engine.scans[scan_id]["status"] = "STOPPED" return res -@app.route('/engines/openvas/getreport/') +@app.route("/engines/openvas/getreport/") def getreport(scan_id): """Get report on finished scans.""" return engine.getreport(scan_id) def _loadconfig(): - conf_file = APP_BASE_DIR+'/openvas.json' + conf_file = APP_BASE_DIR + "/openvas.json" if os.path.exists(conf_file): json_data = open(conf_file) engine.scanner = json.load(json_data) - engine.scanner['status'] = "INIT" + engine.scanner["status"] = "INIT" # Check omp connectivity - if set(["omp_host", "omp_port", "omp_username", "omp_password"]).issubset(engine.scanner['options'].keys()): + if set(["omp_host", "omp_port", "omp_username", "omp_password"]).issubset( + engine.scanner["options"].keys() + ): try: this.openvas_cli = VulnscanManager( - str(engine.scanner['options']['omp_host']['value']), - str(engine.scanner['options']['omp_username']['value']), - str(engine.scanner['options']['omp_password']['value']), - int(engine.scanner['options']['omp_port']['value'])) + str(engine.scanner["options"]["omp_host"]["value"]), + str(engine.scanner["options"]["omp_username"]["value"]), + str(engine.scanner["options"]["omp_password"]["value"]), + int(engine.scanner["options"]["omp_port"]["value"]), + ) except VulnscanException as e: print("Error: {}".format(e)) else: print("Error: missing required options in config file".format(conf_file)) - engine.scanner['status'] = "ERROR" + engine.scanner["status"] = "ERROR" return {"status": "error", "reason": "missing required options"} for pl_name, pl_data in this.openvas_cli.get_port_lists().items(): - this.openvas_portlists.update({pl_name: pl_data['id']}) + this.openvas_portlists.update({pl_name: pl_data["id"]}) # Create custom port lists if "patrowl-all_tcp" not in this.openvas_portlists.keys(): new_pl_id = this.openvas_cli.create_port_list( - name="patrowl-all_tcp", - port_range="T:1-65535" + name="patrowl-all_tcp", port_range="T:1-65535" ) this.openvas_portlists.update({"patrowl-all_tcp": new_pl_id}) if "patrowl-quick_tcp" not in this.openvas_portlists.keys(): new_pl_id = this.openvas_cli.create_port_list( - name="patrowl-quick_tcp", - port_range="T:21-80,T:443,U:53" + name="patrowl-quick_tcp", port_range="T:21-80,T:443,U:53" ) this.openvas_portlists.update({"patrowl-quick_tcp": new_pl_id}) if "patrowl-tcp_80" not in this.openvas_portlists.keys(): new_pl_id = this.openvas_cli.create_port_list( - name="patrowl-tcp_80", - port_range="T:80" + name="patrowl-tcp_80", port_range="T:80" ) this.openvas_portlists.update({"patrowl-tcp_80": new_pl_id}) if "patrowl-tcp_443" not in this.openvas_portlists.keys(): new_pl_id = this.openvas_cli.create_port_list( - name="patrowl-tcp_443", - port_range="T:443" + name="patrowl-tcp_443", port_range="T:443" ) this.openvas_portlists.update({"patrowl-tcp_443": new_pl_id}) if "patrowl-tcp_22" not in this.openvas_portlists.keys(): new_pl_id = this.openvas_cli.create_port_list( - name="patrowl-tcp_22", - port_range="T:22" + name="patrowl-tcp_22", port_range="T:22" ) this.openvas_portlists.update({"patrowl-tcp_22": new_pl_id}) - engine.scanner['status'] = "READY" + engine.scanner["status"] = "READY" else: print("Error: config file '{}' not found".format(conf_file)) - engine.scanner['status'] = "ERROR" + engine.scanner["status"] = "ERROR" return {"status": "error", "reason": "config file not found"} -@app.route('/engines/openvas/reloadconfig', methods=['GET']) +@app.route("/engines/openvas/reloadconfig", methods=["GET"]) def reloadconfig(): res = {"page": "reloadconfig"} _loadconfig() @@ -254,53 +261,67 @@ def reloadconfig(): return jsonify(res) -@app.route('/engines/openvas/startscan', methods=['POST']) +@app.route("/engines/openvas/startscan", methods=["POST"]) def start_scan(): res = {"page": "startscan"} # Check the scanner is ready to start a new scan if len(engine.scans) == APP_MAXSCANS: - res.update({ - "status": "error", - "reason": "Scan refused: max concurrent active scans reached ({})".format(APP_MAXSCANS) - }) + res.update( + { + "status": "error", + "reason": "Scan refused: max concurrent active scans reached ({})".format( + APP_MAXSCANS + ), + } + ) return jsonify(res) status() - if engine.scanner['status'] != "READY": - res.update({ - "status": "refused", - "details": { - "reason": "scanner not ready", - "status": engine.scanner['status'] - }}) + if engine.scanner["status"] != "READY": + res.update( + { + "status": "refused", + "details": { + "reason": "scanner not ready", + "status": engine.scanner["status"], + }, + } + ) return jsonify(res) data = json.loads(request.data) - if 'assets' not in data.keys() or 'scan_id' not in data.keys(): - res.update({ - "status": "refused", - "details": { - "reason": "arg error, something is missing ('assets' ?)" - }}) + if "assets" not in data.keys() or "scan_id" not in data.keys(): + res.update( + { + "status": "refused", + "details": {"reason": "arg error, something is missing ('assets' ?)"}, + } + ) return jsonify(res) assets = [] for asset in data["assets"]: # Value if "value" not in asset.keys() or not asset["value"]: - res.update({ - "status": "error", - "reason": "arg error, something is missing ('asset.value')" - }) + res.update( + { + "status": "error", + "reason": "arg error, something is missing ('asset.value')", + } + ) return jsonify(res) # Supported datatypes if asset["datatype"] not in engine.scanner["allowed_asset_types"]: - res.update({ - "status": "error", - "reason": "arg error, bad value for '{}' datatype (not supported)".format(asset["value"]) - }) + res.update( + { + "status": "error", + "reason": "arg error, bad value for '{}' datatype (not supported)".format( + asset["value"] + ), + } + ) return jsonify(res) if asset["datatype"] == "url": @@ -309,64 +330,61 @@ def start_scan(): assets.append(asset["value"]) - scan_id = str(data['scan_id']) + scan_id = str(data["scan_id"]) - if data['scan_id'] in engine.scans.keys(): - res.update({ - "status": "refused", - "details": { - "reason": "scan '{}' already launched".format(data['scan_id']), + if data["scan_id"] in engine.scans.keys(): + res.update( + { + "status": "refused", + "details": { + "reason": "scan '{}' already launched".format(data["scan_id"]), + }, } - }) + ) return jsonify(res) # Checking default options ov_profile = DEFAULT_OV_PROFILE # "Full and fast" ov_profiles = this.openvas_cli.get_profiles - if "profile" in data['options'].keys(): - ov_p = str(data['options']['profile']) + if "profile" in data["options"].keys(): + ov_p = str(data["options"]["profile"]) if ov_p in ov_profiles.keys(): ov_profile = ov_p ov_port_list = DEFAULT_OV_PORTLIST # "patrowl-all_tcp" ov_port_lists = this.openvas_cli.get_port_lists(abs) - if "port_list" in data['options'].keys(): - ov_pl = str(data['options']['port_list']) + if "port_list" in data["options"].keys(): + ov_pl = str(data["options"]["port_list"]) if ov_pl in ov_port_lists.keys(): ov_port_list = ov_pl scan = { - 'assets': assets, - 'threads': [], - 'options': data['options'], - 'scan_id': scan_id, - 'ov_scan_id': "", - 'ov_target_id': "", - 'ov_profile': ov_profile, - 'ov_port_list': ov_port_list, - 'status': "STARTED", - 'started_at': int(time.time() * 1000), - 'issues': [], - 'summary': {} + "assets": assets, + "threads": [], + "options": data["options"], + "scan_id": scan_id, + "ov_scan_id": "", + "ov_target_id": "", + "ov_profile": ov_profile, + "ov_port_list": ov_port_list, + "status": "STARTED", + "started_at": int(time.time() * 1000), + "issues": [], + "summary": {}, } engine.scans.update({scan_id: scan}) thread = threading.Thread(target=_scan, args=(scan_id,)) thread.start() - engine.scans[scan_id]['threads'].append(thread) + engine.scans[scan_id]["threads"].append(thread) - res.update({ - "status": "accepted", - "details": { - "scan_id": scan['scan_id'] - } - }) + res.update({"status": "accepted", "details": {"scan_id": scan["scan_id"]}}) return jsonify(res) def _scan(scan_id): assets = [] - for asset in engine.scans[scan_id]['assets']: + for asset in engine.scans[scan_id]["assets"]: assets.append(asset) ov_profile = engine.scans[scan_id]["ov_profile"] @@ -378,21 +396,23 @@ def _scan(scan_id): target=assets, profile=ov_profile, port_list=ov_port_list, - callback_end=partial(lambda x: x.release(), Sem) + callback_end=partial(lambda x: x.release(), Sem), + ) + engine.scans[scan_id].update( + { + "ov_scan_id": ov_scan_id, + "ov_target_id": ov_target_id, + "scan_status": "SCANNING", + "status": "SCANNING", + } ) - engine.scans[scan_id].update({ - 'ov_scan_id': ov_scan_id, - 'ov_target_id': ov_target_id, - 'scan_status': "SCANNING", - 'status': "SCANNING" - }) Sem.acquire() # Finished scan ov_report_id = this.openvas_cli.get_report_id(ov_scan_id) ov_results_xml = this.openvas_cli.get_report_xml(ov_report_id) report_filename = "{}/results/{}.xml".format(APP_BASE_DIR, scan_id) - with open(report_filename, 'w') as report_file: + with open(report_filename, "w") as report_file: print(ET.tostring(ov_results_xml)) report_file.write(ET.tostring(ov_results_xml).decode()) @@ -410,13 +430,7 @@ def _parse_results(scan_id): issues = [] issue_id = 1 - nb_vulns = { - "info": 0, - "low": 0, - "medium": 0, - "high": 0, - "critical": 0 - } + nb_vulns = {"info": 0, "low": 0, "medium": 0, "high": 0, "critical": 0} report_filename = "{}/results/{}.xml".format(APP_BASE_DIR, scan_id) @@ -461,13 +475,22 @@ def _parse_results(scan_id): issue_cvss = float(result.find("severity").text) - if result.find("nvt").find("cve") is not None and result.find("nvt").find("cve").text != "NOCVE": + if ( + result.find("nvt").find("cve") is not None + and result.find("nvt").find("cve").text != "NOCVE" + ): cvelist = str(result.find("nvt").find("cve").text) issue_meta.update({"CVE": cvelist.split(", ")}) - if result.find("nvt").find("bid") is not None and result.find("nvt").find("bid").text != "NOBID": + if ( + result.find("nvt").find("bid") is not None + and result.find("nvt").find("bid").text != "NOBID" + ): bid_list = str(result.find("nvt").find("bid").text) issue_meta.update({"BID": bid_list.split(", ")}) - if result.find("nvt").find("xref") is not None and result.find("nvt").find("xref").text != "NOXREF": + if ( + result.find("nvt").find("xref") is not None + and result.find("nvt").find("xref").text != "NOXREF" + ): xref_list = str(result.find("nvt").find("xref").text) issue_meta.update({"XREF": xref_list.split(", ")}) @@ -479,18 +502,17 @@ def _parse_results(scan_id): solution="n/a", severity=severity, confidence="firm", - raw=ET.tostring(result, encoding='utf-8', method='xml'), + raw=ET.tostring(result, encoding="utf-8", method="xml"), target_addrs=assets, meta_tags=["openvas"], meta_risk={"cvss_base_score": issue_cvss}, - meta_vuln_refs=issue_meta + meta_vuln_refs=issue_meta, ) issues.append(issue._PatrowlEngineFinding__to_dict()) nb_vulns[severity] += 1 issue_id += 1 - # report_id = engine.scans[scan_id]["report_id"] # for asset in engine.scans[scan_id]["findings"]: @@ -533,61 +555,74 @@ def _parse_results(scan_id): "nb_high": nb_vulns["high"], "nb_critical": 0, "engine_name": "openvas", - "engine_version": engine.scanner["version"] + "engine_version": engine.scanner["version"], } return issues, summary -@app.route('/engines/openvas/getfindings/', methods=['GET']) +@app.route("/engines/openvas/getfindings/", methods=["GET"]) def getfindings(scan_id): res = {"page": "getfindings", "scan_id": scan_id} # Check if the scan_id exists if scan_id not in engine.scans.keys(): - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) # Check if the scan is finished status() - if engine.scans[scan_id]['status'] != "FINISHED": - res.update({"status": "error", "reason": "scan_id '{}' not finished (status={})".format(scan_id, engine.scans[scan_id]['status'])}) + if engine.scans[scan_id]["status"] != "FINISHED": + res.update( + { + "status": "error", + "reason": "scan_id '{}' not finished (status={})".format( + scan_id, engine.scans[scan_id]["status"] + ), + } + ) return jsonify(res) scan = { "scan_id": scan_id, - "assets": engine.scans[scan_id]['assets'], - "options": engine.scans[scan_id]['options'], - "status": engine.scans[scan_id]['status'], - "started_at": engine.scans[scan_id]['started_at'], - "finished_at": engine.scans[scan_id]['finished_at'] + "assets": engine.scans[scan_id]["assets"], + "options": engine.scans[scan_id]["options"], + "status": engine.scans[scan_id]["status"], + "started_at": engine.scans[scan_id]["started_at"], + "finished_at": engine.scans[scan_id]["finished_at"], } - summary = engine.scans[scan_id]['summary'] - issues = engine.scans[scan_id]['issues'] + summary = engine.scans[scan_id]["summary"] + issues = engine.scans[scan_id]["issues"] # Store the findings in a file - with open(APP_BASE_DIR+"/results/openvas_"+scan_id+".json", 'w') as report_file: - json.dump({ - "scan": scan, - "summary": summary, - "issues": issues - }, report_file, default=_json_serial) + with open( + APP_BASE_DIR + "/results/openvas_" + scan_id + ".json", "w" + ) as report_file: + json.dump( + {"scan": scan, "summary": summary, "issues": issues}, + report_file, + default=_json_serial, + ) # Remove the scan from the active scan list clean_scan(scan_id) - res.update({"scan": scan, "summary": summary, "issues": issues, "status": "success"}) + res.update( + {"scan": scan, "summary": summary, "issues": issues, "status": "success"} + ) return jsonify(res) @app.before_first_request def main(): """First function called.""" - if not os.path.exists(APP_BASE_DIR+"/results"): - os.makedirs(APP_BASE_DIR+"/results") + if not os.path.exists(APP_BASE_DIR + "/results"): + os.makedirs(APP_BASE_DIR + "/results") _loadconfig() -if __name__ == '__main__': +if __name__ == "__main__": engine.run_app(app_debug=APP_DEBUG, app_host=APP_HOST, app_port=APP_PORT) diff --git a/engines/openvas/engine-openvas-omp.py b/engines/openvas/engine-openvas-omp.py index e879c587..24a3ec31 100755 --- a/engines/openvas/engine-openvas-omp.py +++ b/engines/openvas/engine-openvas-omp.py @@ -40,12 +40,13 @@ base_dir=APP_BASE_DIR, name=APP_ENGINE_NAME, max_scans=APP_MAXSCANS, - version=VERSION + version=VERSION, ) this = modules[__name__] this.keys = [] + def get_criticity(score): """ Returns the level of criicity @@ -59,6 +60,7 @@ def get_criticity(score): criticity = "medium" return criticity + def is_uuid(uuid_string, version=4): """ This function uuid_string returns True is the uuid_string is a valid UUID. @@ -69,35 +71,57 @@ def is_uuid(uuid_string, version=4): except ValueError: return False + def get_options(payload): """ Extracts formatted options from the payload. """ - options = {"enable_create_target": True, "enable_create_task": True, "enable_start_task": True} + options = { + "enable_create_target": True, + "enable_create_task": True, + "enable_start_task": True, + } user_opts = loads(payload["options"]) if "enable_create_target" in user_opts: - options["enable_create_target"] = user_opts["enable_create_target"] == "True" or user_opts["enable_create_target"] == "true" + options["enable_create_target"] = ( + user_opts["enable_create_target"] == "True" + or user_opts["enable_create_target"] == "true" + ) if "enable_create_task" in user_opts: - options["enable_create_task"] = user_opts["enable_create_task"] == "True" or user_opts["enable_create_task"] == "true" + options["enable_create_task"] = ( + user_opts["enable_create_task"] == "True" + or user_opts["enable_create_task"] == "true" + ) if "enable_start_task" in user_opts: - options["enable_start_task"] = user_opts["enable_start_task"] == "True" or user_opts["enable_start_task"] == "true" + options["enable_start_task"] = ( + user_opts["enable_start_task"] == "True" + or user_opts["enable_start_task"] == "true" + ) return options + def omp_cmd(command): """ This function returns the output of an 'omp' command. """ - omp_cmd_base = ["omp", - "-h", engine.scanner["options"]["omp_host"]["value"], - "-p", engine.scanner["options"]["omp_port"]["value"], - "-u", engine.scanner["options"]["omp_username"]["value"], - "-w", engine.scanner["options"]["omp_password"]["value"]] + omp_cmd_base = [ + "omp", + "-h", + engine.scanner["options"]["omp_host"]["value"], + "-p", + engine.scanner["options"]["omp_port"]["value"], + "-u", + engine.scanner["options"]["omp_username"]["value"], + "-w", + engine.scanner["options"]["omp_password"]["value"], + ] try: result = check_output(omp_cmd_base + command).decode("utf-8") except Exception: result = "" return result + def get_target(target_name): """ This function returns the target_id of a target. If not, it returns None. @@ -111,6 +135,7 @@ def get_target(target_name): return target_id return None + def get_credentials(): """ This function returns the credentials_id from configuration. @@ -123,13 +148,17 @@ def get_credentials(): if not result.attrib["status"] == "200": return None for credential in result.findall("credential"): - if credential.find("name").text == engine.scanner["options"]["credential_name"]["value"]: + if ( + credential.find("name").text + == engine.scanner["options"]["credential_name"]["value"] + ): credentials_id = credential.attrib["id"] if not is_uuid(credentials_id): return None return credentials_id return None + def get_scan_config(): """ This function returns the scan_config_id from conf @@ -143,15 +172,21 @@ def get_scan_config(): return scan_config_id return None + def create_target(target_name): """ This function creates a target in OpenVAS and returns its target_id """ - result_xml = omp_cmd(["--xml", - "{target_name}{target_name}{ssh_port}".format( - target_name=target_name, - credentials_name=engine.scanner["credentials"], - ssh_port=22)]) + result_xml = omp_cmd( + [ + "--xml", + "{target_name}{target_name}{ssh_port}".format( + target_name=target_name, + credentials_name=engine.scanner["credentials"], + ssh_port=22, + ), + ] + ) try: result = ET.fromstring(result_xml) except Exception: @@ -163,6 +198,7 @@ def create_target(target_name): return None return target_id + def get_task(target_name): """ This function returns the task_id @@ -176,16 +212,28 @@ def get_task(target_name): return task_id return None + def create_task(target_name, target_id): """ This function creates a task_id in OpenVAS and returns its task_id """ - result = omp_cmd(["-C", "-c", engine.scanner["scan_config"], "--name", target_name, "--target", target_id]).split("\n") + result = omp_cmd( + [ + "-C", + "-c", + engine.scanner["scan_config"], + "--name", + target_name, + "--target", + target_id, + ] + ).split("\n") task_id = result[0] if not is_uuid(task_id): return None return task_id + def start_task(task_id): """ This function starts a task and returns a report_id @@ -196,6 +244,7 @@ def start_task(task_id): return None return report_id + def get_last_report(task_id): """ This function returns the last report_id of a task_id @@ -209,6 +258,7 @@ def get_last_report(task_id): return None return report_id + def get_report_status(task_id, report_id): """ This function get the status of a report_id @@ -219,6 +269,7 @@ def get_report_status(task_id, report_id): return report.split(" ")[2] return None + def get_multiple_report_status(assets): """ This function get the status of a set of assets {'task_id': xx, 'report_id': xx} @@ -234,11 +285,17 @@ def get_multiple_report_status(assets): for asset in assets: task_id = assets[asset]["task_id"] report_id = assets[asset]["report_id"] - report = result.find("task/[@id='{task_id}']/*/report[@id='{report_id}']".format( - task_id=task_id, report_id=report_id)) + report = result.find( + "task/[@id='{task_id}']/*/report[@id='{report_id}']".format( + task_id=task_id, report_id=report_id + ) + ) if report is None: - print("Can't find task_id={task_id}, report_id={report_id}".format( - task_id=task_id, report_id=report_id)) + print( + "Can't find task_id={task_id}, report_id={report_id}".format( + task_id=task_id, report_id=report_id + ) + ) assets_status.update({asset: {"status": "Failure"}}) else: scan_end = report.find("scan_end").text @@ -248,10 +305,12 @@ def get_multiple_report_status(assets): assets_status.update({asset: {"status": "Done"}}) return assets_status + def is_ip(string): - """ This dummy function returns True is the string is probably an IP """ + """This dummy function returns True is the string is probably an IP""" return re_search("[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+", string) is not None + @app.errorhandler(404) def page_not_found(e): """Page not found.""" @@ -325,7 +384,9 @@ def status_scan(scan_id): """Get status on scan identified by id.""" res = {"page": "status", "status": "UNKNOWN"} if scan_id not in engine.scans.keys(): - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) if engine.scans[scan_id]["status"] == "ERROR": @@ -353,7 +414,12 @@ def status_scan(scan_id): try: _scan_urls(scan_id) except Exception as e: - res.update({"status": "error", "reason": "scan_urls did not worked ! ({})".format(e)}) + res.update( + { + "status": "error", + "reason": "scan_urls did not worked ! ({})".format(e), + } + ) return jsonify(res) else: res.update({"status": "SCANNING"}) @@ -383,7 +449,7 @@ def getreport(scan_id): def _loadconfig(): - conf_file = APP_BASE_DIR+"/openvas.json" + conf_file = APP_BASE_DIR + "/openvas.json" if exists(conf_file): json_data = open(conf_file) engine.scanner = load(json_data) @@ -411,47 +477,61 @@ def start_scan(): # Check the scanner is ready to start a new scan if len(engine.scans) == APP_MAXSCANS: - res.update({ - "status": "error", - "reason": "Scan refused: max concurrent active scans reached ({})".format(APP_MAXSCANS) - }) + res.update( + { + "status": "error", + "reason": "Scan refused: max concurrent active scans reached ({})".format( + APP_MAXSCANS + ), + } + ) return jsonify(res) status() if engine.scanner["status"] != "READY": - res.update({ - "status": "refused", - "details": { - "reason": "scanner not ready", - "status": engine.scanner["status"] - }}) + res.update( + { + "status": "refused", + "details": { + "reason": "scanner not ready", + "status": engine.scanner["status"], + }, + } + ) return jsonify(res) data = loads(request.data.decode("utf-8")) if "assets" not in data.keys() or "scan_id" not in data.keys(): - res.update({ - "status": "refused", - "details": { - "reason": "arg error, something is missing ('assets' ?)" - }}) + res.update( + { + "status": "refused", + "details": {"reason": "arg error, something is missing ('assets' ?)"}, + } + ) return jsonify(res) assets = [] for asset in data["assets"]: # Value if "value" not in asset.keys() or not asset["value"]: - res.update({ - "status": "error", - "reason": "arg error, something is missing ('asset.value')" - }) + res.update( + { + "status": "error", + "reason": "arg error, something is missing ('asset.value')", + } + ) return jsonify(res) # Supported datatypes if asset["datatype"] not in engine.scanner["allowed_asset_types"]: - res.update({ - "status": "error", - "reason": "arg error, bad value for '{}' datatype (not supported)".format(asset["value"]) - }) + res.update( + { + "status": "error", + "reason": "arg error, bad value for '{}' datatype (not supported)".format( + asset["value"] + ), + } + ) return jsonify(res) if asset["datatype"] == "url": @@ -463,24 +543,28 @@ def start_scan(): scan_id = str(data["scan_id"]) if data["scan_id"] in engine.scans.keys(): - res.update({ - "status": "refused", - "details": { - "reason": "scan '{}' is probably already launched".format(data["scan_id"]), + res.update( + { + "status": "refused", + "details": { + "reason": "scan '{}' is probably already launched".format( + data["scan_id"] + ), + }, } - }) + ) return jsonify(res) scan = { - "assets": assets, - "threads": [], - "options": data["options"], - "scan_id": scan_id, - "status": "STARTED", - "lock": False, - "started_at": int(time() * 1000), - "findings": {}, - "in_failure": dict() + "assets": assets, + "threads": [], + "options": data["options"], + "scan_id": scan_id, + "status": "STARTED", + "lock": False, + "started_at": int(time() * 1000), + "findings": {}, + "in_failure": dict(), } options = get_options(data) @@ -499,7 +583,9 @@ def start_scan(): scan["in_failure"].update({asset: {"reason": "Fail to create target."}}) else: print("Target creation disabled") - scan["in_failure"].update({asset: {"reason": "Target creation disabled"}}) + scan["in_failure"].update( + {asset: {"reason": "Target creation disabled"}} + ) else: task_id = get_task(asset) if task_id is None and options["enable_create_task"]: @@ -509,10 +595,14 @@ def start_scan(): if task_id is None: if options["enable_create_task"]: print("Fail to create task {}".format(asset)) - scan["in_failure"].update({asset: {"reason": "Fail to create task."}}) + scan["in_failure"].update( + {asset: {"reason": "Fail to create task."}} + ) else: print("Task creation disabled") - scan["in_failure"].update({asset: {"reason": "Task creation disabled."}}) + scan["in_failure"].update( + {asset: {"reason": "Task creation disabled."}} + ) else: if options["enable_start_task"]: report_id = start_task(task_id) @@ -525,21 +615,37 @@ def start_scan(): if report_id is None: if options["enable_start_task"]: print("Fail to start task {}".format(task_id)) - scan["in_failure"].update({asset: {"reason": "Fail to start task {}".format(task_id)}}) + scan["in_failure"].update( + {asset: {"reason": "Fail to start task {}".format(task_id)}} + ) else: print("Task start disabled") - scan["in_failure"].update({asset: {"reason": "Task start disabled"}}) + scan["in_failure"].update( + {asset: {"reason": "Task start disabled"}} + ) else: print("OK for report_id {}".format(report_id)) - scan["assets"].update({asset: {"task_id": task_id, "report_id": report_id, "status": "accepted"}}) + scan["assets"].update( + { + asset: { + "task_id": task_id, + "report_id": report_id, + "status": "accepted", + } + } + ) if scan["assets"] == dict(): - res.update({ - "status": "refused", - "details": { - "reason": "scan '{}' is probably already launched".format(data["scan_id"]), + res.update( + { + "status": "refused", + "details": { + "reason": "scan '{}' is probably already launched".format( + data["scan_id"] + ), + }, } - }) + ) return jsonify(res) engine.scans.update({scan_id: scan}) @@ -547,12 +653,7 @@ def start_scan(): thread.start() engine.scans[scan_id]["threads"].append(thread) - res.update({ - "status": "accepted", - "details": { - "scan_id": scan["scan_id"] - } - }) + res.update({"status": "accepted", "details": {"scan_id": scan["scan_id"]}}) return jsonify(res) @@ -582,7 +683,9 @@ def _scan_urls(scan_id): if asset not in engine.scans[scan_id]["findings"]: engine.scans[scan_id]["findings"][asset] = {} try: - engine.scans[scan_id]["findings"][asset]["issues"] = get_report(asset, scan_id) + engine.scans[scan_id]["findings"][asset]["issues"] = get_report( + asset, scan_id + ) except Exception as e: print("scan_urls did not worked ! ({})".format(e)) return False @@ -597,14 +700,27 @@ def get_report(asset, scan_id): report_id = engine.scans[scan_id]["assets"][asset]["report_id"] issues = [] - if not isfile("results/openvas_report_{scan_id}_{asset}.xml".format(scan_id=scan_id, asset=asset)): + if not isfile( + "results/openvas_report_{scan_id}_{asset}.xml".format( + scan_id=scan_id, asset=asset + ) + ): result = omp_cmd(["--get-report", report_id]) - result_file = open("results/openvas_report_{scan_id}_{asset}.xml".format(scan_id=scan_id, asset=asset), "w") + result_file = open( + "results/openvas_report_{scan_id}_{asset}.xml".format( + scan_id=scan_id, asset=asset + ), + "w", + ) result_file.write(result) result_file.close() try: - tree = ET.parse("results/openvas_report_{scan_id}_{asset}.xml".format(scan_id=scan_id, asset=asset)) + tree = ET.parse( + "results/openvas_report_{scan_id}_{asset}.xml".format( + scan_id=scan_id, asset=asset + ) + ) except Exception: # No Element found in XML file return {"status": "ERROR", "reason": "no issues found"} @@ -639,29 +755,29 @@ def _parse_results(scan_id): issues = [] summary = {} - nb_vulns = { - "info": 0, - "low": 0, - "medium": 0, - "high": 0 - } + nb_vulns = {"info": 0, "low": 0, "medium": 0, "high": 0} timestamp = int(time() * 1000) cvss_max = float(0) for failed_asset in engine.scans[scan_id]["in_failure"]: nb_vulns[get_criticity(cvss_max)] += 1 - issues.append({ - "issue_id": len(issues)+1, - "severity": get_criticity(cvss_max), "confidence": "certain", - "target": {"addr": [failed_asset], "protocol": "http"}, - "title": "No report found for '{}'".format(failed_asset), - "solution": "n/a", - "metadata": {"risk": {"cvss_base_score": cvss_max}}, - "type": "openvas_report", - "timestamp": timestamp, - "description": engine.scans[scan_id]["in_failure"][failed_asset]["reason"], - }) + issues.append( + { + "issue_id": len(issues) + 1, + "severity": get_criticity(cvss_max), + "confidence": "certain", + "target": {"addr": [failed_asset], "protocol": "http"}, + "title": "No report found for '{}'".format(failed_asset), + "solution": "n/a", + "metadata": {"risk": {"cvss_base_score": cvss_max}}, + "type": "openvas_report", + "timestamp": timestamp, + "description": engine.scans[scan_id]["in_failure"][failed_asset][ + "reason" + ], + } + ) def sortBySeverity(val): return float(val[0]) @@ -672,31 +788,43 @@ def sortBySeverity(val): description = "" cvss_max = float(0) # Sort issues by CVE severity - engine.scans[scan_id]["findings"][asset]["issues"].sort(key=sortBySeverity, reverse=True) + engine.scans[scan_id]["findings"][asset]["issues"].sort( + key=sortBySeverity, reverse=True + ) for eng in engine.scans[scan_id]["findings"][asset]["issues"]: if float(eng[0]) > 0: cvss_max = max(float(eng[0]), cvss_max) - description = description + "[{threat}] CVSS: {severity} - Associated CVE : {cve}".format( - threat=eng[2], - severity=eng[0], - cve=eng[1]) + "\n" + description = ( + description + + "[{threat}] CVSS: {severity} - Associated CVE : {cve}".format( + threat=eng[2], severity=eng[0], cve=eng[1] + ) + + "\n" + ) link = "https://{omp_host}/omp?cmd=get_report&report_id={report_id}".format( omp_host=engine.scanner["options"]["omp_host"]["value"], - report_id=report_id) + report_id=report_id, + ) nb_vulns[get_criticity(cvss_max)] += 1 - issues.append({ - "issue_id": len(issues)+1, - "severity": get_criticity(cvss_max), "confidence": "certain", - "target": {"addr": [asset], "protocol": "http"}, - "title": "'{}' identified in openvas".format(asset), - "solution": "n/a", - "metadata": {"risk": {"cvss_base_score": cvss_max}, "links": [link]}, - "type": "openvas_report", - "timestamp": timestamp, - "description": description, - }) + issues.append( + { + "issue_id": len(issues) + 1, + "severity": get_criticity(cvss_max), + "confidence": "certain", + "target": {"addr": [asset], "protocol": "http"}, + "title": "'{}' identified in openvas".format(asset), + "solution": "n/a", + "metadata": { + "risk": {"cvss_base_score": cvss_max}, + "links": [link], + }, + "type": "openvas_report", + "timestamp": timestamp, + "description": description, + } + ) summary = { "nb_issues": len(issues), @@ -705,7 +833,7 @@ def sortBySeverity(val): "nb_medium": nb_vulns["medium"], "nb_high": nb_vulns["high"], "engine_name": "openvas", - "engine_version": engine.scanner["version"] + "engine_version": engine.scanner["version"], } return issues, summary @@ -717,13 +845,22 @@ def getfindings(scan_id): # check if the scan_id exists if scan_id not in engine.scans.keys(): - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) # check if the scan is finished status() if engine.scans[scan_id]["status"] != "FINISHED": - res.update({"status": "error", "reason": "scan_id '{}' not finished (status={})".format(scan_id, engine.scans[scan_id]["status"])}) + res.update( + { + "status": "error", + "reason": "scan_id '{}' not finished (status={})".format( + scan_id, engine.scans[scan_id]["status"] + ), + } + ) return jsonify(res) issues, summary = _parse_results(scan_id) @@ -734,31 +871,36 @@ def getfindings(scan_id): "options": engine.scans[scan_id]["options"], "status": engine.scans[scan_id]["status"], "started_at": engine.scans[scan_id]["started_at"], - "finished_at": engine.scans[scan_id]["finished_at"] + "finished_at": engine.scans[scan_id]["finished_at"], } # Store the findings in a file - with open(APP_BASE_DIR+"/results/openvas_"+scan_id+".json", "w") as report_file: - dump({ - "scan": scan, - "summary": summary, - "issues": issues - }, report_file, default=_json_serial) + with open( + APP_BASE_DIR + "/results/openvas_" + scan_id + ".json", "w" + ) as report_file: + dump( + {"scan": scan, "summary": summary, "issues": issues}, + report_file, + default=_json_serial, + ) # remove the scan from the active scan list clean_scan(scan_id) - res.update({"scan": scan, "summary": summary, "issues": issues, "status": "success"}) + res.update( + {"scan": scan, "summary": summary, "issues": issues, "status": "success"} + ) return jsonify(res) @app.before_first_request def main(): """First function called.""" - if not exists(APP_BASE_DIR+"/results"): - makedirs(APP_BASE_DIR+"/results") + if not exists(APP_BASE_DIR + "/results"): + makedirs(APP_BASE_DIR + "/results") _loadconfig() print("Run engine") + if __name__ == "__main__": engine.run_app(app_debug=APP_DEBUG, app_host=APP_HOST, app_port=APP_PORT) diff --git a/engines/openvas/engine-openvas.py b/engines/openvas/engine-openvas.py index 9a3f51ef..157ea5fa 100644 --- a/engines/openvas/engine-openvas.py +++ b/engines/openvas/engine-openvas.py @@ -24,8 +24,10 @@ from dns.resolver import query from gvm.connections import TLSConnection from gvm.protocols.gmp import Gmp + # from gvm.protocols.gmpv7.types import AliveTest from gvm.protocols.gmpv208 import AliveTest + # from gvm.errors import GvmError # Own library @@ -37,16 +39,16 @@ # from pdb import set_trace as st app = Flask(__name__) -APP_DEBUG = os.environ.get('APP_DEBUG', '').lower() in ['true', '1', 'on', 'yes', 'y'] +APP_DEBUG = os.environ.get("APP_DEBUG", "").lower() in ["true", "1", "on", "yes", "y"] APP_HOST = "0.0.0.0" APP_PORT = 5016 -APP_MAXSCANS = int(os.environ.get('APP_MAXSCANS', 5)) +APP_MAXSCANS = int(os.environ.get("APP_MAXSCANS", 5)) APP_ENGINE_NAME = "openvas" APP_BASE_DIR = dirname(realpath(__file__)) # DEFAULT_OV_PROFILE = "Full and fast" # DEFAULT_OV_PORTLIST = "patrowl-all_tcp" -DEFAULT_TIMEOUT = int(os.environ.get('DEFAULT_TIMEOUT', 600)) -DEFAULT_SCAN_TIMEOUT = int(os.environ.get('DEFAULT_SCAN_TIMEOUT', 432000)) # 2 days +DEFAULT_TIMEOUT = int(os.environ.get("DEFAULT_TIMEOUT", 600)) +DEFAULT_SCAN_TIMEOUT = int(os.environ.get("DEFAULT_SCAN_TIMEOUT", 432000)) # 2 days VERSION = "1.4.30" engine = PatrowlEngine( @@ -54,7 +56,7 @@ base_dir=APP_BASE_DIR, name=APP_ENGINE_NAME, max_scans=APP_MAXSCANS, - version=VERSION + version=VERSION, ) this = modules[__name__] @@ -75,8 +77,8 @@ "DEFAULT": "Scan Config Default", } -if __name__ != '__main__': - gunicorn_logger = logging.getLogger('gunicorn.error') +if __name__ != "__main__": + gunicorn_logger = logging.getLogger("gunicorn.error") app.logger.handlers = gunicorn_logger.handlers app.logger.setLevel(gunicorn_logger.level) @@ -92,31 +94,42 @@ def is_uuid(uuid_string, version=4): def get_options(payload): """Extract formatted options from the payload.""" - options = {"enable_create_target": True, "enable_create_task": True, "enable_start_task": True} + options = { + "enable_create_target": True, + "enable_create_task": True, + "enable_start_task": True, + } user_opts = payload["options"] if "enable_create_target" in user_opts: - options["enable_create_target"] = True or user_opts["enable_create_target"] == "True" + options["enable_create_target"] = ( + True or user_opts["enable_create_target"] == "True" + ) if "enable_create_task" in user_opts: - options["enable_create_task"] = True or user_opts["enable_create_task"] == "True" + options["enable_create_task"] = ( + True or user_opts["enable_create_task"] == "True" + ) if "enable_start_task" in user_opts: options["enable_start_task"] = True or user_opts["enable_start_task"] == "True" return options -def get_target(target_name, scan_portlist_id=None, alive_test=AliveTest.TCP_SYN_SERVICE_PING): +def get_target( + target_name, scan_portlist_id=None, alive_test=AliveTest.TCP_SYN_SERVICE_PING +): """Return the target_id of a target. If not, it return None.""" valid_target_id = None connection = TLSConnection( hostname=engine.scanner["options"]["gmp_host"]["value"], port=engine.scanner["options"]["gmp_port"]["value"], - timeout=int(engine.scanner["options"].get("timeout", DEFAULT_TIMEOUT)) + timeout=int(engine.scanner["options"].get("timeout", DEFAULT_TIMEOUT)), ) with Gmp(connection) as gmp_cnx: gmp_cnx.authenticate( engine.scanner["options"]["gmp_username"]["value"], - engine.scanner["options"]["gmp_password"]["value"]) + engine.scanner["options"]["gmp_password"]["value"], + ) # targets_xml = gmp_cnx.get_targets(filter="~"+target_name) - targets_xml = gmp_cnx.get_targets(filter_string="~"+target_name) + targets_xml = gmp_cnx.get_targets(filter_string="~" + target_name) # print("get_target/targets_xml:", targets_xml) try: targets = ET.fromstring(targets_xml) @@ -133,7 +146,10 @@ def get_target(target_name, scan_portlist_id=None, alive_test=AliveTest.TCP_SYN_ valid_target_id = target.get("id") if not is_uuid(valid_target_id): valid_target_id = None - elif scan_portlist_id == target.find("port_list").get('id') and target_name in target.find("name").text: + elif ( + scan_portlist_id == target.find("port_list").get("id") + and target_name in target.find("name").text + ): valid_target_id = target.get("id") if not is_uuid(valid_target_id): valid_target_id = None @@ -184,7 +200,7 @@ def get_scan_config_name(scan_config_id=None, gmp=this.gmp): except Exception: return None - for config in configs.findall('config'): + for config in configs.findall("config"): if config.get("id") == scan_config_id: scan_config_name = config.find("name").text break @@ -200,12 +216,13 @@ def get_scan_config(name=None): connection = TLSConnection( hostname=engine.scanner["options"]["gmp_host"]["value"], port=engine.scanner["options"]["gmp_port"]["value"], - timeout=int(engine.scanner["options"].get("timeout", DEFAULT_TIMEOUT)) + timeout=int(engine.scanner["options"].get("timeout", DEFAULT_TIMEOUT)), ) with Gmp(connection) as gmp_cnx: gmp_cnx.authenticate( engine.scanner["options"]["gmp_username"]["value"], - engine.scanner["options"]["gmp_password"]["value"]) + engine.scanner["options"]["gmp_password"]["value"], + ) # configs_xml = gmp_cnx.get_configs() configs_xml = gmp_cnx.get_scan_configs() try: @@ -223,7 +240,9 @@ def get_scan_config(name=None): tmp_config_name = config.find("name").text if scan_config_name == tmp_config_name: scan_config_id = config.get("id") - if not is_uuid(scan_config_id, version=1) and not is_uuid(scan_config_id): + if not is_uuid(scan_config_id, version=1) and not is_uuid( + scan_config_id + ): return None return scan_config_id return None @@ -231,15 +250,17 @@ def get_scan_config(name=None): def create_target( - target_name, - target_hosts, - port_list_id=None, - port_list_name=None, - ssh_credential_id=None, ssh_credential_port=None, - smb_credential_id=None, - esxi_credential_id=None, - snmp_credential_id=None, - alive_test=AliveTest.TCP_SYN_SERVICE_PING): + target_name, + target_hosts, + port_list_id=None, + port_list_name=None, + ssh_credential_id=None, + ssh_credential_port=None, + smb_credential_id=None, + esxi_credential_id=None, + snmp_credential_id=None, + alive_test=AliveTest.TCP_SYN_SERVICE_PING, +): """Create a target in OpenVAS and returns its target_id.""" # Check alive_test param if alive_test not in OV_ALIVE_TESTS.keys(): @@ -247,12 +268,13 @@ def create_target( connection = TLSConnection( hostname=engine.scanner["options"]["gmp_host"]["value"], port=engine.scanner["options"]["gmp_port"]["value"], - timeout=int(engine.scanner["options"].get("timeout", DEFAULT_TIMEOUT)) + timeout=int(engine.scanner["options"].get("timeout", DEFAULT_TIMEOUT)), ) with Gmp(connection) as gmp_cnx: gmp_cnx.authenticate( engine.scanner["options"]["gmp_username"]["value"], - engine.scanner["options"]["gmp_password"]["value"]) + engine.scanner["options"]["gmp_password"]["value"], + ) new_target_xml = gmp_cnx.create_target( "{} - {} - {}".format(target_name, port_list_name, alive_test), hosts=target_hosts, @@ -262,7 +284,7 @@ def create_target( esxi_credential_id=esxi_credential_id, snmp_credential_id=snmp_credential_id, port_list_id=port_list_id, - alive_test=alive_test + alive_test=alive_test, ) try: new_target = ET.fromstring(new_target_xml) @@ -285,14 +307,17 @@ def get_task_by_target_name(target_name, scan_config_id=None): connection = TLSConnection( hostname=engine.scanner["options"]["gmp_host"]["value"], port=engine.scanner["options"]["gmp_port"]["value"], - timeout=int(engine.scanner["options"].get("timeout", DEFAULT_TIMEOUT)) + timeout=int(engine.scanner["options"].get("timeout", DEFAULT_TIMEOUT)), ) with Gmp(connection) as gmp_cnx: gmp_cnx.authenticate( engine.scanner["options"]["gmp_username"]["value"], - engine.scanner["options"]["gmp_password"]["value"]) + engine.scanner["options"]["gmp_password"]["value"], + ) # tasks_xml = gmp_cnx.get_tasks(filter="apply_overrides=1 min_qod=0 rows=-1 levels=hmlg") - tasks_xml = gmp_cnx.get_tasks(filter_string="apply_overrides=1 min_qod=0 rows=-1 levels=hmlg") + tasks_xml = gmp_cnx.get_tasks( + filter_string="apply_overrides=1 min_qod=0 rows=-1 levels=hmlg" + ) target_id = get_target(target_name) if target_id is None: return None @@ -304,7 +329,10 @@ def get_task_by_target_name(target_name, scan_config_id=None): return None for task in tasks.findall("task"): - if task.find('target').get("id") == target_id and task.find('config').get('id') == scan_config_id: + if ( + task.find("target").get("id") == target_id + and task.find("config").get("id") == scan_config_id + ): task_id = task.get("id") if not is_uuid(task_id): return None @@ -321,12 +349,13 @@ def get_scanners(name=None): connection = TLSConnection( hostname=engine.scanner["options"]["gmp_host"]["value"], port=engine.scanner["options"]["gmp_port"]["value"], - timeout=int(engine.scanner["options"].get("timeout", DEFAULT_TIMEOUT)) + timeout=int(engine.scanner["options"].get("timeout", DEFAULT_TIMEOUT)), ) with Gmp(connection) as gmp_cnx: gmp_cnx.authenticate( engine.scanner["options"]["gmp_username"]["value"], - engine.scanner["options"]["gmp_password"]["value"]) + engine.scanner["options"]["gmp_password"]["value"], + ) scanners_xml = gmp_cnx.get_scanners() try: scanners = ET.fromstring(scanners_xml) @@ -337,7 +366,7 @@ def get_scanners(name=None): for scanner in scanners.findall("scanner"): if name is not None: - if name == scanner.find('name').text: + if name == scanner.find("name").text: return [scanner.get("id")] else: scanners_list.append(scanner.get("id")) @@ -356,17 +385,19 @@ def create_task(target_name, target_id, scan_config_id=None, scanner_id=None): connection = TLSConnection( hostname=engine.scanner["options"]["gmp_host"]["value"], port=engine.scanner["options"]["gmp_port"]["value"], - timeout=int(engine.scanner["options"].get("timeout", DEFAULT_TIMEOUT)) + timeout=int(engine.scanner["options"].get("timeout", DEFAULT_TIMEOUT)), ) with Gmp(connection) as gmp_cnx: gmp_cnx.authenticate( engine.scanner["options"]["gmp_username"]["value"], - engine.scanner["options"]["gmp_password"]["value"]) + engine.scanner["options"]["gmp_password"]["value"], + ) new_task_xml = gmp_cnx.create_task( - name=target_name + " - {}".format(get_scan_config_name(scan_config_id, gmp=gmp_cnx)), + name=target_name + + " - {}".format(get_scan_config_name(scan_config_id, gmp=gmp_cnx)), config_id=scan_config_id, target_id=target_id, - scanner_id=scanner_id + scanner_id=scanner_id, ) try: new_task = ET.fromstring(new_task_xml) @@ -391,12 +422,13 @@ def start_task(task_id): connection = TLSConnection( hostname=engine.scanner["options"]["gmp_host"]["value"], port=engine.scanner["options"]["gmp_port"]["value"], - timeout=int(engine.scanner["options"].get("timeout", DEFAULT_TIMEOUT)) + timeout=int(engine.scanner["options"].get("timeout", DEFAULT_TIMEOUT)), ) with Gmp(connection) as gmp_cnx: gmp_cnx.authenticate( engine.scanner["options"]["gmp_username"]["value"], - engine.scanner["options"]["gmp_password"]["value"]) + engine.scanner["options"]["gmp_password"]["value"], + ) start_scan_results_xml = gmp_cnx.start_task(task_id) try: start_scan_results = ET.fromstring(start_scan_results_xml) @@ -421,12 +453,13 @@ def get_last_report(task_id): connection = TLSConnection( hostname=engine.scanner["options"]["gmp_host"]["value"], port=engine.scanner["options"]["gmp_port"]["value"], - timeout=int(engine.scanner["options"].get("timeout", DEFAULT_TIMEOUT)) + timeout=int(engine.scanner["options"].get("timeout", DEFAULT_TIMEOUT)), ) with Gmp(connection) as gmp_cnx: gmp_cnx.authenticate( engine.scanner["options"]["gmp_username"]["value"], - engine.scanner["options"]["gmp_password"]["value"]) + engine.scanner["options"]["gmp_password"]["value"], + ) task_xml = gmp_cnx.get_task(task_id) try: task = ET.fromstring(task_xml) @@ -467,8 +500,11 @@ def get_multiple_report_status(info, gmp_cnx): return None task_id = info["task_id"] report_id = info["report_id"] - report = result.find("task/[@id='{task_id}']/*/report[@id='{report_id}']".format( - task_id=task_id, report_id=report_id)) + report = result.find( + "task/[@id='{task_id}']/*/report[@id='{report_id}']".format( + task_id=task_id, report_id=report_id + ) + ) if report is None: assets_status.update({"status": "Failure"}) else: @@ -545,11 +581,11 @@ def split_port(asset_port): port_number = "0" port_protocol = "tcp" try: - if asset_port.split('/')[0].isnumeric(): - port_number = asset_port.split('/')[0] + if asset_port.split("/")[0].isnumeric(): + port_number = asset_port.split("/")[0] - if asset_port.split('/')[1] in ["tcp", "udp"]: - port_protocol = asset_port.split('/')[1] + if asset_port.split("/")[1] in ["tcp", "udp"]: + port_protocol = asset_port.split("/")[1] except Exception: pass return port_number, port_protocol @@ -603,19 +639,21 @@ def test(): def info(): """Get info on running engine.""" status() - return jsonify({ - "page": "info", - "engine_config": { - "name": engine.name, - "description": engine.description, - "version": engine.version, - "status": engine.status, - "reason": engine.scanner.get("reason", ""), - "allowed_asset_types": engine.allowed_asset_types, - "max_scans": engine.max_scans, - "nb_scans": len(engine.scans.keys()), + return jsonify( + { + "page": "info", + "engine_config": { + "name": engine.name, + "description": engine.description, + "version": engine.version, + "status": engine.status, + "reason": engine.scanner.get("reason", ""), + "allowed_asset_types": engine.allowed_asset_types, + "max_scans": engine.max_scans, + "nb_scans": len(engine.scans.keys()), + }, } - }) + ) @app.route("/engines/openvas/clean") @@ -645,61 +683,68 @@ def status(): for scan_id in engine.scans.keys(): assets_map = None if "assets_map" in engine.scans[scan_id].keys(): - assets_map = engine.scans[scan_id]['assets_map'] + assets_map = engine.scans[scan_id]["assets_map"] scan_data = { - "status": engine.scans[scan_id]['status'], - "started_at": engine.scans[scan_id]['started_at'], - "finished_at": engine.scans[scan_id]['finished_at'], - "assets": engine.scans[scan_id]['assets'], - "assets_map": assets_map + "status": engine.scans[scan_id]["status"], + "started_at": engine.scans[scan_id]["started_at"], + "finished_at": engine.scans[scan_id]["finished_at"], + "assets": engine.scans[scan_id]["assets"], + "assets_map": assets_map, } if "info" in engine.scans[scan_id].keys(): - scan_data.update({ - "info": engine.scans[scan_id]['info'], - }) + scan_data.update( + { + "info": engine.scans[scan_id]["info"], + } + ) scans.append({scan_id: scan_data}) - res.update({ - "nb_scans": len(engine.scans), - "status": engine.status, - "scans": scans}) + res.update({"nb_scans": len(engine.scans), "status": engine.status, "scans": scans}) return jsonify(res) def _status_scan(scan_id, gmp_cnx=None): scan_status = "SCANNING" - if engine.scans[scan_id]['status'] in ["STARTED", "FINISHED"]: + if engine.scans[scan_id]["status"] in ["STARTED", "FINISHED"]: return scan_status if gmp_cnx is None: connection = TLSConnection( hostname=engine.scanner["options"]["gmp_host"]["value"], port=engine.scanner["options"]["gmp_port"]["value"], - timeout=int(engine.scanner["options"].get("timeout", DEFAULT_TIMEOUT)) + timeout=int(engine.scanner["options"].get("timeout", DEFAULT_TIMEOUT)), ) with Gmp(connection) as gmp_cnx: gmp_cnx.authenticate( engine.scanner["options"]["gmp_username"]["value"], - engine.scanner["options"]["gmp_password"]["value"]) - scan_assets_status = get_multiple_report_status(engine.scans[scan_id]["info"], gmp_cnx) + engine.scanner["options"]["gmp_password"]["value"], + ) + scan_assets_status = get_multiple_report_status( + engine.scans[scan_id]["info"], gmp_cnx + ) connection.disconnect() else: - scan_assets_status = get_multiple_report_status(engine.scans[scan_id]["info"], gmp_cnx) + scan_assets_status = get_multiple_report_status( + engine.scans[scan_id]["info"], gmp_cnx + ) if scan_assets_status is None: - engine.scans[scan_id]['status'] = "UNKNOWN" + engine.scans[scan_id]["status"] = "UNKNOWN" return scan_status if scan_assets_status["status"] == "Done": - if 'report_available' in engine.scans[scan_id].keys() and engine.scans[scan_id]['report_available'] is True: + if ( + "report_available" in engine.scans[scan_id].keys() + and engine.scans[scan_id]["report_available"] is True + ): scan_status = "FINISHED" engine.scans[scan_id]["finished_at"] = int(time.time() * 1000) else: scan_status = "SCANNING" - engine.scans[scan_id]['status'] = scan_status + engine.scans[scan_id]["status"] = scan_status return scan_assets_status @@ -708,7 +753,9 @@ def status_scan(scan_id): """Get status on scan identified by id.""" res = {"page": "status", "status": "UNKNOWN"} if scan_id not in engine.scans.keys(): - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) if engine.scans[scan_id]["status"] == "ERROR": @@ -716,14 +763,14 @@ def status_scan(scan_id): return jsonify(res) assets_status = _status_scan(scan_id) - if engine.scans[scan_id]['status'] in ["SCANNING", "STARTED", "FINISHED"]: - res.update({ - 'status': engine.scans[scan_id]['status'] - }) - if 'info' in engine.scans[scan_id].keys(): - res.update({ - 'info': engine.scans[scan_id]['info'], - }) + if engine.scans[scan_id]["status"] in ["SCANNING", "STARTED", "FINISHED"]: + res.update({"status": engine.scans[scan_id]["status"]}) + if "info" in engine.scans[scan_id].keys(): + res.update( + { + "info": engine.scans[scan_id]["info"], + } + ) if assets_status is None: res.update({"status": "error", "reason": "Cannot find any report_status"}) @@ -742,20 +789,23 @@ def stop_scan(scan_id): """Stop scan identified by id.""" res = {"page": "stop_scan", "status": "UNKNOWN"} if scan_id not in engine.scans.keys(): - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) - task_id = engine.scans[scan_id]['info']['task_id'] + task_id = engine.scans[scan_id]["info"]["task_id"] try: connection = TLSConnection( hostname=engine.scanner["options"]["gmp_host"]["value"], port=engine.scanner["options"]["gmp_port"]["value"], - timeout=int(engine.scanner["options"].get("timeout", DEFAULT_TIMEOUT)) + timeout=int(engine.scanner["options"].get("timeout", DEFAULT_TIMEOUT)), ) with Gmp(connection) as gmp_cnx: gmp_cnx.authenticate( engine.scanner["options"]["gmp_username"]["value"], - engine.scanner["options"]["gmp_password"]["value"]) + engine.scanner["options"]["gmp_password"]["value"], + ) gmp_cnx.stop_task(task_id) engine.scans[scan_id]["status"] = "STOPPED" connection.disconnect() @@ -775,7 +825,7 @@ def getreport(scan_id): def _loadconfig(): """Load configuration file.""" - conf_file = APP_BASE_DIR+"/openvas.json" + conf_file = APP_BASE_DIR + "/openvas.json" if not exists(conf_file): app.logger.error("Error: config file '{}' not found".format(conf_file)) return False @@ -786,7 +836,7 @@ def _loadconfig(): engine.scanner["status"] = "ERROR" engine.scanner["reason"] = "Starting loading configuration file" except Exception as ex: - app.logger.error("Loadconfig: Error "+ex.__str__()) + app.logger.error("Loadconfig: Error " + ex.__str__()) return False try: @@ -794,26 +844,29 @@ def _loadconfig(): connection = TLSConnection( hostname=engine.scanner["options"]["gmp_host"]["value"], port=engine.scanner["options"]["gmp_port"]["value"], - timeout=int(engine.scanner["options"].get("timeout", DEFAULT_TIMEOUT)) + timeout=int(engine.scanner["options"].get("timeout", DEFAULT_TIMEOUT)), ) with Gmp(connection) as this.gmp: response = this.gmp.authenticate( engine.scanner["options"]["gmp_username"]["value"], - engine.scanner["options"]["gmp_password"]["value"]) + engine.scanner["options"]["gmp_password"]["value"], + ) except Exception as ex: engine.scanner["status"] = "ERROR" engine.status = "ERROR" - if(ex.__str__() == "timed out"): - engine.scanner["reason"] = "connection to {}:{} timed-out".format(connection.hostname, connection.port) + if ex.__str__() == "timed out": + engine.scanner["reason"] = "connection to {}:{} timed-out".format( + connection.hostname, connection.port + ) else: engine.scanner["reason"] = ex.__str__() - app.logger.error("Loadconfig: Error "+ex.__str__()) + app.logger.error("Loadconfig: Error " + ex.__str__()) return False # Check login response - if response.find("authenticate_response status=\"400\"") > 0: + if response.find('authenticate_response status="400"') > 0: engine.status = "ERROR" engine.scanner["status"] = "ERROR" engine.scanner["reason"] = "openvas login failed" @@ -827,20 +880,19 @@ def _loadconfig(): app.logger.error("Loadconfig: Unable to retrieve port lists.") return False - for pl in portlists.findall('port_list'): - pl_name = pl.find('name').text - pl_uuid = pl.get('id') + for pl in portlists.findall("port_list"): + pl_name = pl.find("name").text + pl_uuid = pl.get("id") this.openvas_portlists.update({pl_name: pl_uuid}) # Create custom port lists if "patrowl-all_tcp" not in this.openvas_portlists.keys(): try: new_pl_xml = this.gmp.create_port_list( - name="patrowl-all_tcp", - port_range="T:1-65535" + name="patrowl-all_tcp", port_range="T:1-65535" ) new_pl = ET.fromstring(new_pl_xml) - this.openvas_portlists.update({"patrowl-all_tcp": new_pl.get('id')}) + this.openvas_portlists.update({"patrowl-all_tcp": new_pl.get("id")}) except Exception: app.logger.error("Loadconfig: Unable to create port list 'patrowl-all_tcp'") return False @@ -848,23 +900,23 @@ def _loadconfig(): if "patrowl-quick_tcp" not in this.openvas_portlists.keys(): try: new_pl_xml = this.gmp.create_port_list( - name="patrowl-quick_tcp", - port_range="T:21-80,T:443,U:53" + name="patrowl-quick_tcp", port_range="T:21-80,T:443,U:53" ) new_pl = ET.fromstring(new_pl_xml) - this.openvas_portlists.update({"patrowl-quick_tcp": new_pl.get('id')}) + this.openvas_portlists.update({"patrowl-quick_tcp": new_pl.get("id")}) except Exception: - app.logger.error("Loadconfig: Unable to create port list 'patrowl-quick_tcp'") + app.logger.error( + "Loadconfig: Unable to create port list 'patrowl-quick_tcp'" + ) return False if "patrowl-tcp_80" not in this.openvas_portlists.keys(): try: new_pl_xml = this.gmp.create_port_list( - name="patrowl-tcp_80", - port_range="T:80" + name="patrowl-tcp_80", port_range="T:80" ) new_pl = ET.fromstring(new_pl_xml) - this.openvas_portlists.update({"patrowl-tcp_80": new_pl.get('id')}) + this.openvas_portlists.update({"patrowl-tcp_80": new_pl.get("id")}) except Exception: app.logger.error("Loadconfig: Unable to create port list 'patrowl-tcp_80'") return False @@ -872,11 +924,10 @@ def _loadconfig(): if "patrowl-tcp_443" not in this.openvas_portlists.keys(): try: new_pl_xml = this.gmp.create_port_list( - name="patrowl-tcp_443", - port_range="T:443" + name="patrowl-tcp_443", port_range="T:443" ) new_pl = ET.fromstring(new_pl_xml) - this.openvas_portlists.update({"patrowl-tcp_443": new_pl.get('id')}) + this.openvas_portlists.update({"patrowl-tcp_443": new_pl.get("id")}) except Exception: app.logger.error("Loadconfig: Unable to create port list 'patrowl-tcp_443'") return False @@ -884,23 +935,22 @@ def _loadconfig(): if "patrowl-tcp_22" not in this.openvas_portlists.keys(): try: new_pl_xml = this.gmp.create_port_list( - name="patrowl-tcp_22", - port_range="T:22" + name="patrowl-tcp_22", port_range="T:22" ) new_pl = ET.fromstring(new_pl_xml) - this.openvas_portlists.update({"patrowl-tcp_22": new_pl.get('id')}) + this.openvas_portlists.update({"patrowl-tcp_22": new_pl.get("id")}) except Exception: app.logger.error("Loadconfig: Unable to create port list 'patrowl-tcp_22'") return False try: - version_filename = APP_BASE_DIR+'/VERSION' + version_filename = APP_BASE_DIR + "/VERSION" if os.path.exists(version_filename): version_file = open(version_filename, "r") - engine.version = version_file.read().rstrip('\n') + engine.version = version_file.read().rstrip("\n") version_file.close() except Exception as ex: - app.logger.error("Loadconfig: Unable to read the VERSION file. "+ex.__str__()) + app.logger.error("Loadconfig: Unable to read the VERSION file. " + ex.__str__()) return False engine.scanner["status"] = "READY" @@ -927,48 +977,64 @@ def start_scan(): # Check the scanner is ready to start a new scan if len(engine.scans) == APP_MAXSCANS: - res.update({ - "status": "error", - "reason": "Scan refused: max concurrent active scans reached ({})".format(APP_MAXSCANS) - }) + res.update( + { + "status": "error", + "reason": "Scan refused: max concurrent active scans reached ({})".format( + APP_MAXSCANS + ), + } + ) return jsonify(res) status() if engine.scanner["status"] != "READY": - res.update({ - "status": "refused", - "details": { - "reason": engine.scanner.get("reason", "scanner not ready"), - "status": engine.scanner["status"] - }}) + res.update( + { + "status": "refused", + "details": { + "reason": engine.scanner.get("reason", "scanner not ready"), + "status": engine.scanner["status"], + }, + } + ) return jsonify(res) data = loads(request.data.decode("utf-8")) if "assets" not in data.keys() or "scan_id" not in data.keys(): - res.update({ - "status": "refused", - "details": { - "reason": "arg error, something is missing ('assets' ?)" - }}) - app.logger.error("StartScan: arg error, something is missing ('assets' or 'scan_id' ?)") + res.update( + { + "status": "refused", + "details": {"reason": "arg error, something is missing ('assets' ?)"}, + } + ) + app.logger.error( + "StartScan: arg error, something is missing ('assets' or 'scan_id' ?)" + ) return jsonify(res) assets = [] for asset in data["assets"]: # Value if "value" not in asset.keys() or not asset["value"]: - res.update({ - "status": "error", - "reason": "arg error, something is missing ('asset.value')" - }) + res.update( + { + "status": "error", + "reason": "arg error, something is missing ('asset.value')", + } + ) return jsonify(res) # Supported datatypes if asset["datatype"] not in engine.scanner["allowed_asset_types"]: - res.update({ - "status": "error", - "reason": "arg error, bad value for '{}' datatype (not supported)".format(asset["value"]) - }) + res.update( + { + "status": "error", + "reason": "arg error, bad value for '{}' datatype (not supported)".format( + asset["value"] + ), + } + ) return jsonify(res) if asset["datatype"] == "url": @@ -980,16 +1046,16 @@ def start_scan(): scan_id = str(data["scan_id"]) scan = { - "assets": assets, - "threads": [], - "options": data["options"], - "scan_id": scan_id, - "status": "STARTED", - "reason": "", - "lock": False, - "started_at": int(time.time() * 1000), - "finished_at": "", - "findings": {} + "assets": assets, + "threads": [], + "options": data["options"], + "scan_id": scan_id, + "status": "STARTED", + "reason": "", + "lock": False, + "started_at": int(time.time() * 1000), + "finished_at": "", + "findings": {}, } engine.scans.update({scan_id: scan}) @@ -997,13 +1063,15 @@ def start_scan(): thread.start() engine.scans[scan_id]["threads"].append(thread) - res.update({ - "status": "accepted", - "details": { - # "scan_id": scan["scan_id"] - "scan_id": scan_id + res.update( + { + "status": "accepted", + "details": { + # "scan_id": scan["scan_id"] + "scan_id": scan_id + }, } - }) + ) return jsonify(res) @@ -1013,7 +1081,7 @@ def _scan_assets(scan_id): scan = engine.scans[scan_id] scan_config_name = None - if 'profile' in engine.scans[scan_id]["options"].keys(): + if "profile" in engine.scans[scan_id]["options"].keys(): scan_config_name = engine.scans[scan_id]["options"]["profile"] # print("scan_config_name:", scan_config_name) @@ -1024,7 +1092,7 @@ def _scan_assets(scan_id): if "OpenVAS Default" in this.openvas_portlists.keys(): scan_portlist_id = this.openvas_portlists["OpenVAS Default"] scan_portlist_name = "" - if 'port_list' in scan["options"].keys(): + if "port_list" in scan["options"].keys(): scan_portlist_name = scan["options"]["port_list"] if scan_portlist_name in this.openvas_portlists.keys(): scan_portlist_id = this.openvas_portlists[scan_portlist_name] @@ -1032,14 +1100,14 @@ def _scan_assets(scan_id): # print("scan_portlist_id:", scan_portlist_id) if scan_portlist_id is None: - engine.scans[scan_id]['status'] = "ERROR" - engine.scans[scan_id]['reason'] = "Port list unknown ('OpenVAS Default' ?)" + engine.scans[scan_id]["status"] = "ERROR" + engine.scans[scan_id]["reason"] = "Port list unknown ('OpenVAS Default' ?)" return False options = get_options(scan) assets = engine.scans[scan_id]["assets"] - assets_hash = hashlib.sha1(str(''.join(assets)).encode('utf-8')).hexdigest() + assets_hash = hashlib.sha1(str("".join(assets)).encode("utf-8")).hexdigest() engine.scans[scan_id]["assets_hash"] = assets_hash try: @@ -1050,18 +1118,23 @@ def _scan_assets(scan_id): target_name=assets_hash, target_hosts=engine.scans[scan_id]["assets"], port_list_id=scan_portlist_id, - port_list_name=scan_portlist_name) # Todo: add credentials if needed + port_list_name=scan_portlist_name, + ) # Todo: add credentials if needed if target_id is None: - engine.scans[scan_id]['status'] = "ERROR" - engine.scans[scan_id]['reason'] = "Unable to create a target ({})".format(assets_hash) + engine.scans[scan_id]["status"] = "ERROR" + engine.scans[scan_id]["reason"] = "Unable to create a target ({})".format( + assets_hash + ) return False task_id = get_task_by_target_name(assets_hash, scan_config_id) if task_id is None and options["enable_create_task"] is True: task_id = create_task(assets_hash, target_id, scan_config_id=scan_config_id) if task_id is None: - engine.scans[scan_id]['status'] = "ERROR" - engine.scans[scan_id]['reason'] = "Unable to create a task ({})".format(assets_hash) + engine.scans[scan_id]["status"] = "ERROR" + engine.scans[scan_id]["reason"] = "Unable to create a task ({})".format( + assets_hash + ) return False if options["enable_start_task"] is True: @@ -1072,30 +1145,35 @@ def _scan_assets(scan_id): report_id = get_last_report(task_id) if report_id is None: - engine.scans[scan_id]['status'] = "ERROR" - engine.scans[scan_id]['reason'] = "Unable to get a report ({})".format(assets_hash) + engine.scans[scan_id]["status"] = "ERROR" + engine.scans[scan_id]["reason"] = "Unable to get a report ({})".format( + assets_hash + ) return False # Store the scan info - engine.scans[scan_id]['info'] = { - "task_id": task_id, - "report_id": report_id, - "status": "accepted" - } + engine.scans[scan_id]["info"] = { + "task_id": task_id, + "report_id": report_id, + "status": "accepted", + } except Exception as e: print(e) - engine.scans[scan_id]['status'] = "ERROR" - engine.scans[scan_id]['reason'] = "Error when trying to start the scan" + engine.scans[scan_id]["status"] = "ERROR" + engine.scans[scan_id]["reason"] = "Error when trying to start the scan" return False # Scan is now running - engine.scans[scan_id]['status'] = "SCANNING" + engine.scans[scan_id]["status"] = "SCANNING" # @todo: Wait max scan timeout max_scan_timeout = DEFAULT_SCAN_TIMEOUT try: - if 'max_timeout' in engine.scans[scan_id]['options'].keys() and engine.scans[scan_id]['options']['max_timeout'].isnumeric(): - max_scan_timeout = int(engine.scans[scan_id]['options']['max_timeout']) + if ( + "max_timeout" in engine.scans[scan_id]["options"].keys() + and engine.scans[scan_id]["options"]["max_timeout"].isnumeric() + ): + max_scan_timeout = int(engine.scans[scan_id]["options"]["max_timeout"]) except Exception: pass timeout = time.time() + max_scan_timeout @@ -1103,25 +1181,37 @@ def _scan_assets(scan_id): while True: time.sleep(5) if time.time() > timeout: - engine.scans[scan_id]['status'] = "ERROR" - engine.scans[scan_id]['reason'] = "Scan timeout exceeded: {} seconds.".format(timeout) + engine.scans[scan_id]["status"] = "ERROR" + engine.scans[scan_id]["reason"] = ( + "Scan timeout exceeded: {} seconds.".format(timeout) + ) break scan_assets_status = _status_scan(scan_id) - if engine.scans[scan_id]["status"].upper() in ["ERROR", "UNKNOWN", "STOPPED", "FINISHED"]: + if engine.scans[scan_id]["status"].upper() in [ + "ERROR", + "UNKNOWN", + "STOPPED", + "FINISHED", + ]: break elif engine.scans[scan_id]["status"].upper() == "STARTED": continue elif engine.scans[scan_id]["status"].upper() == "SCANNING": - if scan_assets_status["status"] == "Done" and "report_available" not in engine.scans[scan_id].keys(): + if ( + scan_assets_status["status"] == "Done" + and "report_available" not in engine.scans[scan_id].keys() + ): try: # Get the report from the OpenVAS instance engine.scans[scan_id]["findings"] = get_report(scan_id) except Exception as e: print(e) - engine.scans[scan_id]['status'] = "ERROR" - engine.scans[scan_id]['reason'] = "Unable to get findings from scan '{}'.".format(scan_id) + engine.scans[scan_id]["status"] = "ERROR" + engine.scans[scan_id]["reason"] = ( + "Unable to get findings from scan '{}'.".format(scan_id) + ) break # Parse the results @@ -1129,8 +1219,10 @@ def _scan_assets(scan_id): issues, summary = _parse_results(scan_id) except Exception as e: print(e) - engine.scans[scan_id]['status'] = "ERROR" - engine.scans[scan_id]['reason'] = "Unable to parse findings from scan '{}'.".format(scan_id) + engine.scans[scan_id]["status"] = "ERROR" + engine.scans[scan_id]["reason"] = ( + "Unable to parse findings from scan '{}'.".format(scan_id) + ) break scan = { @@ -1139,16 +1231,18 @@ def _scan_assets(scan_id): "options": engine.scans[scan_id]["options"], "status": engine.scans[scan_id]["status"], "started_at": engine.scans[scan_id]["started_at"], - "finished_at": engine.scans[scan_id]["finished_at"] + "finished_at": engine.scans[scan_id]["finished_at"], } # Store the findings in a file - with open(APP_BASE_DIR+"/results/openvas_"+scan_id+".json", "w") as rf: - dump({ - "scan": scan, - "summary": summary, - "issues": issues - }, rf, default=_json_serial) + with open( + APP_BASE_DIR + "/results/openvas_" + scan_id + ".json", "w" + ) as rf: + dump( + {"scan": scan, "summary": summary, "issues": issues}, + rf, + default=_json_serial, + ) engine.scans[scan_id]["status"] = "FINISHED" engine.scans[scan_id]["finished_at"] = int(time.time() * 1000) @@ -1168,12 +1262,13 @@ def get_report(scan_id): connection = TLSConnection( hostname=engine.scanner["options"]["gmp_host"]["value"], port=engine.scanner["options"]["gmp_port"]["value"], - timeout=int(engine.scanner["options"].get("timeout", DEFAULT_TIMEOUT)) + timeout=int(engine.scanner["options"].get("timeout", DEFAULT_TIMEOUT)), ) with Gmp(connection) as gmp_cnx: gmp_cnx.authenticate( engine.scanner["options"]["gmp_username"]["value"], - engine.scanner["options"]["gmp_password"]["value"]) + engine.scanner["options"]["gmp_password"]["value"], + ) if not isfile("results/openvas_report_{}_{}.xml".format(scan_id, assets_hash)): # # result = gmp_cnx.get_reports(filter="report_id={} levels=hmlg apply_overrides=0 rows=-1 min_qod=70 sort-reverse=severity notes=1 overrides=1".format(report_id), details=1, override_details=1, note_details=1) # # result = gmp_cnx.get_reports(filter="task_id={} levels=hmlg apply_overrides=0 rows=-1 min_qod=70 sort-reverse=severity notes=1 overrides=1".format(task_id), details=1, override_details=1, note_details=1) @@ -1190,7 +1285,7 @@ def get_report(scan_id): # filter="levels=hmlg apply_overrides=0 rows=-1 min_qod=70 sort-reverse=severity notes=1 overrides=1", filter_string="levels=hmlg apply_overrides=0 rows=-1 min_qod=70 sort-reverse=severity notes=1 overrides=1", details=1, - ignore_pagination=1 + ignore_pagination=1, ) # result = gmp_cnx.get_results(filter="task_id={} levels=hmlg apply_overrides=0 rows=-1 min_qod=70 sort-reverse=severity notes=1 overrides=1".format(task_id), details=1, override_details=1, note_details=1) @@ -1203,12 +1298,16 @@ def get_report(scan_id): # override_details=1, # note_details=1 # ) - result_file = open("results/openvas_report_{}_{}.xml".format(scan_id, assets_hash), "w") + result_file = open( + "results/openvas_report_{}_{}.xml".format(scan_id, assets_hash), "w" + ) result_file.write(result) result_file.close() try: - tree = ET.parse("results/openvas_report_{}_{}.xml".format(scan_id, assets_hash)) + tree = ET.parse( + "results/openvas_report_{}_{}.xml".format(scan_id, assets_hash) + ) except Exception as e: # No Element found in XML file app.logger.error(e) @@ -1217,7 +1316,7 @@ def get_report(scan_id): # Build the asset mapping assets_map = {} - for asset in engine.scans[scan_id]['assets']: + for asset in engine.scans[scan_id]["assets"]: asset_datatype = "fqdn" siblings = [asset] if is_domain(asset): @@ -1241,19 +1340,21 @@ def get_report(scan_id): app.logger.error(e) pass - assets_map.update({ - asset: { - 'siblings': list(set(siblings)), - 'datatype': asset_datatype, - 'has_issues': False + assets_map.update( + { + asset: { + "siblings": list(set(siblings)), + "datatype": asset_datatype, + "has_issues": False, + } } - }) + ) - engine.scans[scan_id]['assets_map'] = assets_map + engine.scans[scan_id]["assets_map"] = assets_map report = tree.getroot().find("report") # Use with get_reports # report = tree.getroot() # Use with get_results - for result in report.findall('.//result'): + for result in report.findall(".//result"): try: if result.find("host") is None: continue @@ -1261,16 +1362,23 @@ def get_report(scan_id): host_name = result.find("host").find("hostname") for a in assets_map.keys(): - if host_ip in assets_map[a]['siblings']: + if host_ip in assets_map[a]["siblings"]: issues.append(result) - engine.scans[scan_id]['assets_map'][a]['has_issues'] = True - elif host_name is not None and host_name.text in assets_map[a]['siblings']: + engine.scans[scan_id]["assets_map"][a]["has_issues"] = True + elif ( + host_name is not None + and host_name.text in assets_map[a]["siblings"] + ): issues.append(result) - engine.scans[scan_id]['assets_map'][a]['has_issues'] = True + engine.scans[scan_id]["assets_map"][a]["has_issues"] = True except Exception as e: # probably unknown issue's host, skip it - app.logger.error("Warning: failed to process issue: {}".format(ET.tostring(result, encoding='utf8', method='xml'))) + app.logger.error( + "Warning: failed to process issue: {}".format( + ET.tostring(result, encoding="utf8", method="xml") + ) + ) app.logger.error(e) connection.disconnect() @@ -1282,49 +1390,43 @@ def _parse_results(scan_id): issues = [] summary = {} - nb_vulns = { - "info": 0, - "low": 0, - "medium": 0, - "high": 0, - "critical": 0 - } + nb_vulns = {"info": 0, "low": 0, "medium": 0, "high": 0, "critical": 0} timestamp = int(time.time() * 1000) # No issue if engine.scans[scan_id]["findings"] == {}: - for asset in engine.scans[scan_id]['assets']: - issues.append({ - "issue_id": len(issues)+1, - "severity": "info", "confidence": "certain", - "target": { - "addr": [asset], - "protocol": "tcp" - }, - "title": "No results found.", - "solution": "n/a", - "metadata": {}, - "type": "openvas_report", - "timestamp": timestamp, - "description": "No results found during the scan.", - }) - - for asset in engine.scans[scan_id]['assets_map'].keys(): - if engine.scans[scan_id]['assets_map'][asset]['has_issues'] is False: - issues.append({ - "issue_id": len(issues)+1, - "severity": "info", "confidence": "certain", - "target": { - "addr": [asset], - "protocol": "tcp" - }, - "title": "No results found.", - "solution": "n/a", - "metadata": {}, - "type": "openvas_report", - "timestamp": timestamp, - "description": "No results found during the scan.", - }) + for asset in engine.scans[scan_id]["assets"]: + issues.append( + { + "issue_id": len(issues) + 1, + "severity": "info", + "confidence": "certain", + "target": {"addr": [asset], "protocol": "tcp"}, + "title": "No results found.", + "solution": "n/a", + "metadata": {}, + "type": "openvas_report", + "timestamp": timestamp, + "description": "No results found during the scan.", + } + ) + + for asset in engine.scans[scan_id]["assets_map"].keys(): + if engine.scans[scan_id]["assets_map"][asset]["has_issues"] is False: + issues.append( + { + "issue_id": len(issues) + 1, + "severity": "info", + "confidence": "certain", + "target": {"addr": [asset], "protocol": "tcp"}, + "title": "No results found.", + "solution": "n/a", + "metadata": {}, + "type": "openvas_report", + "timestamp": timestamp, + "description": "No results found during the scan.", + } + ) titles = [] for result in engine.scans[scan_id]["findings"]: @@ -1332,15 +1434,21 @@ def _parse_results(scan_id): if result.find("nvt") is None: continue # Do not report an outdated or end-of-life scan engine - if result.find("nvt") is not None and "Report outdated" in result.find("nvt").find("name").text: + if ( + result.find("nvt") is not None + and "Report outdated" in result.find("nvt").find("name").text + ): continue - if result.find("nvt") is not None and "Important Announcement" in result.find("nvt").find("name").text: + if ( + result.find("nvt") is not None + and "Important Announcement" in result.find("nvt").find("name").text + ): continue if result.find("severity") is not None: severity = float(result.find("severity").text) else: - severity = 'info' + severity = "info" cve = [] if result.find("nvt").find("cve") is not None: cve = [result.find("nvt").find("cve").text] @@ -1364,14 +1472,21 @@ def _parse_results(scan_id): asset_name = result.find("host").text asset_hostname = result.find("host").find("hostname") asset_names = [] - for a in engine.scans[scan_id]['assets_map'].keys(): - if asset_name in engine.scans[scan_id]['assets_map'][a]['siblings']: - if engine.scans[scan_id]['assets_map'][a]['datatype'] in ['ip-range', 'ip-subnet']: + for a in engine.scans[scan_id]["assets_map"].keys(): + if asset_name in engine.scans[scan_id]["assets_map"][a]["siblings"]: + if engine.scans[scan_id]["assets_map"][a]["datatype"] in [ + "ip-range", + "ip-subnet", + ]: asset_names.append(asset_name) else: asset_names.append(a) - if asset_hostname is not None and asset_hostname.text in engine.scans[scan_id]['assets_map'][a]['siblings']: + if ( + asset_hostname is not None + and asset_hostname.text + in engine.scans[scan_id]["assets_map"][a]["siblings"] + ): asset_names.append(asset_hostname.text) asset_names.append(asset_name) @@ -1398,89 +1513,96 @@ def _parse_results(scan_id): nb_vulns[criticity] += 1 # CVE - if (refs): - for ref in refs.findall('ref'): - if ref.attrib['type'] == 'cve': - cve.append(ref.attrib['id']) + if refs: + for ref in refs.findall("ref"): + if ref.attrib["type"] == "cve": + cve.append(ref.attrib["id"]) # form description description = "[{}] CVSS: {}\n\n".format(threat, severity) if len(cve) > 0: description += "Associated CVE: {}\n\n".format(", ".join(cve)) - if (xmlDesc): + if xmlDesc: description += xmlDesc + "\n\n" - if (tags): - description += tags.replace('|', '\n') + "\n\n" + if tags: + description += tags.replace("|", "\n") + "\n\n" # Solution - solution_data = re.search('\|solution=(.+?)\|', tags) + solution_data = re.search("\|solution=(.+?)\|", tags) if solution_data and solution_data[0] != "|": solution = solution_data.group(1) # metadata - finding_metadata = { - "risk": {"cvss_base_score": cvss_base}, - "vuln_refs": {} - } + finding_metadata = {"risk": {"cvss_base_score": cvss_base}, "vuln_refs": {}} # CVE if len(cve) > 0: - finding_metadata.update({ - "vuln_refs": {"CVE": cve} - }) + finding_metadata.update({"vuln_refs": {"CVE": cve}}) # CPE try: if name == "CPE Inventory": - finding_metadata.update({ - "vuln_refs": {"CPE": [c.split("|")[1] for c in xmlDesc.split("\n")]} - }) + finding_metadata.update( + { + "vuln_refs": { + "CPE": [c.split("|")[1] for c in xmlDesc.split("\n")] + } + } + ) except Exception: pass try: if name == "CPE Inventory": - finding_metadata.update({ - "vuln_refs": {"CPE": [c.split("|")[1] for c in xmlDesc.split("\n\n")]} - }) + finding_metadata.update( + { + "vuln_refs": { + "CPE": [c.split("|")[1] for c in xmlDesc.split("\n\n")] + } + } + ) except Exception: pass # if (xmlDesc) and "CPE:" in str(xmlDesc): # print(xmlDesc) - # cpe_list = finding_metadata["vuln_refs"]["CPE"] - # for desc_line in xmlDesc.split("\n"): - # if desc_line.startswith("CPE:"): - # cpe_list.append(desc_line.split("\t")[1]) - # - # finding_metadata.update({ - # "vuln_refs": {"CPE": cpe_list} - # }) + # cpe_list = finding_metadata["vuln_refs"]["CPE"] + # for desc_line in xmlDesc.split("\n"): + # if desc_line.startswith("CPE:"): + # cpe_list.append(desc_line.split("\t")[1]) + # + # finding_metadata.update({ + # "vuln_refs": {"CPE": cpe_list} + # }) # create issue - issues.append({ - "issue_id": len(issues)+1, - "severity": criticity, "confidence": "certain", - "target": { - "addr": asset_names, - "protocol": asset_port_protocol - }, - "title": title, - "solution": solution, - "metadata": finding_metadata, - "type": "openvas_report", - "timestamp": timestamp, - "description": description, - }) + issues.append( + { + "issue_id": len(issues) + 1, + "severity": criticity, + "confidence": "certain", + "target": {"addr": asset_names, "protocol": asset_port_protocol}, + "title": title, + "solution": solution, + "metadata": finding_metadata, + "type": "openvas_report", + "timestamp": timestamp, + "description": description, + } + ) # print("new_issue", issues[-1]) xmlDesc = "" except Exception as e: # probably unknown issue's host, skip it - app.logger.error("Warning: failed to process issue: {}".format(ET.tostring(result, encoding='utf8', method='xml'))) + app.logger.error( + "Warning: failed to process issue: {}".format( + ET.tostring(result, encoding="utf8", method="xml") + ) + ) app.logger.error(e) - if hasattr(e, 'message'): + if hasattr(e, "message"): app.logger.error(e.message) summary = { @@ -1491,7 +1613,7 @@ def _parse_results(scan_id): "nb_high": nb_vulns["high"], "nb_critical": nb_vulns["critical"], "engine_name": "openvas", - "engine_version": engine.scanner["version"] + "engine_version": engine.scanner["version"], } return issues, summary @@ -1504,46 +1626,60 @@ def getfindings(scan_id): # check if the scan_id exists if scan_id not in engine.scans.keys(): - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) # check if the scan is finished # status() _status_scan(scan_id) if engine.scans[scan_id]["status"] != "FINISHED": - res.update({ - "status": "error", - "reason": "scan_id '{}' not finished (status={})".format(scan_id, engine.scans[scan_id]["status"]) - }) + res.update( + { + "status": "error", + "reason": "scan_id '{}' not finished (status={})".format( + scan_id, engine.scans[scan_id]["status"] + ), + } + ) return jsonify(res) try: - with open(APP_BASE_DIR+"/results/openvas_"+scan_id+".json", "r") as rf: + with open(APP_BASE_DIR + "/results/openvas_" + scan_id + ".json", "r") as rf: json_report = load(rf) except Exception: - res.update({ - "status": "error", - "reason": "Unable to get report and findings from scan '{}'".format(scan_id) - }) + res.update( + { + "status": "error", + "reason": "Unable to get report and findings from scan '{}'".format( + scan_id + ), + } + ) return jsonify(res) - res.update({ - "scan": json_report['scan'], - "summary": json_report['summary'], - "issues": json_report['issues'], - "status": "success" - }) + res.update( + { + "scan": json_report["scan"], + "summary": json_report["summary"], + "issues": json_report["issues"], + "status": "success", + } + ) return jsonify(res) @app.before_first_request def main(): """First function called.""" - if not exists(APP_BASE_DIR+"/results"): - makedirs(APP_BASE_DIR+"/results") + if not exists(APP_BASE_DIR + "/results"): + makedirs(APP_BASE_DIR + "/results") res = _loadconfig() if res is False: - app.logger.error("Unable to initialize the engine with provided configuration file.") + app.logger.error( + "Unable to initialize the engine with provided configuration file." + ) if __name__ == "__main__": diff --git a/engines/owl_dns/engine-owl_dns.py b/engines/owl_dns/engine-owl_dns.py index fd54aad3..63d805b5 100644 --- a/engines/owl_dns/engine-owl_dns.py +++ b/engines/owl_dns/engine-owl_dns.py @@ -1,6 +1,8 @@ #!/usr/bin/python3 # -*- coding: utf-8 -*- -import os, sys, json, time, urllib, hashlib, threading, datetime, copy, dns.resolver, socket, optparse, random, string + +import os, sys, json, time, urllib, hashlib, threading +import datetime, copy, dns.resolver, socket, optparse, random, string from flask import Flask, request, jsonify, redirect, url_for, send_from_directory import validators import requests @@ -694,9 +696,9 @@ def _do_seg_check(scan_id, asset_value): this.scans[scan_id]["findings"]["seg_dict"][asset_value] = copy.deepcopy( seg_dict ) - this.scans[scan_id]["findings"]["seg_dict_dns_records"][ - asset_value - ] = copy.deepcopy(dns_records) + this.scans[scan_id]["findings"]["seg_dict_dns_records"][asset_value] = ( + copy.deepcopy(dns_records) + ) else: this.scans[scan_id]["findings"]["no_seg"] = { asset_value: "MX records found but no Secure Email Gateway set" diff --git a/engines/sslscan/engine-sslscan.py b/engines/sslscan/engine-sslscan.py index 143127a6..c447531b 100644 --- a/engines/sslscan/engine-sslscan.py +++ b/engines/sslscan/engine-sslscan.py @@ -16,10 +16,10 @@ from PatrowlEnginesUtils.PatrowlEngine import PatrowlEngineFinding from PatrowlEnginesUtils.PatrowlEngineExceptions import PatrowlEngineExceptions -APP_DEBUG = os.environ.get('DEBUG', '').lower() in ['true', '1', 'yes', 'y', 'on'] +APP_DEBUG = os.environ.get("DEBUG", "").lower() in ["true", "1", "yes", "y", "on"] APP_HOST = "0.0.0.0" APP_PORT = 5014 -APP_MAXSCANS = int(os.environ.get('APP_MAXSCANS', 25)) +APP_MAXSCANS = int(os.environ.get("APP_MAXSCANS", 25)) APP_ENGINE_NAME = "sslscan" APP_BASE_DIR = os.path.dirname(os.path.realpath(__file__)) VERSION = "1.4.18" @@ -31,7 +31,7 @@ base_dir=APP_BASE_DIR, name=APP_ENGINE_NAME, max_scans=APP_MAXSCANS, - version=VERSION + version=VERSION, ) @@ -49,97 +49,97 @@ def handle_invalid_usage(error): return response -@app.route('/') +@app.route("/") def default(): """Route by default.""" return engine.default() -@app.route('/engines/sslscan/') +@app.route("/engines/sslscan/") def index(): """Return index page.""" return engine.index() -@app.route('/engines/sslscan/liveness') +@app.route("/engines/sslscan/liveness") def liveness(): """Return liveness page.""" return engine.liveness() -@app.route('/engines/sslscan/readiness') +@app.route("/engines/sslscan/readiness") def readiness(): """Return readiness page.""" return engine.readiness() -@app.route('/engines/sslscan/test') +@app.route("/engines/sslscan/test") def test(): """Return test page.""" return engine.test() -@app.route('/engines/sslscan/reloadconfig') +@app.route("/engines/sslscan/reloadconfig") def reloadconfig(): """Reload the configuration file.""" return engine.reloadconfig() -@app.route('/engines/sslscan/info') +@app.route("/engines/sslscan/info") def info(): """Get info on running engine.""" return engine.info() -@app.route('/engines/sslscan/clean') +@app.route("/engines/sslscan/clean") def clean(): """Clean all scans.""" return engine.clean() -@app.route('/engines/sslscan/clean/') +@app.route("/engines/sslscan/clean/") def clean_scan(scan_id): """Clean scan identified by id.""" return engine.clean_scan(scan_id) -@app.route('/engines/sslscan/status') +@app.route("/engines/sslscan/status") def status(): """Get status on engine and all scans.""" return engine.getstatus() -@app.route('/engines/sslscan/status/') +@app.route("/engines/sslscan/status/") def status_scan(scan_id): """Get status on scan identified by id.""" return engine.getstatus_scan(scan_id) -@app.route('/engines/sslscan/stopscans') +@app.route("/engines/sslscan/stopscans") def stop(): """Stop all scans.""" return engine.stop() -@app.route('/engines/sslscan/stop/') +@app.route("/engines/sslscan/stop/") def stop_scan(scan_id): """Stop scan identified by id.""" return engine.stop_scan(scan_id) -@app.route('/engines/sslscan/getfindings/') +@app.route("/engines/sslscan/getfindings/") def getfindings(scan_id): """Get findings on finished scans.""" return engine.getfindings(scan_id) -@app.route('/engines/sslscan/getreport/') +@app.route("/engines/sslscan/getreport/") def getreport(scan_id): """Get report on finished scans.""" return engine.getreport(scan_id) -@app.route('/engines/sslscan/startscan', methods=['POST']) +@app.route("/engines/sslscan/startscan", methods=["POST"]) def startscan(): """Start a new scan.""" # Check params and prepare the PatrowlEngineScan @@ -157,8 +157,8 @@ def startscan(): asset_ports = ["443"] # Create the results folder - if not os.path.exists(APP_BASE_DIR+"/results/"+scan_id): - os.makedirs(APP_BASE_DIR+"/results/"+scan_id) + if not os.path.exists(APP_BASE_DIR + "/results/" + scan_id): + os.makedirs(APP_BASE_DIR + "/results/" + scan_id) assets_list = [] for asset in engine.scans[scan_id]["assets"]: @@ -174,14 +174,12 @@ def startscan(): for asset_port in asset_ports: th = threading.Thread( target=_scan_thread, - kwargs={ - "scan_id": scan_id, - "asset": asset, - "asset_port": asset_port}) + kwargs={"scan_id": scan_id, "asset": asset, "asset_port": asset_port}, + ) th.start() - engine.scans[scan_id]['threads'].append(th) + engine.scans[scan_id]["threads"].append(th) - engine.scans[scan_id]['status'] = "SCANNING" + engine.scans[scan_id]["status"] = "SCANNING" # Finish res.update({"status": "accepted"}) @@ -191,26 +189,32 @@ def startscan(): def _scan_thread(scan_id, asset, asset_port): # issue_id = 0 # findings = [] - output_dir = APP_BASE_DIR+"/results/"+scan_id + output_dir = APP_BASE_DIR + "/results/" + scan_id if not os.path.exists(output_dir): os.makedirs(output_dir) cmd = "{} --show-certificate --xml={}/{}.xml {}:{}".format( engine.options["bin_path"], - output_dir, asset+"_"+asset_port, asset, asset_port) + output_dir, + asset + "_" + asset_port, + asset, + asset_port, + ) p = subprocess.Popen(cmd, shell=True, stdout=open("/dev/null", "w")) while p.poll() is None: # print("still running") time.sleep(1) _parse_xml_results(scan_id, asset, asset_port) - engine.scans[scan_id]['status'] = "FINISHED" + engine.scans[scan_id]["status"] = "FINISHED" def _parse_xml_results(scan_id, asset, asset_port): issue_id = 0 findings = [] - filename = APP_BASE_DIR+"/results/"+scan_id+"/"+asset+"_"+asset_port+".xml" + filename = ( + APP_BASE_DIR + "/results/" + scan_id + "/" + asset + "_" + asset_port + ".xml" + ) # Check file try: findings_tree = ET.parse(filename) @@ -227,12 +231,15 @@ def _parse_xml_results(scan_id, asset, asset_port): issue_id=issue_id, type="ssltest_scan_summary", title="SSLScan scan on '{}:{}'".format(asset, asset_port), - description=ET.tostring(xml_root, encoding='utf-8', method='xml').decode('utf-8'), + description=ET.tostring(xml_root, encoding="utf-8", method="xml").decode( + "utf-8" + ), solution="n/a", severity="info", confidence="firm", - raw=ET.tostring(xml_root, encoding='utf-8', method='xml').decode('utf-8'), - target_addrs=[asset]) + raw=ET.tostring(xml_root, encoding="utf-8", method="xml").decode("utf-8"), + target_addrs=[asset], + ) findings.append(new_finding) if scan_results is not None: @@ -240,7 +247,10 @@ def _parse_xml_results(scan_id, asset, asset_port): issue_id += 1 ciphersuites_issue = _get_ciphersuites( items=scan_results.findall("cipher"), - issue_id=issue_id, asset=asset, asset_port=asset_port) + issue_id=issue_id, + asset=asset, + asset_port=asset_port, + ) if ciphersuites_issue: findings.append(ciphersuites_issue) @@ -249,7 +259,10 @@ def _parse_xml_results(scan_id, asset, asset_port): issue_id += 1 certificate_pem_issue = _get_certificate_blob( cert_blob=scan_results.find("certificate").find("certificate-blob"), - issue_id=issue_id, asset=asset, asset_port=asset_port) + issue_id=issue_id, + asset=asset, + asset_port=asset_port, + ) if certificate_pem_issue: findings.append(certificate_pem_issue) @@ -257,7 +270,10 @@ def _parse_xml_results(scan_id, asset, asset_port): issue_id += 1 is_cert_expired_issue = _is_certificate_expired( cert_tags=scan_results.find(".//certificate/expired/.."), - issue_id=issue_id, asset=asset, asset_port=asset_port) + issue_id=issue_id, + asset=asset, + asset_port=asset_port, + ) if is_cert_expired_issue: findings.append(is_cert_expired_issue) @@ -265,7 +281,10 @@ def _parse_xml_results(scan_id, asset, asset_port): issue_id += 1 is_cert_selfsigned_issue = _is_certificate_selfsigned( cert_tags=scan_results.find(".//certificate/self-signed/.."), - issue_id=issue_id, asset=asset, asset_port=asset_port) + issue_id=issue_id, + asset=asset, + asset_port=asset_port, + ) if is_cert_selfsigned_issue: findings.append(is_cert_selfsigned_issue) @@ -273,7 +292,10 @@ def _parse_xml_results(scan_id, asset, asset_port): issue_id += 1 hb_vuln = _get_heartbleed_vuln( items=scan_results.findall("heartbleed"), - issue_id=issue_id, asset=asset, asset_port=asset_port) + issue_id=issue_id, + asset=asset, + asset_port=asset_port, + ) if hb_vuln: findings.append(hb_vuln) @@ -281,7 +303,10 @@ def _parse_xml_results(scan_id, asset, asset_port): issue_id += 1 is_fallback_supported_issue = _is_fallback_supported( fallback=scan_results.find("fallback"), - issue_id=issue_id, asset=asset, asset_port=asset_port) + issue_id=issue_id, + asset=asset, + asset_port=asset_port, + ) if is_fallback_supported_issue: findings.append(is_fallback_supported_issue) @@ -289,7 +314,10 @@ def _parse_xml_results(scan_id, asset, asset_port): issue_id += 1 is_secure_renegotiation_issue = _is_secure_renegotiation_supported( sec_rng=scan_results.find("renegotiation"), - issue_id=issue_id, asset=asset, asset_port=asset_port) + issue_id=issue_id, + asset=asset, + asset_port=asset_port, + ) if is_secure_renegotiation_issue: findings.append(is_secure_renegotiation_issue) @@ -297,7 +325,10 @@ def _parse_xml_results(scan_id, asset, asset_port): # issue_id is handled inside the function wp_vuln = _spot_weak_protocol( protocols=scan_results.findall("protocol"), - issue_id=issue_id, asset=asset, asset_port=asset_port) + issue_id=issue_id, + asset=asset, + asset_port=asset_port, + ) if wp_vuln: for weak_pr in wp_vuln: issue_id = weak_pr.__dict__["issue_id"] @@ -307,7 +338,10 @@ def _parse_xml_results(scan_id, asset, asset_port): # issue_id is handled inside the function wc_vuln = _spot_weak_ciphersuites( ciphers=scan_results.findall("cipher"), - issue_id=issue_id, asset=asset, asset_port=asset_port) + issue_id=issue_id, + asset=asset, + asset_port=asset_port, + ) if wc_vuln: for weak_cs in wc_vuln: issue_id = weak_cs.__dict__["issue_id"] @@ -332,34 +366,39 @@ def _get_heartbleed_vuln(items, issue_id, asset, asset_port): for item in items: if item.get("vulnerable") == "1": hb_desc += "sslversion='{}' --> is VULNERABLE\n".format( - item.get("sslversion")) + item.get("sslversion") + ) is_vulnerable = True else: hb_desc += "sslversion='{}' --> is not vulnerable\n".format( - item.get("sslversion")) + item.get("sslversion") + ) if is_vulnerable: return PatrowlEngineFinding( issue_id=issue_id, type="ssltest_heartbleed", - title="Heartbleed check on '{}:{}': VULNERABLE".format( - asset, asset_port), + title="Heartbleed check on '{}:{}': VULNERABLE".format(asset, asset_port), description=hb_desc, solution="Update the version of the OpenSSL component used by the \ - service listening on port '{}'".format(asset_port), + service listening on port '{}'".format( + asset_port + ), severity="high", confidence="firm", raw=hb_desc, target_addrs=[asset], meta_tags=["heartbleed", "ssl", "tls"], meta_links=hb_links, - meta_vuln_refs={{"CVE": ["CVE-2014-0160"]}}) + meta_vuln_refs={{"CVE": ["CVE-2014-0160"]}}, + ) else: return PatrowlEngineFinding( issue_id=issue_id, type="ssltest_heartbleed", title="Heartbleed check on '{}:{}': not vulnerable".format( - asset, asset_port), + asset, asset_port + ), description=hb_desc, solution="n/a", severity="info", @@ -367,7 +406,8 @@ def _get_heartbleed_vuln(items, issue_id, asset, asset_port): raw=hb_desc, target_addrs=[asset], meta_tags=["heartbleed", "ssl", "tls"], - meta_links=hb_links) + meta_links=hb_links, + ) def _get_ciphersuites(items, issue_id, asset, asset_port): @@ -377,29 +417,33 @@ def _get_ciphersuites(items, issue_id, asset, asset_port): issue_desc = "Supported ciphersuites:\n" for item in items: add_info = "" - if 'curve' in item.keys(): + if "curve" in item.keys(): add_info += "Curve: {}".format(item.get("curve")) - if 'dhebits' in item.keys(): + if "dhebits" in item.keys(): add_info += "DHEbits: {}".format(item.get("dhebits")) - if 'ecdhebits' in item.keys(): + if "ecdhebits" in item.keys(): add_info += "ECDHEbits: {}".format(item.get("ecdhebits")) issue_desc += "{:30} SSLVersion: {:8} Bits: {:4} Status: {:10} {}\n".format( - item.get("cipher"), item.get("sslversion"), - item.get("bits"), item.get("status"), add_info + item.get("cipher"), + item.get("sslversion"), + item.get("bits"), + item.get("status"), + add_info, ) return PatrowlEngineFinding( issue_id=issue_id, type="ssltest_supported_ciphersuites", - title="Supported ciphersuites on '{}:{}'.".format( - asset, asset_port), + title="Supported ciphersuites on '{}:{}'.".format(asset, asset_port), description=issue_desc, solution="n/a", severity="info", confidence="firm", raw=issue_desc, target_addrs=[asset], - meta_tags=["ciphersuites", "ssl", "tls"]) + meta_tags=["ciphersuites", "ssl", "tls"], + ) + def _spot_weak_protocol(protocols, issue_id, asset, asset_port): if protocols is None: @@ -408,92 +452,134 @@ def _spot_weak_protocol(protocols, issue_id, asset, asset_port): for protocol in protocols: if protocol.attrib["type"] == "ssl" and protocol.attrib["enabled"] == "1": issue_id += 1 - res.append(PatrowlEngineFinding( - issue_id=issue_id, - type="tls_supported_protocols", - title="Weak TLS protocol detected : SSLv{}".format(protocol.attrib["version"]), - description="Weak TLS protocol SSLv{} was detected on {}:{}".format( - protocol.attrib["version"], asset, asset_port), - solution="Deactivate SSLv{} on your server".format(protocol.attrib["version"]), - severity="high", - confidence="firm", - raw=protocol.attrib, - target_addrs=[asset], - meta_tags=["ssl", "tls"])) - if protocol.attrib["type"] == "tls" and \ - protocol.attrib["version"] in ("1.0", "1.1") and \ - protocol.attrib["enabled"] == "1": + res.append( + PatrowlEngineFinding( + issue_id=issue_id, + type="tls_supported_protocols", + title="Weak TLS protocol detected : SSLv{}".format( + protocol.attrib["version"] + ), + description="Weak TLS protocol SSLv{} was detected on {}:{}".format( + protocol.attrib["version"], asset, asset_port + ), + solution="Deactivate SSLv{} on your server".format( + protocol.attrib["version"] + ), + severity="high", + confidence="firm", + raw=protocol.attrib, + target_addrs=[asset], + meta_tags=["ssl", "tls"], + ) + ) + if ( + protocol.attrib["type"] == "tls" + and protocol.attrib["version"] in ("1.0", "1.1") + and protocol.attrib["enabled"] == "1" + ): issue_id += 1 - res.append(PatrowlEngineFinding( - issue_id=issue_id, - type="tls_supported_protocols", - title="Weak TLS protocol detected : TLSv{}".format(protocol.attrib["version"]), - description="Weak TLS protocol TLSv{} was detected on {}:{}".format( - protocol.attrib["version"], asset, asset_port), - solution="Deactivate TLSv{} on your server".format(protocol.attrib["version"]), - severity="medium", - confidence="firm", - raw=protocol.attrib, - target_addrs=[asset], - meta_tags=["ssl", "tls"])) + res.append( + PatrowlEngineFinding( + issue_id=issue_id, + type="tls_supported_protocols", + title="Weak TLS protocol detected : TLSv{}".format( + protocol.attrib["version"] + ), + description="Weak TLS protocol TLSv{} was detected on {}:{}".format( + protocol.attrib["version"], asset, asset_port + ), + solution="Deactivate TLSv{} on your server".format( + protocol.attrib["version"] + ), + severity="medium", + confidence="firm", + raw=protocol.attrib, + target_addrs=[asset], + meta_tags=["ssl", "tls"], + ) + ) return res + def _spot_weak_ciphersuites(ciphers, issue_id, asset, asset_port): if ciphers is None: return False res = [] for cipher in ciphers: - if cipher.attrib["strength"] in ("anonymous", "medium") and \ - cipher.attrib["status"] in ("preferred", "accepted"): + if cipher.attrib["strength"] in ("anonymous", "medium") and cipher.attrib[ + "status" + ] in ("preferred", "accepted"): issue_id += 1 - res.append(PatrowlEngineFinding( - issue_id=issue_id, - type="tls_supported_ciphersuites", - title="Unsecure TLS ciphersuite detected : {}".format(cipher.attrib["cipher"]), - description="Unsecure TLS ciphersuite {} was detected on {}:{}".format( - cipher.attrib["cipher"], asset, asset_port), - solution="Deactivate the ciphersuite {} on your TLS configuration".format(cipher.attrib["cipher"]), - severity="medium", - confidence="firm", - raw=cipher.attrib, - target_addrs=[asset], - meta_tags=["ssl", "tls", "ciphersuites"])) - if cipher.attrib["strength"] in ("null", "weak") and \ - cipher.attrib["status"] in ("preferred", "accepted"): + res.append( + PatrowlEngineFinding( + issue_id=issue_id, + type="tls_supported_ciphersuites", + title="Unsecure TLS ciphersuite detected : {}".format( + cipher.attrib["cipher"] + ), + description="Unsecure TLS ciphersuite {} was detected on {}:{}".format( + cipher.attrib["cipher"], asset, asset_port + ), + solution="Deactivate the ciphersuite {} on your TLS configuration".format( + cipher.attrib["cipher"] + ), + severity="medium", + confidence="firm", + raw=cipher.attrib, + target_addrs=[asset], + meta_tags=["ssl", "tls", "ciphersuites"], + ) + ) + if cipher.attrib["strength"] in ("null", "weak") and cipher.attrib[ + "status" + ] in ("preferred", "accepted"): issue_id += 1 - res.append(PatrowlEngineFinding( - issue_id=issue_id, - type="tls_supported_ciphersuites", - title="Dangerous (weak) TLS ciphersuite detected : {}".format(cipher.attrib["cipher"]), - description="Weak TLS ciphersuite {} was detected on {}:{}".format( - cipher.attrib["cipher"], asset, asset_port), - solution="Deactivate the ciphersuite {} on your TLS configuration".format(cipher.attrib["cipher"]), - severity="medium", - confidence="firm", - raw=cipher.attrib, - target_addrs=[asset], - meta_tags=["ssl", "tls", "ciphersuites"])) + res.append( + PatrowlEngineFinding( + issue_id=issue_id, + type="tls_supported_ciphersuites", + title="Dangerous (weak) TLS ciphersuite detected : {}".format( + cipher.attrib["cipher"] + ), + description="Weak TLS ciphersuite {} was detected on {}:{}".format( + cipher.attrib["cipher"], asset, asset_port + ), + solution="Deactivate the ciphersuite {} on your TLS configuration".format( + cipher.attrib["cipher"] + ), + severity="medium", + confidence="firm", + raw=cipher.attrib, + target_addrs=[asset], + meta_tags=["ssl", "tls", "ciphersuites"], + ) + ) return res + def _get_certificate_blob(cert_blob, issue_id, asset, asset_port): if cert_blob is None: return False - cert_hash = hashlib.sha1(str(cert_blob.text).encode('utf-8')).hexdigest().upper() + cert_hash = hashlib.sha1(str(cert_blob.text).encode("utf-8")).hexdigest().upper() return PatrowlEngineFinding( issue_id=issue_id, type="ssltest_certificate_pem", title="Certificate was retrieved from '{}:{}' with hash '{}'.".format( - asset, asset_port, cert_hash[:6]), + asset, asset_port, cert_hash[:6] + ), description="Following certificate was retrieved from the server:\n\ - {}".format(cert_blob.text), + {}".format( + cert_blob.text + ), solution="n/a", severity="info", confidence="firm", raw=cert_blob.text, target_addrs=[asset], - meta_tags=["certificate", "ssl", "tls", "pem"]) + meta_tags=["certificate", "ssl", "tls", "pem"], + ) def _is_certificate_expired(cert_tags, issue_id, asset, asset_port): @@ -510,21 +596,26 @@ def _is_certificate_expired(cert_tags, issue_id, asset, asset_port): title="Certificate from '{}:{}' is expired.".format(asset, asset_port), description="The SSL/TLS certificate retrieved from the server is \ expired:\nNot valid before: {}\nNot valid after: {}".format( - cert_tags.find("not-valid-before").text, - cert_tags.find("not-valid-after").text), + cert_tags.find("not-valid-before").text, + cert_tags.find("not-valid-after").text, + ), solution="Renew the certificate on the service listening on \ - '{}:{}'.".format(asset, asset_port), + '{}:{}'.".format( + asset, asset_port + ), severity="high", confidence="firm", raw=expired_text, target_addrs=[asset], - meta_tags=["certificate", "ssl", "tls", "expired"]) + meta_tags=["certificate", "ssl", "tls", "expired"], + ) + def _is_fallback_supported(fallback, issue_id, asset, asset_port): if fallback is None: return False fallback_support = fallback.attrib["supported"] - if fallback_support == '1': + if fallback_support == "1": return False return PatrowlEngineFinding( @@ -532,45 +623,54 @@ def _is_fallback_supported(fallback, issue_id, asset, asset_port): type="ssltest_fallback_support", title="Downgrade attack prevention is not supported", description="Downgrade attack prevention is not supported on {}:{}".format( - asset, asset_port), + asset, asset_port + ), solution="Enable TLS_FALLBACK_SCSV option on your server", severity="low", confidence="firm", raw=fallback.attrib, target_addrs=[asset], - meta_tags=["ssl", "tls"]) + meta_tags=["ssl", "tls"], + ) + def _is_secure_renegotiation_supported(sec_rng, issue_id, asset, asset_port): if sec_rng is None: return False - if sec_rng.attrib["supported"] != '1': + if sec_rng.attrib["supported"] != "1": return PatrowlEngineFinding( issue_id=issue_id, type="ssltest_secure_renegotiation", title="Secure renegotiation is not supported", description="Secure renegotiation is not supported on {}:{}".format( - asset, asset_port), + asset, asset_port + ), solution="Enable secure renegotiation on your server", severity="medium", confidence="firm", raw=sec_rng.attrib, target_addrs=[asset], - meta_tags=["ssl", "tls"]) - if sec_rng.attrib["supported"] == '1' and sec_rng.attrib["secure"] != '1': + meta_tags=["ssl", "tls"], + ) + if sec_rng.attrib["supported"] == "1" and sec_rng.attrib["secure"] != "1": return PatrowlEngineFinding( issue_id=issue_id, type="ssltest_secure_renegotiation", title="Unsecure renegotiation is enabled", - description="Unsecure renegotiation is enabled on {}:{}".format(asset, asset_port), + description="Unsecure renegotiation is enabled on {}:{}".format( + asset, asset_port + ), solution="Disable unsecure renegotiation on your server", severity="high", confidence="firm", raw=sec_rng.attrib, target_addrs=[asset], - meta_tags=["ssl", "tls"]) + meta_tags=["ssl", "tls"], + ) return False + def _is_certificate_selfsigned(cert_tags, issue_id, asset, asset_port): if cert_tags is None: return False @@ -582,30 +682,32 @@ def _is_certificate_selfsigned(cert_tags, issue_id, asset, asset_port): return PatrowlEngineFinding( issue_id=issue_id, type="ssltest_certificate_selfsigned", - title="Certificate from '{}:{}' is self-signed.".format( - asset, asset_port), + title="Certificate from '{}:{}' is self-signed.".format(asset, asset_port), description="The SSL/TLS certificate retrieved from the server is \ self-signed.", solution="Renew the certificate on the service listening on '{}:{}' \ - and sign it with a trusted CA.".format(asset, asset_port), + and sign it with a trusted CA.".format( + asset, asset_port + ), severity="high", confidence="firm", raw=selfsigned_text, target_addrs=[asset], - meta_tags=["certificate", "ssl", "tls", "self-signed"]) + meta_tags=["certificate", "ssl", "tls", "self-signed"], + ) @app.before_first_request def main(): """First function called.""" - if not os.path.exists(APP_BASE_DIR+"/results"): - os.makedirs(APP_BASE_DIR+"/results") + if not os.path.exists(APP_BASE_DIR + "/results"): + os.makedirs(APP_BASE_DIR + "/results") engine._loadconfig() - version_filename = APP_BASE_DIR+'/VERSION' + version_filename = APP_BASE_DIR + "/VERSION" if os.path.exists(version_filename): version_file = open(version_filename, "r") - engine.version = version_file.read().rstrip('\n') + engine.version = version_file.read().rstrip("\n") version_file.close() # Check if sslscan is available @@ -613,5 +715,5 @@ def main(): sys.exit(-1) -if __name__ == '__main__': +if __name__ == "__main__": engine.run_app(app_debug=APP_DEBUG, app_host=APP_HOST, app_port=APP_PORT) diff --git a/engines/urlvoid/engine-urlvoid.py b/engines/urlvoid/engine-urlvoid.py index 70e95e76..b1e2646b 100755 --- a/engines/urlvoid/engine-urlvoid.py +++ b/engines/urlvoid/engine-urlvoid.py @@ -16,6 +16,7 @@ from PatrowlEnginesUtils.PatrowlEngine import _json_serial from PatrowlEnginesUtils.PatrowlEngine import PatrowlEngine + # from PatrowlEnginesUtils.PatrowlEngine import PatrowlEngineFinding from PatrowlEnginesUtils.PatrowlEngineExceptions import PatrowlEngineExceptions @@ -23,7 +24,7 @@ APP_DEBUG = False APP_HOST = "0.0.0.0" APP_PORT = 5008 -APP_MAXSCANS = int(os.environ.get('APP_MAXSCANS', 25)) +APP_MAXSCANS = int(os.environ.get("APP_MAXSCANS", 25)) APP_ENGINE_NAME = "urlvoid" APP_BASE_DIR = os.path.dirname(os.path.realpath(__file__)) VERSION = "1.4.26" @@ -33,7 +34,7 @@ base_dir=APP_BASE_DIR, name=APP_ENGINE_NAME, max_scans=APP_MAXSCANS, - version=VERSION + version=VERSION, ) this = sys.modules[__name__] @@ -54,86 +55,86 @@ def handle_invalid_usage(error): return response -@app.route('/') +@app.route("/") def default(): """Route by default.""" return engine.default() -@app.route('/engines/urlvoid/') +@app.route("/engines/urlvoid/") def index(): """Return index page.""" return engine.index() -@app.route('/engines/urlvoid/liveness') +@app.route("/engines/urlvoid/liveness") def liveness(): """Return liveness page.""" return engine.liveness() -@app.route('/engines/urlvoid/readiness') +@app.route("/engines/urlvoid/readiness") def readiness(): """Return readiness page.""" return engine.readiness() -@app.route('/engines/urlvoid/test') +@app.route("/engines/urlvoid/test") def test(): """Return test page.""" return engine.test() -@app.route('/engines/urlvoid/info') +@app.route("/engines/urlvoid/info") def info(): """Get info on running engine.""" return engine.info() -@app.route('/engines/urlvoid/clean') +@app.route("/engines/urlvoid/clean") def clean(): """Clean all scans.""" return engine.clean() -@app.route('/engines/urlvoid/clean/') +@app.route("/engines/urlvoid/clean/") def clean_scan(scan_id): """Clean scan identified by id.""" return engine.clean_scan(scan_id) -@app.route('/engines/urlvoid/status') +@app.route("/engines/urlvoid/status") def status(): """Get status on engine and all scans.""" return engine.getstatus() -@app.route('/engines/urlvoid/status/') +@app.route("/engines/urlvoid/status/") def status_scan(scan_id): """Get status on scan identified by id.""" return engine.getstatus_scan(scan_id) -@app.route('/engines/urlvoid/stopscans') +@app.route("/engines/urlvoid/stopscans") def stop(): """Stop all scans.""" return engine.stop() -@app.route('/engines/urlvoid/stop/') +@app.route("/engines/urlvoid/stop/") def stop_scan(scan_id): """Stop scan identified by id.""" return engine.stop_scan(scan_id) -@app.route('/engines/urlvoid/getreport/') +@app.route("/engines/urlvoid/getreport/") def getreport(scan_id): """Get report on finished scans.""" return engine.getreport(scan_id) def _loadconfig(): - conf_file = APP_BASE_DIR+'/urlvoid.json' + conf_file = APP_BASE_DIR + "/urlvoid.json" if os.path.exists(conf_file): json_data = open(conf_file) engine.scanner = json.load(json_data) @@ -143,14 +144,14 @@ def _loadconfig(): this.keys.append(apikey) del engine.scanner["apikeys"] - engine.scanner['status'] = "READY" + engine.scanner["status"] = "READY" else: - print ("Error: config file '{}' not found".format(conf_file)) + print("Error: config file '{}' not found".format(conf_file)) return {"status": "error", "reason": "config file not found"} -@app.route('/engines/urlvoid/reloadconfig', methods=['GET']) +@app.route("/engines/urlvoid/reloadconfig", methods=["GET"]) def reloadconfig(): res = {"page": "reloadconfig"} _loadconfig() @@ -158,53 +159,67 @@ def reloadconfig(): return jsonify(res) -@app.route('/engines/urlvoid/startscan', methods=['POST']) +@app.route("/engines/urlvoid/startscan", methods=["POST"]) def start_scan(): res = {"page": "startscan"} # Check the scanner is ready to start a new scan if len(engine.scans) == APP_MAXSCANS: - res.update({ - "status": "error", - "reason": "Scan refused: max concurrent active scans reached ({})".format(APP_MAXSCANS) - }) + res.update( + { + "status": "error", + "reason": "Scan refused: max concurrent active scans reached ({})".format( + APP_MAXSCANS + ), + } + ) return jsonify(res) status() - if engine.scanner['status'] != "READY": - res.update({ - "status": "refused", - "details": { - "reason": "scanner not ready", - "status": engine.scanner['status'] - }}) + if engine.scanner["status"] != "READY": + res.update( + { + "status": "refused", + "details": { + "reason": "scanner not ready", + "status": engine.scanner["status"], + }, + } + ) return jsonify(res) data = json.loads(request.data) - if 'assets' not in data.keys() or 'scan_id' not in data.keys(): - res.update({ - "status": "refused", - "details": { - "reason": "arg error, something is missing ('assets' ?)" - }}) + if "assets" not in data.keys() or "scan_id" not in data.keys(): + res.update( + { + "status": "refused", + "details": {"reason": "arg error, something is missing ('assets' ?)"}, + } + ) return jsonify(res) assets = [] for asset in data["assets"]: # Value if "value" not in asset.keys() or not asset["value"]: - res.update({ - "status": "error", - "reason": "arg error, something is missing ('asset.value')" - }) + res.update( + { + "status": "error", + "reason": "arg error, something is missing ('asset.value')", + } + ) return jsonify(res) # Supported datatypes if asset["datatype"] not in engine.scanner["allowed_asset_types"]: - res.update({ - "status": "error", - "reason": "arg error, bad value for '{}' datatype (not supported)".format(asset["value"]) - }) + res.update( + { + "status": "error", + "reason": "arg error, bad value for '{}' datatype (not supported)".format( + asset["value"] + ), + } + ) return jsonify(res) if asset["datatype"] == "url": @@ -213,56 +228,59 @@ def start_scan(): assets.append(asset["value"]) - scan_id = str(data['scan_id']) + scan_id = str(data["scan_id"]) - if data['scan_id'] in engine.scans.keys(): - res.update({ - "status": "refused", - "details": { - "reason": "scan '{}' already launched".format(data['scan_id']), + if data["scan_id"] in engine.scans.keys(): + res.update( + { + "status": "refused", + "details": { + "reason": "scan '{}' already launched".format(data["scan_id"]), + }, } - }) + ) return jsonify(res) scan = { # 'assets': data['assets'], - 'assets': assets, - 'threads': [], - 'options': data['options'], - 'scan_id': scan_id, - 'status': "STARTED", - 'started_at': int(time.time() * 1000), - 'findings': {} + "assets": assets, + "threads": [], + "options": data["options"], + "scan_id": scan_id, + "status": "STARTED", + "started_at": int(time.time() * 1000), + "findings": {}, } engine.scans.update({scan_id: scan}) th = threading.Thread(target=_scan_urls, args=(scan_id,)) th.start() - engine.scans[scan_id]['threads'].append(th) + engine.scans[scan_id]["threads"].append(th) - res.update({ - "status": "accepted", - "details": { - "scan_id": scan['scan_id'] - } - }) + res.update({"status": "accepted", "details": {"scan_id": scan["scan_id"]}}) return jsonify(res) def _scan_urls(scan_id): assets = [] - for asset in engine.scans[scan_id]['assets']: + for asset in engine.scans[scan_id]["assets"]: assets.append(asset) for asset in assets: - apikey = this.keys[random.randint(0, len(this.keys)-1)] + apikey = this.keys[random.randint(0, len(this.keys) - 1)] if asset not in engine.scans[scan_id]["findings"]: engine.scans[scan_id]["findings"][asset] = {} try: - engine.scans[scan_id]["findings"][asset]['issues'] = get_report(asset, apikey) + engine.scans[scan_id]["findings"][asset]["issues"] = get_report( + asset, apikey + ) except Exception as ex: - app.logger.error("_scan_urls failed {}".format(re.sub(r'/'+apikey+'/',r'/***/',ex.__str__()))) + app.logger.error( + "_scan_urls failed {}".format( + re.sub(r"/" + apikey + "/", r"/***/", ex.__str__()) + ) + ) return False return True @@ -270,9 +288,7 @@ def _scan_urls(scan_id): def get_report(asset, apikey): """Get URLVoid XML report.""" - scan_url = "{}{}/host/{}/".format( - "http://api.urlvoid.com/api1000/", apikey, asset - ) + scan_url = "{}{}/host/{}/".format("http://api.urlvoid.com/api1000/", apikey, asset) xml = requests.get(scan_url) issues = [] tree = ElementTree.fromstring(xml.text) @@ -287,45 +303,53 @@ def _parse_results(scan_id): issues = [] summary = {} - nb_vulns = { - "info": 0, - "low": 0, - "medium": 0, - "high": 0 - } + nb_vulns = {"info": 0, "low": 0, "medium": 0, "high": 0} ts = int(time.time() * 1000) for asset in engine.scans[scan_id]["findings"]: if len(engine.scans[scan_id]["findings"][asset]["issues"]) != 0: - description = "On the host {} appear in {} identified in blacklist engines or online reputation tools :\n".format(asset, len(engine.scans[scan_id]["findings"][asset]["issues"])) + description = "On the host {} appear in {} identified in blacklist engines or online reputation tools :\n".format( + asset, len(engine.scans[scan_id]["findings"][asset]["issues"]) + ) for eng in engine.scans[scan_id]["findings"][asset]["issues"]: description = description + eng + "\n" - description = description + "For more detail go 'http://www.urlvoid.com/scan/" + asset + "/'" + description = ( + description + + "For more detail go 'http://www.urlvoid.com/scan/" + + asset + + "/'" + ) nb_vulns["high"] += 1 - issues.append({ - "issue_id": len(issues)+1, - "severity": "high", "confidence": "certain", + issues.append( + { + "issue_id": len(issues) + 1, + "severity": "high", + "confidence": "certain", "target": {"addr": [asset], "protocol": "http"}, "title": "'{}' identified in urlvoid".format(asset), "solution": "n/a", "metadata": {"tags": ["http"]}, "type": "urlvoid_report", "timestamp": ts, - "description": description + "description": description, } ) else: nb_vulns["info"] += 1 - issues.append({ - "issue_id": len(issues)+1, - "severity": "info", "confidence": "certain", + issues.append( + { + "issue_id": len(issues) + 1, + "severity": "info", + "confidence": "certain", "target": {"addr": [asset], "protocol": "http"}, "title": "'{}' have not been identified in urlvoid".format(asset), "solution": "n/a", "metadata": {"tags": ["http"]}, "type": "urlvoid_report", "timestamp": ts, - "description": "{} have not identified in blacklist engines or online reputation tools".format(asset) + "description": "{} have not identified in blacklist engines or online reputation tools".format( + asset + ), } ) @@ -336,60 +360,73 @@ def _parse_results(scan_id): "nb_medium": nb_vulns["medium"], "nb_high": nb_vulns["high"], "engine_name": "urlvoid", - "engine_version": engine.scanner["version"] + "engine_version": engine.scanner["version"], } return issues, summary -@app.route('/engines/urlvoid/getfindings/', methods=['GET']) +@app.route("/engines/urlvoid/getfindings/", methods=["GET"]) def getfindings(scan_id): res = {"page": "getfindings", "scan_id": scan_id} # check if the scan_id exists if scan_id not in engine.scans.keys(): - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) # check if the scan is finished status() - if engine.scans[scan_id]['status'] != "FINISHED": - res.update({"status": "error", "reason": "scan_id '{}' not finished (status={})".format(scan_id, engine.scans[scan_id]['status'])}) + if engine.scans[scan_id]["status"] != "FINISHED": + res.update( + { + "status": "error", + "reason": "scan_id '{}' not finished (status={})".format( + scan_id, engine.scans[scan_id]["status"] + ), + } + ) return jsonify(res) issues, summary = _parse_results(scan_id) scan = { "scan_id": scan_id, - "assets": engine.scans[scan_id]['assets'], - "options": engine.scans[scan_id]['options'], - "status": engine.scans[scan_id]['status'], - "started_at": engine.scans[scan_id]['started_at'], - "finished_at": engine.scans[scan_id]['finished_at'] + "assets": engine.scans[scan_id]["assets"], + "options": engine.scans[scan_id]["options"], + "status": engine.scans[scan_id]["status"], + "started_at": engine.scans[scan_id]["started_at"], + "finished_at": engine.scans[scan_id]["finished_at"], } # Store the findings in a file - with open(APP_BASE_DIR+"/results/urlvoid_"+scan_id+".json", 'w') as report_file: - json.dump({ - "scan": scan, - "summary": summary, - "issues": issues - }, report_file, default=_json_serial) + with open( + APP_BASE_DIR + "/results/urlvoid_" + scan_id + ".json", "w" + ) as report_file: + json.dump( + {"scan": scan, "summary": summary, "issues": issues}, + report_file, + default=_json_serial, + ) # remove the scan from the active scan list clean_scan(scan_id) - res.update({"scan": scan, "summary": summary, "issues": issues, "status": "success"}) + res.update( + {"scan": scan, "summary": summary, "issues": issues, "status": "success"} + ) return jsonify(res) @app.before_first_request def main(): """First function called.""" - if not os.path.exists(APP_BASE_DIR+"/results"): - os.makedirs(APP_BASE_DIR+"/results") + if not os.path.exists(APP_BASE_DIR + "/results"): + os.makedirs(APP_BASE_DIR + "/results") _loadconfig() -if __name__ == '__main__': +if __name__ == "__main__": engine.run_app(app_debug=APP_DEBUG, app_host=APP_HOST, app_port=APP_PORT) diff --git a/engines/wpscan/engine-wpscan.py b/engines/wpscan/engine-wpscan.py index 508d7c56..4d41c45a 100755 --- a/engines/wpscan/engine-wpscan.py +++ b/engines/wpscan/engine-wpscan.py @@ -2,7 +2,7 @@ """ WPSCAN PatrOwl engine application. -Copyright (C) 2021 Nicolas Mattiocco - @MaKyOtOx +Copyright (C) 2024 Nicolas Mattiocco - @MaKyOtOx Licensed under the AGPLv3 License Written by Nicolas BEGUIER (nicolas.beguier@adevinta.com) """ @@ -32,10 +32,10 @@ LOG = getLogger("werkzeug") app = Flask(__name__) -APP_DEBUG = os.environ.get('DEBUG', '').lower() in ['true', '1', 'yes', 'y', 'on'] +APP_DEBUG = os.environ.get("DEBUG", "").lower() in ["true", "1", "yes", "y", "on"] APP_HOST = "0.0.0.0" APP_PORT = 5023 -APP_MAXSCANS = int(os.environ.get('APP_MAXSCANS', 5)) +APP_MAXSCANS = int(os.environ.get("APP_MAXSCANS", 5)) APP_ENGINE_NAME = "wpscan" APP_BASE_DIR = dirname(realpath(__file__)) VERSION = "1.4.28" @@ -45,7 +45,7 @@ base_dir=APP_BASE_DIR, name=APP_ENGINE_NAME, max_scans=APP_MAXSCANS, - version=VERSION + version=VERSION, ) this = modules[__name__] @@ -89,7 +89,8 @@ def get_api_token(api_token_list): try: token_status_req = SESSION.get( "https://wpscan.com/api/v3/status", - headers={"Authorization": f"Token token={api_token}"}) + headers={"Authorization": f"Token token={api_token}"}, + ) except Exception: continue if token_status_req.status_code != 200: @@ -177,7 +178,9 @@ def status_scan(scan_id): """Get status on scan identified by id.""" res = {"page": "status", "status": "UNKNOWN"} if scan_id not in engine.scans.keys(): - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) if engine.scans[scan_id]["status"] == "ERROR": @@ -196,19 +199,27 @@ def status_scan(scan_id): engine.scans[scan_id]["status"] = "ERROR" return jsonify(res) - if not proc or (psutil.pid_exists(proc.pid) and psutil.Process(proc.pid).status() in ["sleeping", "running"]): + if not proc or ( + psutil.pid_exists(proc.pid) + and psutil.Process(proc.pid).status() in ["sleeping", "running"] + ): has_running_thread = True - res.update({ - "status": "SCANNING", - "info": { - asset: { - # "pid": proc.pid, - # "cmd": engine.scans[scan_id]["reports"][asset]["proc_cmd"]} - } + res.update( + { + "status": "SCANNING", + "info": { + asset: { + # "pid": proc.pid, + # "cmd": engine.scans[scan_id]["reports"][asset]["proc_cmd"]} + } + }, } - }) + ) - elif psutil.pid_exists(proc.pid) and psutil.Process(proc.pid).status() == "zombie": + elif ( + psutil.pid_exists(proc.pid) + and psutil.Process(proc.pid).status() == "zombie" + ): psutil.Process(proc.pid).terminate() if has_running_thread is False: @@ -237,8 +248,8 @@ def getreport(scan_id): def _loadconfig(): - conf_file = APP_BASE_DIR+"/wpscan.json" - if len(argv) > 1 and exists(APP_BASE_DIR+"/"+argv[1]): + conf_file = APP_BASE_DIR + "/wpscan.json" + if len(argv) > 1 and exists(APP_BASE_DIR + "/" + argv[1]): conf_file = APP_BASE_DIR + "/" + argv[1] if exists(conf_file): json_data = open(conf_file) @@ -260,10 +271,10 @@ def _loadconfig(): LOG.error("Error: You have to specify APIToken in options") return {"status": "error", "reason": "You have to specify APIToken in options"} - version_filename = APP_BASE_DIR+'/VERSION' + version_filename = APP_BASE_DIR + "/VERSION" if os.path.exists(version_filename): version_file = open(version_filename, "r") - engine.version = version_file.read().rstrip('\n') + engine.version = version_file.read().rstrip("\n") version_file.close() LOG.info("[OK] APIToken") @@ -284,47 +295,61 @@ def start_scan(): # Check the scanner is ready to start a new scan if len(engine.scans) == APP_MAXSCANS: - res.update({ - "status": "error", - "reason": "Scan refused: max concurrent active scans reached ({})".format(APP_MAXSCANS) - }) + res.update( + { + "status": "error", + "reason": "Scan refused: max concurrent active scans reached ({})".format( + APP_MAXSCANS + ), + } + ) return jsonify(res) status() if engine.scanner["status"] != "READY": - res.update({ - "status": "refused", - "details": { - "reason": "scanner not ready", - "status": engine.scanner["status"] - }}) + res.update( + { + "status": "refused", + "details": { + "reason": "scanner not ready", + "status": engine.scanner["status"], + }, + } + ) return jsonify(res) data = json.loads(request.data.decode("utf-8")) if "assets" not in data.keys() or "scan_id" not in data.keys(): - res.update({ - "status": "refused", - "details": { - "reason": "arg error, something is missing ('assets' ?)" - }}) + res.update( + { + "status": "refused", + "details": {"reason": "arg error, something is missing ('assets' ?)"}, + } + ) return jsonify(res) assets = [] for asset in data["assets"]: # Value if "value" not in asset.keys() or not asset["value"]: - res.update({ - "status": "error", - "reason": "arg error, something is missing ('asset.value')" - }) + res.update( + { + "status": "error", + "reason": "arg error, something is missing ('asset.value')", + } + ) return jsonify(res) # Supported datatypes if asset["datatype"] not in engine.scanner["allowed_asset_types"]: - res.update({ - "status": "error", - "reason": "arg error, bad value for '{}' datatype (not supported)".format(asset["value"]) - }) + res.update( + { + "status": "error", + "reason": "arg error, bad value for '{}' datatype (not supported)".format( + asset["value"] + ), + } + ) return jsonify(res) assets.append(asset["value"]) @@ -332,22 +357,26 @@ def start_scan(): scan_id = str(data["scan_id"]) if data["scan_id"] in engine.scans.keys(): - res.update({ - "status": "refused", - "details": { - "reason": "scan '{}' is probably already launched".format(data["scan_id"]), + res.update( + { + "status": "refused", + "details": { + "reason": "scan '{}' is probably already launched".format( + data["scan_id"] + ), + }, } - }) + ) return jsonify(res) scan = { - "assets": assets, - "threads": [], - "options": data["options"], - "scan_id": scan_id, - "status": "STARTED", - "started_at": int(time() * 1000), - "findings": {} + "assets": assets, + "threads": [], + "options": data["options"], + "scan_id": scan_id, + "status": "STARTED", + "started_at": int(time() * 1000), + "findings": {}, } options = get_options(data) @@ -356,16 +385,17 @@ def start_scan(): engine.scans.update({scan_id: scan}) for a in engine.scans[scan_id]["assets"]: - thread = Thread(target=_scan_urls, args=(scan_id, a,)) + thread = Thread( + target=_scan_urls, + args=( + scan_id, + a, + ), + ) thread.start() engine.scans[scan_id]["threads"].append(thread) - res.update({ - "status": "accepted", - "details": { - "scan_id": scan["scan_id"] - } - }) + res.update({"status": "accepted", "details": {"scan_id": scan["scan_id"]}}) return jsonify(res) @@ -380,19 +410,23 @@ def _scan_urls(scan_id, asset): wpscan_cmd += " --url '{}'".format(wordpress_hostname) wpscan_cmd += " --disable-tls-checks" wpscan_cmd += " --update" # Update database - wpscan_cmd += " --clear-cache" # Clear cache + wpscan_cmd += " --clear-cache" # Clear cache # Patrowl specific User-Agent wpscan_cmd += " --ua 'Patrowl Engine WPSCAN v{}'".format(VERSION) # Write report on disk - if 'reports' not in engine.scans[scan_id].keys(): + if "reports" not in engine.scans[scan_id].keys(): engine.scans[scan_id]["reports"] = {} if asset not in engine.scans[scan_id]["reports"].keys(): engine.scans[scan_id]["reports"][asset] = {} - engine.scans[scan_id]["reports"][asset]["report_path"] = "{}/results/{}_{}.json".format(APP_BASE_DIR, scan_id, wordpress_hostname_hash) - wpscan_cmd += " --output '{}'".format(engine.scans[scan_id]["reports"][asset]["report_path"]) + engine.scans[scan_id]["reports"][asset]["report_path"] = ( + "{}/results/{}_{}.json".format(APP_BASE_DIR, scan_id, wordpress_hostname_hash) + ) + wpscan_cmd += " --output '{}'".format( + engine.scans[scan_id]["reports"][asset]["report_path"] + ) wpscan_cmd += " --format json" # Add API Token if credits remaining @@ -405,10 +439,15 @@ def _scan_urls(scan_id, asset): extra_args = engine.scanner["options"]["extra_args"] if re.fullmatch("[a-zA-Z0-9\-_\ :/\.]+", extra_args): wpscan_cmd += " " + extra_args - + LOG.debug(wpscan_cmd) - engine.scans[scan_id]["reports"][asset]["proc"] = subprocess.Popen(wpscan_cmd, shell=True, stdout=open("/dev/null", "w"), stderr=open("/dev/null", "w")) + engine.scans[scan_id]["reports"][asset]["proc"] = subprocess.Popen( + wpscan_cmd, + shell=True, + stdout=open("/dev/null", "w"), + stderr=open("/dev/null", "w"), + ) engine.scans[scan_id]["reports"][asset]["proc_cmd"] = wpscan_cmd return True @@ -418,7 +457,9 @@ def get_report(scan_id): """Get report.""" result = dict() try: - result_file = open("results/wpscan_report_{scan_id}.txt".format(scan_id=scan_id), "r") + result_file = open( + "results/wpscan_report_{scan_id}.txt".format(scan_id=scan_id), "r" + ) result = json.loads(result_file.read()) result_file.close() except Exception: @@ -444,7 +485,9 @@ def _parse_item(key, value, prefix=""): elif isinstance(value, list): values = "{}:\n".format(key) for subitem in value: - values += '{}-'.format(prefix)+_parse_item(None, subitem, "{}{}".format(prefix, " "*2)) + values += "{}-".format(prefix) + _parse_item( + None, subitem, "{}{}".format(prefix, " " * 2) + ) return values elif isinstance(value, dict): @@ -454,9 +497,13 @@ def _parse_item(key, value, prefix=""): values = "{}:\n".format(key) for subitem in value.keys(): if isinstance(value[subitem], (str, int, bool)): - values += '{}-'.format(prefix)+_parse_item(subitem, value[subitem], ' ') + values += "{}-".format(prefix) + _parse_item( + subitem, value[subitem], " " + ) else: - values += '{}- '.format(prefix)+_parse_item(subitem, value[subitem], "{}{}".format(prefix, " "*2)) + values += "{}- ".format(prefix) + _parse_item( + subitem, value[subitem], "{}{}".format(prefix, " " * 2) + ) return values return res @@ -465,31 +512,30 @@ def _parse_results(scan_id): issues = [] summary = {} - nb_vulns = { - "info": 0, - "low": 0, - "medium": 0, - "high": 0, - "critical": 0 - } + nb_vulns = {"info": 0, "low": 0, "medium": 0, "high": 0, "critical": 0} timestamp = int(time() * 1000) for asset in engine.scans[scan_id]["reports"].keys(): - with open(engine.scans[scan_id]["reports"][asset]["report_path"], "r") as results_file: + with open( + engine.scans[scan_id]["reports"][asset]["report_path"], "r" + ) as results_file: content = json.loads(results_file.read()) if "scan_aborted" in content and content["scan_aborted"]: - issues.append({ - "issue_id": len(issues)+1, - "severity": get_criticity(0), "confidence": "certain", - "target": {"addr": [asset], "protocol": "http", "parent": asset}, - "title": "Scan error: {}".format(content["scan_aborted"]).rstrip(), - "solution": "n/a", - "metadata": {"risk": {"cvss_base_score": 0}}, - "type": "wpscan_report", - "timestamp": timestamp, - "description": content["scan_aborted"], - }) + issues.append( + { + "issue_id": len(issues) + 1, + "severity": get_criticity(0), + "confidence": "certain", + "target": {"addr": [asset], "protocol": "http", "parent": asset}, + "title": "Scan error: {}".format(content["scan_aborted"]).rstrip(), + "solution": "n/a", + "metadata": {"risk": {"cvss_base_score": 0}}, + "type": "wpscan_report", + "timestamp": timestamp, + "description": content["scan_aborted"], + } + ) nb_vulns[get_criticity(0)] += 1 continue @@ -500,197 +546,329 @@ def _parse_results(scan_id): if interesting_finding["type"] == "headers": for interesting_entry in interesting_finding["interesting_entries"]: if interesting_entry.lower().startswith("server:"): - issues.append({ - "issue_id": len(issues)+1, - "severity": get_criticity(0), "confidence": "certain", - "target": {"addr": [asset], "protocol": "http", "parent": asset}, - "title": "Header: {}".format(interesting_entry).rstrip(), - "solution": "n/a", - "metadata": {"risk": {"cvss_base_score": 0}}, - "type": "wpscan_report", - "timestamp": timestamp, - "description": _parse_description(interesting_finding), - }) + issues.append( + { + "issue_id": len(issues) + 1, + "severity": get_criticity(0), + "confidence": "certain", + "target": { + "addr": [asset], + "protocol": "http", + "parent": asset, + }, + "title": "Header: {}".format( + interesting_entry + ).rstrip(), + "solution": "n/a", + "metadata": {"risk": {"cvss_base_score": 0}}, + "type": "wpscan_report", + "timestamp": timestamp, + "description": _parse_description( + interesting_finding + ), + } + ) nb_vulns[get_criticity(0)] += 1 # Medium findings elif interesting_finding["type"] in ["xmlrpc"]: - issues.append({ - "issue_id": len(issues)+1, - "severity": get_criticity(5), "confidence": "certain", - "target": {"addr": [asset], "protocol": "http", "parent": asset}, - "title": "XMLRPC enabled", - "solution": "n/a", - "metadata": {"risk": {"cvss_base_score": 0}}, - "type": "wpscan_report", - "timestamp": timestamp, - "description": _parse_description(interesting_finding), - }) + issues.append( + { + "issue_id": len(issues) + 1, + "severity": get_criticity(5), + "confidence": "certain", + "target": { + "addr": [asset], + "protocol": "http", + "parent": asset, + }, + "title": "XMLRPC enabled", + "solution": "n/a", + "metadata": {"risk": {"cvss_base_score": 0}}, + "type": "wpscan_report", + "timestamp": timestamp, + "description": _parse_description(interesting_finding), + } + ) nb_vulns[get_criticity(5)] += 1 # Info findings else: - issues.append({ - "issue_id": len(issues)+1, - "severity": get_criticity(0), "confidence": "certain", - "target": {"addr": [asset], "protocol": "http", "parent": asset}, - "title": "Interesting finding: {}".format(interesting_finding['type']).rstrip(), - "solution": "n/a", - "metadata": {"risk": {"cvss_base_score": 0}}, - "type": "wpscan_report", - "timestamp": timestamp, - "description": _parse_description(interesting_finding), - }) + issues.append( + { + "issue_id": len(issues) + 1, + "severity": get_criticity(0), + "confidence": "certain", + "target": { + "addr": [asset], + "protocol": "http", + "parent": asset, + }, + "title": "Interesting finding: {}".format( + interesting_finding["type"] + ).rstrip(), + "solution": "n/a", + "metadata": {"risk": {"cvss_base_score": 0}}, + "type": "wpscan_report", + "timestamp": timestamp, + "description": _parse_description(interesting_finding), + } + ) nb_vulns[get_criticity(0)] += 1 # Themes if "main_theme" in content and content["main_theme"]: - issues.append({ - "issue_id": len(issues)+1, - "severity": get_criticity(0), "confidence": "certain", - "target": {"addr": [asset], "protocol": "http", "parent": asset}, - "title": "Theme: {}".format(content["main_theme"]["slug"]).rstrip(), - "solution": "n/a", - "metadata": {"risk": {"cvss_base_score": 0}}, - "type": "wpscan_report", - "timestamp": timestamp, - "description": _parse_description(content["main_theme"]), - }) - nb_vulns[get_criticity(0)] += 1 - for vulnerability in content["main_theme"]["vulnerabilities"]: - metadata = {"risk": {"cvss_base_score": 0}} - if "references" in vulnerability and "url" in vulnerability["references"]: - metadata = {"risk": {"cvss_base_score": 0}, "links": vulnerability["references"]["url"]} - issues.append({ - "issue_id": len(issues)+1, - "severity": get_criticity(8), "confidence": "certain", + issues.append( + { + "issue_id": len(issues) + 1, + "severity": get_criticity(0), + "confidence": "certain", "target": {"addr": [asset], "protocol": "http", "parent": asset}, - "title": "Theme {} vulnerability: {}".format(content["main_theme"]["slug"], vulnerability['title']).rstrip(), - "solution": "n/a", - "metadata": metadata, - "type": "wpscan_report", - "timestamp": timestamp, - "description": _parse_description(vulnerability), - }) - nb_vulns[get_criticity(8)] += 1 - for parent in content["main_theme"]["parents"]: - issues.append({ - "issue_id": len(issues)+1, - "severity": get_criticity(0), "confidence": "certain", - "target": {"addr": [asset], "protocol": "http", "parent": asset}, - "title": "Theme: {}".format(parent["slug"]).rstrip(), + "title": "Theme: {}".format(content["main_theme"]["slug"]).rstrip(), "solution": "n/a", "metadata": {"risk": {"cvss_base_score": 0}}, "type": "wpscan_report", "timestamp": timestamp, - "description": _parse_description(parent), - }) - nb_vulns[get_criticity(0)] += 1 - for vulnerability in parent["vulnerabilities"]: - metadata = {"risk": {"cvss_base_score": 0}} - if "references" in vulnerability and "url" in vulnerability["references"]: - metadata = {"risk": {"cvss_base_score": 0}, "links": vulnerability["references"]["url"]} - issues.append({ - "issue_id": len(issues)+1, - "severity": get_criticity(8), "confidence": "certain", - "target": {"addr": [asset], "protocol": "http", "parent": asset}, - "title": "Theme {} vulnerability: {}".format(parent["slug"], vulnerability['title']).rstrip(), + "description": _parse_description(content["main_theme"]), + } + ) + nb_vulns[get_criticity(0)] += 1 + for vulnerability in content["main_theme"]["vulnerabilities"]: + metadata = {"risk": {"cvss_base_score": 0}} + if ( + "references" in vulnerability + and "url" in vulnerability["references"] + ): + metadata = { + "risk": {"cvss_base_score": 0}, + "links": vulnerability["references"]["url"], + } + issues.append( + { + "issue_id": len(issues) + 1, + "severity": get_criticity(8), + "confidence": "certain", + "target": { + "addr": [asset], + "protocol": "http", + "parent": asset, + }, + "title": "Theme {} vulnerability: {}".format( + content["main_theme"]["slug"], vulnerability["title"] + ).rstrip(), "solution": "n/a", "metadata": metadata, "type": "wpscan_report", "timestamp": timestamp, "description": _parse_description(vulnerability), - }) + } + ) + nb_vulns[get_criticity(8)] += 1 + for parent in content["main_theme"]["parents"]: + issues.append( + { + "issue_id": len(issues) + 1, + "severity": get_criticity(0), + "confidence": "certain", + "target": { + "addr": [asset], + "protocol": "http", + "parent": asset, + }, + "title": "Theme: {}".format(parent["slug"]).rstrip(), + "solution": "n/a", + "metadata": {"risk": {"cvss_base_score": 0}}, + "type": "wpscan_report", + "timestamp": timestamp, + "description": _parse_description(parent), + } + ) + nb_vulns[get_criticity(0)] += 1 + for vulnerability in parent["vulnerabilities"]: + metadata = {"risk": {"cvss_base_score": 0}} + if ( + "references" in vulnerability + and "url" in vulnerability["references"] + ): + metadata = { + "risk": {"cvss_base_score": 0}, + "links": vulnerability["references"]["url"], + } + issues.append( + { + "issue_id": len(issues) + 1, + "severity": get_criticity(8), + "confidence": "certain", + "target": { + "addr": [asset], + "protocol": "http", + "parent": asset, + }, + "title": "Theme {} vulnerability: {}".format( + parent["slug"], vulnerability["title"] + ).rstrip(), + "solution": "n/a", + "metadata": metadata, + "type": "wpscan_report", + "timestamp": timestamp, + "description": _parse_description(vulnerability), + } + ) nb_vulns[get_criticity(8)] += 1 # Plugins if "plugins" in content and content["plugins"]: for plugin_name in content["plugins"]: - issues.append({ - "issue_id": len(issues)+1, - "severity": get_criticity(0), "confidence": "certain", - "target": {"addr": [asset], "protocol": "http", "parent": asset}, - "title": "Plugin: {}".format(plugin_name).rstrip(), - "solution": "n/a", - "metadata": {"risk": {"cvss_base_score": 0}}, - "type": "wpscan_report", - "timestamp": timestamp, - "description": _parse_description(content["plugins"][plugin_name]), - }) - nb_vulns[get_criticity(0)] += 1 - for vulnerability in content["plugins"][plugin_name]["vulnerabilities"]: - metadata = {"risk": {"cvss_base_score": 0}} - if "references" in vulnerability and "url" in vulnerability["references"]: - metadata = {"risk": {"cvss_base_score": 0}, "links": vulnerability["references"]["url"]} - issues.append({ - "issue_id": len(issues)+1, - "severity": get_criticity(8), "confidence": "certain", - "target": {"addr": [asset], "protocol": "http", "parent": asset}, - "title": "Plugin {} vulnerability: {}".format(plugin_name, vulnerability['title']).rstrip(), + issues.append( + { + "issue_id": len(issues) + 1, + "severity": get_criticity(0), + "confidence": "certain", + "target": { + "addr": [asset], + "protocol": "http", + "parent": asset, + }, + "title": "Plugin: {}".format(plugin_name).rstrip(), "solution": "n/a", - "metadata": metadata, + "metadata": {"risk": {"cvss_base_score": 0}}, "type": "wpscan_report", "timestamp": timestamp, - "description": _parse_description(vulnerability), - }) + "description": _parse_description( + content["plugins"][plugin_name] + ), + } + ) + nb_vulns[get_criticity(0)] += 1 + for vulnerability in content["plugins"][plugin_name]["vulnerabilities"]: + metadata = {"risk": {"cvss_base_score": 0}} + if ( + "references" in vulnerability + and "url" in vulnerability["references"] + ): + metadata = { + "risk": {"cvss_base_score": 0}, + "links": vulnerability["references"]["url"], + } + issues.append( + { + "issue_id": len(issues) + 1, + "severity": get_criticity(8), + "confidence": "certain", + "target": { + "addr": [asset], + "protocol": "http", + "parent": asset, + }, + "title": "Plugin {} vulnerability: {}".format( + plugin_name, vulnerability["title"] + ).rstrip(), + "solution": "n/a", + "metadata": metadata, + "type": "wpscan_report", + "timestamp": timestamp, + "description": _parse_description(vulnerability), + } + ) nb_vulns[get_criticity(8)] += 1 # Users if "users" in content and content["users"]: for user_name in content["users"]: - issues.append({ - "issue_id": len(issues)+1, - "severity": get_criticity(0), "confidence": "certain", - "target": {"addr": [asset], "protocol": "http", "parent": asset}, - "title": "User: {}".format(user_name).rstrip(), - "solution": "n/a", - "metadata": {"risk": {"cvss_base_score": 0}}, - "type": "wpscan_report", - "timestamp": timestamp, - "description": _parse_description(content["users"][user_name]), - }) + issues.append( + { + "issue_id": len(issues) + 1, + "severity": get_criticity(0), + "confidence": "certain", + "target": { + "addr": [asset], + "protocol": "http", + "parent": asset, + }, + "title": "User: {}".format(user_name).rstrip(), + "solution": "n/a", + "metadata": {"risk": {"cvss_base_score": 0}}, + "type": "wpscan_report", + "timestamp": timestamp, + "description": _parse_description(content["users"][user_name]), + } + ) nb_vulns[get_criticity(0)] += 1 # Version if "version" in content and content["version"]: - issues.append({ - "issue_id": len(issues)+1, - "severity": get_criticity(0), "confidence": "certain", - "target": {"addr": [asset], "protocol": "http", "parent": asset}, - "title": "Version: {} [{}]".format(content["version"]["number"], content["version"]["release_date"]).rstrip(), - "solution": "n/a", - "metadata": {"risk": {"cvss_base_score": 0}}, - "type": "wpscan_report", - "timestamp": timestamp, - "description": _parse_description(content["version"]), - }) - nb_vulns[get_criticity(0)] += 1 - if "status" in content["version"] and content["version"]["status"] != "latest": - issues.append({ - "issue_id": len(issues)+1, - "severity": get_criticity(5), "confidence": "certain", + issues.append( + { + "issue_id": len(issues) + 1, + "severity": get_criticity(0), + "confidence": "certain", "target": {"addr": [asset], "protocol": "http", "parent": asset}, - "title": "Version {} [{}] is {}".format(content["version"]["number"], content["version"]["release_date"], content["version"]["status"]).rstrip(), + "title": "Version: {} [{}]".format( + content["version"]["number"], content["version"]["release_date"] + ).rstrip(), "solution": "n/a", "metadata": {"risk": {"cvss_base_score": 0}}, "type": "wpscan_report", "timestamp": timestamp, "description": _parse_description(content["version"]), - }) + } + ) + nb_vulns[get_criticity(0)] += 1 + if ( + "status" in content["version"] + and content["version"]["status"] != "latest" + ): + issues.append( + { + "issue_id": len(issues) + 1, + "severity": get_criticity(5), + "confidence": "certain", + "target": { + "addr": [asset], + "protocol": "http", + "parent": asset, + }, + "title": "Version {} [{}] is {}".format( + content["version"]["number"], + content["version"]["release_date"], + content["version"]["status"], + ).rstrip(), + "solution": "n/a", + "metadata": {"risk": {"cvss_base_score": 0}}, + "type": "wpscan_report", + "timestamp": timestamp, + "description": _parse_description(content["version"]), + } + ) nb_vulns[get_criticity(5)] += 1 for vulnerability in content["version"]["vulnerabilities"]: metadata = {"risk": {"cvss_base_score": 0}} - if "references" in vulnerability and "url" in vulnerability["references"]: - metadata = {"risk": {"cvss_base_score": 0}, "links": vulnerability["references"]["url"]} - issues.append({ - "issue_id": len(issues)+1, - "severity": get_criticity(8), "confidence": "certain", - "target": {"addr": [asset], "protocol": "http", "parent": asset}, - "title": "Wordpress version {} vulnerability: {}".format(content["version"]["number"], vulnerability['title']).rstrip(), - "solution": "n/a", - "metadata": metadata, - "type": "wpscan_report", - "timestamp": timestamp, - "description": _parse_description(vulnerability), - }) + if ( + "references" in vulnerability + and "url" in vulnerability["references"] + ): + metadata = { + "risk": {"cvss_base_score": 0}, + "links": vulnerability["references"]["url"], + } + issues.append( + { + "issue_id": len(issues) + 1, + "severity": get_criticity(8), + "confidence": "certain", + "target": { + "addr": [asset], + "protocol": "http", + "parent": asset, + }, + "title": "Wordpress version {} vulnerability: {}".format( + content["version"]["number"], vulnerability["title"] + ).rstrip(), + "solution": "n/a", + "metadata": metadata, + "type": "wpscan_report", + "timestamp": timestamp, + "description": _parse_description(vulnerability), + } + ) nb_vulns[get_criticity(8)] += 1 summary = { @@ -701,7 +879,7 @@ def _parse_results(scan_id): "nb_high": nb_vulns["high"], "nb_critical": nb_vulns["critical"], "engine_name": "wpscan", - "engine_version": engine.scanner["version"] + "engine_version": engine.scanner["version"], } return issues, summary @@ -712,20 +890,28 @@ def getfindings(scan_id): """Get findings.""" res = {"page": "getfindings", "scan_id": scan_id} if scan_id not in engine.scans.keys(): - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) # check if the scan is finished status() if engine.scans[scan_id]["status"] != "FINISHED": - res.update({"status": "error", "reason": "scan_id '{}' not finished".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not finished".format(scan_id)} + ) return jsonify(res) has_error = False has_error_reason = "" for asset in engine.scans[scan_id]["reports"].keys(): proc = engine.scans[scan_id]["reports"][asset]["proc"] - if hasattr(proc, "pid") and psutil.pid_exists(proc.pid) and psutil.Process(proc.pid).status() in ["sleeping", "running"]: + if ( + hasattr(proc, "pid") + and psutil.pid_exists(proc.pid) + and psutil.Process(proc.pid).status() in ["sleeping", "running"] + ): has_error = True has_error_reason = "Scan in progress" break @@ -738,7 +924,9 @@ def getfindings(scan_id): # Check if report is a valid json try: - with open(engine.scans[scan_id]["reports"][asset]["report_path"], "r") as results_file: + with open( + engine.scans[scan_id]["reports"][asset]["report_path"], "r" + ) as results_file: content = results_file.read() json.loads(content) except json.decoder.JSONDecodeError: @@ -755,30 +943,32 @@ def getfindings(scan_id): return jsonify(res) issues, summary = _parse_results(scan_id) - scan = { - "scan_id": scan_id - } + scan = {"scan_id": scan_id} # Store the findings in a file - with open(APP_BASE_DIR+"/results/wpscan_"+scan_id+".json", 'w') as report_file: - json.dump({ - "scan": scan, - "summary": summary, - "issues": issues - }, report_file, default=_json_serial) + with open( + APP_BASE_DIR + "/results/wpscan_" + scan_id + ".json", "w" + ) as report_file: + json.dump( + {"scan": scan, "summary": summary, "issues": issues}, + report_file, + default=_json_serial, + ) # remove the scan from the active scan list clean_scan(scan_id) - res.update({"scan": scan, "summary": summary, "issues": issues, "status": "success"}) + res.update( + {"scan": scan, "summary": summary, "issues": issues, "status": "success"} + ) return jsonify(res) @app.before_first_request def main(): """First function called.""" - if not exists(APP_BASE_DIR+"/results"): - os.makedirs(APP_BASE_DIR+"/results") + if not exists(APP_BASE_DIR + "/results"): + os.makedirs(APP_BASE_DIR + "/results") _loadconfig() LOG.debug("Run engine") From 33d1d9fa9bb53dbf2695d42e9306906a3f6e207d Mon Sep 17 00:00:00 2001 From: sebastien Date: Thu, 15 Feb 2024 17:52:02 +0100 Subject: [PATCH 3/4] Black format & skeleton update ARS-319 --- engines/arachni/Dockerfile | 2 +- engines/arachni/VERSION | 2 +- engines/certstream/Dockerfile | 2 +- engines/certstream/VERSION | 2 +- engines/cortex/Dockerfile | 2 +- engines/cortex/VERSION | 2 +- engines/cybelangel/Dockerfile | 2 +- engines/cybelangel/VERSION | 2 +- engines/droopescan/Dockerfile | 2 +- engines/droopescan/VERSION | 2 +- engines/eyewitness/Dockerfile | 2 +- engines/eyewitness/VERSION | 2 +- engines/nessus/Dockerfile | 2 +- engines/nessus/VERSION | 2 +- engines/nmap/Dockerfile | 2 +- engines/nmap/VERSION | 2 +- engines/openvas/Dockerfile | 2 +- engines/openvas/VERSION | 2 +- engines/owl_code/Dockerfile | 2 +- engines/owl_code/VERSION | 2 +- engines/owl_dns/Dockerfile | 2 +- engines/owl_dns/VERSION | 2 +- engines/owl_dns/__init__.py | 2 +- engines/owl_dns/owl_dns.json.sample | 2 +- engines/owl_leaks/Dockerfile | 2 +- engines/owl_leaks/VERSION | 2 +- engines/owl_request/Dockerfile | 2 +- engines/owl_request/VERSION | 2 +- engines/pastebin_monitor/Dockerfile | 2 +- engines/pastebin_monitor/VERSION | 2 +- engines/shhgit/Dockerfile | 2 +- engines/shhgit/VERSION | 2 +- engines/ssllabs/Dockerfile | 2 +- engines/ssllabs/VERSION | 2 +- engines/sslscan/Dockerfile | 2 +- engines/sslscan/VERSION | 2 +- engines/urlvoid/Dockerfile | 2 +- engines/urlvoid/VERSION | 2 +- engines/virustotal/Dockerfile | 2 +- engines/virustotal/VERSION | 2 +- engines/wpscan/Dockerfile | 2 +- engines/wpscan/VERSION | 2 +- engines/wpscan/__init__.py | 2 +- engines/wpscan/wpscan.json.sample | 2 +- skeleton/Dockerfile | 35 ++++ skeleton/VERSION | 1 + skeleton/engine-skeleton.py | 274 ++++++++++++++++------------ 47 files changed, 233 insertions(+), 165 deletions(-) create mode 100644 skeleton/Dockerfile create mode 100644 skeleton/VERSION diff --git a/engines/arachni/Dockerfile b/engines/arachni/Dockerfile index a60f68e2..41cc195d 100644 --- a/engines/arachni/Dockerfile +++ b/engines/arachni/Dockerfile @@ -1,5 +1,5 @@ FROM ubuntu:20.04 -LABEL Name="Arachni\ \(Patrowl engine\)" Version="1.4.29" +LABEL Name="Arachni\ \(Patrowl engine\)" Version="1.4.30" ENV VERSION_FRAMEWORK 1.5.1 ENV VERSION_ARACHNI $VERSION_FRAMEWORK-0.5.12 diff --git a/engines/arachni/VERSION b/engines/arachni/VERSION index 8a40a8cf..5b3274b4 100644 --- a/engines/arachni/VERSION +++ b/engines/arachni/VERSION @@ -1 +1 @@ -1.4.29 +1.4.30 diff --git a/engines/certstream/Dockerfile b/engines/certstream/Dockerfile index d30d08a7..4722ca85 100644 --- a/engines/certstream/Dockerfile +++ b/engines/certstream/Dockerfile @@ -1,5 +1,5 @@ FROM alpine:3.16.3 -LABEL Name="CertStream\ \(Patrowl engine\)" Version="1.4.28" +LABEL Name="CertStream\ \(Patrowl engine\)" Version="1.4.29" # Install dependencies RUN apk add --update \ diff --git a/engines/certstream/VERSION b/engines/certstream/VERSION index c87f986a..8a40a8cf 100644 --- a/engines/certstream/VERSION +++ b/engines/certstream/VERSION @@ -1 +1 @@ -1.4.28 +1.4.29 diff --git a/engines/cortex/Dockerfile b/engines/cortex/Dockerfile index ad644f7e..12b5019f 100644 --- a/engines/cortex/Dockerfile +++ b/engines/cortex/Dockerfile @@ -1,5 +1,5 @@ FROM alpine:3.16.3 -LABEL Name="Cortex\ \(Patrowl engine\)" Version="1.4.28" +LABEL Name="Cortex\ \(Patrowl engine\)" Version="1.4.29" # Set the working directory RUN mkdir -p /opt/patrowl-engines/cortex diff --git a/engines/cortex/VERSION b/engines/cortex/VERSION index c87f986a..8a40a8cf 100644 --- a/engines/cortex/VERSION +++ b/engines/cortex/VERSION @@ -1 +1 @@ -1.4.28 +1.4.29 diff --git a/engines/cybelangel/Dockerfile b/engines/cybelangel/Dockerfile index a261d91e..7ed447eb 100644 --- a/engines/cybelangel/Dockerfile +++ b/engines/cybelangel/Dockerfile @@ -1,5 +1,5 @@ FROM alpine:3.16.3 -LABEL Name="CybelAngel\ \(Patrowl engine\)" Version="1.4.29" +LABEL Name="CybelAngel\ \(Patrowl engine\)" Version="1.4.30" # Create the target repo RUN mkdir -p /opt/patrowl-engines/cybelangel diff --git a/engines/cybelangel/VERSION b/engines/cybelangel/VERSION index 8a40a8cf..5b3274b4 100644 --- a/engines/cybelangel/VERSION +++ b/engines/cybelangel/VERSION @@ -1 +1 @@ -1.4.29 +1.4.30 diff --git a/engines/droopescan/Dockerfile b/engines/droopescan/Dockerfile index b3f9de3e..b920457d 100644 --- a/engines/droopescan/Dockerfile +++ b/engines/droopescan/Dockerfile @@ -1,5 +1,5 @@ FROM alpine:3.16.3 -LABEL Name="droopescan\ \(Patrowl engine\)" Version="1.4.31" +LABEL Name="droopescan\ \(Patrowl engine\)" Version="1.4.30" # Set the working directory RUN mkdir -p /opt/patrowl-engines/droopescan diff --git a/engines/droopescan/VERSION b/engines/droopescan/VERSION index d156665a..5b3274b4 100644 --- a/engines/droopescan/VERSION +++ b/engines/droopescan/VERSION @@ -1 +1 @@ -1.4.31 +1.4.30 diff --git a/engines/eyewitness/Dockerfile b/engines/eyewitness/Dockerfile index 8815d63e..cb962811 100644 --- a/engines/eyewitness/Dockerfile +++ b/engines/eyewitness/Dockerfile @@ -1,5 +1,5 @@ FROM phusion/baseimage:bionic-1.0.0 -LABEL Name="EyeWitness\ \(Patrowl engine\)" Version="1.4.29" +LABEL Name="EyeWitness\ \(Patrowl engine\)" Version="1.4.30" ARG S6_OVERLAY_VERSION=2.0.0.1 diff --git a/engines/eyewitness/VERSION b/engines/eyewitness/VERSION index 8a40a8cf..5b3274b4 100644 --- a/engines/eyewitness/VERSION +++ b/engines/eyewitness/VERSION @@ -1 +1 @@ -1.4.29 +1.4.30 diff --git a/engines/nessus/Dockerfile b/engines/nessus/Dockerfile index e34dde62..3992495b 100644 --- a/engines/nessus/Dockerfile +++ b/engines/nessus/Dockerfile @@ -1,5 +1,5 @@ FROM alpine:3.16.3 -LABEL Name="Nessus\ \(Patrowl engine\)" Version="1.4.29" +LABEL Name="Nessus\ \(Patrowl engine\)" Version="1.4.30" ENV LOGLEVEL info diff --git a/engines/nessus/VERSION b/engines/nessus/VERSION index 8a40a8cf..5b3274b4 100644 --- a/engines/nessus/VERSION +++ b/engines/nessus/VERSION @@ -1 +1 @@ -1.4.29 +1.4.30 diff --git a/engines/nmap/Dockerfile b/engines/nmap/Dockerfile index 01a3ca80..7f8f02af 100644 --- a/engines/nmap/Dockerfile +++ b/engines/nmap/Dockerfile @@ -1,5 +1,5 @@ FROM alpine:3.16.3 -LABEL Name="Nmap\ \(Patrowl engine\)" Version="1.4.47" +LABEL Name="Nmap\ \(Patrowl engine\)" Version="1.4.48" # Set the working directory RUN mkdir -p /opt/patrowl-engines/nmap diff --git a/engines/nmap/VERSION b/engines/nmap/VERSION index 377504d5..3d558d0b 100644 --- a/engines/nmap/VERSION +++ b/engines/nmap/VERSION @@ -1 +1 @@ -1.4.47 +1.4.48 diff --git a/engines/openvas/Dockerfile b/engines/openvas/Dockerfile index 3ae769c8..ff5fea8a 100644 --- a/engines/openvas/Dockerfile +++ b/engines/openvas/Dockerfile @@ -1,5 +1,5 @@ FROM alpine:3.16.3 -LABEL Name="OpenVAS\ \(Patrowl engine\)" Version="1.4.32" +LABEL Name="OpenVAS\ \(Patrowl engine\)" Version="1.4.33" ENV CRYPTOGRAPHY_DONT_BUILD_RUST=1 diff --git a/engines/openvas/VERSION b/engines/openvas/VERSION index 00bbe72a..9baec2fd 100644 --- a/engines/openvas/VERSION +++ b/engines/openvas/VERSION @@ -1 +1 @@ -1.4.32 +1.4.33 diff --git a/engines/owl_code/Dockerfile b/engines/owl_code/Dockerfile index 62b2ed01..5910f79d 100644 --- a/engines/owl_code/Dockerfile +++ b/engines/owl_code/Dockerfile @@ -1,5 +1,5 @@ FROM alpine:3.16.3 -LABEL Name="Patrowl\ Code\ Security\ review\ \(Patrowl engine\)" Version="1.4.29" +LABEL Name="Patrowl\ Code\ Security\ review\ \(Patrowl engine\)" Version="1.4.30" # Install dependencies RUN apk add --update --no-cache \ diff --git a/engines/owl_code/VERSION b/engines/owl_code/VERSION index 8a40a8cf..5b3274b4 100644 --- a/engines/owl_code/VERSION +++ b/engines/owl_code/VERSION @@ -1 +1 @@ -1.4.29 +1.4.30 diff --git a/engines/owl_dns/Dockerfile b/engines/owl_dns/Dockerfile index 7a5e85aa..eb05516e 100644 --- a/engines/owl_dns/Dockerfile +++ b/engines/owl_dns/Dockerfile @@ -1,5 +1,5 @@ FROM alpine:3.16.3 -LABEL Name="Patrowl\ DNS\ \(Patrowl engine\)" Version="1.5.7" +LABEL Name="Patrowl\ DNS\ \(Patrowl engine\)" Version="1.5.8" # Install dependencies RUN apk add --update --no-cache \ diff --git a/engines/owl_dns/VERSION b/engines/owl_dns/VERSION index f01291b8..1cc9c180 100644 --- a/engines/owl_dns/VERSION +++ b/engines/owl_dns/VERSION @@ -1 +1 @@ -1.5.7 +1.5.8 diff --git a/engines/owl_dns/__init__.py b/engines/owl_dns/__init__.py index 8ca87e46..8b177675 100644 --- a/engines/owl_dns/__init__.py +++ b/engines/owl_dns/__init__.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- __title__ = "patrowl_engine_owl_dns" -__version__ = "1.5.7" +__version__ = "1.5.8" __author__ = "Nicolas MATTIOCCO" __license__ = "AGPLv3" __copyright__ = "Copyright (C) 2018-2024 Nicolas Mattiocco - @MaKyOtOx" diff --git a/engines/owl_dns/owl_dns.json.sample b/engines/owl_dns/owl_dns.json.sample index e0a651b4..00699445 100644 --- a/engines/owl_dns/owl_dns.json.sample +++ b/engines/owl_dns/owl_dns.json.sample @@ -1,6 +1,6 @@ { "name": "PatrOwl - Dns module", - "version": "1.5.7", + "version": "1.5.8", "description": "DNS Scanner", "allowed_asset_types": ["ip", "domain", "fqdn", "keyword"], "sublist3r_bin_path": "/opt/patrowl-engines/owl_dns/external-libs/Sublist3r", diff --git a/engines/owl_leaks/Dockerfile b/engines/owl_leaks/Dockerfile index 7f0ede94..23c1376b 100644 --- a/engines/owl_leaks/Dockerfile +++ b/engines/owl_leaks/Dockerfile @@ -1,5 +1,5 @@ FROM alpine:3.16.3 -LABEL Name="Patrowl\ Data\ Leaks\ \(Patrowl engine\)" Version="1.4.29" +LABEL Name="Patrowl\ Data\ Leaks\ \(Patrowl engine\)" Version="1.4.30" # Install dependencies RUN apk add --update \ diff --git a/engines/owl_leaks/VERSION b/engines/owl_leaks/VERSION index 8a40a8cf..5b3274b4 100644 --- a/engines/owl_leaks/VERSION +++ b/engines/owl_leaks/VERSION @@ -1 +1 @@ -1.4.29 +1.4.30 diff --git a/engines/owl_request/Dockerfile b/engines/owl_request/Dockerfile index 18c06d7f..801f6bb7 100644 --- a/engines/owl_request/Dockerfile +++ b/engines/owl_request/Dockerfile @@ -1,5 +1,5 @@ FROM alpine:3.16.3 -LABEL Name="Request\ \(Patrowl engine\)" Version="1.4.27" +LABEL Name="Request\ \(Patrowl engine\)" Version="1.4.28" # Create the target repo RUN mkdir -p /opt/patrowl-engines/owl_request diff --git a/engines/owl_request/VERSION b/engines/owl_request/VERSION index 5e99adfc..c87f986a 100644 --- a/engines/owl_request/VERSION +++ b/engines/owl_request/VERSION @@ -1 +1 @@ -1.4.27 +1.4.28 diff --git a/engines/pastebin_monitor/Dockerfile b/engines/pastebin_monitor/Dockerfile index cbfaf048..623b3a49 100644 --- a/engines/pastebin_monitor/Dockerfile +++ b/engines/pastebin_monitor/Dockerfile @@ -1,5 +1,5 @@ FROM alpine:3.16.3 -LABEL Name="PastebinMonitor\ \(Patrowl engine\)" Version="1.4.28" +LABEL Name="PastebinMonitor\ \(Patrowl engine\)" Version="1.4.29" # Install dependencies RUN apk add --update --no-cache \ diff --git a/engines/pastebin_monitor/VERSION b/engines/pastebin_monitor/VERSION index c87f986a..8a40a8cf 100644 --- a/engines/pastebin_monitor/VERSION +++ b/engines/pastebin_monitor/VERSION @@ -1 +1 @@ -1.4.28 +1.4.29 diff --git a/engines/shhgit/Dockerfile b/engines/shhgit/Dockerfile index 7863d1dc..585ff950 100644 --- a/engines/shhgit/Dockerfile +++ b/engines/shhgit/Dockerfile @@ -1,5 +1,5 @@ FROM alpine:3.16.3 -LABEL Name="SHHGit\ \(Patrowl engine\)" Version="1.4.32" +LABEL Name="SHHGit\ \(Patrowl engine\)" Version="1.4.33" # Create the target repo RUN mkdir -p /opt/patrowl-engines/shhgit diff --git a/engines/shhgit/VERSION b/engines/shhgit/VERSION index 00bbe72a..9baec2fd 100644 --- a/engines/shhgit/VERSION +++ b/engines/shhgit/VERSION @@ -1 +1 @@ -1.4.32 +1.4.33 diff --git a/engines/ssllabs/Dockerfile b/engines/ssllabs/Dockerfile index e4a731fe..4a214de2 100644 --- a/engines/ssllabs/Dockerfile +++ b/engines/ssllabs/Dockerfile @@ -1,5 +1,5 @@ FROM alpine:3.16.3 -LABEL Name="SSL-Labs\ \(Patrowl engine\)" Version="1.4.30" +LABEL Name="SSL-Labs\ \(Patrowl engine\)" Version="1.4.31" # Install dependencies RUN apk add --update --no-cache \ diff --git a/engines/ssllabs/VERSION b/engines/ssllabs/VERSION index 5b3274b4..d156665a 100644 --- a/engines/ssllabs/VERSION +++ b/engines/ssllabs/VERSION @@ -1 +1 @@ -1.4.30 +1.4.31 diff --git a/engines/sslscan/Dockerfile b/engines/sslscan/Dockerfile index 12cfb652..72cab1ac 100644 --- a/engines/sslscan/Dockerfile +++ b/engines/sslscan/Dockerfile @@ -1,5 +1,5 @@ FROM alpine:3.16.3 -LABEL Name="SSLScan\ \(Patrowl engine\)" Version="1.4.33" +LABEL Name="SSLScan\ \(Patrowl engine\)" Version="1.4.34" ENV CFLAGS "-D__USE_GNU" diff --git a/engines/sslscan/VERSION b/engines/sslscan/VERSION index 9baec2fd..8b9619b3 100644 --- a/engines/sslscan/VERSION +++ b/engines/sslscan/VERSION @@ -1 +1 @@ -1.4.33 +1.4.34 diff --git a/engines/urlvoid/Dockerfile b/engines/urlvoid/Dockerfile index a4a86b91..34de023b 100644 --- a/engines/urlvoid/Dockerfile +++ b/engines/urlvoid/Dockerfile @@ -1,5 +1,5 @@ FROM alpine:3.16.3 -LABEL Name="URLVoid\ \(Patrowl engine\)" Version="1.4.27" +LABEL Name="URLVoid\ \(Patrowl engine\)" Version="1.4.28" # Create the target repo RUN mkdir -p /opt/patrowl-engines/urlvoid diff --git a/engines/urlvoid/VERSION b/engines/urlvoid/VERSION index 5e99adfc..c87f986a 100644 --- a/engines/urlvoid/VERSION +++ b/engines/urlvoid/VERSION @@ -1 +1 @@ -1.4.27 +1.4.28 diff --git a/engines/virustotal/Dockerfile b/engines/virustotal/Dockerfile index 52e3be8e..7da94e64 100644 --- a/engines/virustotal/Dockerfile +++ b/engines/virustotal/Dockerfile @@ -1,5 +1,5 @@ FROM alpine:3.16.3 -LABEL Name="VirusTotal\ \(Patrowl engine\)" Version="1.4.29" +LABEL Name="VirusTotal\ \(Patrowl engine\)" Version="1.4.30" # Create the target repo RUN mkdir -p /opt/patrowl-engines/virustotal diff --git a/engines/virustotal/VERSION b/engines/virustotal/VERSION index 8a40a8cf..5b3274b4 100644 --- a/engines/virustotal/VERSION +++ b/engines/virustotal/VERSION @@ -1 +1 @@ -1.4.29 +1.4.30 diff --git a/engines/wpscan/Dockerfile b/engines/wpscan/Dockerfile index 2c74ecbc..de9b4982 100644 --- a/engines/wpscan/Dockerfile +++ b/engines/wpscan/Dockerfile @@ -25,7 +25,7 @@ RUN chmod -R a+r /usr/local/bundle # -- WPScan Deployment FROM ruby:3.0.2-alpine -LABEL Name="WPScan\ \(Patrowl engine\)" Version="1.4.28" +LABEL Name="WPScan\ \(Patrowl engine\)" Version="1.4.29" RUN adduser -h /wpscan -g WPScan -D wpscan COPY --from=builder /usr/local/bundle /usr/local/bundle diff --git a/engines/wpscan/VERSION b/engines/wpscan/VERSION index c87f986a..8a40a8cf 100644 --- a/engines/wpscan/VERSION +++ b/engines/wpscan/VERSION @@ -1 +1 @@ -1.4.28 +1.4.29 diff --git a/engines/wpscan/__init__.py b/engines/wpscan/__init__.py index d797e2bf..2ada8b22 100644 --- a/engines/wpscan/__init__.py +++ b/engines/wpscan/__init__.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- __title__ = 'patrowl_engine_wpscan' -__version__ = '1.4.28' +__version__ = '1.4.29' __author__ = 'Nicolas Béguier' __license__ = 'AGPLv3' __copyright__ = 'Copyright (C) 2020-2021 Nicolas Mattiocco - @MaKyOtOx' diff --git a/engines/wpscan/wpscan.json.sample b/engines/wpscan/wpscan.json.sample index e17be957..da2aa056 100644 --- a/engines/wpscan/wpscan.json.sample +++ b/engines/wpscan/wpscan.json.sample @@ -1,6 +1,6 @@ { "name": "Wpscan API", - "version": "1.4.28", + "version": "1.4.29", "description": "Wpscan API", "allowed_asset_types": ["fqdn", "ip", "domain", "url"], "options": { diff --git a/skeleton/Dockerfile b/skeleton/Dockerfile new file mode 100644 index 00000000..285293b7 --- /dev/null +++ b/skeleton/Dockerfile @@ -0,0 +1,35 @@ +FROM alpine:3.16.3 +LABEL Name="Patrowl\ Skeleton\" Version="1.0.0" + +# Install dependencies +RUN apk add --update --no-cache \ + python3 python3-dev py3-pip \ + && rm -rf /var/cache/apk/* + +# Create the target repo +RUN mkdir -p /opt/patrowl-engines/skeleton +RUN mkdir -p /opt/patrowl-engines/skeleton/results +RUN mkdir -p /opt/patrowl-engines/skeleton/logs + +# Set the working directory to /opt/ +WORKDIR /opt/patrowl-engines/skeleton + +# Copy the current directory contents into the container at / +COPY __init__.py . +COPY engine-skeleton.py . +COPY skeleton.json.sample skeleton.json +COPY requirements.txt . +COPY README.md . +COPY VERSION . + +# Install python modules for engine +WORKDIR /opt/patrowl-engines/skeleton/ +RUN pip3 install --upgrade pip +RUN pip3 install --trusted-host pypi.python.org -r requirements.txt + +# TCP port exposed by the container (NAT) +EXPOSE 5999 +# (change this) + +# Run app.py when the container launches +CMD ["gunicorn", "engine-skeleton:app", "-b", "0.0.0.0:5999", "--access-logfile", "-", "--threads", "10"] diff --git a/skeleton/VERSION b/skeleton/VERSION new file mode 100644 index 00000000..3eefcb9d --- /dev/null +++ b/skeleton/VERSION @@ -0,0 +1 @@ +1.0.0 diff --git a/skeleton/engine-skeleton.py b/skeleton/engine-skeleton.py index f1b58ab2..f111309f 100755 --- a/skeleton/engine-skeleton.py +++ b/skeleton/engine-skeleton.py @@ -10,6 +10,7 @@ from flask import Flask, request, jsonify from PatrowlEnginesUtils.PatrowlEngine import _json_serial from PatrowlEnginesUtils.PatrowlEngine import PatrowlEngine + # from PatrowlEnginesUtils.PatrowlEngine import PatrowlEngineFinding from PatrowlEnginesUtils.PatrowlEngineExceptions import PatrowlEngineExceptions @@ -22,10 +23,7 @@ APP_BASE_DIR = os.path.dirname(os.path.realpath(__file__)) engine = PatrowlEngine( - app=app, - base_dir=APP_BASE_DIR, - name=APP_ENGINE_NAME, - max_scans=APP_MAXSCANS + app=app, base_dir=APP_BASE_DIR, name=APP_ENGINE_NAME, max_scans=APP_MAXSCANS ) this = sys.modules[__name__] @@ -46,95 +44,95 @@ def handle_invalid_usage(error): return response -@app.route('/') +@app.route("/") def default(): """Route by default.""" return engine.default() -@app.route('/engines/skeleton/') +@app.route("/engines/skeleton/") def index(): """Return index page.""" return engine.index() -@app.route('/engines/skeleton/liveness') +@app.route("/engines/skeleton/liveness") def liveness(): """Return liveness page.""" return engine.liveness() -@app.route('/engines/skeleton/readiness') +@app.route("/engines/skeleton/readiness") def readiness(): """Return readiness page.""" return engine.readiness() -@app.route('/engines/skeleton/test') +@app.route("/engines/skeleton/test") def test(): """Return test page.""" return engine.test() -@app.route('/engines/skeleton/info') +@app.route("/engines/skeleton/info") def info(): """Get info on running engine.""" return engine.info() -@app.route('/engines/skeleton/clean') +@app.route("/engines/skeleton/clean") def clean(): """Clean all scans.""" return engine.clean() -@app.route('/engines/skeleton/clean/') +@app.route("/engines/skeleton/clean/") def clean_scan(scan_id): """Clean scan identified by id.""" return engine.clean_scan(scan_id) -@app.route('/engines/skeleton/status') +@app.route("/engines/skeleton/status") def status(): """Get status on engine and all scans.""" return engine.getstatus() -@app.route('/engines/skeleton/status/') +@app.route("/engines/skeleton/status/") def status_scan(scan_id): """Get status on scan identified by id.""" return engine.getstatus_scan(scan_id) -@app.route('/engines/skeleton/stopscans') +@app.route("/engines/skeleton/stopscans") def stop(): """Stop all scans.""" return engine.stop() -@app.route('/engines/skeleton/stop/') +@app.route("/engines/skeleton/stop/") def stop_scan(scan_id): """Stop scan identified by id.""" return engine.stop_scan(scan_id) -@app.route('/engines/skeleton/getreport/') +@app.route("/engines/skeleton/getreport/") def getreport(scan_id): """Get report on finished scans.""" return engine.getreport(scan_id) def _loadconfig(): - conf_file = APP_BASE_DIR+'/skeleton.json' + conf_file = APP_BASE_DIR + "/skeleton.json" if os.path.exists(conf_file): json_data = open(conf_file) engine.scanner = json.load(json_data) - engine.scanner['status'] = "READY" + engine.scanner["status"] = "READY" - version_filename = APP_BASE_DIR+'/VERSION' + version_filename = APP_BASE_DIR + "/VERSION" if os.path.exists(version_filename): version_file = open(version_filename, "r") - engine.version = version_file.read().rstrip('\n') + engine.version = version_file.read().rstrip("\n") version_file.close() else: @@ -142,7 +140,7 @@ def _loadconfig(): return {"status": "error", "reason": "config file not found"} -@app.route('/engines/skeleton/reloadconfig', methods=['GET']) +@app.route("/engines/skeleton/reloadconfig", methods=["GET"]) def reloadconfig(): res = {"page": "reloadconfig"} _loadconfig() @@ -150,53 +148,67 @@ def reloadconfig(): return jsonify(res) -@app.route('/engines/skeleton/startscan', methods=['POST']) +@app.route("/engines/skeleton/startscan", methods=["POST"]) def start_scan(): res = {"page": "startscan"} # Check the scanner is ready to start a new scan if len(engine.scans) == APP_MAXSCANS: - res.update({ - "status": "error", - "reason": "Scan refused: max concurrent active scans reached ({})".format(APP_MAXSCANS) - }) + res.update( + { + "status": "error", + "reason": "Scan refused: max concurrent active scans reached ({})".format( + APP_MAXSCANS + ), + } + ) return jsonify(res) status() - if engine.scanner['status'] != "READY": - res.update({ - "status": "refused", - "details": { - "reason": "scanner not ready", - "status": engine.scanner['status'] - }}) + if engine.scanner["status"] != "READY": + res.update( + { + "status": "refused", + "details": { + "reason": "scanner not ready", + "status": engine.scanner["status"], + }, + } + ) return jsonify(res) data = json.loads(request.data) - if 'assets' not in data.keys() or 'scan_id' not in data.keys(): - res.update({ - "status": "refused", - "details": { - "reason": "arg error, something is missing ('assets' ?)" - }}) + if "assets" not in data.keys() or "scan_id" not in data.keys(): + res.update( + { + "status": "refused", + "details": {"reason": "arg error, something is missing ('assets' ?)"}, + } + ) return jsonify(res) assets = [] for asset in data["assets"]: # Value if "value" not in asset.keys() or not asset["value"]: - res.update({ - "status": "error", - "reason": "arg error, something is missing ('asset.value')" - }) + res.update( + { + "status": "error", + "reason": "arg error, something is missing ('asset.value')", + } + ) return jsonify(res) # Supported datatypes if asset["datatype"] not in engine.scanner["allowed_asset_types"]: - res.update({ - "status": "error", - "reason": "arg error, bad value for '{}' datatype (not supported)".format(asset["value"]) - }) + res.update( + { + "status": "error", + "reason": "arg error, bad value for '{}' datatype (not supported)".format( + asset["value"] + ), + } + ) return jsonify(res) if asset["datatype"] == "url": @@ -205,53 +217,50 @@ def start_scan(): assets.append(asset["value"]) - scan_id = str(data['scan_id']) + scan_id = str(data["scan_id"]) - if data['scan_id'] in engine.scans.keys(): - res.update({ - "status": "refused", - "details": { - "reason": "scan '{}' already launched".format(data['scan_id']), + if data["scan_id"] in engine.scans.keys(): + res.update( + { + "status": "refused", + "details": { + "reason": "scan '{}' already launched".format(data["scan_id"]), + }, } - }) + ) return jsonify(res) scan = { # 'assets': data['assets'], - 'assets': assets, - 'threads': [], - 'options': data['options'], - 'scan_id': scan_id, - 'status': "STARTED", - 'started_at': int(time.time() * 1000), - 'findings': {} + "assets": assets, + "threads": [], + "options": data["options"], + "scan_id": scan_id, + "status": "STARTED", + "started_at": int(time.time() * 1000), + "findings": {}, } engine.scans.update({scan_id: scan}) thread = threading.Thread(target=_scan_urls, args=(scan_id,)) thread.start() - engine.scans[scan_id]['threads'].append(thread) + engine.scans[scan_id]["threads"].append(thread) - res.update({ - "status": "accepted", - "details": { - "scan_id": scan['scan_id'] - } - }) + res.update({"status": "accepted", "details": {"scan_id": scan["scan_id"]}}) return jsonify(res) def _scan_urls(scan_id): assets = [] - for asset in engine.scans[scan_id]['assets']: + for asset in engine.scans[scan_id]["assets"]: assets.append(asset) for asset in assets: if asset not in engine.scans[scan_id]["findings"]: engine.scans[scan_id]["findings"][asset] = {} try: - engine.scans[scan_id]["findings"][asset]['issues'] = get_report(asset) + engine.scans[scan_id]["findings"][asset]["issues"] = get_report(asset) except Exception as e: print("_scan_urls: API Connexion error (quota?)") print(e) @@ -273,45 +282,55 @@ def _parse_results(scan_id): issues = [] summary = {} - nb_vulns = { - "info": 0, - "low": 0, - "medium": 0, - "high": 0 - } + nb_vulns = {"info": 0, "low": 0, "medium": 0, "high": 0} timestamp = int(time.time() * 1000) for asset in engine.scans[scan_id]["findings"]: if engine.scans[scan_id]["findings"][asset]["issues"]: - description = "On the host {} appear in {} identified in blacklist engines or online reputation tools :\n".format(asset, len(engine.scans[scan_id]["findings"][asset]["issues"])) + description = "On the host {} appear in {} identified in blacklist engines or online reputation tools :\n".format( + asset, len(engine.scans[scan_id]["findings"][asset]["issues"]) + ) for eng in engine.scans[scan_id]["findings"][asset]["issues"]: description = description + eng + "\n" - description = description + "For more detail go 'http://www.skeleton.com/scan/" + asset + "/'" + description = ( + description + + "For more detail go 'http://www.skeleton.com/scan/" + + asset + + "/'" + ) nb_vulns["high"] += 1 - issues.append({ - "issue_id": len(issues)+1, - "severity": "high", "confidence": "certain", - "target": {"addr": [asset], "protocol": "http"}, - "title": "'{}' identified in skeleton".format(asset), - "solution": "n/a", - "metadata": {"tags": ["http"]}, - "type": "skeleton_report", - "timestamp": timestamp, - "description": description - }) + issues.append( + { + "issue_id": len(issues) + 1, + "severity": "high", + "confidence": "certain", + "target": {"addr": [asset], "protocol": "http"}, + "title": "'{}' identified in skeleton".format(asset), + "solution": "n/a", + "metadata": {"tags": ["http"]}, + "type": "skeleton_report", + "timestamp": timestamp, + "description": description, + } + ) else: nb_vulns["info"] += 1 - issues.append({ - "issue_id": len(issues)+1, - "severity": "info", "confidence": "certain", - "target": {"addr": [asset], "protocol": "http"}, - "title": "'{}' have not been identified in skeleton".format(asset), - "solution": "n/a", - "metadata": {"tags": ["http"]}, - "type": "skeleton_report", - "timestamp": timestamp, - "description": "{} have not identified in blacklist engines or online reputation tools".format(asset) - }) + issues.append( + { + "issue_id": len(issues) + 1, + "severity": "info", + "confidence": "certain", + "target": {"addr": [asset], "protocol": "http"}, + "title": "'{}' have not been identified in skeleton".format(asset), + "solution": "n/a", + "metadata": {"tags": ["http"]}, + "type": "skeleton_report", + "timestamp": timestamp, + "description": "{} have not identified in blacklist engines or online reputation tools".format( + asset + ), + } + ) summary = { "nb_issues": len(issues), @@ -320,60 +339,73 @@ def _parse_results(scan_id): "nb_medium": nb_vulns["medium"], "nb_high": nb_vulns["high"], "engine_name": "skeleton", - "engine_version": engine.scanner["version"] + "engine_version": engine.scanner["version"], } return issues, summary -@app.route('/engines/skeleton/getfindings/', methods=['GET']) +@app.route("/engines/skeleton/getfindings/", methods=["GET"]) def getfindings(scan_id): res = {"page": "getfindings", "scan_id": scan_id} # check if the scan_id exists if scan_id not in engine.scans.keys(): - res.update({"status": "error", "reason": "scan_id '{}' not found".format(scan_id)}) + res.update( + {"status": "error", "reason": "scan_id '{}' not found".format(scan_id)} + ) return jsonify(res) # check if the scan is finished status() - if engine.scans[scan_id]['status'] != "FINISHED": - res.update({"status": "error", "reason": "scan_id '{}' not finished (status={})".format(scan_id, engine.scans[scan_id]['status'])}) + if engine.scans[scan_id]["status"] != "FINISHED": + res.update( + { + "status": "error", + "reason": "scan_id '{}' not finished (status={})".format( + scan_id, engine.scans[scan_id]["status"] + ), + } + ) return jsonify(res) issues, summary = _parse_results(scan_id) scan = { "scan_id": scan_id, - "assets": engine.scans[scan_id]['assets'], - "options": engine.scans[scan_id]['options'], - "status": engine.scans[scan_id]['status'], - "started_at": engine.scans[scan_id]['started_at'], - "finished_at": engine.scans[scan_id]['finished_at'] + "assets": engine.scans[scan_id]["assets"], + "options": engine.scans[scan_id]["options"], + "status": engine.scans[scan_id]["status"], + "started_at": engine.scans[scan_id]["started_at"], + "finished_at": engine.scans[scan_id]["finished_at"], } # Store the findings in a file - with open(APP_BASE_DIR+"/results/skeleton_"+scan_id+".json", 'w') as report_file: - json.dump({ - "scan": scan, - "summary": summary, - "issues": issues - }, report_file, default=_json_serial) + with open( + APP_BASE_DIR + "/results/skeleton_" + scan_id + ".json", "w" + ) as report_file: + json.dump( + {"scan": scan, "summary": summary, "issues": issues}, + report_file, + default=_json_serial, + ) # remove the scan from the active scan list clean_scan(scan_id) - res.update({"scan": scan, "summary": summary, "issues": issues, "status": "success"}) + res.update( + {"scan": scan, "summary": summary, "issues": issues, "status": "success"} + ) return jsonify(res) @app.before_first_request def main(): """First function called.""" - if not os.path.exists(APP_BASE_DIR+"/results"): - os.makedirs(APP_BASE_DIR+"/results") + if not os.path.exists(APP_BASE_DIR + "/results"): + os.makedirs(APP_BASE_DIR + "/results") _loadconfig() -if __name__ == '__main__': +if __name__ == "__main__": engine.run_app(app_debug=APP_DEBUG, app_host=APP_HOST, app_port=APP_PORT) From ba07a1ae3f4cd4b4936a1bc8bcc7f7ff91b37617 Mon Sep 17 00:00:00 2001 From: sebastien Date: Tue, 20 Feb 2024 11:42:17 +0100 Subject: [PATCH 4/4] Updated VERSION --- VERSION | 2 +- engines/sslscan/engine-sslscan.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/VERSION b/VERSION index a7ccabdb..07a45d78 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.5.20 +1.5.21 diff --git a/engines/sslscan/engine-sslscan.py b/engines/sslscan/engine-sslscan.py index c447531b..683fbe89 100644 --- a/engines/sslscan/engine-sslscan.py +++ b/engines/sslscan/engine-sslscan.py @@ -390,7 +390,7 @@ def _get_heartbleed_vuln(items, issue_id, asset, asset_port): target_addrs=[asset], meta_tags=["heartbleed", "ssl", "tls"], meta_links=hb_links, - meta_vuln_refs={{"CVE": ["CVE-2014-0160"]}}, + meta_vuln_refs=[{"CVE": ["CVE-2014-0160"]}], ) else: return PatrowlEngineFinding(