Skip to content

Commit

Permalink
add documentation adn change result (todo)
Browse files Browse the repository at this point in the history
  • Loading branch information
ElNiak committed Jun 20, 2024
1 parent 018db22 commit 8da788f
Show file tree
Hide file tree
Showing 17 changed files with 7,554 additions and 390 deletions.
6 changes: 0 additions & 6 deletions bounty_drive/attacks/dorks/dorking_config.py

This file was deleted.

11 changes: 0 additions & 11 deletions bounty_drive/attacks/dorks/github_config.py

This file was deleted.

11 changes: 9 additions & 2 deletions bounty_drive/attacks/dorks/github_dorking.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,18 @@
import requests
from termcolor import cprint

from attacks.dorks.github_config import GITHUB_API_URL, TOKENS_LIST
from utils.app_config import *

token_index = 0

# GitHub Dorking
GITHUB_API_URL = "https://api.github.com"
TOKENS_LIST = ["your_github_token"] # Add your GitHub tokens here
DORK_LIST = ["example_dork1", "example_dork2"] # Add your dorks here
QUERIES_LIST = ["example_query"] # Add your queries here
ORGANIZATIONS_LIST = ["example_organization"] # Add your organizations here
USERS_LIST = ["example_user"] # Add your users here


def token_round_robin():
global token_index
Expand Down Expand Up @@ -105,5 +112,5 @@ def github_search_with_proxy(dork_tuple, proxy, category, retries=3, advanced=Fa
return category, None # Indicate failure after retries


# def load_github_dorks_and_search(extension=DEFAULT_EXTENSION, total_output=DEFAULT_TOTAL_OUTPUT, page_no=DEFAULT_PAGE_NO, proxies=None):
# def launch_github_dorks_and_search_attack(extension=DEFAULT_EXTENSION, total_output=DEFAULT_TOTAL_OUTPUT, page_no=DEFAULT_PAGE_NO, proxies=None):
# pass
280 changes: 194 additions & 86 deletions bounty_drive/attacks/dorks/google_dorking.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,13 @@

from scraping.web_scraper import parse_google_search_results, render_js_and_get_text

from vpn_proxies.proxies_manager import prepare_proxies, round_robin_proxies
from vpn_proxies.proxies_manager import get_proxies_and_cycle, prepare_proxies
from requester.request_manager import param_converter, start_request
from reporting.results_manager import get_processed_dorks, safe_add_result
from reporting.results_manager import (
get_processed_dorks,
save_dorking_query,
google_dorking_results,
)

dork_id_lock = threading.Lock()

Expand All @@ -37,6 +41,29 @@ def google_search_with_proxy(
advanced=False,
dork_id=0,
):
"""Performs a Google search using a proxy.
This function takes in various parameters to perform a Google search using a proxy.
It generates a full query based on the provided dork query, config, and domain.
It prepares the necessary parameters and proxies, and then performs the search using the `perform_searches` function.
Args:
dork_query (str): The dork query to be used for the Google search.
proxy (str): The proxy to be used for the Google search.
category (str): The category of the search.
config (dict): The configuration file.
domain (str): The domain to be searched.
processed_dorks (list): A list of processed dorks.
retries (int, optional): The number of retries for the search. Defaults to 1.
advanced (bool, optional): Whether to use advanced search options. Defaults to False.
dork_id (int, optional): The ID of the dork. Defaults to 0.
Raises:
Exception: If the config file is not provided.
Returns:
dict: The search results.
"""

if not config:
raise Exception("Config file should be provided")
Expand Down Expand Up @@ -82,6 +109,26 @@ def perform_searches(
processed_dorks,
use_session,
):
"""Perform searches using Google dorking.
This function performs searches using Google dorking technique. It takes various parameters
to customize the search query and behavior.
Args:
full_query (str): The full search query.
proxies (dict): A dictionary of proxies to be used for the search.
category (str): The category of the search.
params (dict): Additional parameters for the search.
retries (int): The number of retries in case of failure.
config (dict): Configuration settings for the search.
advanced (bool): Flag indicating whether to use advanced search techniques.
dork_id (int): The ID of the dork.
processed_dorks (list): A list of processed dorks.
use_session (bool): Flag indicating whether to use session for the search.
Returns:
int: The ID of the executed search.
"""

params["q"] = full_query
dork_id = execute_search_with_retries(
Expand Down Expand Up @@ -112,6 +159,24 @@ def execute_search_with_retries(
processed_dorks,
use_session=False,
):
"""
Execute a search with retries using Google dorking.
Args:
query (str): The search query.
proxies (dict): The proxies to be used for the request.
category (str): The category of the search.
params (dict): The parameters for the request.
retries (int): The number of retries.
config (dict): The configuration settings.
advanced (bool): Whether to use advanced search techniques.
dork_id (int): The ID of the dork.
processed_dorks (list): The list of already processed dorks.
use_session (bool, optional): Whether to use a session for the request. Defaults to False.
Returns:
int: The updated dork ID.
"""
base_url = "https://www.google.com/search"
headers = {
"User-Agent": random.choice(USER_AGENTS),
Expand Down Expand Up @@ -207,7 +272,9 @@ def execute_search_with_retries(
proxies, advanced, query, html_content
)
result = dork_id, category, urls, query
safe_add_result(result, config)
# save_dorking_query(result, config)
with dork_id_lock:
google_dorking_results.append((result, config))
# with dork_id_lock:
# dork_id += 1
# TODO to be faster also record non functionnal dork
Expand Down Expand Up @@ -250,6 +317,23 @@ def execute_search_with_retries(


def generate_dork_query(query, config, domain):
"""Generate a dork query for Google dorking.
This function takes a query, configuration, and domain as input and generates a dork query
for Google dorking. It cleans up the query by removing existing inurl: and intext: tags,
ensures the query is properly enclosed in quotes if it contains quotes, and incorporates
the subdomain into the search query if specified. It also appends the file extension to the
query if specified in the configuration.
Args:
query (str): The query string.
config (dict): The configuration settings.
domain (str): The domain to incorporate into the search query.
Returns:
str: The generated dork query.
"""
# Clean up the query by removing existing inurl: and intext: tags
if len(query) > 0:
for tag in ["inurl:", "intext:"]:
Expand Down Expand Up @@ -291,6 +375,14 @@ def generate_dork_query(query, config, domain):
def filter_search_tasks(search_tasks, processed_dorks):
"""
Filters out the already processed dorks from search tasks.
Args:
search_tasks (dict): A dictionary containing search tasks categorized by their respective categories.
processed_dorks (list): A list of already processed dorks.
Returns:
dict: A dictionary containing filtered search tasks with the already processed dorks removed.
"""
filtered_tasks = {}
for category, dorks in search_tasks.items():
Expand All @@ -300,97 +392,113 @@ def filter_search_tasks(search_tasks, processed_dorks):
return filtered_tasks


def load_google_dorks_and_search(config, categories):
proxies, proxy_cycle = get_proxies_and_cycle(config)
def launch_google_dorks_and_search_attack(config, categories):
"""Launches a Google dorks and search attack.
search_tasks = {}
This function takes a configuration object and a list of categories as input.
It performs a Google dorks search for each category and launches a search attack
using the obtained dorks.
for category in categories:
search_tasks[category] = []
dork_files = glob.glob(f"attacks/dorks/google/{category}/*.txt", recursive=True)
for dork_file in dork_files:
with open(dork_file, "r") as file:
lines = file.readlines()
dorks = [line.strip() for line in lines]
search_tasks[category] += dorks
Args:
config (dict): A configuration object containing various settings for the attack.
categories (list): A list of categories to perform the search attack on.
cprint(
f"Total number of dorks: {sum([len(search_tasks[task]) for task in search_tasks])}",
"yellow",
file=sys.stderr,
)
processed_dorks = get_processed_dorks(config)
Raises:
NotImplementedError: If VPN is enabled in the configuration, as VPN is not supported in this version.
Exception: If any other error occurs during the search attack.
if not search_tasks:
cprint(f"No dorks to process.", "red", file=sys.stderr)
return
Returns:
None
"""
try:
proxies, proxy_cycle = get_proxies_and_cycle(config)

if config["use_vpn"]:
raise NotImplementedError(
"VPN is not supported in this version - Error in library"
)
thread = threading.Thread(target=change_vpn)
thread.start()

number_of_worker = min(len(proxies), 30)
cprint(f"Number of workers: {number_of_worker}", "yellow", file=sys.stderr)

search_tasks_with_proxy = []
for task in search_tasks:
for domain in config["subdomain"]:
for dork in search_tasks[task]:
proxy = next(proxy_cycle)
search_tasks_with_proxy.append(
{"dork": dork, "proxy": proxy, "category": task, "domain": domain}
)
cprint(
f"Total number of dorks: {len(search_tasks_with_proxy)}",
"yellow",
file=sys.stderr,
)
search_tasks = {}

for category in categories:
search_tasks[category] = []
dork_files = glob.glob(
f"attacks/dorks/google/{category}/*.txt", recursive=True
)
for dork_file in dork_files:
with open(dork_file, "r") as file:
lines = file.readlines()
dorks = [line.strip() for line in lines]
search_tasks[category] += dorks

with concurrent.futures.ThreadPoolExecutor(
max_workers=number_of_worker
) as executor:
future_to_search = {
executor.submit(
google_search_with_proxy,
task["dork"],
task["proxy"],
task["category"],
config,
task["domain"],
processed_dorks,
): task
for task in search_tasks_with_proxy
}
for future in tqdm(
concurrent.futures.as_completed(future_to_search),
total=len(future_to_search),
desc="Searching for vulnerable website",
unit="site",
):
# task = future_to_search[future]
# try:
future.result()


def get_proxies_and_cycle(config):
proxies = config["proxies"]
if config["use_proxy"] and len(proxies) == 0:
cprint(
f"Using proxies -> you should have at least one UP",
"red",
f"Total number of dorks: {sum([len(search_tasks[task]) for task in search_tasks])}",
"yellow",
file=sys.stderr,
)
exit()
processed_dorks = get_processed_dorks(config)

if not config["use_proxy"]:
proxies = [None]
if not search_tasks:
cprint(f"No dorks to process.", "red", file=sys.stderr)
return

if config["use_vpn"]:
raise NotImplementedError(
"VPN is not supported in this version - Error in library"
)
thread = threading.Thread(target=change_vpn)
thread.start()

number_of_worker = min(len(proxies), 30)
cprint(f"Number of workers: {number_of_worker}", "yellow", file=sys.stderr)

search_tasks_with_proxy = []
for task in search_tasks:
for domain in config["subdomain"]:
for dork in search_tasks[task]:
proxy = next(proxy_cycle)
search_tasks_with_proxy.append(
{
"dork": dork,
"proxy": proxy,
"category": task,
"domain": domain,
}
)
cprint(
f"Total number of dorks: {len(search_tasks_with_proxy)}",
"yellow",
file=sys.stderr,
)

proxy_cycle = round_robin_proxies(proxies)
return proxies, proxy_cycle
# update_csv(config["experiment_file_path"], task, success=True)
# except Exception as e:
# cprint(f"Error processing {task['dork']}: {e}", "red", file=sys.stderr)
# # update_csv(config["experiment_file_path"], task, success=False)
with concurrent.futures.ThreadPoolExecutor(
max_workers=number_of_worker
) as executor:
future_to_search = {
executor.submit(
google_search_with_proxy,
task["dork"],
task["proxy"],
task["category"],
config,
task["domain"],
processed_dorks,
): task
for task in search_tasks_with_proxy
}
for future in tqdm(
concurrent.futures.as_completed(future_to_search),
total=len(future_to_search),
desc="Searching for vulnerable website",
unit="site",
):
future.result()
except KeyboardInterrupt:
cprint(
"Process interrupted by user during google dorking phase ... Saving results",
"red",
file=sys.stderr,
)
concurrent.futures.thread._threads_queues.clear()
# https://stackoverflow.com/questions/49992329/the-workers-in-threadpoolexecutor-is-not-really-daemon
for result, config in google_dorking_results:
save_dorking_query(result, config)
quit()
except Exception as e:
cprint(f"Error searching for dorks: {e}", "red", file=sys.stderr)
raise e
2 changes: 1 addition & 1 deletion bounty_drive/attacks/sqli/sqli.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ def test_sqli_with_proxy(url_proxy):
return url, vuln # Error or can't determine


def test_vulnerability_sqli(proxies):
def launch_sqli_attack(proxies):
"""
Test a list of websites for SQL injection vulnerability using multithreading and proxies.
"""
Expand Down
Loading

0 comments on commit 8da788f

Please sign in to comment.