diff --git a/.gitignore b/.gitignore index 8e012419..d7f264ec 100644 --- a/.gitignore +++ b/.gitignore @@ -10,6 +10,7 @@ python/__pycache__/ python/*/__pycache__/ python/.vscode/ python/*/log/ +python/.xray-configs .tmp golang/CFScanner windows/*.rsuser diff --git a/python/README.md b/python/README.md index 30a40a48..ed12ba54 100644 --- a/python/README.md +++ b/python/README.md @@ -212,6 +212,9 @@ Contributors names and contact info * Fixed a bug in custom config template * 1.1.0 * Added random sampling +* 1.2.0 + * Added progress bar + * Improved logging [python]: https://img.shields.io/badge/-Python-3776AB?logo=python&logoColor=white -[version]: https://img.shields.io/badge/Version-1.1.0-blue +[version]: https://img.shields.io/badge/Version-1.2.0-blue diff --git a/python/args/parser.py b/python/args/parser.py index 1b6a6a90..f2205b2e 100644 --- a/python/args/parser.py +++ b/python/args/parser.py @@ -1,9 +1,9 @@ import argparse -from report.clog import CLogger from report.print import color_text +from rich.console import Console -logger = CLogger("args") +console = Console() def _title(text): @@ -217,11 +217,12 @@ def formatter(prog): return argparse.HelpFormatter( parsed_args.sample_size = float(parsed_args.sample_size) elif parsed_args.sample_size >= 1: if parsed_args.sample_size % 1 > 0.000001: - logger.warn( - f"Sample size rounded to integer value: {round(parsed_args.sample_size)}" + console.log( + f"[yellow]Sample size rounded to integer value: {round(parsed_args.sample_size)}[/yellow]" ) parsed_args.sample_size = round(parsed_args.sample_size) else: - raise ValueError(color_text("Sample size must be a positive number.", rgb=(255, 0, 0))) + raise ValueError(color_text( + "Sample size must be a positive number.", rgb=(255, 0, 0))) return parsed_args diff --git a/python/args/testconfig.py b/python/args/testconfig.py index c2afb06c..dbe342f3 100644 --- a/python/args/testconfig.py +++ b/python/args/testconfig.py @@ -2,14 +2,13 @@ import json import os -from report.clog import CLogger from utils.exceptions import BinaryNotFoundError, TemplateReadError from utils.os import detect_system from utils.requests import download_file from xray import templates from xray.binary import download_binary -logger = CLogger("testconfig") +from utils.exceptions import * PATH = os.path.dirname(os.path.abspath(__file__)) PARENT_PATH = os.path.dirname(PATH) @@ -47,10 +46,10 @@ def from_args(cls, args: argparse.Namespace): (file_content["path"].lstrip("/")) if args.template_path is None: - test_config.custom_template = False # user did not provide a custom template + test_config.custom_template = False # user did not provide a custom template test_config.proxy_config_template = templates.vmess_ws_tls else: - test_config.custom_template = True # user provided a custom template + test_config.custom_template = True # user provided a custom template try: with open(args.template_path, "r") as infile: test_config.proxy_config_template = infile.read() @@ -78,7 +77,7 @@ def from_args(cls, args: argparse.Namespace): test_config.max_ul_latency = args.max_ul_latency test_config.n_tries = args.n_tries test_config.novpn = args.no_vpn - + test_config.sample_size = args.sample_size system_info = detect_system() @@ -92,9 +91,12 @@ def from_args(cls, args: argparse.Namespace): ) test_config.binpath = args.binpath else: - test_config.binpath = download_binary( - system_info=system_info, - bin_dir=PARENT_PATH - ) + try: + test_config.binpath = download_binary( + system_info=system_info, + bin_dir=PARENT_PATH + ) + except Exception as e: + raise BinaryDownloadError(str(e)) return test_config diff --git a/python/cfscanner.py b/python/cfscanner.py index 7d2ff51c..eab4f96d 100644 --- a/python/cfscanner.py +++ b/python/cfscanner.py @@ -8,96 +8,176 @@ from args.parser import parse_args from args.testconfig import TestConfig -from report.clog import CLogger -from report.print import print_ok +from rich import print as rprint +from rich.console import Console +from rich.progress import Progress from speedtest.conduct import test_ip from speedtest.tools import mean_jitter from subnets import cidr_to_ip_list, get_num_ips_in_cidr, read_cidrs -from utils.exceptions import BinaryNotFoundError, TemplateReadError +from utils.exceptions import * from utils.os import create_dir -log = CLogger("cfscanner-python") +console = Console() SCRIPTDIR = os.path.dirname(os.path.realpath(__file__)) -CONFIGDIR = f"{SCRIPTDIR}/../config" -RESULTDIR = f"{SCRIPTDIR}/../result" +CONFIGDIR = f"{SCRIPTDIR}/.xray-configs" +RESULTDIR = f"{SCRIPTDIR}/result" START_DT_STR = datetime.now().strftime(r"%Y%m%d_%H%M%S") INTERIM_RESULTS_PATH = os.path.join(RESULTDIR, f'{START_DT_STR}_result.csv') if __name__ == "__main__": + console = Console() + args = parse_args() if not args.no_vpn: - create_dir(CONFIGDIR) + with console.status(f"[green]Creating config dir \"{CONFIGDIR}\"[/green]"): + try: + create_dir(CONFIGDIR) + except Exception as e: + console.log("[red]Could not created config directory[/red]") + exit(1) + console.log(f"[blue]Config directory created \"{CONFIGDIR}\"[/blue]") configFilePath = args.config_path - create_dir(RESULTDIR) + with console.status(f"[green]Creating results directory \"{RESULTDIR}\"[/green]"): + try: + create_dir(RESULTDIR) + except Exception as e: + console.log("[red]Could not create results directory[/red]") + console.log(f"[blue]Results directory created \"{RESULTDIR}\"[/blue]") # create empty result file - with open(INTERIM_RESULTS_PATH, "w") as empty_file: - titles = [ - "ip", "avg_download_speed", "avg_upload_speed", - "avg_download_latency", "avg_upload_latency", - "avg_download_jitter", "avg_upload_jitter" - ] - titles += [f"download_speed_{i+1}" for i in range(args.n_tries)] - titles += [f"upload_speed_{i+1}" for i in range(args.n_tries)] - titles += [f"download_latency_{i+1}" for i in range(args.n_tries)] - titles += [f"upload_latency_{i+1}" for i in range(args.n_tries)] - empty_file.write(",".join(titles) + "\n") + with console.status(f"[green]Creating empty result file {INTERIM_RESULTS_PATH}[/green]"): + try: + with open(INTERIM_RESULTS_PATH, "w") as empty_file: + titles = [ + "ip", "avg_download_speed", "avg_upload_speed", + "avg_download_latency", "avg_upload_latency", + "avg_download_jitter", "avg_upload_jitter" + ] + titles += [f"download_speed_{i+1}" for i in range(args.n_tries)] + titles += [f"upload_speed_{i+1}" for i in range(args.n_tries)] + titles += [f"download_latency_{i+1}" for i in range(args.n_tries)] + titles += [f"upload_latency_{i+1}" for i in range(args.n_tries)] + empty_file.write(",".join(titles) + "\n") + except Exception as e: + console.log(f"[red]Could not create empty result file:\n\"{INTERIM_RESULTS_PATH}\"[/red]") threadsCount = args.threads if args.subnets: - cidr_list = read_cidrs(args.subnets) + with console.status("[green]Reading subnets from \"{args.subnets}\"[/green]"): + try: + cidr_list = read_cidrs(args.subnets) + except SubnetsReadError as e: + console.log(f"[red]Could not read subnets. {e}[/red]") + exit(1) + except Exception as e: + console.log(f"Unknown error in reading subnets: {e}") + exit(1) + console.log( + f"[blue]Subnets successfully read from \"{args.subnets}\"[/blue]") else: - cidr_list = read_cidrs( - "https://raw.githubusercontent.com/MortezaBashsiz/CFScanner/main/bash/cf.local.iplist" + subnets_default_address = "https://raw.githubusercontent.com/MortezaBashsiz/CFScanner/main/config/cf.local.iplist" + console.log( + f"[blue]Subnets not provided. Default address will be used:\n\"{subnets_default_address}\"[/blue]" ) - + with console.status(f"[green]Retrieving subnets from \"{subnets_default_address}\"[/green]"): + try: + cidr_list = read_cidrs( + "https://raw.githubusercontent.com/MortezaBashsiz/CFScanner/main/config/cf.local.iplist" + ) + except SubnetsReadError as e: + console.log(f"[red]Could not read subnets. {e}[/red]") + exit(1) + except Exception as e: + console.log(f"Unknown error in reading subnets: {e}") + exit(1) try: test_config = TestConfig.from_args(args) except TemplateReadError: - log.error("Could not read template from file.") + console.log( + f"[red]Could not read template from file \"{args.template_path}\"[/red]") exit(1) except BinaryNotFoundError: - log.error("Could not find xray/v2ray binary.", args.binpath) + console.log( + f"[red]Could not find xray/v2ray binary from path \"{args.binpath}\"[/red]") exit(1) except Exception as e: - log.error("Unknown error while reading template.") - log.exception(e) + console.print_exception() exit(1) - n_total_ips = sum(get_num_ips_in_cidr(cidr, sample_size=test_config.sample_size) for cidr in cidr_list) - log.info(f"Starting to scan {n_total_ips} ips...") - - big_ip_list = [ip for cidr in cidr_list for ip in cidr_to_ip_list(cidr, sample_size=test_config.sample_size)] - - with multiprocessing.Pool(processes=threadsCount) as pool: - for res in pool.imap(partial(test_ip, test_config=test_config, config_dir=CONFIGDIR), big_ip_list): - if res: - down_mean_jitter = mean_jitter(res["download"]["latency"]) - up_mean_jitter = mean_jitter( - res["upload"]["latency"]) if test_config.do_upload_test else -1 - mean_down_speed = statistics.mean(res["download"]["speed"]) - mean_up_speed = statistics.mean( - res["upload"]["speed"]) if test_config.do_upload_test else -1 - mean_down_latency = statistics.mean(res["download"]["latency"]) - mean_up_latency = statistics.mean( - res["upload"]["latency"]) if test_config.do_upload_test else -1 - - print_ok(scan_result=res) - - with open(INTERIM_RESULTS_PATH, "a") as outfile: - res_parts = [ - res["ip"], mean_down_speed, mean_up_speed, - mean_down_latency, mean_up_latency, - down_mean_jitter, up_mean_jitter - ] - res_parts += res["download"]["speed"] - res_parts += res["upload"]["speed"] - res_parts += res["download"]["latency"] - res_parts += res["upload"]["latency"] - - outfile.write(",".join(map(str, res_parts)) + "\n") + n_total_ips = sum(get_num_ips_in_cidr( + cidr, + sample_size=test_config.sample_size + ) for cidr in cidr_list) + console.log(f"[blue]Starting to scan {n_total_ips} ips...[/blue]") + + cidr_ip_lists = [ + cidr_to_ip_list( + cidr, + sample_size=test_config.sample_size) + for cidr in cidr_list + ] + big_ip_list = [(ip, cidr) for cidr, ip_list in zip( + cidr_list, cidr_ip_lists) for ip in ip_list] + + cidr_scanned_ips = {cidr: 0 for cidr in cidr_list} + + cidr_prog_tasks = dict() + + with Progress() as progress: + all_ips_task = progress.add_task( + f"all subnets - {n_total_ips} ips", total=n_total_ips) + + with multiprocessing.Pool(processes=threadsCount) as pool: + try: + for res in pool.imap(partial(test_ip, test_config=test_config, config_dir=CONFIGDIR), big_ip_list): + progress.update(all_ips_task, advance=1) + if cidr_scanned_ips[res.cidr] == 0: + n_ips_cidr = get_num_ips_in_cidr( + res.cidr, sample_size=test_config.sample_size) + cidr_prog_tasks[res.cidr] = progress.add_task( + f"{res.cidr} - {n_ips_cidr} ips", total=n_ips_cidr) + progress.update(cidr_prog_tasks[res.cidr], advance=1) + + if res.is_ok: + down_mean_jitter = mean_jitter( + res.result["download"]["latency"]) + up_mean_jitter = mean_jitter( + res.result["upload"]["latency"]) if test_config.do_upload_test else -1 + mean_down_speed = statistics.mean( + res.result["download"]["speed"]) + mean_up_speed = statistics.mean( + res.result["upload"]["speed"]) if test_config.do_upload_test else -1 + mean_down_latency = statistics.mean( + res.result["download"]["latency"]) + mean_up_latency = statistics.mean( + res.result["upload"]["latency"]) if test_config.do_upload_test else -1 + + rprint(res.message) + + with open(INTERIM_RESULTS_PATH, "a") as outfile: + res_parts = [ + res.ip, mean_down_speed, mean_up_speed, + mean_down_latency, mean_up_latency, + down_mean_jitter, up_mean_jitter + ] + res_parts += res.result["download"]["speed"] + res_parts += res.result["upload"]["speed"] + res_parts += res.result["download"]["latency"] + res_parts += res.result["upload"]["latency"] + + outfile.write(",".join(map(str, res_parts)) + "\n") + else: + rprint(res.message) + + cidr_scanned_ips[res.cidr] += 1 + if cidr_scanned_ips[res.cidr] == get_num_ips_in_cidr(res.cidr, sample_size=test_config.sample_size): + progress.remove_task(cidr_prog_tasks[res.cidr]) + except StartProxyServiceError as e: + progress.stop() + console.log(f"[red]{e}[/red]") + pool.terminate() diff --git a/python/report/clog.py b/python/report/clog.py deleted file mode 100644 index 1828662c..00000000 --- a/python/report/clog.py +++ /dev/null @@ -1,184 +0,0 @@ -import logging -import os -import sys -from logging import StreamHandler as _SH -from logging.handlers import TimedRotatingFileHandler as _TRFH - -if os.name == "nt": - os.system("color") - -PATH = os.path.dirname(os.path.realpath(__file__)) - -# basic logging configuration -_COLORS = dict( - CRITICAL="\033[95m", - FATAL="\033[95m", - ERROR="\033[91m", - WARNING="\033[33m", - INFO="\x1b[0m", - DEBUG="\033[94m", - SUCCESS="\033[32m", -) - -# add custom class -_SUCCESS = 45 - - -class _CustomLogger(logging.Logger): - """Custom Logger Class""" - - def success(self, msg, *args, **kwargs): - if self.isEnabledFor(_SUCCESS): - self._log(_SUCCESS, msg, args, **kwargs) - - -class _CustomFormat(object): - DATE_FORMAT = "%Y-%m-%d %H:%M:%S" - - @staticmethod - def create(colored=True, show_task_name=False): - task_name = "[%(threadName)-3s] " if show_task_name else "" - color_start, color_end = "", "" - if colored: - color_start = "%(color)s" - color_end = "\033[0m" - - return ( - "%(asctime)s.%(msecs)03d %(name)s {color_start}" - "{task_name}[%(levelname).3s] [%(prefix)-3s] " - "%(message)s{color_end}".format( - task_name=task_name, color_start=color_start, color_end=color_end - ) - ) - - -class _MessageFilter(logging.Filter): - """Setting/Formatting all record arguments and adding color.""" - - def filter(self, record): - record.prefix = "" - record.levelname = record.levelname.replace("Level ", "").lower() - record.color = _COLORS[record.levelname.upper()] - - if record.threadName == "MainThread": - record.threadName = 0 - - if "prefix" in record.args: - prefix = record.args.get("prefix") - if prefix: - record.prefix = record.args.get("prefix") - - return True - - -logging.setLoggerClass(_CustomLogger) -logging.addLevelName(_SUCCESS, "SUCCESS") - - -class CLogger(object): - """Logger to import in every project""" - - def __init__( - self, - module, - colored=True, - console_log_level=1, - file_log_level=25, - show_task_name=False, - ): - - module = module.lower() - - self._logger = logging.getLogger(module) - if not self._logger.hasHandlers(): - self.add_handlers( - module, colored, show_task_name, console_log_level, file_log_level - ) - - self._prefix = None - - def add_handlers( - self, module, colored, show_task_name, console_log_level, file_log_level - ): - # create a new logger and add filters and file handler - - console_fmt = _CustomFormat.create(colored, show_task_name) - file_fmt = _CustomFormat.create(False, show_task_name) - date_fmt = _CustomFormat.DATE_FORMAT - - # set console logging - stream_fmt = logging.Formatter(console_fmt, datefmt=date_fmt) - stream_handler = _SH(stream=sys.stdout) - stream_handler.setFormatter(stream_fmt) - stream_handler.setLevel(console_log_level) - - # set file handler and file logging - file_fmt = logging.Formatter(file_fmt, datefmt=date_fmt) - - # create log folder if not exist already - log_dir_path = os.path.join(PATH, "log") - if not os.path.isdir(log_dir_path): - os.mkdir(log_dir_path) - - filename = os.path.join(log_dir_path, f"{module}.log") - file_handler = _TRFH( - filename=filename, - when="D", - interval=1, - backupCount=30, - encoding="utf-8", - delay=False, - ) - file_handler.setFormatter(file_fmt) - file_handler.setLevel(file_log_level) - self._logger.addHandler(file_handler) - self._logger.addHandler(stream_handler) - self._logger.addFilter(_MessageFilter()) - self._logger.setLevel(console_log_level) - - def set_prefix(self, prefix): - self._prefix = prefix - - def info(self, msg: str, prefix: str = None): - if not prefix: - prefix = self._prefix - msg = msg.replace("%", "%%") - self._logger.info(msg, {"prefix": prefix}) - - def warn(self, msg: str, prefix: str = None): - if not prefix: - prefix = self._prefix - msg = msg.replace("%", "%%") - self._logger.warning(msg, {"prefix": prefix}) - - def error(self, msg: str, prefix: str = None): - if not prefix: - prefix = self._prefix - msg = msg.replace("%", "%%") - self._logger.error(msg, {"prefix": prefix}) - - def exception(self, e): - self._logger.exception(e) - - def success(self, msg: str, prefix: str = None): - if not prefix: - prefix = self._prefix - msg = msg.replace("%", "%%") - self._logger.success(msg, {"prefix": prefix}) - - def debug(self, msg: str, prefix: str = None): - if not prefix: - prefix = self._prefix - msg = msg.replace("%", "%%") - self._logger.debug(msg, {"prefix": prefix}) - - -if __name__ == "__main__": - for _ in range(2): - log = CLogger("test-instance") - - log.info("INFO MESSAGE") - log.warn("WARNING MESSAGE", "403") - log.error("ERROR MESSAGE", "TimeoutError") - log.success("SUCCESS MESSAGE", "jones@gmail.com") - log.debug("DEBUG MESSAGE", "a == b") diff --git a/python/report/print.py b/python/report/print.py index 2ab40848..68463553 100644 --- a/python/report/print.py +++ b/python/report/print.py @@ -4,7 +4,7 @@ from statistics import mean -def print_and_kill( +def no_and_kill( ip: str, message: str, process: Popen @@ -16,11 +16,11 @@ def print_and_kill( message (str): the message related to the error process (Popen): the process (xray) to be killed """ - print(f"{Colors.FAIL}NO {Colors.WARNING}{ip:15s} {message}{Colors.ENDC}") process.kill() + return f"[bold red]NO[/bold red] [orange]{ip:15s}[/orange] [yellow]{message}[/yellow]" -def print_ok( +def ok_message( scan_result: dict ) -> None: """prints the result if test is ok @@ -34,18 +34,16 @@ def print_ok( mean_up_speed = mean(scan_result["upload"]["speed"]) mean_down_latency = mean(scan_result["download"]["latency"]) mean_up_latency = mean(scan_result["upload"]["latency"]) - print( - f"{Colors.OKGREEN}" - f"OK {scan_result['ip']:15s} " - f"{Colors.OKBLUE}" - f"avg_down_speed: {mean_down_speed:7.4f}mbps " - f"avg_up_speed: {mean_up_speed:7.4f}mbps " - f"avg_down_latency: {mean_down_latency:7.2f}ms " - f"avg_up_latency: {mean_up_latency:7.2f}ms ", - f"avg_down_jitter: {down_mean_jitter:7.2f}ms ", - f"avg_up_jitter: {up_mean_jitter:4.2f}ms" - f"{Colors.ENDC}" - ) + return f"[green]"\ + f"OK [green][blue_violet]{scan_result['ip']:15s}[/blue_violet][blue] "\ + f"avg_down_speed: {mean_down_speed:7.4f}mbps "\ + f"avg_up_speed: {mean_up_speed:7.4f}mbps "\ + f"avg_down_latency: {mean_down_latency:7.2f}ms "\ + f"avg_up_latency: {mean_up_latency:7.2f}ms "\ + f"avg_down_jitter: {down_mean_jitter:7.2f}ms "\ + f"avg_up_jitter: {up_mean_jitter:4.2f}ms"\ + f"[/blue]" + def color_text(text: str, rgb: tuple, bold: bool = False): diff --git a/python/requirements.txt b/python/requirements.txt index d596ff4d..3c6f38d8 100644 --- a/python/requirements.txt +++ b/python/requirements.txt @@ -1,2 +1,3 @@ requests==2.28.2 pysocks==1.7.1 +rich==13.3.3 diff --git a/python/speedtest/conduct.py b/python/speedtest/conduct.py index 656e7c06..7d7dd030 100644 --- a/python/speedtest/conduct.py +++ b/python/speedtest/conduct.py @@ -1,9 +1,10 @@ -import requests +import statistics +import requests from args.testconfig import TestConfig -from report.clog import CLogger -from report.print import print_and_kill +from report.print import no_and_kill, ok_message from utils.decorators import timeout_fun +from utils.exceptions import * from xray.config import create_proxy_config from xray.service import start_proxy_service @@ -11,9 +12,41 @@ from .fronting import fronting_test from .upload import upload_speed_test -log = CLogger("cfscanner-speedtest") +class TestResult: + """class to store test results + """ + + def __init__( + self, + ip, + cidr, + n_tries + ): + self.ip = ip + self.cidr = cidr + + self.is_ok = False + self.n_tries = n_tries + self.message = "" + + self.result = dict( + ip=ip, + success=False, + download=dict( + speed=[-1] * self.n_tries, + latency=[-1] * self.n_tries + ), + upload=dict( + speed=[-1] * self.n_tries, + latency=[-1] * self.n_tries + ) + ) + + def __bool__(self): + return self.is_ok + class _FakeProcess: def __init__(self): @@ -24,25 +57,24 @@ def kill(self): def test_ip( - ip: str, + ip_cidr: tuple, test_config: TestConfig, config_dir: str ): - result = dict( + ip, cidr = ip_cidr + test_result = TestResult( ip=ip, - download=dict( - speed=[-1] * test_config.n_tries, - latency=[-1] * test_config.n_tries - ), - upload=dict( - speed=[-1] * test_config.n_tries, - latency=[-1] * test_config.n_tries - ), + cidr=cidr, + n_tries=test_config.n_tries ) for try_idx in range(test_config.n_tries): - if not fronting_test(ip, timeout=test_config.fronting_timeout): - return False + fronting_result_msg = fronting_test( + ip, timeout=test_config.fronting_timeout) + if "NO" in fronting_result_msg: + test_result.message = fronting_result_msg + test_result.is_ok = False + return test_result if not test_config.novpn: try: @@ -52,13 +84,13 @@ def test_ip( config_dir=config_dir ) except Exception as e: - log.error("Could not save proxy (xray/v2ray) config to file", ip) - log.exception(e) - return print_and_kill( + test_result.message = no_and_kill( ip=ip, message="Could not save proxy (xray/v2ray) config to file", process=process ) + test_result.is_ok = False + return test_result if not test_config.novpn: try: @@ -68,10 +100,9 @@ def test_ip( timeout=test_config.startprocess_timeout ) except Exception as e: - message = "Could not start proxy (v2ray/xray) service" - log.error(message, ip) - log.exception(e) - print_and_kill(ip=ip, message=message, process=process) + test_result.is_ok = False + raise StartProxyServiceError(f"Could not start xray service - {ip}") + else: process = _FakeProcess() proxies = None @@ -90,26 +121,46 @@ def timeout_download_fun(): try: dl_speed, dl_latency = timeout_download_fun() except TimeoutError as e: - return print_and_kill(ip=ip, message="download timeout exceeded", process=process) + fail_msg = no_and_kill( + ip=ip, message="download timeout exceeded", process=process) + test_result.message = fail_msg + test_result.is_ok = False + return test_result except (requests.exceptions.ReadTimeout, requests.exceptions.ConnectionError, requests.ConnectTimeout) as e: - return print_and_kill(ip=ip, message="download error", process=process) + fail_msg = no_and_kill( + ip=ip, message="download error", process=process) + test_result.message = fail_msg + test_result.is_ok = False + return test_result except Exception as e: - log.error("Download - unknown error", ip) - log.exception(e) - return print_and_kill(ip=ip, message="download unknown error", process=process) + fail_msg = no_and_kill( + ip=ip, + message="download unknown error", + process=process + ) + test_result.message = fail_msg + test_result.is_ok = False + return test_result if dl_latency <= test_config.max_dl_latency: dl_speed_kBps = dl_speed / 8 * 1000 if dl_speed_kBps >= test_config.min_dl_speed: - result["download"]["speed"][try_idx] = dl_speed - result["download"]["latency"][try_idx] = round( - dl_latency * 1000) + test_result.result["download"]["speed"][try_idx] = dl_speed + test_result.result["download"]["latency"][try_idx] = round( + dl_latency * 1000 + ) else: message = f"download too slow {dl_speed_kBps:.2f} < {test_config.min_dl_speed:.2f} kBps" - return print_and_kill(ip=ip, message=message, process=process) + fail_msg = no_and_kill(ip=ip, message=message, process=process) + test_result.message = fail_msg + test_result.is_ok = False + return test_result else: message = f"high download latency {dl_latency:.4f} s > {test_config.max_dl_latency:.4f} s" - return print_and_kill(ip=ip, message=message, process=process) + fail_msg = no_and_kill(ip=ip, message=message, process=process) + test_result.message = fail_msg + test_result.is_ok = False + return test_result # upload speed test if test_config.do_upload_test: @@ -121,25 +172,48 @@ def timeout_download_fun(): timeout=test_config.max_ul_latency + test_config.max_ul_time ) except requests.exceptions.ReadTimeout: - return print_and_kill(ip, 'upload read timeout', process) + fail_msg = no_and_kill(ip, 'upload read timeout', process) + test_result.message = fail_msg + test_result.is_ok = False + return test_result except requests.exceptions.ConnectTimeout: - return print_and_kill(ip, 'upload connect timeout', process) + fail_msg = no_and_kill(ip, 'upload connect timeout', process) + test_result.message = fail_msg + test_result.is_ok = False + return test_result except requests.exceptions.ConnectionError: - return print_and_kill(ip, 'upload connection error', process) + fail_msg = no_and_kill(ip, 'upload connection error', process) + test_result.message = fail_msg + test_result.is_ok = False + return test_result except Exception as e: - log.error("Upload - unknown error", ip) - log.exception(e) - return print_and_kill(ip, 'upload unknown error', process) + fail_msg = no_and_kill(ip, 'upload unknown error', process) + test_result.message = fail_msg + test_result.is_ok = False + return test_result if up_latency > test_config.max_ul_latency: - return print_and_kill(ip, 'upload latency too high', process) + fail_msg = no_and_kill(ip, 'upload latency too high', process) + test_result.message = fail_msg + test_result.is_ok = False + return test_result up_speed_kBps = up_speed / 8 * 1000 if up_speed_kBps >= test_config.min_ul_speed: - result["upload"]["speed"][try_idx] = up_speed - result["upload"]["latency"][try_idx] = round(up_latency * 1000) + test_result.result["upload"]["speed"][try_idx] = up_speed + test_result.result["upload"]["latency"][try_idx] = round( + up_latency * 1000 + ) else: message = f"upload too slow {up_speed_kBps:.2f} kBps < {test_config.min_ul_speed:.2f} kBps" - return print_and_kill(ip, message, process) + fail_msg = no_and_kill(ip, message, process) + test_result.message = fail_msg + test_result.is_ok = False + return test_result process.kill() - return result + + test_ok_msg = ok_message(test_result.result) + + test_result.is_ok = True + test_result.message = test_ok_msg + return test_result diff --git a/python/speedtest/fronting.py b/python/speedtest/fronting.py index 68ec1b90..519f375d 100644 --- a/python/speedtest/fronting.py +++ b/python/speedtest/fronting.py @@ -1,10 +1,5 @@ import requests -from report import Colors -from report.clog import CLogger - -logger = CLogger("fronting") - def fronting_test( ip: str, @@ -25,7 +20,6 @@ def fronting_test( s.get_adapter( 'https://').poolmanager.connection_pool_kw['assert_hostname'] = "speed.cloudflare.com" - success = False try: compatible_ip = f"[{ip}]" if ":" in ip else ip r = s.get( @@ -34,25 +28,16 @@ def fronting_test( headers={"Host": "speed.cloudflare.com"} ) if r.status_code != 200: - print( - f"{Colors.FAIL}NO {Colors.WARNING}{ip:15s} fronting error {r.status_code} {Colors.ENDC}") + return f"[red]NO[/red] [dark_orange3]{ip:15s}[/dark_orange3][yellow] fronting error {r.status_code} [/yellow]" else: success = True except requests.exceptions.ConnectTimeout as e: - print( - f"{Colors.FAIL}NO {Colors.WARNING}{ip:15s} fronting connect timeout{Colors.ENDC}" - ) + return f"[red]NO[/red] [dark_orange3]{ip:15s}[/dark_orange3][yellow] fronting connect timeout[/yellow]" except requests.exceptions.ReadTimeout as e: - print( - f"{Colors.FAIL}NO {Colors.WARNING}{ip:15s} fronting read timeout{Colors.ENDC}" - ) + return f"[red]NO[/red] [dark_orange3]{ip:15s}[/dark_orange3][yellow] fronting read timeout[/yellow]" except requests.exceptions.ConnectionError as e: - print( - f"{Colors.FAIL}NO {Colors.WARNING}{ip:15s} fronting connection error{Colors.ENDC}" - ) + return f"[red]NO[/red] [dark_orange3]{ip:15s}[/dark_orange3][yellow] fronting connection error[/yellow]" except Exception as e: - f"{Colors.FAIL}NO {Colors.WARNING}{ip:15s}fronting Unknown error{Colors.ENDC}" - logger.error(f"Fronting test Unknown error {ip:15}") - logger.exception(e) + return f"[red]NO[/red] [dark_orange3]{ip:15s}[/dark_orange3][yellow] fronting Unknown error[/yellow]" - return success + return "OK" diff --git a/python/subnets/cidr.py b/python/subnets/cidr.py index e43cadce..f27552a8 100644 --- a/python/subnets/cidr.py +++ b/python/subnets/cidr.py @@ -7,11 +7,10 @@ from urllib.parse import urlparse import requests - -from report.clog import CLogger +from rich.console import Console from utils.exceptions import * -logger = CLogger("subnets.cidr") +console = Console() def cidr_to_ip_list( @@ -105,9 +104,7 @@ def read_cidrs_from_url( cidr_regex = r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}\/[\d]+" cidrs = re.findall(cidr_regex, r.text) except Exception as e: - logger.error(f"Could not read cidrs from url", url) - logger.exception(e) - raise SubnetsReadError(f"Could not read cidrs from url {url}") + raise SubnetsReadError(f"Could not read cidrs from url \"{url}\"") return cidrs @@ -151,8 +148,7 @@ def read_cidrs( elif os.path.isfile(url_or_path): cidrs = read_cidrs_from_file(url_or_path) else: - logger.error( - "url_or_path is neither a valid url or a file path.", url_or_path) raise SubnetsReadError( - f"{url_or_path} is neither a valid url or a file path.") + f"\"{url_or_path}\" is neither a valid url nor a file path." + ) return cidrs diff --git a/python/utils/__init__.py b/python/utils/__init__.py index 07f0bd94..e69de29b 100644 --- a/python/utils/__init__.py +++ b/python/utils/__init__.py @@ -1 +0,0 @@ -from report.clog import CLogger \ No newline at end of file diff --git a/python/utils/exceptions.py b/python/utils/exceptions.py index 224eaf9b..e5297e97 100644 --- a/python/utils/exceptions.py +++ b/python/utils/exceptions.py @@ -16,3 +16,11 @@ class TemplateReadError(Exception): class BinaryNotFoundError(Exception): """Raised when the xray binary file is not found""" pass + + +class BinaryDownloadError(Exception): + """Raised when xray binary could not be downloaded""" + + +class StartProxyServiceError(Exception): + "Raised when the xray binary could not start" diff --git a/python/utils/os.py b/python/utils/os.py index bc4a8b66..7eb84b9f 100644 --- a/python/utils/os.py +++ b/python/utils/os.py @@ -73,4 +73,3 @@ def create_dir(dir_path): """ if not os.path.exists(dir_path): os.makedirs(dir_path) - print(f"Directory created : {dir_path}") diff --git a/python/utils/requests.py b/python/utils/requests.py index 4d7ee77a..b2f9f945 100644 --- a/python/utils/requests.py +++ b/python/utils/requests.py @@ -1,9 +1,8 @@ import requests -from report.clog import CLogger +from rich.console import Console from utils.exceptions import * - -logger = CLogger("requests", file_log_level=1, console_log_level=1) +console = Console() def download_file( @@ -17,16 +16,20 @@ def download_file( Args: url (str): the url to download the file from - savepath (str): the path to save the file to + save_path (str): the path to save the file to timeout (float, optional): timeout for ``requests.get`` note that this not limit the total download time, only the RTT. Defaults to 10. """ r = requests.get(url, stream=True, timeout=timeout) try: - with open(save_path, "wb") as zipout: + with open(save_path, "wb") as zip_out: for chunk in r.iter_content(chunk_size=chunk_size): if chunk: - zipout.write(chunk) - except Exception as e: - logger.exception(e) - raise(FileDownloadError("Error downloading file from {url} to {savepath}")) + zip_out.write(chunk) + except Exception: + console.print_exception() + raise ( + FileDownloadError( + f"Error downloading file from {url} to {save_path}" + ) + ) return True diff --git a/python/version.py b/python/version.py index 369b4e07..a8753110 100644 --- a/python/version.py +++ b/python/version.py @@ -1 +1 @@ -VERSION = "1.0.0" \ No newline at end of file +VERSION = "1.2.0" \ No newline at end of file diff --git a/python/xray/binary.py b/python/xray/binary.py index 491c7bda..46e516eb 100644 --- a/python/xray/binary.py +++ b/python/xray/binary.py @@ -2,16 +2,14 @@ import zipfile import requests - -from report.clog import CLogger +from rich.console import Console from utils.decorators import timeout_fun from utils.exceptions import * from utils.requests import download_file -import traceback from . import LATEST_SUPPORTED_VERSION, SUPPORTED -logger = CLogger("xraybinary") +console = Console() PATH = os.path.dirname(os.path.abspath(__file__)) @@ -42,44 +40,42 @@ def download_binary( zipdir = os.path.join(PATH, ".tmp") os.makedirs(zipdir, exist_ok=True) zip_path = os.path.join(zipdir, f"{platform_str}.zip") - bin_fname = f"xray-{'-'.join(system_info)}" - bin_path = os.path.join(bin_dir, bin_fname) + bin_fname = f"xray-{'-'.join(system_info)}" + bin_path = os.path.join(bin_dir, bin_fname) # if windows, add .exe if system_info[0] == "windows": bin_path += ".exe" - + if not os.path.exists(bin_path): - try: - logger.info("Downloading xray...", bin_path) - timeout_fun(timeout=timeout)(download_file)( - zip_url, zip_path, timeout=max_latency - ) - logger.success("Downloaded xray", bin_path) - with zipfile.ZipFile(zip_path, "r") as archive: - if system_info[0] == "windows": - xray_file = archive.read("xray.exe") - else: - xray_file = archive.read("xray") - with open(bin_path, "wb") as binoutfile: - binoutfile.write(xray_file) - os.chmod(bin_path, 0o775) - return bin_path - except FileDownloadError as e: - logger.error( - "Failed to download the release zip file from xtls xray-core github repo", str(system_info)) - logger.exception(e) - return False - except KeyError as e: - logger.error("Failed to get binary from zip file", zip_url) - logger.exception(e) - return False - except Exception as e: - logger.error("Unknown error", str(system_info)) - logger.exception(e) - traceback.print_exc() - return False + with console.status("[bold green]Downloading xray[/bold green]") as console_status: + try: + timeout_fun(timeout=timeout)(download_file)( + zip_url, zip_path, timeout=max_latency + ) + console.log(f"[green]Downloaded xray {bin_path}[green]") + with zipfile.ZipFile(zip_path, "r") as archive: + if system_info[0] == "windows": + xray_file = archive.read("xray.exe") + else: + xray_file = archive.read("xray") + with open(bin_path, "wb") as binoutfile: + binoutfile.write(xray_file) + os.chmod(bin_path, 0o775) + return bin_path + except FileDownloadError as e: + raise BinaryDownloadError( + f"Failed to download the release zip file from xtls xray-core github repo {str(system_info)}") + return False + except KeyError as e: + raise BinaryDownloadError( + f"Failed to get binary from zip file {zip_url}") + return False + except Exception as e: + raise BinaryDownloadError( + f"Unknown error - detected system: {str(system_info)}") + return False else: - logger.info("Binary file already exists", bin_path) + console.log(f"[blue]Binary file already exists {bin_path}[/blue]") return bin_path @@ -99,4 +95,3 @@ def get_latest_release() -> dict: raise e return release_info - diff --git a/python/xray/service.py b/python/xray/service.py index 7368f349..3053faf9 100644 --- a/python/xray/service.py +++ b/python/xray/service.py @@ -1,13 +1,9 @@ import json -import os import subprocess from typing import Tuple -from report.clog import CLogger from utils.socket import wait_for_port -logger = CLogger("xray-service") - def start_proxy_service( proxy_conf_path: str, @@ -18,7 +14,7 @@ def start_proxy_service( Args: proxy_conf_path (str): the path to the proxy (v2ray or xray) config json file - binfilepath (str): the path to the xray binary file. Defaults to None. + binary_path (str): the path to the xray binary file. Defaults to None. timeout (int, optional): total time in seconds to wait for the proxy service to start. Defaults to 5. Returns: