diff --git a/clash/clash-darwin-amd b/clash/clash-darwin-amd index eea48bc11..29abf3949 100644 Binary files a/clash/clash-darwin-amd and b/clash/clash-darwin-amd differ diff --git a/clash/clash-darwin-arm b/clash/clash-darwin-arm index 51edaaed9..09573d667 100644 Binary files a/clash/clash-darwin-arm and b/clash/clash-darwin-arm differ diff --git a/clash/clash-linux-amd b/clash/clash-linux-amd index 4a9ac4bfa..a2dc24606 100644 Binary files a/clash/clash-linux-amd and b/clash/clash-linux-amd differ diff --git a/clash/clash-linux-arm b/clash/clash-linux-arm index a8505222a..259de146d 100644 Binary files a/clash/clash-linux-arm and b/clash/clash-linux-arm differ diff --git a/clash/clash-windows-amd.exe b/clash/clash-windows-amd.exe index 9f18410d8..d8072548c 100644 Binary files a/clash/clash-windows-amd.exe and b/clash/clash-windows-amd.exe differ diff --git a/subscribe/crawl.py b/subscribe/crawl.py index febd3e528..e129eccc2 100644 --- a/subscribe/crawl.py +++ b/subscribe/crawl.py @@ -756,7 +756,7 @@ def search_github_code(page: int, cookie: str, excludes: list = []) -> list[str] return [] try: - regex = r' dict: except: logger.error(f"[AirPortCollector] occur error when crawl from [{url}], message: \n{traceback.format_exc()}") + logger.info(f"[AirPortCollector] finished crawl from [{url}], found {len(result)} domains") return result def crawl_maomeng() -> dict: @@ -1518,7 +1519,7 @@ def get_links(url: str, prefix: str) -> list[str]: return {} separator = r'

' - address_regex = r'前往注册' + address_regex = r'前往注册' coupon_regex = r"使用优惠码(?:\s+)?(?:)?([^\r\n\s]+)(?:(?:[\r\n\s]+)?)?0(?:\s+)?元购买" tasks = [[x, separator, address_regex, coupon_regex] for x in sorted(articles)] @@ -1545,7 +1546,10 @@ def crawl_jctj(convert: bool = False) -> dict: else: links = tasks - return {utils.extract_domain(url=x, include_protocal=True): "" for x in links if x} + result = {utils.extract_domain(url=x, include_protocal=True): "" for x in links if x} + logger.info(f"[AirPortCollector] finished crawl from [{url}], found {len(result)} domains") + + return result except: logger.error(f"[AirPortCollector] occur error when crawl from [{url}], message: \n{traceback.format_exc()}") return {} @@ -1597,6 +1601,7 @@ def run_crawl(url: str, separator: str, address_regex: str, coupon_regex: str) - except: logger.error(f"[AirPortCollector] occur error when crawl from [{url}], message: \n{traceback.format_exc()}") + logger.info(f"[AirPortCollector] finished crawl from [{url}], found {len(result)} domains") return result def extract_backend_url(domain: str, retry: int = 2) -> str: