diff --git a/CHANGELOG.md b/CHANGELOG.md index 328a5a190e..0f897a0e1d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,13 +2,17 @@ ## v1.4.7 -### 2024/09/23 +### 2024/09/26 - 修复部分设备本地运行软件 driver 问题(#335) - 修复 driver 模式下新版谷歌浏览器白屏问题 +- 优化对历史结果的测速 +- 优化测速进度条显示 - Fix some issues with local software driver operation on certain devices (#335) - Fix the white screen issue with the new version of Google Chrome in driver mode +- Optimize the speed measurement of historical results +- Optimize speed test progress bar display ## v1.4.6 diff --git a/docs/config.md b/docs/config.md index 990348d1ca..ef841ec0a2 100644 --- a/docs/config.md +++ b/docs/config.md @@ -1,7 +1,7 @@ | 配置项 | 默认值 | 描述 | | ----------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------- | | open_update | True | 开启更新,若关闭则只运行结果页面服务 | -| open_use_old_result | True | 开启使用历史更新结果,合并至本次更新中 | +| open_use_old_result | True | 开启使用历史更新结果(包含模板与结果文件的接口),合并至本次更新中 | | open_driver | True | 开启浏览器运行,若更新无数据可开启此模式,较消耗性能 | | open_proxy | False | 开启代理,自动获取免费可用代理,若更新无数据可开启此模式 | | source_file | config/demo.txt | 模板文件路径 | diff --git a/docs/config_en.md b/docs/config_en.md index d63d53d05a..7b9a11ed12 100644 --- a/docs/config_en.md +++ b/docs/config_en.md @@ -1,33 +1,33 @@ -| Configuration Item | Default Value | Description | -| ----------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------- | -| open_update | True | Enable updates, if disabled then only the result page service is run | -| open_use_old_result | True | Enable the use of historical update results and merge them into the current update | -| open_driver | True | Enable browser execution, If there are no updates, this mode can be enabled, which consumes more performance | -| open_proxy | False | Enable proxy, automatically obtains free available proxies, If there are no updates, this mode can be enabled | -| source_file | config/demo.txt | Template file path | -| final_file | output/result.txt | Generated result file path | -| open_online_search | False | Enable online search source feature | -| online_search_page_num | 3 | Page retrieval quantity for online search channels | -| urls_limit | 15 | Number of interfaces per channel | -| open_keep_all | False | Retain all search results, retain results with non-template channel names, recommended to be turned on when manually maintaining | -| open_sort | True | Enable the sorting function (response speed, date, resolution) | -| open_ffmpeg | True | Enable speed testing using FFmpeg to obtain more accurate speed and resolution information. Manual installation is required in advance. | -| open_m3u_result | True | Enable the conversion to generate m3u file type result links, supporting the display of channel icons | -| response_time_weight | 0.5 | Response time weight value (the sum of all weight values should be 1) | -| resolution_weight | 0.5 | Resolution weight value (the sum of all weight values should be 1) | -| recent_days | 30 | Retrieve interfaces updated within a recent time range (in days), reducing appropriately can avoid matching issues | -| ipv_type | ipv4 | The type of interface in the generated result, optional values: ipv4, ipv6, all | -| domain_blacklist | epg.pw | Interface domain blacklist, used to filter out interfaces with low-quality, ad-inclusive domains | -| url_keywords_blacklist | | Interface keyword blacklist, used to filter out interfaces containing specific characters | -| open_subscribe | False | Enable subscription source feature | -| subscribe_urls | https://m3u.ibert.me/txt/fmml_dv6.txt,
https://m3u.ibert.me/txt/o_cn.txt,
https://m3u.ibert.me/txt/j_iptv.txt,
https://github.moeyy.xyz/https://raw.githubusercontent.com/PizazzGY/TVBox/main/live.txt | Subscription source list | -| open_multicast | True | Enable multicast source function | -| open_multicast_tonkiang | True | Enable Tonkiang multicast source work mode | -| open_multicast_fofa | True | Enable FOFA multicast source work mode | -| multicast_region_list | all | Multicast source region list, [more regions](../updates/multicast/multicast_map.json, all means all regions) | -| multicast_page_num | 3 | Number of pages to retrieve for multicast regions | -| open_hotel | True | Enable hotel source feature | -| open_hotel_tonkiang | False | Enable Tonkiang hotel source work mode | -| open_hotel_fofa | True | Enable FOFA、ZoomEye hotel source work mode | -| hotel_region_list | all | List of hotel source regions, [more regions](../updates/fofa/fofa_map.py), 'all' indicates all regions | -| hotel_page_num | 3 | Number of pages to retrieve for hotel regions | +| Configuration Item | Default Value | Description | +| ----------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------ | +| open_update | True | Enable updates, if disabled then only the result page service is run | +| open_use_old_result | True | Enable the use of historical update results (including the interface for template and result files) and merge them into the current update | +| open_driver | True | Enable browser execution, If there are no updates, this mode can be enabled, which consumes more performance | +| open_proxy | False | Enable proxy, automatically obtains free available proxies, If there are no updates, this mode can be enabled | +| source_file | config/demo.txt | Template file path | +| final_file | output/result.txt | Generated result file path | +| open_online_search | False | Enable online search source feature | +| online_search_page_num | 3 | Page retrieval quantity for online search channels | +| urls_limit | 15 | Number of interfaces per channel | +| open_keep_all | False | Retain all search results, retain results with non-template channel names, recommended to be turned on when manually maintaining | +| open_sort | True | Enable the sorting function (response speed, date, resolution) | +| open_ffmpeg | True | Enable speed testing using FFmpeg to obtain more accurate speed and resolution information. Manual installation is required in advance. | +| open_m3u_result | True | Enable the conversion to generate m3u file type result links, supporting the display of channel icons | +| response_time_weight | 0.5 | Response time weight value (the sum of all weight values should be 1) | +| resolution_weight | 0.5 | Resolution weight value (the sum of all weight values should be 1) | +| recent_days | 30 | Retrieve interfaces updated within a recent time range (in days), reducing appropriately can avoid matching issues | +| ipv_type | ipv4 | The type of interface in the generated result, optional values: ipv4, ipv6, all | +| domain_blacklist | epg.pw | Interface domain blacklist, used to filter out interfaces with low-quality, ad-inclusive domains | +| url_keywords_blacklist | | Interface keyword blacklist, used to filter out interfaces containing specific characters | +| open_subscribe | False | Enable subscription source feature | +| subscribe_urls | https://m3u.ibert.me/txt/fmml_dv6.txt,
https://m3u.ibert.me/txt/o_cn.txt,
https://m3u.ibert.me/txt/j_iptv.txt,
https://github.moeyy.xyz/https://raw.githubusercontent.com/PizazzGY/TVBox/main/live.txt | Subscription source list | +| open_multicast | True | Enable multicast source function | +| open_multicast_tonkiang | True | Enable Tonkiang multicast source work mode | +| open_multicast_fofa | True | Enable FOFA multicast source work mode | +| multicast_region_list | all | Multicast source region list, [more regions](../updates/multicast/multicast_map.json, all means all regions) | +| multicast_page_num | 3 | Number of pages to retrieve for multicast regions | +| open_hotel | True | Enable hotel source feature | +| open_hotel_tonkiang | False | Enable Tonkiang hotel source work mode | +| open_hotel_fofa | True | Enable FOFA、ZoomEye hotel source work mode | +| hotel_region_list | all | List of hotel source regions, [more regions](../updates/fofa/fofa_map.py), 'all' indicates all regions | +| hotel_page_num | 3 | Number of pages to retrieve for hotel regions | diff --git a/main.py b/main.py index b8efed4b7e..45ffbcc15f 100644 --- a/main.py +++ b/main.py @@ -15,6 +15,7 @@ get_ip_address, convert_to_m3u, get_result_file_content, + process_nested_dict, ) from updates.subscribe import get_channels_by_subscribe_urls from updates.multicast import get_channels_by_multicast @@ -30,6 +31,7 @@ import shutil import atexit import pickle +import copy app = Flask(__name__) @@ -121,15 +123,12 @@ def pbar_update(self, name=""): ) def get_urls_len(self, filter=False): - def process_cache_url(url): - if filter and "$cache:" in url: - cache_part = url.split("$cache:", 1)[1] - return cache_part.split("?")[0] - return url - + data = copy.deepcopy(self.channel_data) + if filter: + process_nested_dict(data, seen=set(), flag="$cache:") processed_urls = set( - process_cache_url(url_info[0]) - for channel_obj in self.channel_data.values() + url_info[0] + for channel_obj in data.values() for url_info_list in channel_obj.values() for url_info in url_info_list ) @@ -146,7 +145,7 @@ async def main(self): await self.visit_page(channel_names) self.tasks = [] channel_items_obj_items = self.channel_items.items() - self.channel_data = append_total_data( + append_total_data( channel_items_obj_items, self.channel_data, self.hotel_fofa_result, @@ -155,7 +154,7 @@ async def main(self): self.subscribe_result, self.online_search_result, ) - channel_data_cache = self.channel_data + channel_data_cache = copy.deepcopy(self.channel_data) self.total = self.get_urls_len(filter=True) sort_callback = lambda: self.pbar_update(name="测速") open_sort = config.getboolean("Settings", "open_sort") @@ -190,7 +189,7 @@ async def main(self): shutil.copy(user_final_file, result_file) if config.getboolean("Settings", "open_use_old_result"): if open_sort: - channel_data_cache = get_channel_data_cache_with_compare( + get_channel_data_cache_with_compare( channel_data_cache, self.channel_data ) with open(resource_path("output/result_cache.pkl"), "wb") as file: diff --git a/updates/fofa/fofa_hotel_region_result.pkl b/updates/fofa/fofa_hotel_region_result.pkl index c7726d3cdb..a21326a86b 100644 Binary files a/updates/fofa/fofa_hotel_region_result.pkl and b/updates/fofa/fofa_hotel_region_result.pkl differ diff --git a/updates/fofa/fofa_multicast_region_result.pkl b/updates/fofa/fofa_multicast_region_result.pkl index f8e8e3f8ca..29eada5ffd 100644 Binary files a/updates/fofa/fofa_multicast_region_result.pkl and b/updates/fofa/fofa_multicast_region_result.pkl differ diff --git a/updates/multicast/request.py b/updates/multicast/request.py index 60bd9a3a8e..3c3e24114f 100644 --- a/updates/multicast/request.py +++ b/updates/multicast/request.py @@ -6,7 +6,7 @@ get_channel_multicast_result, get_multicast_fofa_search_urls, ) -from utils.tools import get_pbar_remaining, get_soup +from utils.tools import get_pbar_remaining, get_soup, merge_objects from utils.config import config from updates.proxy import get_proxy, get_proxy_next from updates.fofa import get_channels_by_fofa @@ -49,9 +49,10 @@ async def get_channels_by_multicast(names, callback=None): search_region_type_result = defaultdict(lambda: defaultdict(list)) if open_multicast_fofa: fofa_search_urls = get_multicast_fofa_search_urls() - search_region_type_result = await get_channels_by_fofa( + fofa_result = await get_channels_by_fofa( fofa_search_urls, multicast=True, callback=callback ) + merge_objects(search_region_type_result, fofa_result) def process_channel_by_multicast(region, type): nonlocal proxy, open_driver, page_num, start_time diff --git a/utils/channel.py b/utils/channel.py index 2a3f896ef6..bbe39a7844 100644 --- a/utils/channel.py +++ b/utils/channel.py @@ -3,8 +3,14 @@ check_url_by_patterns, get_total_urls_from_info_list, check_ipv6_support, + process_nested_dict, +) +from utils.speed import ( + sort_urls_by_speed_and_resolution, + is_ffmpeg_installed, + format_url, + speed_cache, ) -from utils.speed import sort_urls_by_speed_and_resolution, is_ffmpeg_installed import os from collections import defaultdict import re @@ -15,6 +21,7 @@ import asyncio import base64 import pickle +import copy log_dir = "output" log_file = "result_new.log" @@ -479,18 +486,16 @@ def init_info_data(data, cate, name): data[cate] = {} if data[cate].get(name) is None: data[cate][name] = [] - return data def append_data_to_info_data(info_data, cate, name, data, check=True): """ Append channel data to total info data """ - info_data = init_info_data(info_data, cate, name) + init_info_data(info_data, cate, name) for url, date, resolution in data: if (url and not check) or (url and check and check_url_by_patterns(url)): info_data[cate][name].append((url, date, resolution)) - return info_data def append_total_data(*args, **kwargs): @@ -498,9 +503,9 @@ def append_total_data(*args, **kwargs): Append total channel data """ if config.getboolean("Settings", "open_keep_all"): - return append_all_method_data_keep_all(*args, **kwargs) + append_all_method_data_keep_all(*args, **kwargs) else: - return append_all_method_data(*args, **kwargs) + append_all_method_data(*args, **kwargs) def append_all_method_data( @@ -530,7 +535,7 @@ def append_all_method_data( ) and config.getboolean("Settings", f"open_hotel") == False: continue name_results = get_channel_results_by_name(name, result) - data = append_data_to_info_data( + append_data_to_info_data( data, cate, name, @@ -545,7 +550,7 @@ def append_all_method_data( if total_channel_data_len == 0 or config.getboolean( "Settings", "open_use_old_result" ): - data = append_data_to_info_data( + append_data_to_info_data( data, cate, name, @@ -557,7 +562,6 @@ def append_all_method_data( "total num:", len(data.get(cate, {}).get(name, [])), ) - return data def append_all_method_data_keep_all( @@ -586,18 +590,17 @@ def append_all_method_data_keep_all( ) and config.getboolean("Settings", f"open_hotel") == False: continue for name, urls in result.items(): - data = append_data_to_info_data(data, cate, name, urls) + append_data_to_info_data(data, cate, name, urls) print(name, f"{method.capitalize()} num:", len(urls)) if config.getboolean("Settings", "open_use_old_result"): old_info_list = channel_obj.get(name, []) - data = append_data_to_info_data( + append_data_to_info_data( data, cate, name, old_info_list, ) print(name, "using old num:", len(old_info_list)) - return data async def sort_channel_list( @@ -622,10 +625,7 @@ async def sort_channel_list( logging.info( f"Name: {name}, URL: {url}, Date: {date}, Resolution: {resolution}, Response Time: {response_time} ms" ) - data = [ - (url, date, resolution) - for (url, date, resolution), _ in sorted_data - ] + data.append((url, date, resolution)) except Exception as e: logging.error(f"Error: {e}") finally: @@ -639,16 +639,18 @@ async def process_sort_channel_list(data, callback=None): open_ffmpeg = config.getboolean("Settings", "open_ffmpeg") ipv_type = config.get("Settings", "ipv_type").lower() open_ipv6 = "ipv6" in ipv_type or "all" in ipv_type or "全部" in ipv_type - ipv6_proxy = None - if open_ipv6: - ipv6_proxy = ( - None if check_ipv6_support() else "http://www.ipv6proxy.net/go.php?u=" - ) + ipv6_proxy = ( + None + if not open_ipv6 or check_ipv6_support() + else "http://www.ipv6proxy.net/go.php?u=" + ) ffmpeg_installed = is_ffmpeg_installed() if open_ffmpeg and not ffmpeg_installed: print("FFmpeg is not installed, using requests for sorting.") is_ffmpeg = open_ffmpeg and ffmpeg_installed semaphore = asyncio.Semaphore(3) + need_sort_data = copy.deepcopy(data) + process_nested_dict(need_sort_data, seen=set(), flag="$cache:") tasks = [ asyncio.create_task( sort_channel_list( @@ -661,18 +663,48 @@ async def process_sort_channel_list(data, callback=None): callback=callback, ) ) - for cate, channel_obj in data.items() + for cate, channel_obj in need_sort_data.items() for name, info_list in channel_obj.items() ] sort_results = await asyncio.gather(*tasks) - data = {} + sort_data = {} for result in sort_results: if result: - cate = result.get("cate") - name = result.get("name") - result_data = result.get("data") - data = append_data_to_info_data(data, cate, name, result_data, False) - return data + cate, name, result_data = result["cate"], result["name"], result["data"] + append_data_to_info_data(sort_data, cate, name, result_data, False) + for cate, obj in data.items(): + for name, info_list in obj.items(): + sort_info_list = sort_data.get(cate, {}).get(name, []) + sort_urls = { + sort_url[0].split("$")[0] + for sort_url in sort_info_list + if sort_url and sort_url[0] + } + for url, date, resolution in info_list: + url_rsplit = url.rsplit("$cache:", 1) + if len(url_rsplit) != 2: + continue + url, cache_key = url_rsplit + if url in sort_urls or cache_key not in speed_cache: + continue + cache = speed_cache[cache_key] + if not cache: + continue + response_time, resolution = cache + if response_time and response_time != float("inf"): + if resolution: + url = format_url(url, resolution) + append_data_to_info_data( + sort_data, + cate, + name, + [(url, date, resolution)], + False, + ) + logging.info( + f"Name: {name}, URL: {url}, Date: {date}, Resolution: {resolution}, Response Time: {response_time} ms" + ) + return sort_data def write_channel_to_file(items, data, callback=None): @@ -752,4 +784,3 @@ def match_url(url, sort_urls): data[cate][name] = [ info for info in data[cate][name] if match_url(info[0], new_urls) ] - return data diff --git a/utils/speed.py b/utils/speed.py index 9127038213..84770d57b2 100644 --- a/utils/speed.py +++ b/utils/speed.py @@ -2,7 +2,6 @@ from time import time import asyncio import re -from urllib.parse import quote from utils.config import config from utils.tools import is_ipv6 import subprocess @@ -107,7 +106,7 @@ async def check_stream_speed(url_info): if frame is None or frame == float("inf"): return float("inf") if resolution: - url_info[0] = url_info[0] + f"${resolution}" + url_info[0] = format_url(url, resolution) url_info[2] = resolution return (tuple(url_info), frame) except Exception as e: @@ -115,6 +114,15 @@ async def check_stream_speed(url_info): return float("inf") +def format_url(url, info): + """ + Format the url + """ + separator = "|" if "$" in url else "$" + url += f"{separator}{info}" + return url + + speed_cache = {} @@ -125,36 +133,37 @@ async def get_speed_by_info( Get the info with speed """ async with semaphore: - url, _, _ = url_info + url, _, resolution = url_info url_info = list(url_info) cache_key = None if "$" in url: - url, cache_info = url.split("$", 1) + url, cache_info = url.rsplit("$", 1) if "cache:" in cache_info: cache_key = cache_info.replace("cache:", "") - url = quote(url, safe=":/?&=$[]") + url_is_ipv6 = is_ipv6(url) + if url_is_ipv6: + url = format_url(url, "IPv6") url_info[0] = url if cache_key in speed_cache: - speed = speed_cache[cache_key] + speed = speed_cache[cache_key][0] + url_info[2] = speed_cache[cache_key][1] return (tuple(url_info), speed) if speed != float("inf") else float("inf") try: - url_is_ipv6 = is_ipv6(url) if ".m3u8" not in url and ffmpeg and not url_is_ipv6: speed = await check_stream_speed(url_info) url_speed = speed[1] if speed != float("inf") else float("inf") + resolution = speed[0][2] if speed != float("inf") else None else: if ipv6_proxy and url_is_ipv6: url = ipv6_proxy + url url_speed = await get_speed(url) - if url_is_ipv6: - url_info[0] = url_info[0] + "$IPv6" speed = ( (tuple(url_info), url_speed) if url_speed != float("inf") else float("inf") ) if cache_key and cache_key not in speed_cache: - speed_cache[cache_key] = url_speed + speed_cache[cache_key] = (url_speed, resolution) return speed except Exception: return float("inf") diff --git a/utils/tools.py b/utils/tools.py index ca0b7ef49a..e6c111f587 100644 --- a/utils/tools.py +++ b/utils/tools.py @@ -312,3 +312,27 @@ def get_result_file_content(show_result=False): "
{{ content }}
", content=content, ) + + +def remove_duplicates_from_tuple_list(tuple_list, seen, flag=None): + """ + Remove duplicates from tuple list + """ + unique_list = [] + for item in tuple_list: + part = item[0] if flag is None else item[0].rsplit(flag, 1)[-1] + if part not in seen: + seen.add(part) + unique_list.append(item) + return unique_list + + +def process_nested_dict(data, seen, flag=None): + """ + Process nested dict + """ + for key, value in data.items(): + if isinstance(value, dict): + process_nested_dict(value, seen, flag) + elif isinstance(value, list): + data[key] = remove_duplicates_from_tuple_list(value, seen, flag)