Skip to content

Commit

Permalink
Merge pull request #340 from Guovin/dev
Browse files Browse the repository at this point in the history
feat:result cache
  • Loading branch information
Guovin authored Sep 23, 2024
2 parents b45cbc4 + b695f6c commit f46490b
Show file tree
Hide file tree
Showing 4 changed files with 58 additions and 18 deletions.
2 changes: 2 additions & 0 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,8 @@ jobs:
if [[ -f "result.m3u" ]]; then
git add -f "result.m3u"
fi
if [[ -f "output/result_cache.pkl" ]]; then
git add -f "output/result_cache.pkl"
if [[ -f "output/user_result.log" ]]; then
git add -f "output/user_result.log"
elif [[ -f "output/result.log" ]]; then
Expand Down
17 changes: 14 additions & 3 deletions main.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
import asyncio
from utils.config import config
from utils.config import config, resource_path
from utils.channel import (
get_channel_items,
append_total_data,
process_sort_channel_list,
write_channel_to_file,
setup_logging,
cleanup_logging,
get_channel_data_with_cache_compare,
)
from utils.tools import (
update_file,
Expand All @@ -28,6 +29,7 @@
import sys
import shutil
import atexit
import pickle

app = Flask(__name__)

Expand Down Expand Up @@ -153,9 +155,11 @@ async def main(self):
self.subscribe_result,
self.online_search_result,
)
channel_data_with_cache = self.channel_data
self.total = self.get_urls_len(filter=True)
sort_callback = lambda: self.pbar_update(name="测速")
if config.getboolean("Settings", "open_sort"):
open_sort = config.getboolean("Settings", "open_sort")
if open_sort:
self.update_progress(
f"正在测速排序, 共{self.total}个接口",
0,
Expand Down Expand Up @@ -184,7 +188,14 @@ async def main(self):
else "result.txt"
)
shutil.copy(user_final_file, result_file)
if config.getboolean("Settings", "open_sort"):
if config.getboolean("Settings", "open_use_old_result"):
if open_sort:
channel_data_with_cache = get_channel_data_with_cache_compare(
channel_data_with_cache, self.channel_data
)
with open(resource_path("output/result_cache.pkl"), "wb") as file:
pickle.dump(channel_data_with_cache, file)
if open_sort:
user_log_file = "output/" + (
"user_result.log"
if os.path.exists("config/user_config.ini")
Expand Down
1 change: 0 additions & 1 deletion tkinter_ui/default.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from tkinter import scrolledtext
from tkinter import filedialog
import os
from utils.channel import get_channel_items


class DefaultUI:
Expand Down
56 changes: 42 additions & 14 deletions utils/channel.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from opencc import OpenCC
import asyncio
import base64
import pickle

log_dir = "output"
log_file = "result_new.log"
Expand Down Expand Up @@ -49,7 +50,7 @@ def cleanup_logging():
os.remove(log_path)


def get_channel_data_from_file(channels=None, file=None, from_result=False):
def get_channel_data_from_file(channels=None, file=None, use_old=False):
"""
Get the channel data from the file
"""
Expand All @@ -62,17 +63,13 @@ def get_channel_data_from_file(channels=None, file=None, from_result=False):
# This is a new channel, create a new key in the dictionary.
current_category = line.split(",")[0]
else:
if from_result and channels.get(current_category) is None:
continue
# This is a url, add it to the list of urls for the current channel.
match = re.search(pattern, line)
if match is not None and match.group(1):
name = match.group(1).strip()
if name not in channels[current_category]:
if from_result:
continue
channels[current_category][name] = []
if match.group(3):
if use_old and match.group(3):
url = match.group(3).strip()
if url and url not in channels[current_category][name]:
channels[current_category][name].append(url)
Expand All @@ -84,20 +81,28 @@ def get_channel_items():
Get the channel items from the source file
"""
user_source_file = config.get("Settings", "source_file")
user_final_file = config.get("Settings", "final_file")
channels = defaultdict(lambda: defaultdict(list))
open_use_old_result = config.getboolean("Settings", "open_use_old_result")

if os.path.exists(resource_path(user_source_file)):
with open(resource_path(user_source_file), "r", encoding="utf-8") as file:
channels = get_channel_data_from_file(channels=channels, file=file)

if config.getboolean("Settings", "open_use_old_result") and os.path.exists(
resource_path(user_final_file)
):
with open(resource_path(user_final_file), "r", encoding="utf-8") as file:
channels = get_channel_data_from_file(
channels=channels, file=file, from_result=True
channels=channels, file=file, use_old=open_use_old_result
)

if open_use_old_result and os.path.exists(resource_path("output/result_cache.pkl")):
with open(resource_path("output/result_cache.pkl"), "rb") as file:
old_result = pickle.load(file)
for cate, data in channels.items():
if cate in old_result:
for name, urls in data.items():
if name in old_result[cate]:
old_urls = [
url
for info in old_result[cate][name]
for url, _, _ in info
]
channels[cate][name] = set(urls + old_urls)
return channels


Expand Down Expand Up @@ -733,3 +738,26 @@ def get_multicast_fofa_search_urls():
search_url += search_txt
search_urls.append((search_url, region, type))
return search_urls


def get_channel_data_with_cache_compare(data, new_data):
"""
Get channel data with cache compare new data
"""

def match_url(url, sort_urls):
url = url.split("$", 1)[0]
return url in sort_urls

for cate, obj in new_data.items():
for name, url_info in obj.items():
if url_info and cate in data and name in data[cate]:
new_urls = {new_url for new_url, _, _ in url_info}
data[cate][name] = [
url
for info in data[cate][name]
for url, _, _ in info
if match_url(url, new_urls)
]

return data

0 comments on commit f46490b

Please sign in to comment.