-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathroot_fixer.py
97 lines (75 loc) · 3.08 KB
/
root_fixer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
import json
import os
from slashr import SlashR
from rm_api import API, get_file, get_file_contents, Metadata, update_root, put_file, File, make_hash, \
DocumentSyncProgress
with open('config.json', 'r') as f:
config = json.load(f)
api = API(uri=config['uri'], discovery_uri=config['discovery_uri'])
api.debug = True
api.ignore_error_protection = True
root = api.get_root()
try:
get_file(api, root['hash'])
print(f"Your current root file hash: {root['hash']}")
print("Your root file is fine, press enter if you still want to try and find a replacement from cache")
input("> press enter")
except:
print("Your root file is fucked!")
potential_roots = {}
for cache in os.listdir(api.sync_file_path):
try:
_, files = get_file(api, cache, use_cache=True)
except:
continue
if any(map(lambda file: '.' in file.uuid, files)):
continue
potential_roots[cache] = files
if len(potential_roots) == 0:
print("No luck! Couldn't find any cached root files")
exit()
print(f"Great luck! Found {len(potential_roots)} potential root files, sorting by last modified")
root_last_modified = {}
with SlashR() as sr:
for i, (hash_of_root, files) in enumerate(potential_roots.items()):
if len(files) == 0:
continue
sr.print(f"Checking {hash_of_root} {i + 1}/{len(potential_roots)} - ITEMS: {len(files)}")
last_modified = 0
for file in files:
try:
_, file_root = get_file(api, file.hash, use_cache=True)
except:
continue
for sub_file in file_root:
if not sub_file.uuid.endswith('.metadata'):
continue
try:
metadata_raw = get_file_contents(api, sub_file.hash, use_cache=True)
except:
continue
metadata = Metadata(metadata_raw, sub_file.hash)
last_modified = max(last_modified, int(metadata.last_modified))
root_last_modified[hash_of_root] = last_modified
print("\nNEWEST FIRST, OLDEST LAST")
for hash_of_root, files in sorted(potential_roots.items(), key=lambda root: root_last_modified.get(root[0], 0),
reverse=True):
last_modified = root_last_modified.get(hash_of_root, 0)
print(f'{hash_of_root} - ITEMS: {len(files)}')
picked_root = input("\nType the hash of the root file you picked: ")
if picked_root not in potential_roots:
print("That hash seems wrong, run again and make sure to copy paste it with no spaces")
exit()
current_root = api.get_root()
files = potential_roots[picked_root]
contents = '3\n' + '\n'.join(File.to_root_line(file) for file in files)
contents = contents.encode()
if input("also upload this root file? (shouldn't need to) [y/N]").lower().startswith('y'):
file = File(make_hash(contents), f"root.docSchema", len(files), len(contents))
put_file(api, file, contents, DocumentSyncProgress(file.uuid))
new_root = {
"broadcast": True,
"generation": current_root['generation'],
'hash': picked_root
}
update_root(api, new_root)