-
Notifications
You must be signed in to change notification settings - Fork 1
/
sheepit.py
418 lines (360 loc) · 14.4 KB
/
sheepit.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTIBILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import os
import requests.sessions
import requests.cookies
import html.parser
from .requests_toolbelt.multipart import encoder
class NetworkException(Exception):
pass
class LoginException(Exception):
pass
class UploadException(Exception):
pass
class Sheepit():
""" Api for Managing your SheepIt Account
and uploading Project """
def __init__(self):
self.domain = "www.sheepit-renderfarm.com"
self.session = requests.session()
def __del__(self):
self.session.close()
del self.session
def login(self, username, password):
""" This method try's logging in with the provided
username and password
For continuing a connection, please use import_session()
Use logout() to logout
Raises:
NetworkError on a failed connection
LoginError on a Wrong username and/or password """
try:
r = self.session.post(f"https://{self.domain}/user/authenticate",
data={"login": username,
"password": password,
"do_login": "do_login",
"timezone": "Europe/Berlin",
"account_login": "account_login"},
timeout=5)
except requests.exceptions.Timeout:
raise NetworkException("Timed out")
except requests.exceptions.RequestException:
raise NetworkException("Failed connecting to the sheepit server")
if r.text != "OK":
raise LoginException("Wrong Username and/or Password")
return
def logout(self):
""" When run, this method will send a logout request to the Server
Additionally all Cookies will be cleared
Use login() to login
Raises:
NetworkError on a failed connection,
cookies will still be cleared """
try:
self.session.get(
f"https://{self.domain}/user/logout", timeout=5)
except requests.exceptions.Timeout:
raise NetworkException("Timed out")
except requests.exceptions.RequestException:
raise NetworkException("Failed connecting to the sheepit server")
finally:
try:
self.session.cookies.clear(domain=self.domain)
except KeyError:
pass
def get_profile_information(self, username):
""" This methode returns a dict with the folowing profile attributes:
"Projects created", "Frames ordered", "Rendered frames"
"Accumulated render", "Rank", "Points", "Team" and "Registration"
Raises:
NetworkException on a failed connection """
r = None
try:
r = self.session.get(
f"https://{self.domain}/user/{username}/profile", timeout=5)
except requests.exceptions.Timeout:
raise NetworkException("Timed out")
except requests.exceptions.RequestException:
raise NetworkException("Failed connecting to the sheepit server")
p = ProfileParser()
p.feed(str(r.text))
p.close()
return p.data
def request_upload_token(self):
""" Requests a upload token from the Server
This token should be used with:
upload_file() and
add_job()
Raises:
NetworkError on a failed connection
UploadException if the maximum number of simultaneous
projects had been reached """
try:
r = self.session.get(f"https://{self.domain}/getstarted",
timeout=5)
except requests.exceptions.Timeout:
raise NetworkException("Timed out")
except requests.exceptions.RequestException:
raise NetworkException("Failed connecting to the sheepit server")
p = TokenParser()
p.feed(str(r.text))
p.close()
if p.token == "":
raise UploadException(
"Error getting Upload Token, "
"maximum number of simultaneous Projects reached"
)
return p.token
def upload_file(self, token, path_to_file):
""" Uploads the selected file to the Server
Use get_upload_progress() to track the upload progress
and add_job() to add the uploaded project
Raises:
NetworkError on a failed connection """
with open(path_to_file, "rb") as f:
try:
form = encoder.MultipartEncoder({
"UPLOAD_IDENTIFIER": token,
"addjob_archive": (os.path.split(path_to_file)[1], f, "multipart/form-data")
})
headers = {"Prefer": "respond-async",
"Content-Type": form.content_type}
r = self.session.post(
f"https://{self.domain}/project/internal/upload", data=form, headers=headers)
except requests.exceptions.RequestException as e:
raise NetworkException(
"Failed connecting to the sheepit server")
def get_upload_progress(self, token):
""" Returns the upload progress in percent
Raises:
NetworkError on a failed connection """
try:
r = self.session.post(
f"https://{self.domain}/project/internal/progress", data={
"uid": token
},
timeout=5
)
dict = eval(r.content)
return int(dict['bytes_processed'])/int(dict['content_length'])
except requests.exceptions.RequestException:
raise NetworkException("Failed connecting to the sheepit server")
except SyntaxError:
return
def add_job(self, token, animation=True, cpu=True, cuda=False,
opencl=False, public=True, mp4=False,
anim_start_frame=None, anim_end_frame=None,
anim_step_frame=None, still_frame=None, max_ram=None,
split_tiles=None, split_layers=None, split_by_layers=False):
""" Uploads the selected file to the Server
Use upload_file() to upload the file
Raises:
NetworkError on a failed connection """
param_start_frame = 0
param_end_frame = 0
param_step_frame = 1
param_split_tiles = -1
param_split_layers = None
if split_by_layers:
param_split_layers = split_layers
else:
param_split_tiles = split_tiles
if animation:
param_start_frame = anim_start_frame
param_end_frame = anim_end_frame
param_step_frame = anim_step_frame
else:
param_start_frame = still_frame
try:
r = self.session.get(
f"https://{self.domain}/project/add")
except requests.exceptions.RequestException:
raise NetworkException("Failed connecting to the sheepit server")
parser = AddJobParser()
parser.feed(str(r.text))
parser.close()
compute_method = 0
if parser.data['addjob_engine_0'] == "BLENDER_EEVEE":
cpu = False
if cpu:
compute_method += 1
if cuda:
compute_method += 2
if opencl:
compute_method += 4
settings = {
"type": "animation" if animation else "singleframe",
"compute_method": compute_method,
"executable": parser.data["addjob_exe"],
"engine": parser.data['addjob_engine_0'],
"render_on_gpu_headless": "1",
"public_render": "1" if public else "0",
"public_thumbnail": "0",
"generate_mp4": "1" if mp4 else "0",
"start_frame": param_start_frame,
"end_frame": param_end_frame,
"step_frame": param_step_frame,
"archive": parser.data['addjob_archive_0'],
"max_ram_optional": "",
"path": parser.data['addjob_path_0'],
"framerate": parser.data['addjob_framerate_0'],
"width": parser.data['addjob_width_0'],
"height": parser.data['addjob_height_0'],
"split_tiles": param_split_tiles,
"exr": "0",
"cycles_samples": parser.data['addjob_cycles_samples_0'],
"samples_pixel": parser.data['addjob_samples_pixel_0'],
"image_extension": parser.data['addjob_image_extension_0'],
"use_adaptive_sampling": "1"
}
if param_split_layers:
settings["split_samples"] = param_split_layers
try:
r = self.session.post(
f"https://{self.domain}/project/add_internal", data=settings)
except requests.exceptions.RequestException:
raise NetworkException("Failed connecting to the sheepit server")
def import_session(self, dict):
""" Imports all cookies from a dictionary
Use export_session() to export """
for name, value in dict.items():
self.session.cookies.set_cookie(requests.cookies.create_cookie(
domain=self.domain,
name=name,
value=value
))
def export_session(self):
""" Exports all cookies as a dictionary
Use import_session() to import """
cookies = dict()
for cookie in self.session.cookies:
if cookie.domain == self.domain:
cookies[cookie.name] = cookie.value
return cookies
def is_logged_in(self):
""" Returns True if logged in
Raises:
NetworkError on a failed connection """
cookies = self.export_session()
if not cookies:
# Return if cookies empty
return False
try:
r = self.session.get(
f"https://{self.domain}/account.php?mode=login", timeout=5)
# return True if redirected to main page
return r.url == f"https://{self.domain}/"
except requests.exceptions.RequestException:
raise NetworkException("Failed connecting to the sheepit server")
class ProfileParser(html.parser.HTMLParser):
""" Parses the account.php?mode=profile Page """
def __init__(self):
html.parser.HTMLParser.__init__(self)
self.data = {
"Projects created": None,
"Frames ordered": None,
"Rendered frames": None,
"Accumulated render": None,
"Rank": None,
"Points": None,
"Team": None,
"Registration": None
}
self.in_dt = False
self.in_dd = False
self.dt_data = ""
def handle_starttag(self, tag, attrs):
# handle title in the description list:
if tag == "dt":
self.in_dt = True
# handle data
elif tag == "dd" and self.dt_data != "":
self.in_dd = True
def handle_data(self, data):
# handle title in the description list:
if self.in_dt:
if data in self.data:
self.dt_data = data
# handle data
elif self.in_dd:
if data != "":
self.data[self.dt_data] = data
self.dt_data = ""
self.in_dd = False
def handle_endtag(self, tag):
# handle title in the description list:
if tag == "dt":
self.in_dt = False
# handle data
if tag == "dd":
self.in_dd = False
self.dt_data = ""
class TokenParser(html.parser.HTMLParser):
""" Parses the get started page to return a upload token """
def __init__(self):
html.parser.HTMLParser.__init__(self)
self.token = ""
def handle_starttag(self, tag, attributes):
if tag == 'input':
isToken = False
for name, value in attributes:
if(name == "name" and value == "UPLOAD_IDENTIFIER"):
isToken = True
if isToken:
for name, value in attributes:
if(name == "value"):
self.token = value
class AddJobParser(html.parser.HTMLParser):
""" Parses the step 2 Page in the upload process """
def __init__(self):
html.parser.HTMLParser.__init__(self)
self.data = {
"addjob_engine_0": "",
"addjob_archive_0": "",
"addjob_path_0": "",
"addjob_framerate_0": "",
"addjob_cycles_samples_0": "",
"addjob_samples_pixel_0": "",
"addjob_image_extension_0": "",
"addjob_width_0": "",
"addjob_height_0": "",
"addjob_exe": "",
}
self.is_in_addjob = False
def handle_starttag(self, tag, attributes):
if tag == 'input':
is_valid = False
id = ""
for name, value in attributes:
if(name == "id" and (value in self.data)):
is_valid = True
id = value
if is_valid:
for name, value in attributes:
if(name == "value"):
self.data[id] = value
elif tag == 'select':
for name, value in attributes:
if name == "id" and value == "addjob_exe":
self.is_in_addjob = True
elif tag == 'option' and self.is_in_addjob:
isDefault = False
addjob_exe = ""
for name, value in attributes:
if name == "value":
isDefault = True # we will use the first option as default
addjob_exe = value
if isDefault:
self.data["addjob_exe"] = addjob_exe
self.is_in_addjob = False