forked from sahana/eden
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtasks.py
317 lines (264 loc) · 11.3 KB
/
tasks.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
# -*- coding: utf-8 -*-
# =============================================================================
# Tasks to be callable async
# =============================================================================
tasks = {}
# -----------------------------------------------------------------------------
def gis_download_kml(record_id, filename, session_id_name, session_id,
user_id=None):
"""
Download a KML file
- will normally be done Asynchronously if there is a worker alive
@param record_id: id of the record in db.gis_layer_kml
@param filename: name to save the file as
@param session_id_name: name of the session
@param session_id: id of the session
@param user_id: calling request's auth.user.id or None
"""
if user_id:
# Authenticate
auth.s3_impersonate(user_id)
# Run the Task & return the result
result = gis.download_kml(record_id, filename, session_id_name, session_id)
db.commit()
return result
tasks["gis_download_kml"] = gis_download_kml
# -----------------------------------------------------------------------------
def gis_update_location_tree(feature, user_id=None):
"""
Update the Location Tree for a feature
- will normally be done Asynchronously if there is a worker alive
@param feature: the feature (in JSON format)
@param user_id: calling request's auth.user.id or None
"""
if user_id:
# Authenticate
auth.s3_impersonate(user_id)
# Run the Task & return the result
feature = json.loads(feature)
path = gis.update_location_tree(feature)
db.commit()
return path
tasks["gis_update_location_tree"] = gis_update_location_tree
# -----------------------------------------------------------------------------
def org_facility_geojson(user_id=None):
"""
Export GeoJSON[P] Of Facility data
@param user_id: calling request's auth.user.id or None
"""
if user_id:
# Authenticate
auth.s3_impersonate(user_id)
# Run the Task & return the result
s3db.org_facility_geojson()
tasks["org_facility_geojson"] = org_facility_geojson
# -----------------------------------------------------------------------------
def sync_synchronize(repository_id, user_id=None, manual=False):
"""
Run all tasks for a repository, to be called from scheduler
"""
auth.s3_impersonate(user_id)
rtable = s3db.sync_repository
query = (rtable.deleted != True) & \
(rtable.id == repository_id)
repository = db(query).select(limitby=(0, 1)).first()
if repository:
sync = s3base.S3Sync()
status = sync.get_status()
if status.running:
message = "Synchronization already active - skipping run"
sync.log.write(repository_id=repository.id,
resource_name=None,
transmission=None,
mode=None,
action="check",
remote=False,
result=sync.log.ERROR,
message=message)
db.commit()
return sync.log.ERROR
sync.set_status(running=True, manual=manual)
try:
sync.synchronize(repository)
finally:
sync.set_status(running=False, manual=False)
db.commit()
return s3base.S3SyncLog.SUCCESS
tasks["sync_synchronize"] = sync_synchronize
# -----------------------------------------------------------------------------
def maintenance(period="daily"):
"""
Run all maintenance tasks which should be done daily
- these are read from the template
"""
mod = "applications.%s.private.templates.%s.maintenance as maintenance" % \
(appname, settings.get_template())
try:
exec("import %s" % mod)
except ImportError, e:
# No Custom Maintenance available, use the default
exec("import applications.%s.private.templates.default.maintenance as maintenance" % appname)
if period == "daily":
result = maintenance.Daily()()
else:
result = "NotImplementedError"
db.commit()
return result
tasks["maintenance"] = maintenance
# -----------------------------------------------------------------------------
if settings.has_module("msg"):
# -------------------------------------------------------------------------
def msg_process_outbox(contact_method, user_id=None):
"""
Process Outbox
- will normally be done Asynchronously if there is a worker alive
@param contact_method: one from s3msg.MSG_CONTACT_OPTS
@param user_id: calling request's auth.user.id or None
"""
if user_id:
# Authenticate
auth.s3_impersonate(user_id)
# Run the Task & return the result
result = msg.process_outbox(contact_method)
db.commit()
return result
tasks["msg_process_outbox"] = msg_process_outbox
# -------------------------------------------------------------------------
def msg_process_inbound_email(username, user_id):
"""
Poll an inbound email source.
@param username: email address of the email source to read from.
This uniquely identifies one inbound email task.
"""
# Run the Task & return the result
result = msg.fetch_inbound_email(username)
db.commit()
return result
tasks["msg_process_inbound_email"] = msg_process_inbound_email
# -------------------------------------------------------------------------
def msg_mcommons_inbound_sms(campaign_id, user_id=None):
"""
Poll an inbound SMS(Mobile Commons) source.
@param campaign_id: account name for the SMS source to read from.
This uniquely identifies one inbound SMS task.
"""
# Run the Task & return the result
result = msg.mcommons_inbound_sms(campaign_id)
db.commit()
return result
tasks["msg_mcommons_inbound_sms"] = msg_mcommons_inbound_sms
# -------------------------------------------------------------------------
def msg_twilio_inbound_sms(account, user_id=None):
"""
Poll an inbound SMS(Twilio) source.
@param account: account name for the SMS source to read from.
This uniquely identifies one inbound SMS task.
"""
# Run the Task & return the result
result = msg.twilio_inbound_sms(account)
db.commit()
return result
tasks["msg_twilio_inbound_sms"] = msg_twilio_inbound_sms
# -----------------------------------------------------------------------------
def msg_parse_workflow(workflow, source, user_id):
"""
Processes the msg_log for unparsed messages.
"""
# Run the Task & return the result
result = msg.parse_import(workflow, source)
db.commit()
return result
tasks["msg_parse_workflow"] = msg_parse_workflow
# --------------------------------------------------------------------------
def msg_search_subscription_notifications(frequency):
"""
Search Subscriptions & send Notifications.
"""
# Run the Task & return the result
result = s3db.msg_search_subscription_notifications(frequency=frequency)
db.commit()
return result
tasks["msg_search_subscription_notifications"] = msg_search_subscription_notifications
# -----------------------------------------------------------------------------
if settings.has_module("req"):
def req_add_from_template(req_id, user_id=None):
"""
Add a Request from template
"""
if user_id:
# Authenticate
auth.s3_impersonate(user_id)
# Run the Task & return the result
result = s3db.req_add_from_template(req_id)
db.commit()
return result
tasks["req_add_from_template"] = req_add_from_template
# -----------------------------------------------------------------------------
if settings.has_module("stats"):
def stats_group_clean(user_id=None):
"""
Update the stats_aggregate table by calculating all the stats_group
records which have the dirty flag set to True
"""
if user_id:
# Authenticate
auth.s3_impersonate(user_id)
# Run the Task & return the result
result = s3db.stats_group_clean()
db.commit()
return result
tasks["stats_group_clean"] = stats_group_clean
def stats_update_time_aggregate(data_id=None, user_id=None):
"""
Update the stats_aggregate table for the given stats_data record
@param data_id: the id of the stats_data record just added
@param user_id: calling request's auth.user.id or None
"""
if user_id:
# Authenticate
auth.s3_impersonate(user_id)
# Run the Task & return the result
result = s3db.stats_update_time_aggregate(data_id)
db.commit()
return result
tasks["stats_update_time_aggregate"] = stats_update_time_aggregate
def stats_update_aggregate_location(location_level,
root_location_id,
parameter_id,
start_date,
end_date,
user_id=None):
"""
Update the stats_aggregate table for the given location and parameter
@param location_level: the gis level at which the data needs to be accumulated
@param root_location_id: the id of the location
@param paramerter_id: the parameter for which the stats are being updated
@param start_date: the start date of the period in question
@param end_date: the end date of the period in question
@param user_id: calling request's auth.user.id or None
"""
if user_id:
# Authenticate
auth.s3_impersonate(user_id)
# Run the Task & return the result
result = s3db.stats_update_aggregate_location(location_level,
root_location_id,
parameter_id,
start_date,
end_date,
)
db.commit()
return result
tasks["stats_update_aggregate_location"] = stats_update_aggregate_location
# -----------------------------------------------------------------------------
# Instantiate Scheduler instance with the list of tasks
s3.tasks = tasks
s3task = s3base.S3Task()
current.s3task = s3task
# -----------------------------------------------------------------------------
# Reusable field for scheduler task links
scheduler_task_id = S3ReusableField("scheduler_task_id",
"reference %s" % s3base.S3Task.TASK_TABLENAME,
ondelete="CASCADE")
s3.scheduler_task_id = scheduler_task_id
# END =========================================================================