This repository has been archived by the owner on Mar 17, 2022. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 9
/
ace_api.py
executable file
·1777 lines (1418 loc) · 63 KB
/
ace_api.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!/usr/bin/env python3
# vim: ts=4:sw=4:et:cc=120
#
# python3 wrapper for ACE API calls
import sys
try:
import requests
except ImportError:
print("You need to install the python Requests library (see http://docs.python-requests.org/en/master/)")
sys.exit(1)
try:
import tzlocal
except ImportError:
print("You need to install the tzlocal library (see https://pypi.org/project/tzlocal/)")
sys.exit(1)
import argparse
import copy
import datetime
import inspect
import io
import json
import logging
import os
import os.path
import pickle
import pprint
import shutil
import socket
import tarfile
import tempfile
import traceback
import urllib3
import uuid
import warnings
from configparser import ConfigParser
from zoneinfo import ZoneInfo
# set up the argument parsing as we define the functions
# so we can keep them grouped together
parser = argparse.ArgumentParser(description="ACE API Command Line Wrapper")
# all API commands take the following parameters
def _api_command(parser):
parser.add_argument(
"--remote-host",
required=False,
default="localhost:443",
dest="remote_host",
help="The remote host to connect to in host[:port] format.",
)
parser.add_argument(
"--ssl-verification",
required=False,
default=None,
dest="ssl_verification",
help="Optional path to root CA ssl to load. Defaults to system installed CA.",
)
parser.add_argument(
"-S",
"--disable-ssl-verification",
required=False,
default=False,
dest="disable_ssl_verification",
action="store_true",
help="Do not perform SSL verification.",
)
parser.add_argument(
"--disable-proxy",
required=False,
default=False,
action="store_true",
help="Disables proxy usage by removing the environment variables http_proxy, https_proxy and ftp_proxy.",
)
return parser
subparsers = parser.add_subparsers(dest="cmd")
# ignoring this: /usr/lib/python3/dist-packages/urllib3/connection.py:344:
# SubjectAltNameWarning: Certificate for localhost has no `subjectAltName`,
# falling back to check for a `commonName` for now. This feature is being
# removed by major browsers and deprecated by RFC 2818. (See
# https://github.com/shazow/urllib3/issues/497 for details.)
warnings.simplefilter("ignore", urllib3.exceptions.SecurityWarning)
# get our custom logger we use for this library
log = logging.getLogger(__name__)
# what HTTP method to use
METHOD_GET = "get"
METHOD_POST = "post"
METHOD_PUT = "put"
def set_default_remote_host(remote_host):
"""Sets the default remote host used when no remote host is provided to the API calls.
:param str remote_host: The ACE node you want to work with. Default: localhost
"""
global default_remote_host
default_remote_host = remote_host
def set_default_ssl_ca_path(ssl_verification):
"""Sets the default SSL verification mode. Behavior:
- If set to None (the default) then the system installed CAs are used.
- If set to False, then SSL verification is disabled.
- Else, it is assumed to be a file that contains the CAs to be used to verify the SSL certificates.
:param ssl_verification: see behavior above.
:type ssl_verification: str or None or False
"""
global default_ssl_verification
default_ssl_verification = ssl_verification
# list of api commands
api_commands = []
# list of support commands
support_commands = []
# the default remote host to use when no remote host is provided
default_remote_host = "localhost"
default_ssl_verification = None
# the local timezone
LOCAL_TIMEZONE = ZoneInfo(tzlocal.get_localzone_name())
# the datetime string format we use for this api
DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f%z"
def _execute_api_call(
command,
method=METHOD_GET,
remote_host=None,
ssl_verification=None,
disable_ssl_verification=False,
stream=False,
data=None,
files=None,
params=None,
proxies=None,
timeout=None,
):
if remote_host is None:
remote_host = default_remote_host
if ssl_verification is None:
ssl_verification = default_ssl_verification
if disable_ssl_verification:
ssl_verification = False
if method == METHOD_GET:
func = requests.get
elif method == METHOD_PUT:
func = requests.put
else:
func = requests.post
kwargs = {"stream": stream}
if params is not None:
kwargs["params"] = params
if ssl_verification is not None:
kwargs["verify"] = ssl_verification
else:
kwargs["verify"] = False
if data is not None:
kwargs["data"] = data
if files is not None:
kwargs["files"] = files
if proxies is not None:
kwargs["proxies"] = proxies
if timeout is not None:
kwargs["timeout"] = timeout
r = func("https://{}/api/{}".format(remote_host, command), **kwargs)
r.raise_for_status()
return r
def get_supported_api_version(*args, **kwargs):
"""Get the API version for the ACE ecosystem you're working with.
:return: Result dictionary containing the version string.
:rtype: dict
"""
return _execute_api_call("common/get_supported_api_version", *args, **kwargs).json()
def _cli_get_supported_api_version(args):
return get_supported_api_version(remote_host=args.remote_host, ssl_verification=args.ssl_verification)
get_supported_command_parser = _api_command(
subparsers.add_parser(
"get-supported-api-version", help="""Get the API version for the ACE ecosystem you're working with."""
)
)
get_supported_command_parser.set_defaults(func=_cli_get_supported_api_version)
def get_valid_companies(*args, **kwargs):
"""Get a list of the companies supported by this ACE ecosystem.
:return: Result dictionary containing a list of companies.
:rtype: dict
"""
return _execute_api_call("common/get_valid_companies", *args, **kwargs).json()
def _cli_get_valid_companies(args):
return get_valid_companies(remote_host=args.remote_host, ssl_verification=args.ssl_verification)
get_valid_companies_command_parser = _api_command(
subparsers.add_parser(
"get-valid-companies", help="""Get a list of the companies supported by this ACE ecosystem."""
)
)
get_valid_companies_command_parser.set_defaults(func=_cli_get_valid_companies)
def get_valid_observables(*args, **kwargs):
"""Get all of the valid observable types for this ACE ecosystem.
:return: result dictionary containing observables names and description.
:rtype: dict
"""
return _execute_api_call("common/get_valid_observables", *args, **kwargs).json()
def _cli_get_valid_observables(args):
return get_valid_observables(remote_host=args.remote_host, ssl_verification=args.ssl_verification)
get_valid_observables_command_parser = _api_command(
subparsers.add_parser(
"get-valid-observables", help="""Get all of the valid observable types for this ACE ecosystem."""
)
)
get_valid_observables_command_parser.set_defaults(func=_cli_get_valid_observables)
def get_valid_directives(*args, **kwargs):
return _execute_api_call("common/get_valid_directives", *args, **kwargs).json()
def _cli_get_valid_directives(args):
return get_valid_directives(remote_host=args.remote_host, ssl_verification=args.ssl_verification)
get_valid_directives_command_parser = _api_command(
subparsers.add_parser(
"get-valid-directives", help="""Get all of the valid observable types for this ACE ecosystem."""
)
)
get_valid_directives_command_parser.set_defaults(func=_cli_get_valid_directives)
def ping(*args, **kwargs):
"""Connectivity check to the ACE ecosystem."""
return _execute_api_call("common/ping", *args, **kwargs).json()
def _cli_ping(args):
return ping(remote_host=args.remote_host, ssl_verification=args.ssl_verification)
ping_command_parser = _api_command(subparsers.add_parser("ping", help="""Connectivity check to the ACE ecosystem."""))
ping_command_parser.set_defaults(func=_cli_ping)
# TODO parameters should default to None and let the server decide the default value
def submit(
description,
analysis_mode="analysis",
tool="ace_api",
tool_instance="ace_api:{}".format(socket.getfqdn()),
type="generic",
company_id=None,
event_time=None,
details={},
observables=[],
tags=[],
files=[],
queue="default",
instructions=None,
*args,
**kwargs,
):
"""Submit a request to ACE for analysis and/or correlation.
:param str description: The title of the analysis. This ends up being displayed as the title if it becomes
an alert.
:param str analysis_mode: (optional) The ACE mode this analysis should be put into. 'correlation' will force an alert creation. 'analysis' will only alert if a detection is made. Default: 'analysis'
:param str tool: (optional) The "tool" that is submitting this analysis. Meant for distinguishing your custom hunters and detection tools. Default: 'ace_api'.
:param str tool_instance: (optional) The instance of the tool that is submitting this analysis.
:param str type: (optional) The type of the analysis. Defaults to 'generic'.
:param str company_id: (optional) The ID of a company/organization/group this submission should be associated with.
:param datetime event_time: (optional) Assign a time to this analysis. Usually, the time associated to what ever event triggered this analysis creation. Default: now()
:param dict details: (optional) The dict of details for the analysis. This is a free form JSON object and typically
contains detailed information about what is being analyzed or what was found that triggered the analysis.
:param list observables: (optional) A list of observables to add to the request.
:param list tags: (optional) An optional list of tags to add the the analysis.
:param list files: (optional) A list of (file_name, file_descriptor) tuples to be included in this ACE request.
:param str queue: (optional) The queue this analysis should go into if this submissions becomes an alert.
:param str instructions: (optional) A free form string value that gives the analyst instructions on what
this alert is about and/or how to analyze the data contained in the
alert.
:return: A result dictionary. If submission was successful, the UUID of the analysis will be contained. Like this:
{'result': {'uuid': '960b0a0f-3ea2-465f-852f-ebccac6ae282'}}
:rtype: dict
"""
# make sure you passed in *something* for the description
assert description
# default event time is now
if event_time is None:
event_time = datetime.datetime.utcnow()
# no timezone yet?
# convert to UTC and then to the correct datetime format string for ACE
if isinstance(event_time, datetime.datetime):
if event_time.tzinfo is None:
formatted_event_time = formatted_event_time.replace(tzinfo=ZoneInfo("UTC")).strftime(DATETIME_FORMAT)
else:
formatted_event_time = event_time.astimezone(ZoneInfo("UTC")).strftime(DATETIME_FORMAT)
else:
# otherwise we assume the event time is already formatted
formatted_event_time = event_time
# make sure the observables are in the correct format
for o in observables:
assert isinstance(o, dict)
assert "type" in o, "missing type in observable {}".format(o)
assert "value" in o, "missing value in observable {}".format(o)
for key in o.keys():
assert key in [
"type",
"value",
"time",
"tags",
"directives",
"limited_analysis",
], "unknown observable property {} in {}".format(key, o)
# make sure any times are formatted
if "time" in o and isinstance(o["time"], datetime.datetime):
if o["time"].tzinfo is None:
o["time"] = o["time"].replace(tzinfo=LOCAL_TIMEZONE)
o["time"] = o["time"].astimezone(ZoneInfo("UTC")).strftime(DATETIME_FORMAT)
# make sure the tags are strings
for t in tags:
assert isinstance(t, str), "tag {} is not a string".format(t)
# if details is a string interpret it as JSON
# if isinstance(details, str):
# details = json.loads(details)
# make sure each file is a tuple of (str, fp)
_error_message = (
"file parameter {} invalid: each element of the file parameter must be a tuple of "
"(file_name, file_descriptor)"
)
files_params = []
for index, f in enumerate(files):
assert isinstance(f, tuple), _error_message.format(index)
assert len(f) == 2, _error_message.format(index)
assert f[1], _error_message.format(index)
assert isinstance(f[0], str), _error_message.format(index)
files_params.append(("file", (f[0], f[1])))
# OK everything seems legit
return _execute_api_call(
"analysis/submit",
data={
"analysis": json.dumps(
{
"analysis_mode": analysis_mode,
"tool": tool,
"tool_instance": tool_instance,
"type": type,
"company_id": company_id,
"description": description,
"event_time": formatted_event_time,
"details": details,
"observables": observables,
"tags": tags,
"queue": queue,
"instructions": instructions,
}
),
},
files=files_params,
method=METHOD_POST,
*args,
**kwargs,
).json()
def _cli_submit(args):
if args.event_time:
# make sure the time is formatted correctly
datetime.datetime.strptime(args.event_time, DATETIME_FORMAT)
# parse the details JSON
if args.details:
if args.details.startswith("@"):
with open(args.details[1:], "r") as fp:
args.details = json.load(fp)
# parse the observables
observables = []
if args.observables:
for o in args.observables:
o = o.split("/")
_type = o[0]
_value = o[1].strip()
_time = _tags = _directives = _limited_analysis = None
if len(o) > 2:
if o[2].strip():
datetime.datetime.strptime(o[2].strip(), DATETIME_FORMAT)
_time = o[2].strip()
if len(o) > 3:
if o[3].strip():
_tags = [_.strip() for _ in o[3].split(",")]
if len(o) > 4:
if o[4].strip():
_directives = [_.strip() for _ in o[4].split(",")]
if len(o) > 5:
if o[5].strip():
_limited_analysis = [_.strip() for _ in o[5].split(",")]
o = {"type": _type, "value": _value}
if _time:
o["time"] = _time
if _tags:
o["tags"] = _tags
if _directives:
o["directives"] = _directives
if _limited_analysis:
o["limited_analysis"] = _limited_analysis
observables.append(o)
args.observables = observables
files = []
if args.files:
for f in args.files:
if "-->" in f:
source_file, dest_file = f.split("-->")
else:
source_file = f
dest_file = os.path.basename(f)
files.append((dest_file, open(source_file, "rb")))
args.files = files
f_args = [args.description]
f_kwargs = {"remote_host": args.remote_host}
for prop in [
"ssl_verification",
"disable_ssl_verification",
"analysis_mode",
"tool",
"tool_instance",
"type",
"event_time",
"details",
"observables",
"tags",
"files",
"instructions",
]:
if getattr(args, prop) is not None:
f_kwargs[prop] = getattr(args, prop)
return submit(*f_args, **f_kwargs)
submit_command_parser = _api_command(
subparsers.add_parser("submit", help="""Submit a request to ACE for analysis and/or correlation.""")
)
submit_command_parser.add_argument("description", help="The description (title) of the analysis.")
submit_command_parser.add_argument(
"-m",
"--mode",
"--analysis_mode",
dest="analysis_mode",
help="The mode of analysis. Defaults of analysis. Set it to correlation to automatically become an alert.",
)
submit_command_parser.add_argument(
"--tool", help="The name of the tool that generated the analysis request. Defaults to ace_api"
)
submit_command_parser.add_argument(
"--tool-instance", help="The instance of the tool that generated the analysis request. Defaults to ace_api(ipv4)."
)
submit_command_parser.add_argument("--type", help="The type of the analysis. Defaults to generic.")
submit_command_parser.add_argument(
"-t",
"--time",
"--event-time",
dest="event_time",
help="""The time of the event that triggered the analysis, or the source reference time for all analysis.
The expected format is {} (example: {}). Defaults to current time and current time zone.""".format(
DATETIME_FORMAT.replace("%", "%%"),
datetime.datetime.now().replace(tzinfo=LOCAL_TIMEZONE).strftime(DATETIME_FORMAT),
),
)
submit_command_parser.add_argument(
"-d",
"--details",
dest="details",
help="""The free form JSON dict that makes up the details of the analysis.
You can specify @filename to load a given file as a JSON dict.""",
)
submit_command_parser.add_argument(
"-o",
"--observables",
nargs="+",
dest="observables",
help="""Adds the given observable to the analysis in the following format:
type/value/[/time][/tags_csv][/directives_csv][/limited_analysis_csv]
Any times must be in {} format (example: {}).""".format(
DATETIME_FORMAT.replace("%", "%%"),
datetime.datetime.now().replace(tzinfo=LOCAL_TIMEZONE).strftime(DATETIME_FORMAT),
),
)
submit_command_parser.add_argument(
"-T", "--tags", nargs="+", dest="tags", help="""The list of tags to add to the analysis."""
)
submit_command_parser.add_argument(
"-f",
"--files",
nargs="+",
dest="files",
help="""The list of files to add to the analysis.
Each file name can optionally be renamed in the remote submission by using the format
source_path-->dest_path where dest_path is a relative path.""",
)
# TODO add support for queue
submit_command_parser.add_argument(
"--instructions",
help="""A free form string value that gives the analyst instructions on what
this alert is about and/or how to analyze the data contained in the
alert.""",
)
submit_command_parser.set_defaults(func=_cli_submit)
def resubmit_alert(uuid, *args, **kwargs):
"""Resubmit an alert for analysis. This means the alert will be re-analyzed as-if it was new.
:param str uuid: The uuid of the alert to be resubmitted.
:return: A result dictionary (has 'result' key).
:rtype: dict
"""
return _execute_api_call("analysis/resubmit/{}".format(uuid), *args, **kwargs).json()
def _cli_resubmit_alert(args):
return resubmit_alert(remote_host=args.remote_host, ssl_verification=args.ssl_verification, uuid=args.uuid)
resubmit_command_parser = _api_command(
subparsers.add_parser(
"resubmit",
help="""Resubmit an alert for analysis. This means the alert will be re-analyzed as-if it was new.""",
)
)
resubmit_command_parser.add_argument("uuid", help="The UUID of the analysis/alert to resubmit.")
resubmit_command_parser.set_defaults(func=_cli_resubmit_alert)
def get_analysis(uuid, *args, **kwargs):
"""Get analysis results.
:param str uuid: The UUID of the analysis request.
:return: Result dictionary containing any and all analysis results.
:rtype: dict
"""
return _execute_api_call("analysis/{}".format(uuid), *args, **kwargs).json()
def _cli_get_analysis(args):
return get_analysis(remote_host=args.remote_host, ssl_verification=args.ssl_verification, uuid=args.uuid)
get_analysis_command_parser = _api_command(subparsers.add_parser("get-analysis", help="""Get analysis results."""))
get_analysis_command_parser.add_argument("uuid", help="The UUID of the analysis to get.")
get_analysis_command_parser.set_defaults(func=_cli_get_analysis)
def get_analysis_details(uuid, name, *args, **kwargs):
# Get external details.
return _execute_api_call("analysis/details/{}/{}".format(uuid, name), *args, **kwargs).json()
def _cli_get_analysis_details(args):
return get_analysis_details(
remote_host=args.remote_host, ssl_verification=args.ssl_verification, uuid=args.uuid, name=args.name
)
get_analysis_details_command_parser = _api_command(
subparsers.add_parser("get-analysis-details", help="""Get detailed analysis results.""")
)
get_analysis_details_command_parser.add_argument("uuid", help="The UUID of the analysis to get details from.")
get_analysis_details_command_parser.add_argument("name", help="The name of the details to get.")
get_analysis_details_command_parser.set_defaults(func=_cli_get_analysis_details)
def get_analysis_file(uuid, name, output_file=None, output_fp=None, *args, **kwargs):
if output_file is None and output_fp is None:
output_fp = sys.stdout.buffer
elif output_fp is None:
output_fp = open(output_file, "wb")
r = _execute_api_call("analysis/file/{}/{}".format(uuid, name), stream=True, *args, **kwargs)
size = 0
for chunk in r.iter_content(io.DEFAULT_BUFFER_SIZE):
if chunk:
output_fp.write(chunk)
size += len(chunk)
if output_file is not None:
output_fp.close()
return True
def _cli_get_analysis_file(args):
return get_analysis_file(
remote_host=args.remote_host,
ssl_verification=args.ssl_verification,
uuid=args.uuid,
name=args.name,
output_file=args.output_file,
output_fp=sys.stdout.buffer if args.output_file is None else None,
)
get_analysis_file_command_parser = _api_command(
subparsers.add_parser("get-analysis-file", help="""Download the given file from the given analysis.""")
)
get_analysis_file_command_parser.add_argument("uuid", help="The UUID of the analysis to get the file from.")
get_analysis_file_command_parser.add_argument("name", help="The name of the file to get.")
get_analysis_file_command_parser.add_argument(
"-o",
"--output-file",
help="""The name of the local file to write to. If this option is not specified then the file is written
to standard output.""",
)
get_analysis_file_command_parser.set_defaults(func=_cli_get_analysis_file)
def get_analysis_status(uuid, *args, **kwargs):
"""Get the status of an analysis.
:param str uuid: The analysis UUID.
:return: Result dictionary
"""
return _execute_api_call("analysis/status/{}".format(uuid), *args, **kwargs).json()
def _cli_get_analysis_status(args):
return get_analysis_status(remote_host=args.remote_host, ssl_verification=args.ssl_verification, uuid=args.uuid)
get_analysis_status_command_parser = _api_command(
subparsers.add_parser("get-analysis-status", help="""Get the status of an analysis.""")
)
get_analysis_status_command_parser.add_argument("uuid", help="The UUID of the analysis to get the status from.")
get_analysis_status_command_parser.set_defaults(func=_cli_get_analysis_status)
def download(uuid, target_dir, *args, **kwargs):
"""Download everything related to this uuid and write it to target_dir.
:param str uuid: The ACE analysis/alert uuid.
:param str target_dir: The directory you want everything written to.
"""
if not os.path.isdir(target_dir):
os.makedirs(target_dir)
fp, tar_path = tempfile.mkstemp(prefix="download_{}".format(uuid), suffix=".tar")
try:
r = _execute_api_call("engine/download/{}".format(uuid), stream=True, *args, **kwargs)
size = 0
for chunk in r.iter_content(io.DEFAULT_BUFFER_SIZE):
if chunk:
os.write(fp, chunk)
size += len(chunk)
os.close(fp)
t = tarfile.open(tar_path, "r|")
t.extractall(path=target_dir)
finally:
try:
os.remove(tar_path)
except:
sys.stderr.write("unable to delete temporary file {}: {}\n".format(tar_path, e))
def _cli_download(args):
target_dir = args.output_dir
if not target_dir:
target_dir = args.uuid
return download(
remote_host=args.remote_host, ssl_verification=args.ssl_verification, uuid=args.uuid, target_dir=target_dir
)
download_command_parser = _api_command(
subparsers.add_parser("download", help="""Download everything related to this uuid and write it to target_dir.""")
)
download_command_parser.add_argument("uuid", help="The UUID of the analysis/alert to download.")
download_command_parser.add_argument(
"-o",
"--output-dir",
help="""The name of the directory to save the analysis into. Defaults to a new directory created relative to the
current working directory using the UUID as the name.""",
)
download_command_parser.set_defaults(func=_cli_download)
def upload(uuid, source_dir, overwrite=False, sync=True, *args, **kwargs):
"""Upload an ACE analysis/alert directory.
:param str uuid: A new UUID for ACE to use.
:param str source_dir: The directory to upload.
"""
if not os.path.isdir(source_dir):
raise ValueError("{} is not a directory".format(source_dir))
fp, tar_path = tempfile.mkstemp(suffix=".tar", prefix="upload_{}".format(uuid))
try:
tar = tarfile.open(fileobj=os.fdopen(fp, "wb"), mode="w|")
tar.add(source_dir, ".")
tar.close()
with open(tar_path, "rb") as fp:
return _execute_api_call(
"engine/upload/{}".format(uuid),
data={
"upload_modifiers": json.dumps(
{
"overwrite": overwrite,
"sync": sync,
}
)
},
method=METHOD_POST,
files=[("archive", (os.path.basename(tar_path), fp))],
).json()
finally:
try:
os.remove(tar_path)
except Exception as e:
log.warning("unable to remove {}: {}".foramt(tar_path, e))
def clear(uuid, lock_uuid, *args, **kwargs):
return _execute_api_call("engine/clear/{}/{}".format(uuid, lock_uuid), *args, **kwargs).status_code == 200
def cloudphish_submit(url, reprocess=False, ignore_filters=False, context={}, *args, **kwargs):
"""Submit a URL for Cloudphish to analyze.
:param str url: The URL
:param bool reprocess: (optional) If True, re-analyze the URL and ignore the cache.
:param bool ignore_filters: (optional) Ignore URL filtering (forces download and analysis.)
:param dict context: (optional) Additional context to associated to the analysis.
"""
# make sure the following keys are not in the context
for key in ["url", "reprocess", "ignore_filters"]:
if key in context:
raise ValueError("context cannot contain the keys url, reprocess or ignore_filters")
data = {"url": url, "reprocess": "1" if reprocess else "0", "ignore_filters": "1" if ignore_filters else "0"}
data.update(context)
return _execute_api_call("cloudphish/submit", data=data, method=METHOD_POST, *args, **kwargs).json()
def _cli_cloudphish_submit(args):
if args.context:
if args.context.startswith("@"):
with open(args.context[1:], "r") as fp:
args.context = json.load(fp)
return cloudphish_submit(
remote_host=args.remote_host,
ssl_verification=args.ssl_verification,
url=args.url,
reprocess=args.reprocess,
ignore_filters=args.ignore_filters,
context=args.context if args.context else {},
)
cloudphish_submit_command_parser = _api_command(
subparsers.add_parser("cloudphish-submit", help="""Submit a URL for Cloudphish to analyze.""")
)
cloudphish_submit_command_parser.add_argument("url", help="The URL to download and analyze.")
cloudphish_submit_command_parser.add_argument(
"-r",
"--reprocess",
default=False,
action="store_true",
help="Forces cloudphish to re-analyze the given url (bypassing the cache.)",
)
cloudphish_submit_command_parser.add_argument(
"-i",
"--ignore-filters",
default=False,
action="store_true",
help="Forces cloudphish to analyze the given url (bypassing any filtering it does.)",
)
cloudphish_submit_command_parser.add_argument(
"-c",
"--context",
default=None,
help="""Optional additional context to add to the request. This is a free-form JSON dict.
If this parameter starts with a @ then it is taken as the name of a JSON file to load.""",
)
cloudphish_submit_command_parser.set_defaults(func=_cli_cloudphish_submit)
def cloudphish_download(url=None, sha256=None, output_path=None, output_fp=None, *args, **kwargs):
"""Download content from Cloudphish.
Note: either the url OR the sha256 of the url is expected to passed.
:param str url: (optional) The url
:param str sha256: (optional) The sha256 of the url.
:param str output_path: (optional) The path to write the content. Default: stdout
:param str output_fp: (optional) a file handle/buffer to write the content. Default: stdout
"""
if url is None and sha256 is None:
raise ValueError("you must supply either url or sha256 to cloudphish_download")
if output_path is None and output_fp is None:
output_fp = sys.stdout.buffer
elif output_fp is None:
output_fp = open(output_path, "wb")
params = {}
if url:
params["url"] = url
if sha256:
params["s"] = sha256
r = _execute_api_call("cloudphish/download", params=params, stream=True, *args, **kwargs)
size = 0
for chunk in r.iter_content(io.DEFAULT_BUFFER_SIZE):
if chunk:
output_fp.write(chunk)
size += len(chunk)
if output_path is not None:
output_fp.close()
return True
def _cli_cloudphish_download(args):
url = args.target
sha256 = None
if args.sha256:
url = None
sha256 = args.target
return cloudphish_download(
remote_host=args.remote_host,
ssl_verification=args.ssl_verification,
url=url,
sha256=sha256,
output_path=args.output_file,
output_fp=sys.stdout.buffer if args.output_file is None else None,
)
cloudphish_download_command_parser = _api_command(
subparsers.add_parser("cloudphish-download", help="""Download content from Cloudphish.""")
)
cloudphish_download_command_parser.add_argument(
"target", help="""The URL (or sha256 if -s is used) to download the contents of."""
)
cloudphish_download_command_parser.add_argument(
"-s", "--sha256", default=False, action="store_true", help="Treat target as the sha256 of the URL."
)
cloudphish_download_command_parser.add_argument(
"-o", "--output-file", help="Save the content to the given file. Defaults to writing the content to stdout."
)
cloudphish_download_command_parser.set_defaults(func=_cli_cloudphish_download)
def cloudphish_clear_alert(url=None, sha256=None, *args, **kwargs):
params = {}
if url is not None:
params["url"] = url
if sha256 is not None:
params["s"] = sha256
return _execute_api_call("cloudphish/clear_alert", params=params, *args, **kwargs).status_code == 200
def _cli_cloudphish_clear_alert(args):
url = args.target
sha256 = None
if args.sha256:
url = None
sha256 = target
return cloudphish_clear_alert(
remote_host=args.remote_host, ssl_verification=args.ssl_verification, url=url, sha256=sha256
)
cloudphish_clear_alert_command_parser = _api_command(
subparsers.add_parser("cloudphish-clear-alert", help="""Clear the ALERT status of a URL analyzed by cloudphish.""")
)
cloudphish_clear_alert_command_parser.add_argument(
"target", help="""The URL (or sha256 if -s is used) to clear the alert for."""
)
cloudphish_clear_alert_command_parser.add_argument(
"-s", "--sha256", default=False, action="store_true", help="Treat target as the sha256 of the URL."
)
cloudphish_clear_alert_command_parser.set_defaults(func=_cli_cloudphish_clear_alert)
#
# supporting backwards comptability for the old ace_client_lib.client library
#
class AlertSubmitException(Exception):
pass
class Analysis(object):
"""An ACE Analysis object.
:param str description: (optional) A brief description of this analysis data (Why? What? How?).
:param str analysis_mode: (optional) The ACE mode this analysis should be put into. 'correlation' will force an
alert creation. 'analysis' will only alert if a detection is made. Default: 'analysis'
:param str tool: (optional) The "tool" that is submitting this analysis. Meant for distinguishing your custom
hunters and detection tools. Default: 'ace_api'.
:param str tool_instance: (optional) The instance of the tool that is submitting this analysis.
:param str type: (optional) The type of analysis this is, kinda like the focus of the alert. Mainly used internally
by some ACE modules. Default: 'generic'
:param datetime event_time: (optional) Assign a time to this analysis. Usually, the time associated to what ever
event triggered this analysis creation. Default: now()
:param dict details: (optional) A dictionary of additional details to get added to the alert, think notes and
comments.
:param list observables: (optional) A list of observables to add to the request.
:param list tags: (optional) If this request becomes an Alert, these tags will get added to it.
:param list files: (optional) A list of (file_name, file_descriptor) tuples to be included in this ACE request.
:param str queue: (optional) The queue this analysis should go into if this submissions becomes an alert.
:param str instructions: (optional) A free form string value that gives the analyst instructions on what
this alert is about and/or how to analyze the data contained in the
alert.
"""
def __init__(self, *args, **kwargs):
# these just get passed to ace_api.submit function
self.submit_args = args
self.submit_kwargs = kwargs