diff --git a/AWS/lambda/lambda_function.py b/AWS/lambda/lambda_function.py index 4dd8532..bcde5b4 100644 --- a/AWS/lambda/lambda_function.py +++ b/AWS/lambda/lambda_function.py @@ -48,7 +48,10 @@ ) sys.path.insert(1, "/tmp/") # FalconPy SDK - QuickScan Pro -from falconpy import QuickScanPro # pylint: disable=E0401,wrong-import-position +from falconpy import ( + APIHarnessV2, + QuickScanPro, +) # pylint: disable=E0401,wrong-import-position # AWS Secret Vars SECRET_STORE_NAME = os.environ["SECRET_NAME"] @@ -83,6 +86,7 @@ def get_secret(): secret = base64.b64decode(get_secret_value_response["SecretBinary"]) return secret + # Main routine def lambda_handler(event, _): """Function Handler""" @@ -91,6 +95,7 @@ def lambda_handler(event, _): event["Records"][0]["s3"]["object"]["key"], encoding="utf-8" ) upload_file_size = int(event["Records"][0]["s3"]["object"]["size"]) + uber, scanner = None, None try: secret_str = get_secret() if secret_str: @@ -98,17 +103,19 @@ def lambda_handler(event, _): falcon_client_id = secrets_dict["FalconClientId"] falcon_secret = secrets_dict["FalconSecret"] # Connect to the QuickScan Pro API - Scanner = QuickScanPro( - client_id=falcon_client_id, client_secret=falcon_secret - ) + uber = APIHarnessV2(client_id=falcon_client_id, client_secret=falcon_secret) + scanner = QuickScanPro(auth_object=uber) if upload_file_size < MAX_FILE_SIZE: # Get the file from S3 scan_file = f"/tmp/{key}" s3.download_file(bucket_name, key, scan_file) with open(scan_file, "rb") as upload_file: - response = Scanner.upload_file(file=upload_file.read(), scan=True) - # Upload the file to the CrowdStrike Falcon QuickScan Pro - # response = Scanner.upload_file(file=f'/tmp/{key}', scan=True) + # For now we have to use Uber class to allow sending the correct file name + response = uber.command( + "UploadFileMixin0Mixin94", + files=[("file", (key, upload_file.read()))], + data={"scan": True}, + ) if response["status_code"] > 201: error_msg = ( f"Error uploading object {key} from " @@ -124,14 +131,14 @@ def lambda_handler(event, _): # Uploaded file unique identifier upload_sha = response["body"]["resources"][0]["sha256"] # Scan request ID, generated when the request for the scan is made - scan_id = Scanner.launch_scan(sha256=upload_sha)["body"]["resources"][ + scan_id = scanner.launch_scan(sha256=upload_sha)["body"]["resources"][ 0 ]["id"] scanning = True # Loop until we get a result or the function times out while scanning: # Retrieve our scan using our scan ID - scan_results = Scanner.get_scan_result(ids=scan_id) + scan_results = scanner.get_scan_result(ids=scan_id) result = None try: if ( @@ -198,7 +205,7 @@ def lambda_handler(event, _): log.info(scan_msg) # Clean up the artifact - response = Scanner.delete_file(ids=upload_sha) + response = scanner.delete_file(ids=upload_sha) if response["status_code"] > 201: log.warning("Could not remove sample (%s) from QuickScan Pro.", key) diff --git a/AWS/on-demand/quickscan_target.py b/AWS/on-demand/quickscan_target.py index 2470a2f..017c478 100644 --- a/AWS/on-demand/quickscan_target.py +++ b/AWS/on-demand/quickscan_target.py @@ -81,7 +81,7 @@ from logging.handlers import RotatingFileHandler import boto3 -from falconpy import OAuth2, QuickScanPro +from falconpy import APIHarnessV2, QuickScanPro class Analysis: @@ -171,7 +171,7 @@ def enable_logging(self): def load_api_config(self): """Return an instance of the authentication class""" - return OAuth2( + return APIHarnessV2( client_id=self.config.falcon_client_id, client_secret=self.config.falcon_client_secret, ) @@ -279,9 +279,24 @@ def process_single_file(self, item, max_file_size): s3 = boto3.client("s3") s3.download_file(self.config.target_dir, item.key, scan_file) with open(scan_file, "rb") as file_data: + # Upload file + # For now we have to use Uber class to allow sending the correct file name + response = self.auth.command( + "UploadFileMixin0Mixin94", + files=[("file", (filename, file_data))], + data={"scan": True}, + ) + + if response["status_code"] >= 300: + if "errors" in response["body"]: + self.logger.warning( + "%s. Unable to upload file.", + response["body"]["errors"][0]["message"], + ) + else: + self.logger.warning("Rate limit exceeded.") + return None - # Upload file - response = self.scanner.upload_file(file=file_data, scan=True) sha = response["body"]["resources"][0]["sha256"] self.logger.info("Uploaded %s to %s", filename, sha) @@ -392,28 +407,24 @@ def parse_command_line(): "--region", dest="region", help="Region the target bucket resides in", - required=True + required=True, ) parser.add_argument( "-t", "--target", dest="target", help="S3 bucket to scan. Bucket must have 's3://' prefix.", - required=True + required=True, ) parser.add_argument( - "-k", - "--key", - dest="key", - help="CrowdStrike Falcon API KEY", - required=True + "-k", "--key", dest="key", help="CrowdStrike Falcon API KEY", required=True ) parser.add_argument( "-s", "--secret", dest="secret", help="CrowdStrike Falcon API SECRET", - required=True + required=True, ) return parser.parse_args() diff --git a/Azure/function-app/function_app.py b/Azure/function-app/function_app.py index 2bb99e2..ff80c0c 100644 --- a/Azure/function-app/function_app.py +++ b/Azure/function-app/function_app.py @@ -1,3 +1,5 @@ +# pylint: disable=W1401 +# flake8: noqa """CrowdStrike Azure Storage Account Container Protection with QuickScan. Based on the work of @jshcodes w/ s3-bucket-protection & @carlos.matos w/ cloud-storage-protection @@ -12,7 +14,7 @@ import logging import azure.functions as func import azurefunctions.extensions.bindings.blob as blob -from falconpy import OAuth2, QuickScanPro +from falconpy import APIHarnessV2, QuickScanPro app = func.FunctionApp() @@ -38,12 +40,10 @@ raise SystemExit("FALCON_CLIENT_SECRET environment variable not set") from exc # Authenticate to the CrowdStrike Falcon API -auth = OAuth2( - creds={"client_id": client_id, "client_secret": client_secret}, base_url=BASE_URL -) +uber = APIHarnessV2(client_id=client_id, client_secret=client_secret, base_url=BASE_URL) # Connect to the QuickScan Pro API -Scanner = QuickScanPro(auth_object=auth) +Scanner = QuickScanPro(auth_object=uber) @app.blob_trigger( @@ -61,9 +61,11 @@ def container_protection(client: blob.BlobClient): # Get the blob file blob_data = io.BytesIO(client.download_blob().read()) # Upload the file to QuickScan Pro - response = Scanner.upload_file( - file=blob_data, - scan=True, + # For now we have to use Uber class to allow sending the correct file name + response = uber.command( + "UploadFileMixin0Mixin94", + files=[("file", (file_name, blob_data))], + data={"scan": True}, ) if response["status_code"] > 201: logging.warning(str(response)) @@ -151,7 +153,9 @@ def container_protection(client: blob.BlobClient): # Clean up the artifact in QuickScan Pro response = Scanner.delete_file(ids=upload_sha) if response["status_code"] > 201: - logging.warning("Could not remove sample %s from QuickScan Pro.", file_name) + logging.warning( + "Could not remove sample %s from QuickScan Pro.", file_name + ) else: logging.info("Sample %s removed from QuickScan Pro.", file_name) except Exception as err: diff --git a/Azure/on-demand/quickscan_target.py b/Azure/on-demand/quickscan_target.py index c80b36c..fa46f67 100644 --- a/Azure/on-demand/quickscan_target.py +++ b/Azure/on-demand/quickscan_target.py @@ -1,3 +1,5 @@ +# pylint: disable=W1401 +# flake8: noqa import os import io import time @@ -7,7 +9,7 @@ from logging.handlers import RotatingFileHandler from azure.identity import DefaultAzureCredential from azure.storage.blob import BlobServiceClient -from falconpy import OAuth2, QuickScanPro +from falconpy import APIHarnessV2, QuickScanPro logging.getLogger("azure.core.pipeline.policies.http_logging_policy").setLevel( logging.WARNING @@ -72,6 +74,7 @@ def __init__(self): self.logger = None self.auth = None self.scanner = None + self.az_container = None def initialize(self): """Initialize the application components""" @@ -100,7 +103,7 @@ def enable_logging(self): def load_api_config(self): """Return an instance of the authentication class""" - return OAuth2( + return APIHarnessV2( client_id=self.config.falcon_client_id, client_secret=self.config.falcon_client_secret, ) @@ -120,6 +123,7 @@ def run(self): raise def retrieve_all_items(self, az_container): + """Retrieve all items from an Azure container.""" summaries = [] page = az_container.list_blobs(name_starts_with=self.config.target_prefix) @@ -143,11 +147,11 @@ def upload_bucket_samples(self): ) except Exception as err: self.logger.error( - "Unable to connect to container %s. %s", self.config.target_dir, err + "Unable to connect to container %s. %s", self.config.target_prefix, err ) raise SystemExit( f"Unable to connect to container {self.config.container_name}. {err}" - ) + ) from err summaries = self.retrieve_all_items(az_container) total_files = len(summaries) @@ -201,6 +205,7 @@ def upload_bucket_samples(self): self.logger.info("Completed processing all %d files", total_files) def process_single_file(self, item, max_file_size): + """Process a single file: upload, scan, and get results.""" if item.size > max_file_size: self.logger.warning( "Skipping %s: File size %d bytes exceeds maximum of %d bytes", @@ -217,14 +222,22 @@ def process_single_file(self, item, max_file_size): ) # Upload file - response = self.scanner.upload_file(file=file_data, scan=True) - if "errors" in response["body"]: - if len(response["body"]["errors"]) > 0: + # For now we have to use Uber class to allow sending the correct file name + response = self.auth.command( + "UploadFileMixin0Mixin94", + files=[("file", (filename, file_data))], + data={"scan": True}, + ) + if response["status_code"] >= 300: + if "errors" in response["body"]: self.logger.warning( - "There was an error while uploading %s to be scanned: %s", - filename, + "%s. Unable to upload file.", response["body"]["errors"][0]["message"], ) + else: + self.logger.warning("Rate limit exceeded.") + return None + sha = response["body"]["resources"][0]["sha256"] self.logger.info("Uploaded %s to %s", filename, sha) diff --git a/GCP/cloud-function/main.py b/GCP/cloud-function/main.py index 3c9b5c0..235c0ef 100644 --- a/GCP/cloud-function/main.py +++ b/GCP/cloud-function/main.py @@ -37,7 +37,7 @@ from google.cloud import storage # FalconPy SDK - Auth, QuickScan Pro -from falconpy import OAuth2, QuickScanPro # pylint: disable=E0401 +from falconpy import QuickScanPro, APIHarnessV2 # pylint: disable=E0401 # Maximum file size for scan (256mb) MAX_FILE_SIZE = 256 * 1024 * 1024 @@ -70,11 +70,10 @@ raise SystemExit("FALCON_CLIENT_SECRET environment variable not set") from exc # Authenticate to the CrowdStrike Falcon API -auth = OAuth2( - creds={"client_id": client_id, "client_secret": client_secret}, base_url=BASE_URL -) +uber = APIHarnessV2(client_id=client_id, client_secret=client_secret, base_url=BASE_URL) + # Connect to the QuickScan Pro API -Scanner = QuickScanPro(auth_object=auth) +Scanner = QuickScanPro(auth_object=uber) # Main routine @@ -89,7 +88,12 @@ def cs_bucket_protection(event, _): blob = bucket.blob(file_name) blob_data = blob.download_as_bytes() # Upload the file to the CrowdStrike Falcon QuickScan Pro - response = Scanner.upload_file(file=blob_data, scan=True) + # For now we have to use Uber class to allow sending the correct file name + response = uber.command( + "UploadFileMixin0Mixin94", + files=[("file", (file_name, blob_data))], + data={"scan": True}, + ) if response["status_code"] > 201: error_msg = ( f"Error uploading object {file_name} from " diff --git a/GCP/on-demand/quickscan_target.py b/GCP/on-demand/quickscan_target.py index eb94ea4..399af51 100644 --- a/GCP/on-demand/quickscan_target.py +++ b/GCP/on-demand/quickscan_target.py @@ -79,7 +79,7 @@ from concurrent.futures import ThreadPoolExecutor, as_completed from logging.handlers import RotatingFileHandler from google.cloud import storage -from falconpy import OAuth2, QuickScanPro +from falconpy import APIHarnessV2, QuickScanPro class Analysis: @@ -167,7 +167,7 @@ def enable_logging(self): def load_api_config(self): """Return an instance of the authentication class""" - return OAuth2( + return APIHarnessV2( client_id=self.config.falcon_client_id, client_secret=self.config.falcon_client_secret, ) @@ -275,7 +275,22 @@ def process_single_file(self, item, max_file_size): file_data = item.download_as_bytes() # Upload file - response = self.scanner.upload_file(file=file_data, scan=True) + # For now we have to use Uber class to allow sending the correct file name + response = self.auth.command( + "UploadFileMixin0Mixin94", + files=[("file", (filename, file_data))], + data={"scan": True}, + ) + if response["status_code"] >= 300: + if "errors" in response["body"]: + self.logger.warning( + "%s. Unable to upload file.", + response["body"]["errors"][0]["message"], + ) + else: + self.logger.warning("Rate limit exceeded.") + return None + sha = response["body"]["resources"][0]["sha256"] self.logger.info("Uploaded %s to %s", filename, sha)