diff --git a/AzureTemplates/Blob storage/azuredeploy.json b/AzureTemplates/Blob storage/azuredeploy.json new file mode 100644 index 0000000..49264e7 --- /dev/null +++ b/AzureTemplates/Blob storage/azuredeploy.json @@ -0,0 +1,147 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "location": { + "type": "string", + "metadata": { + "description": "Location of the container. We recommend to use the same value as the \"Region\" parameter." + } + }, + "Name": { + "type": "String", + "defaultValue": "dashlane-audit-logs-sa", + "allowedValues": ["dashlane-audit-logs-sa"] + }, + "Storage account name": { + "type": "String", + "defaultValue": "" + }, + "Share filename": { + "type": "String", + "defaultValue": "fluentbit-configuration" + }, + "File share path": { + "type": "String", + "defaultValue": "/dashlane", + "allowedValues": ["/dashlane"] + }, + "Dashlane team UUID": { + "type": "secureString", + "metadata": { + "description": "Dashlane's team UUID" + }, + "minLength": 36, + "maxLength": 36 + }, + "Dashlane team access key": { + "type": "secureString", + "metadata": { + "description": "Dashlane's team access key" + }, + "minLength": 16, + "maxLength": 16 + }, + "Dashlane team secret key": { + "type": "secureString", + "metadata": { + "description": "Dashlane's team secret key" + }, + "minLength": 64, + "maxLength": 64 + } + }, + "resources": [ + { + "type": "Microsoft.Storage/storageAccounts", + "apiVersion": "2021-04-01", + "name": "[parameters('Storage account name')]", + "location": "[parameters('location')]", + "kind": "StorageV2", + "sku": { + "name": "Standard_LRS", + "tier": "Standard" + } + }, + { + "type": "Microsoft.Storage/storageAccounts/fileServices/shares", + "apiVersion": "2021-04-01", + "name": "[concat(parameters('Storage account name'), '/default/', parameters('Share filename'))]", + "dependsOn": [ + "[resourceId('Microsoft.Storage/storageAccounts', parameters('Storage account name'))]" + ] + }, + { + "type": "Microsoft.ContainerInstance/containerGroups", + "apiVersion": "2021-03-01", + "name": "[parameters('Name')]", + "location": "[parameters('location')]", + "dependsOn": [ + "[resourceId('Microsoft.Storage/storageAccounts/fileServices/shares', parameters('Storage account name'), 'default', parameters('Share filename'))]" + ], + "properties": { + "containers": [ + { + "name": "[parameters('Name')]", + "properties": { + "image": "dashlane/audit-logs", + "environmentVariables": [ + { + "name": "DASHLANE_TEAM_UUID", + "value": "[parameters('Dashlane team UUID')]" + }, + { + "name": "DASHLANE_TEAM_ACCESS_KEY", + "value": "[parameters('Dashlane team access key')]" + }, + { + "name": "DASHLANE_TEAM_SECRET_KEY", + "value": "[parameters('Dashlane team secret key')]" + }, + { + "name": "DASHLANE_CLI_FLUENTBIT_CONF", + "value": "[concat(parameters('File share path'), '/fluent-bit.conf')]" + }, + { + "name": "STORAGE_ACCOUNT_NAME", + "value": "[parameters('Storage account name')]" + }, + { + "name": "ACCESS_KEY", + "value": "[listKeys(parameters('Storage account name'), '2019-06-01').keys[0].value]" + } + + ], + "resources": { + "requests": { + "memoryInGB": 1.5, + "cpu": 1 + } + }, + "volumeMounts": [ + { + "name": "[parameters('Share filename')]", + "mountPath": "[parameters('File share path')]" + } + ] + } + } + ], + "osType": "Linux", + "volumes": [ + { + "name": "[parameters('Share filename')]", + "azureFile": { + "shareName": "[parameters('Share filename')]", + "readOnly": false, + "storageAccountName": "[parameters('Storage account name')]", + "storageAccountKey": "[listKeys(parameters('Storage account name'), '2019-06-01').keys[0].value]" + } + } + ], + "restartPolicy": "Always" + } + } + ], + "outputs": {} +} \ No newline at end of file diff --git a/README.md b/README.md index c70a3e4..720959c 100644 --- a/README.md +++ b/README.md @@ -70,35 +70,44 @@ To send your Dashlane audit logs on Azure in a Log Analytics Workspace, you can ### Azure blob storage -If you want to send your logs to an Azure storage account, you need to have the following information: +If you want to send your logs to an Azure storage account, you can use the deployment template we provide in this repository, which will: +- Create a storage account and a file share to upload a custom Fluentbit configuration file +- Create a container instance running the Docker image with your custom file +You will need: - Your Dashlane credentials -- Your storage account name -- Your storage account access key +- A custom Fluentbit configuration file -You can deploy the Dashlane Docker image in a container instance by running this simple command and be able to see the logs in the stdout of the container. -``` -az container create -g $RESOURCE_GORUP --name dashlane-audit-logs --image sgravis/dcli-log-catcher:0.2 -e DASHLANE_TEAM_UUID=XXX DASHLANE_TEAM_ACCESS_KEY=XXX DASHLANE_TEAM_SECRET_KEY=XXX STORAGE_ACCOUNT_KEY=XXX -``` +>**Click on the button to start the deployment** +> [![Deploy to Azure](https://aka.ms/deploytoazurebutton)](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FDashlane%2Fdashlane-audit-logs%2Fmain%2FAzureTemplates%2FBlob%20storage%2Fazuredeploy.json) -As a second step, you need to update your Fluentbit configuration file by adding the following output configuration +Once your container is deployed, copy the following configuration into a file called "fluent-bit.conf". ``` +[INPUT] + Name stdin + Tag dashlane + +[OUTPUT] + Name stdout + Match * + Format json_lines + [OUTPUT] name azure_blob match * - account_name dashlaneauditlogs - shared_key ${STORAGE_ACCOUNT_KEY} - container_name fluentbit + account_name ${STORAGE_ACCOUNT_NAME} + shared_key ${ACCESS_KEY} + container_name audit-logs auto_create_container on tls on blob_type blockblob ``` - -In this configuration, we are telling Fluentbit to send the logs on a storage account named "dashlaneauditlogs" in the container "fluentbit". Be sure to validate that your Azure configuration matches the Fluentbit output configuration. +Then upload in the storage account you just created. In the Azure Portal, go to **Storage accounts**, select the one you just created, go to **File shares**, select **fluentbit-configuration** and upload your configuration file. > The "blob_type" configuration specifies to create a blob for every log entry on the storage account, which facilitates the logs manipulation for eventual post-processing treatment. -> To pass your custom configuration file, you can create an Azure file share and use it when you create your container, as described here: https://learn.microsoft.com/en-us/azure/container-instances/container-instances-volume-azure-files + +> The configuration provided above is meant to be working out of the box, but can be customized to suids your needs. You can refer to FLuentbit's documentation to see all available options: https://docs.fluentbit.io/manual/pipeline/outputs/azure_blob ## Splunk