Skip to content

Commit

Permalink
elastic module
Browse files Browse the repository at this point in the history
  • Loading branch information
github-actions committed Nov 22, 2024
1 parent 87a1517 commit b3c40d4
Show file tree
Hide file tree
Showing 4 changed files with 171 additions and 12 deletions.
22 changes: 22 additions & 0 deletions bbot/modules/output/elastic.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
from .http import HTTP


class Elastic(HTTP):
watched_events = ["*"]
metadata = {
"description": "Send scan results to Elasticsearch",
"created_date": "2022-11-21",
"author": "@TheTechromancer",
}
options = {
"url": "",
"username": "elastic",
"password": "changeme",
"timeout": 10,
}
options_desc = {
"url": "Elastic URL (e.g. https://localhost:9200/<your_index>/_doc)",
"username": "Elastic username",
"password": "Elastic password",
"timeout": "HTTP timeout",
}
6 changes: 5 additions & 1 deletion bbot/modules/output/http.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from bbot.models.pydantic import Event
from bbot.modules.output.base import BaseOutputModule


Expand Down Expand Up @@ -48,12 +49,15 @@ async def setup(self):

async def handle_event(self, event):
while 1:
event_json = event.json()
event_pydantic = Event(**event_json)
event_json = event_pydantic.model_dump(exclude_none=True)
response = await self.helpers.request(
url=self.url,
method=self.method,
auth=self.auth,
headers=self.headers,
json=event.json(),
json=event_json,
)
is_success = False if response is None else response.is_success
if not is_success:
Expand Down
130 changes: 130 additions & 0 deletions bbot/test/test_step_2/module_tests/test_module_elastic.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
import time
import httpx
import asyncio

from .base import ModuleTestBase


class TestElastic(ModuleTestBase):
config_overrides = {
"modules": {
"elastic": {
"url": "https://localhost:9200/bbot_test_events/_doc",
"username": "elastic",
"password": "bbotislife",
}
}
}
skip_distro_tests = True

async def setup_before_prep(self, module_test):
# Start Elasticsearch container
await asyncio.create_subprocess_exec(
"docker",
"run",
"--name",
"bbot-test-elastic",
"--rm",
"-e",
"ELASTIC_PASSWORD=bbotislife",
"-e",
"cluster.routing.allocation.disk.watermark.low=96%",
"-e",
"cluster.routing.allocation.disk.watermark.high=97%",
"-e",
"cluster.routing.allocation.disk.watermark.flood_stage=98%",
"-p",
"9200:9200",
"-d",
"docker.elastic.co/elasticsearch/elasticsearch:8.16.0",
)

# Connect to Elasticsearch with retry logic
async with httpx.AsyncClient(verify=False) as client:
while True:
try:
# Attempt a simple operation to confirm the connection
response = await client.get("https://localhost:9200/_cat/health", auth=("elastic", "bbotislife"))
response.raise_for_status()
break
except Exception as e:
print(f"Connection failed: {e}. Retrying...", flush=True)
time.sleep(0.5)

# Ensure the index is empty
await client.delete(f"https://localhost:9200/bbot_test_events", auth=("elastic", "bbotislife"))
print("Elasticsearch index cleaned up", flush=True)

async def check(self, module_test, events):
try:
from bbot.models.pydantic import Event

events_json = [e.json() for e in events]
events_json.sort(key=lambda x: x["timestamp"])

# Connect to Elasticsearch
async with httpx.AsyncClient(verify=False) as client:

# refresh the index
await client.post(f"https://localhost:9200/bbot_test_events/_refresh", auth=("elastic", "bbotislife"))

# Fetch all events from the index
response = await client.get(
f"https://localhost:9200/bbot_test_events/_search?size=100", auth=("elastic", "bbotislife")
)
response_json = response.json()
import json

print(f"response: {json.dumps(response_json, indent=2)}")
db_events = [hit["_source"] for hit in response_json["hits"]["hits"]]

# make sure we have the same number of events
assert len(events_json) == len(db_events)

for db_event in db_events:
assert isinstance(db_event["timestamp"], float)
assert isinstance(db_event["inserted_at"], float)

# Convert to Pydantic objects and dump them
db_events_pydantic = [Event(**e).model_dump(exclude_none=True) for e in db_events]
db_events_pydantic.sort(key=lambda x: x["timestamp"])

# Find the main event with type DNS_NAME and data blacklanternsecurity.com
main_event = next(
(
e
for e in db_events_pydantic
if e.get("type") == "DNS_NAME" and e.get("data") == "blacklanternsecurity.com"
),
None,
)
assert (
main_event is not None
), "Main event with type DNS_NAME and data blacklanternsecurity.com not found"

# Ensure it has the reverse_host attribute
expected_reverse_host = "blacklanternsecurity.com"[::-1]
assert (
main_event.get("reverse_host") == expected_reverse_host
), f"reverse_host attribute is not correct, expected {expected_reverse_host}"

# Events don't match exactly because the elastic ones have reverse_host and inserted_at
assert events_json != db_events_pydantic
for db_event in db_events_pydantic:
db_event.pop("reverse_host")
db_event.pop("inserted_at")
# They should match after removing reverse_host
assert events_json == db_events_pydantic, "Events do not match"

finally:
# Clean up: Delete all documents in the index
async with httpx.AsyncClient(verify=False) as client:
response = await client.delete(
f"https://localhost:9200/bbot_test_events",
auth=("elastic", "bbotislife"),
params={"ignore": "400,404"},
)
print(f"Deleted documents from index", flush=True)
await asyncio.create_subprocess_exec(
"docker", "stop", "bbot-test-elastic", stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
25 changes: 14 additions & 11 deletions docs/scanning/output.md
Original file line number Diff line number Diff line change
Expand Up @@ -155,27 +155,30 @@ config:

### Elasticsearch

When outputting to Elastic, use the `http` output module with the following settings (replace `<your_index>` with your desired index, e.g. `bbot`):
- Step 1: Spin up a quick Elasticsearch docker image

```bash
docker run -d -p 9200:9200 --name=bbot-elastic --v "$(pwd)/elastic_data:/usr/share/elasticsearch/data" -e ELASTIC_PASSWORD=bbotislife -m 1GB docker.elastic.co/elasticsearch/elasticsearch:8.16.0
```

- Step 2: Execute a scan with `elastic` output module

```bash
# send scan results directly to elasticsearch
bbot -t evilcorp.com -om http -c \
modules.http.url=http://localhost:8000/<your_index>/_doc \
modules.http.siem_friendly=true \
modules.http.username=elastic \
modules.http.password=changeme
# note: you can replace "bbot_events" with your own index name
bbot -t evilcorp.com -om elastic -c \
modules.elastic.url=https://localhost:9200/bbot_events/_doc \
modules.elastic.password=bbotislife
```

Alternatively, via a preset:

```yaml title="elastic_preset.yml"
config:
modules:
http:
url: http://localhost:8000/<your_index>/_doc
siem_friendly: true
username: elastic
password: changeme
elastic:
url: http://localhost:9200/bbot_events/_doc
password: bbotislife
```

### Splunk
Expand Down

0 comments on commit b3c40d4

Please sign in to comment.