Skip to content

Commit

Permalink
Merge branch 'main' into trinity--rate-limit-logs
Browse files Browse the repository at this point in the history
  • Loading branch information
trinity-1686a committed Jan 31, 2024
2 parents 7796542 + a2974e5 commit eebebd6
Show file tree
Hide file tree
Showing 164 changed files with 9,007 additions and 4,200 deletions.
48 changes: 48 additions & 0 deletions config/templates/gh-archive.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
version: 0.7

template_id: gh-archive

index_id_patterns:
- gh-archive*

description: Index config template for the GH Archive dataset (gharchive.org)

priority: 0

doc_mapping:
field_mappings:
- name: id
type: text
tokenizer: raw
- name: type
type: text
fast: true
tokenizer: raw
- name: public
type: bool
fast: true
- name: payload
type: json
tokenizer: default
- name: org
type: json
tokenizer: default
- name: repo
type: json
tokenizer: default
- name: actor
type: json
tokenizer: default
- name: other
type: json
tokenizer: default
- name: created_at
type: datetime
fast: true
input_formats:
- rfc3339
fast_precision: seconds
timestamp_field: created_at

indexing_settings:
commit_timeout_secs: 10
36 changes: 36 additions & 0 deletions config/templates/stackoverflow.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
version: 0.7

template_id: stackoverflow

index_id_patterns:
- stackoverflow*

description: Index config template for the Stackoverflow tutorial (quickwit.io/docs/get-started/quickstart)

priority: 0

doc_mapping:
field_mappings:
- name: title
type: text
tokenizer: default
record: position
stored: true
- name: body
type: text
tokenizer: default
record: position
stored: true
- name: creationDate
type: datetime
fast: true
input_formats:
- rfc3339
fast_precision: seconds
timestamp_field: creationDate

search_settings:
default_search_fields: [title, body]

indexing_settings:
commit_timeout_secs: 10
7 changes: 5 additions & 2 deletions distribution/lambda/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -67,14 +67,17 @@ deploy-mock-data: package check-env

# address https://github.com/aws/aws-cdk/issues/20060
before-destroy:
mkdir -p cdk.out
touch $(INDEXER_PACKAGE_PATH)
touch $(SEARCHER_PACKAGE_PATH)

destroy-hdfs: before-destroy
cdk destroy -a cdk/app.py HdfsStack
python -c 'from cdk import cli; cli.empty_hdfs_bucket()'
cdk destroy --force -a cdk/app.py HdfsStack

destroy-mock-data: before-destroy
cdk destroy -a cdk/app.py MockDataStack
python -c 'from cdk import cli; cli.empty_mock_data_buckets()'
cdk destroy --force -a cdk/app.py MockDataStack

clean:
rm -rf cdk.out
Expand Down
37 changes: 30 additions & 7 deletions distribution/lambda/cdk/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -269,13 +269,16 @@ def get_logs(


def download_logs_to_file(request_id: str, function_name: str, invoke_start: float):
with open(f"lambda.{request_id}.log", "w") as f:
for log in get_logs(
function_name,
request_id,
int(invoke_start * 1000),
):
f.write(log)
try:
with open(f"lambda.{request_id}.log", "w") as f:
for log in get_logs(
function_name,
request_id,
int(invoke_start * 1000),
):
f.write(log)
except Exception as e:
print(f"Failed to download logs: {e}")


def invoke_mock_data_searcher():
Expand All @@ -288,11 +291,31 @@ def invoke_mock_data_searcher():


def _clean_s3_bucket(bucket_name: str, prefix: str = ""):
print(f"Cleaning up bucket {bucket_name}/{prefix}...")
s3 = session.resource("s3")
bucket = s3.Bucket(bucket_name)
bucket.objects.filter(Prefix=prefix).delete()


def empty_hdfs_bucket():
bucket_name = _get_cloudformation_output_value(
app.HDFS_STACK_NAME, hdfs_stack.INDEX_STORE_BUCKET_NAME_EXPORT_NAME
)

_clean_s3_bucket(bucket_name)


def empty_mock_data_buckets():
bucket_name = _get_cloudformation_output_value(
app.MOCK_DATA_STACK_NAME, mock_data_stack.INDEX_STORE_BUCKET_NAME_EXPORT_NAME
)
_clean_s3_bucket(bucket_name)
bucket_name = _get_cloudformation_output_value(
app.MOCK_DATA_STACK_NAME, mock_data_stack.SOURCE_BUCKET_NAME_EXPORT_NAME
)
_clean_s3_bucket(bucket_name)


@cache
def _git_commit():
return subprocess.run(
Expand Down
14 changes: 14 additions & 0 deletions distribution/lambda/cdk/stacks/examples/mock_data_stack.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@
from ..services.quickwit_service import QuickwitService

SEARCHER_FUNCTION_NAME_EXPORT_NAME = "mock-data-searcher-function-name"
INDEX_STORE_BUCKET_NAME_EXPORT_NAME = "mock-data-index-store-bucket-name"
SOURCE_BUCKET_NAME_EXPORT_NAME = "mock-data-source-bucket-name"


class Source(Construct):
Expand Down Expand Up @@ -66,6 +68,12 @@ def __init__(
mock_data_bucket.add_object_created_notification(
aws_s3_notifications.LambdaDestination(qw_svc.indexer.lambda_function)
)
aws_cdk.CfnOutput(
self,
"source-bucket-name",
value=mock_data_bucket.bucket_name,
export_name=SOURCE_BUCKET_NAME_EXPORT_NAME,
)


class SearchAPI(Construct):
Expand Down Expand Up @@ -164,6 +172,12 @@ def __init__(
api_key=search_api_key,
)

aws_cdk.CfnOutput(
self,
"index-store-bucket-name",
value=qw_svc.bucket.bucket_name,
export_name=INDEX_STORE_BUCKET_NAME_EXPORT_NAME,
)
aws_cdk.CfnOutput(
self,
"searcher-function-name",
Expand Down
2 changes: 2 additions & 0 deletions docs/guides/e2e-serverless-aws-lambda.md
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,9 @@ curl -d '{"query":"quantity:>5", "max_hits": 10}' \
--compressed
```

:::note
The index is not created until the first run of the Indexer, so you might need a few minutes before your first search request succeeds. The API Gateway key configuration also takes a minute or two to propagate, so the first requests might receive an authorization error response.
:::

Because the JSON query responses are often quite verbose, the Searcher Lambda always compresses them before sending them on the wire. It is crucial to keep this size low, both to avoid hitting the Lambda payload size limit of 6MB and to avoid egress costs at around $0.10/GB. We do this regardless of the `accept-encoding` request header, this is why the `--compressed` flag needs to be set to `curl`.

Expand Down
Loading

0 comments on commit eebebd6

Please sign in to comment.