Skip to content

Commit

Permalink
docs/11792-add-link-to-ssl-online-reference
Browse files Browse the repository at this point in the history
merged main
  • Loading branch information
StacieClark-Elastic committed Jan 30, 2025
2 parents db93255 + 79cff12 commit bb820e5
Show file tree
Hide file tree
Showing 260 changed files with 96,377 additions and 2,739 deletions.
4 changes: 2 additions & 2 deletions .buildkite/hooks/pre-command
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ if [ -n "${ELASTIC_PACKAGE_LINKS_FILE_PATH+x}" ]; then
export ELASTIC_PACKAGE_LINKS_FILE_PATH=${BASE_DIR}/${ELASTIC_PACKAGE_LINKS_FILE_PATH}
fi

if [[ "${BUILDKITE_PIPELINE_SLUG}" == "integrations" && "${BUILDKITE_STEP_KEY}" == "reference-target-branch" ]]; then
if [[ ( "${BUILDKITE_PIPELINE_SLUG}" =~ ^(integrations|integrations-test-stack)$ ) && "${BUILDKITE_STEP_KEY}" == "reference-target-branch" ]]; then
# Get the commit from target branch in the first step (reference-target-branch).
# This step MUST be the first one and not run in parallel with any other step to ensure
# that there is just one value for this variable
Expand All @@ -91,7 +91,7 @@ if [[ "${BUILDKITE_PIPELINE_SLUG}" == "integrations-publish" ]]; then
fi
fi

if [[ "${BUILDKITE_PIPELINE_SLUG}" == "integrations" ]]; then
if [[ "${BUILDKITE_PIPELINE_SLUG}" =~ ^(integrations|integrations-test-stack)$ ]]; then
if [[ "${BUILDKITE_STEP_KEY}" == "test-integrations" ]]; then
BUILDKITE_API_TOKEN=$(retry 5 vault kv get -field buildkite_token "${BUILDKITE_API_TOKEN_PATH}")
export BUILDKITE_API_TOKEN
Expand Down
2 changes: 1 addition & 1 deletion .buildkite/hooks/pre-exit
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ source .buildkite/scripts/common.sh

set -euo pipefail

if [[ "$BUILDKITE_PIPELINE_SLUG" == "integrations" ]]; then
if [[ "$BUILDKITE_PIPELINE_SLUG" =~ ^(integrations|integrations-test-stack)$ ]]; then
# FIXME: update condition depending on the pipeline steps triggered
if [[ "$BUILDKITE_STEP_KEY" =~ ^test-integrations- ]]; then
unset ELASTIC_PACKAGE_AWS_ACCESS_KEY
Expand Down
6 changes: 6 additions & 0 deletions .buildkite/pipeline.yml
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,9 @@ steps:
image: "${LINUX_AGENT_IMAGE}"
cpu: "8"
memory: "4G"
if: |
build.env('BUILDKITE_PULL_REQUEST') != "false" &&
build.env('BUILDKITE_PIPELINE_SLUG') == "integrations"
- label: ":sonarqube: Continuous Code Inspection"
soft_fail: true # FIXME: Coverage is failing, remove this after solving the issue
Expand All @@ -83,6 +86,8 @@ steps:
command: ".buildkite/scripts/run_sonar_scanner.sh"
artifact_paths:
- build/test-coverage/coverage_merged.xml
if: |
build.env('BUILDKITE_PIPELINE_SLUG') == "integrations"
- label: ":junit: Junit annotate"
plugins:
Expand All @@ -108,4 +113,5 @@ steps:
# run this step when if it is triggered by the daily job
if: |
build.source == "trigger_job" &&
build.env('BUILDKITE_PIPELINE_SLUG') == "integrations" &&
build.env('BUILDKITE_TRIGGERED_FROM_BUILD_PIPELINE_SLUG') == "integrations-schedule-daily"
16 changes: 16 additions & 0 deletions .buildkite/pull-requests.json
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,22 @@
"skip_ci_on_only_changed": ["^.github/", "^docs/"],
"always_require_ci_on_changed": []
},
{
"enabled": true,
"pipelineSlug": "integrations-test-stack",
"allow_org_users": true,
"allowed_repo_permissions": ["admin", "write"],
"allowed_list": [],
"set_commit_status": true,
"build_on_commit": false,
"build_on_comment": true,
"trigger_comment_regex": "^/test stack (7|8|9)\\.\\d+\\.\\d+(-SNAPSHOT)?$",
"always_trigger_comment_regex": "^/test stack (7|8|9)\\.\\d+\\.\\d+(-SNAPSHOT)?$",
"skip_ci_labels": [],
"skip_target_branches": [],
"skip_ci_on_only_changed": [],
"always_require_ci_on_changed": []
},
{
"enabled": false,
"pipelineSlug": "integrations-schedule-daily",
Expand Down
10 changes: 10 additions & 0 deletions .buildkite/scripts/trigger_integrations_in_parallel.sh
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,16 @@ to="$(get_to_changeset)"

echo "[DEBUG] Checking with commits: from: '${from}' to: '${to}'"

# This variable does not exist in builds triggered automatically
GITHUB_PR_TRIGGER_COMMENT="${GITHUB_PR_TRIGGER_COMMENT:-""}"

if [[ "${BUILDKITE_PIPELINE_SLUG}" == "integrations-test-stack" && "${GITHUB_PR_TRIGGER_COMMENT}" =~ ^/test\ stack ]]; then
echo "--- Stack version set from Github comment"
STACK_VERSION=$(echo "$GITHUB_PR_TRIGGER_COMMENT" | cut -d " " -f 3)
export STACK_VERSION
echo "Use Elastic stack version from Github comment: ${STACK_VERSION}"
fi

packages_to_test=0

for package in ${PACKAGE_LIST}; do
Expand Down
5 changes: 4 additions & 1 deletion .github/CODEOWNERS
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,7 @@
/packages/bbot @elastic/security-service-integrations
/packages/beaconing @elastic/ml-ui @elastic/sec-applied-ml
/packages/beat @elastic/stack-monitoring
/packages/beyondinsight_password_safe @elastic/security-service-integrations
/packages/bitdefender @elastic/security-service-integrations
/packages/bitwarden @elastic/security-service-integrations
/packages/blacklens @elastic/security-service-integrations
Expand Down Expand Up @@ -472,4 +473,6 @@
/packages/o365_metrics/data_stream/teams_user_activity_user_counts @elastic/obs-infraobs-integrations
/packages/o365_metrics/data_stream/teams_user_activity_user_detail @elastic/security-service-integrations
/packages/o365_metrics/data_stream/viva_engage_groups_activity_group_detail @elastic/security-service-integrations
/packages/o365_metrics/data_stream/viva_engage_device_usage_user_counts @elastic/obs-infraobs-integrations
/packages/o365_metrics/data_stream/yammer_device_usage @elastic/obs-infraobs-integrations
/packages/o365_metrics/data_stream/service_health @elastic/obs-infraobs-integrations
/packages/o365_metrics/data_stream/viva_engage_device_usage_user_counts @elastic/obs-infraobs-integrations
46 changes: 46 additions & 0 deletions catalog-info.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,52 @@ spec:
everyone:
access_level: READ_ONLY

---
# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json
apiVersion: backstage.io/v1alpha1
kind: Resource
metadata:
name: buildkite-pipeline-integrations-test-stack
description: 'Pipeline to test packages with a given Elastic stack version'
links:
- title: Pipeline
url: https://buildkite.com/elastic/integrations-test-stack

spec:
type: buildkite-pipeline
owner: group:ingest-fp
system: platform-ingest
implementation:
apiVersion: buildkite.elastic.dev/v1
kind: Pipeline
metadata:
name: integrations-test-stack
description: 'Pipeline to test packages with a given Elastic stack version'
spec:
branch_configuration: "main backport-*"
pipeline_file: ".buildkite/pipeline.yml"
provider_settings:
build_pull_request_forks: false
build_pull_requests: true # requires filter_enabled and filter_condition settings as below when used with buildkite-pr-bot
publish_commit_status: true
build_tags: false
build_branches: false
filter_enabled: true
filter_condition: >-
build.pull_request.id == null || (build.creator.name == 'elasticmachine' && build.pull_request.id != null && build.source == 'api')
repository: elastic/integrations
cancel_intermediate_builds: true
cancel_intermediate_builds_branch_filter: '!main !backport-*'
skip_intermediate_builds: true
skip_intermediate_builds_branch_filter: '!main !backport-*'
env:
ELASTIC_PR_COMMENTS_ENABLED: 'true'
teams:
ingest-fp:
access_level: MANAGE_BUILD_AND_READ
everyone:
access_level: READ_ONLY

---
# yaml-language-server: $schema=https://gist.githubusercontent.com/elasticmachine/988b80dae436cafea07d9a4a460a011d/raw/e57ee3bed7a6f73077a3f55a38e76e40ec87a7cf/rre.schema.json
apiVersion: backstage.io/v1alpha1
Expand Down
70 changes: 14 additions & 56 deletions packages/aws_bedrock/_dev/build/docs/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,20 +2,9 @@

## Overview

[Amazon Bedrock](https://docs.aws.amazon.com/bedrock/index.html) is a fully managed
service that makes high-performing foundation models (FMs) from leading AI
startups and Amazon available for your use through a unified API. You can choose
from a wide range of foundation models to find the model that is best suited for
your use case. Amazon Bedrock also offers a broad set of capabilities to build
generative AI applications with security, privacy, and responsible AI. Using
Amazon Bedrock, you can easily experiment with and evaluate top foundation
models for your use cases, privately customize them with your data using
techniques such as fine-tuning and Retrieval Augmented Generation (RAG), and
build agents that execute tasks using your enterprise systems and data sources.

The Amazon Bedrock integration allows you to easily connect your Amazon Bedrock model
invocation logging and runtime metrics to Elastic for seamless collection of
invocation logs and runtime metrics to monitor usage.
[Amazon Bedrock](https://docs.aws.amazon.com/bedrock/index.html) offers a fully managed service that provides access to high-performing foundation models (FMs) from leading AI startups and Amazon through a unified API. You can choose from a wide variety of foundation models to find the one that best fits your specific use case. With Amazon Bedrock, you gain access to robust tools for building generative AI applications with security, privacy, and responsible AI practices. Amazon Bedrock enables you to easily experiment with and evaluate top foundation models, customize them privately with your data using methods like fine-tuning and Retrieval Augmented Generation (RAG), and develop agents that perform tasks by leveraging your enterprise systems and data sources.

The Amazon Bedrock integration enables a seamless connection of your model to Elastic to efficiently collect and monitor invocation logs and runtime metrics.

Elastic Security can leverage this data for security analytics including
correlation, visualization and incident response. With invocation logging, you
Expand All @@ -39,6 +28,8 @@ Data streams:
- `runtime`: Collects Amazon Bedrock runtime metrics such as model invocation
count, invocation latency, input token count, output token count and many
more.
- `guardrails`: Collects Amazon Bedrock Guardrails metrics such as guardrail invocation
count, guardrail invocation latency, text unit utilization count, guardrail policy types associated with interventions and many more.

## Requirements

Expand All @@ -57,43 +48,17 @@ For more details about these requirements, check the [AWS
integration
documentation](https://docs.elastic.co/integrations/aws#requirements).

- Elastic Agent must be installed.
- You can install only one Elastic Agent per host.
- Elastic Agent is required to stream data from the S3 bucket and ship the
data to Elastic, where the events will then be processed via the
* Elastic Agent must be installed. For detailed guidance, follow these [instructions](https://www.elastic.co/guide/en/fleet/current/elastic-agent-installation.html).
* You can install only one Elastic Agent per host.
* Elastic Agent is required to stream data from the S3 bucket and ship the
data to Elastic, where the events will then be processed through the
integration's ingest pipelines.

### Installing and managing an Elastic Agent

To install and manage an Elastic Agent you have the following options:

### Install a Fleet-managed Elastic Agent (recommended)

You install Elastic Agent and use Fleet in Kibana to
define, configure, and manage your agents in a central location. We recommend
using Fleet management because it makes the management and upgrade of your
agents considerably easier.

### Install Elastic Agent in standalone mode (advanced users)

You install Elastic Agent and manually configure the agent
locally on the system where it is installed. You are responsible for managing
and upgrading the agents. This approach is for advanced users only.

### Install Elastic Agent in a containerized environment

You can run Elastic Agent inside a container, either with Fleet Server or
standalone. Docker images for all versions of Elastic Agent are available
from the Elastic Docker registry, and we provide deployment manifests for
running on Kubernetes.

To run Elastic Agent, check these [requirements](https://www.elastic.co/guide/en/fleet/current/elastic-agent-installation.html).

## Setup

To use the Amazon Bedrock model invocation logs, the logging model
invocation logging must be enabled and be sent to a log store destination,
either S3 or CloudWatch. For more details check the
either S3 or CloudWatch. For more details, check the
[Amazon Bedrock User Guide](https://docs.aws.amazon.com/bedrock/latest/userguide/model-invocation-logging.html).

1. Set up an [Amazon S3](https://docs.aws.amazon.com/bedrock/latest/userguide/model-invocation-logging.html#setup-s3-destination) or [CloudWatch Logs](https://docs.aws.amazon.com/bedrock/latest/userguide/model-invocation-logging.html#setup-cloudwatch-logs-destination) destination.
Expand All @@ -103,25 +68,18 @@ either S3 or CloudWatch. For more details check the

### Collecting Amazon Bedrock model invocation logs from S3 bucket

When collecting logs from S3 bucket is enabled, you can retrieve logs from S3
objects that are pointed to by S3 notification events read from an SQS queue or
directly polling list of S3 objects in an S3 bucket.
When log collection from an S3 bucket is enabled, you can access logs from S3 objects referenced by S3 notification events received through an SQS queue or by directly polling the list of S3 objects within the bucket.

The use of SQS notification is preferred: polling list of S3 objects is
expensive in terms of performance and costs and should be preferably used only
expensive in terms of performance and costs and should be used only
when no SQS notification can be attached to the S3 buckets. This input
integration also supports S3 notification from SNS to SQS.

SQS notification method is enabled setting `queue_url` configuration value. S3
bucket list polling method is enabled setting `bucket_arn` configuration value
and `number_of_workers` value. Both `queue_url` and `bucket_arn` cannot be set
at the same time and at least one of the two value must be set.
To enable the SQS notification method, set the `queue_url` configuration value. To enable the S3 bucket list polling method, configure both the `bucket_arn` and number_of_workers values. Note that `queue_url` and `bucket_arn` cannot be set simultaneously, and at least one of these values must be specified.

### Collecting Amazon Bedrock model invocation logs from CloudWatch

When collecting logs from CloudWatch is enabled, you can retrieve logs from
all log streams in a specific log group. `filterLogEvents` AWS API is used to
list log events from the specified log group.
When CloudWatch log collection is enabled, you can retrieve logs from all log streams within a specified log group. The filterLogEvents AWS API is used to list log events from the specified log group.

{{fields "invocation"}}

Expand Down
10 changes: 10 additions & 0 deletions packages/aws_bedrock/changelog.yml
Original file line number Diff line number Diff line change
@@ -1,3 +1,13 @@
- version: "1.0.0"
changes:
- description: Make Amazon Bedrock package GA.
type: enhancement
link: https://github.com/elastic/integrations/pull/12514
- version: "0.22.2"
changes:
- description: Add Guardrails dataset details to the AWS Bedrock integration page.
type: enhancement
link: https://github.com/elastic/integrations/pull/12424
- version: "0.22.1"
changes:
- description: Add minor improvements to the Overview and Guardrails dashboards.
Expand Down
Loading

0 comments on commit bb820e5

Please sign in to comment.