Skip to content

Commit

Permalink
Merge pull request #194 from ms-johnalex/refactor-config-openai1
Browse files Browse the repository at this point in the history
Refactored chat.py to use ChainedTokenCredential
  • Loading branch information
pamelafox authored Oct 11, 2024
2 parents 7d90b98 + e5249cd commit 389cc8c
Show file tree
Hide file tree
Showing 17 changed files with 303 additions and 201 deletions.
2 changes: 1 addition & 1 deletion .env.sample
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
AZURE_OPENAI_API_VERSION="2024-02-15-preview"
AZURE_OPENAI_ENDPOINT="https://YOUR-ENDPOINT-HERE.openai.azure.com/"
# Name of the Azure OpenAI GPT deployment (different from the model name)
AZURE_OPENAI_CHATGPT_DEPLOYMENT=chatgpt
AZURE_OPENAI_CHAT_DEPLOYMENT=gpt4o-mini
10 changes: 9 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ azd pipeline config

## Development server

In order to run this app, you need to either have an Azure OpenAI account deployed (from the [deploying steps](#deploying)), use a model from [GitHub models](https://github.com/marketplace/models), or use a [local LLM server](/docs/local_ollama.md).
Assuming you've run the steps in [Opening the project](#opening-the-project) and the steps in [Deploying](#deploying), you can now run the Quart app in your development environment:
1. Copy `.env.sample.azure` into `.env`:
Expand All @@ -158,6 +158,14 @@ In order to run this app, you need to either have an Azure OpenAI account deploy
azd env get-value AZURE_OPENAI_ENDPOINT
```
1. Run the development server:
```shell
python -m quart --app src.quartapp run --port 50505 --reload
```
This will start the app on port 50505, and you can access it at `http://localhost:50505`.
## Guidance
### Costs
Expand Down
12 changes: 12 additions & 0 deletions azure.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,15 @@ services:
host: containerapp
docker:
remoteBuild: true
hooks:
predown:
windows:
shell: pwsh
run: ./scripts/pre-down.ps1
continueOnError: false
interactive: true
posix:
shell: sh
run: ./scripts/pre-down.sh
continueOnError: false
interactive: true
2 changes: 1 addition & 1 deletion docs/deploy_existing.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ If you don't want to deploy a new Azure OpenAI resource and just want to use an

```shell
azd env set CREATE_AZURE_OPENAI false
azd env set AZURE_OPENAI_CHATGPT_DEPLOYMENT gpt-35-turbo
azd env set AZURE_OPENAI_CHAT_DEPLOYMENT gpt-35-turbo
azd env set AZURE_OPENAI_ENDPOINT https://YOUR-ENDPOINT-HERE
```

Expand Down
2 changes: 1 addition & 1 deletion infra/aca.bicep
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ resource acaIdentity 'Microsoft.ManagedIdentity/userAssignedIdentities@2023-01-3

var env = [
{
name: 'AZURE_OPENAI_CHATGPT_DEPLOYMENT'
name: 'AZURE_OPENAI_CHAT_DEPLOYMENT'
value: openAiDeploymentName
}
{
Expand Down
6 changes: 5 additions & 1 deletion infra/main.bicep
Original file line number Diff line number Diff line change
Expand Up @@ -160,8 +160,11 @@ module openAiRoleBackend 'core/security/role.bicep' = if (createAzureOpenAi) {
}

output AZURE_LOCATION string = location
output AZURE_TENANT_ID string = tenant().tenantId

output AZURE_OPENAI_CHATGPT_DEPLOYMENT string = openAiDeploymentName
output AZURE_OPENAI_RESOURCE_GROUP string = openAiResourceGroup.name
output AZURE_OPENAI_RESOURCE_NAME string = openAi.outputs.name
output AZURE_OPENAI_CHAT_DEPLOYMENT string = openAiDeploymentName
output AZURE_OPENAI_API_VERSION string = openAiApiVersion
output AZURE_OPENAI_ENDPOINT string = createAzureOpenAi ? openAi.outputs.endpoint : openAiEndpoint

Expand All @@ -173,3 +176,4 @@ output SERVICE_ACA_IMAGE_NAME string = aca.outputs.SERVICE_ACA_IMAGE_NAME
output AZURE_CONTAINER_ENVIRONMENT_NAME string = containerApps.outputs.environmentName
output AZURE_CONTAINER_REGISTRY_ENDPOINT string = containerApps.outputs.registryLoginServer
output AZURE_CONTAINER_REGISTRY_NAME string = containerApps.outputs.registryName

8 changes: 4 additions & 4 deletions infra/main.parameters.json
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,13 @@
"value": "${AZURE_PRINCIPAL_ID}"
},
"openAiDeploymentName": {
"value": "${AZURE_OPENAI_CHATGPT_DEPLOYMENT=gpt-4o-mini}"
"value": "${AZURE_OPENAI_CHAT_DEPLOYMENT=gpt-4o-mini}"
},
"openAiModelName": {
"value": "${AZURE_OPENAI_CHATGPT_MODEL=gpt-4o-mini}"
"value": "${AZURE_OPENAI_CHAT_MODEL=gpt-4o-mini}"
},
"openAiModelVersion": {
"value": "${AZURE_OPENAI_CHATGPT_MODEL_VERSION=2024-07-18}"
"value": "${AZURE_OPENAI_CHAT_MODEL_VERSION=2024-07-18}"
},
"openAiResourceName": {
"value": "${AZURE_OPENAI_RESOURCE}"
Expand All @@ -33,7 +33,7 @@
"value": "${AZURE_OPENAI_DEPLOYMENT_CAPACITY=30}"
},
"openAiDeploymentSkuName": {
"value": "${AZURE_OPENAI_DEPLOYMENT_SKU_NAME=Standard}"
"value": "${AZURE_OPENAI_DEPLOYMENT_SKU_NAME=GlobalStandard}"
},
"openAiSkuName": {
"value": "${AZURE_OPENAI_SKU_NAME}"
Expand Down
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
[tool.ruff]
select = ["E", "F", "I", "UP"]
lint.select = ["E", "F", "I", "UP"]
target-version = "py311"
line-length = 120
src = ["src"]

[tool.ruff.isort]
[tool.ruff.lint.isort]
known-first-party = ["quartapp"]

[tool.black]
Expand Down
Binary file modified readme_diagram.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
1 change: 1 addition & 0 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
-r src/requirements.txt
azure-mgmt-cognitiveservices
black
ruff
pre-commit
Expand Down
11 changes: 11 additions & 0 deletions scripts/pre-down.ps1
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# Get the directory of the current script
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition

# Load environment variables from azd env
$subscriptionId = azd env get-value AZURE_SUBSCRIPTION_ID
$resourceName = azd env get-value AZURE_OPENAI_RESOURCE_NAME
$resourceGroup = azd env get-value AZURE_OPENAI_RESOURCE_GROUP
$deploymentName = azd env get-value AZURE_OPENAI_CHAT_DEPLOYMENT

# Run the Python script with the retrieved values
python "$scriptDir/pre-down.py" --subscription-id $subscriptionId --resource-name $resourceName --resource-group $resourceGroup --deployment-name $deploymentName
35 changes: 35 additions & 0 deletions scripts/pre-down.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import argparse

import azure.core.exceptions
from azure.identity import DefaultAzureCredential
from azure.mgmt.cognitiveservices import CognitiveServicesManagementClient

# Set up argument parsing
parser = argparse.ArgumentParser(description="Delete an Azure OpenAI deployment.")
parser.add_argument("--resource-name", required=True, help="The name of the Azure OpenAI resource.")
parser.add_argument("--resource-group", required=True, help="The name of the Azure resource group.")
parser.add_argument("--deployment-name", required=True, help="The name of the deployment to delete.")
parser.add_argument("--subscription-id", required=True, help="The Azure subscription ID.")

print("Pre-down OpenAI script starting.")

args = parser.parse_args()

# Authenticate using DefaultAzureCredential
credential = DefaultAzureCredential()

# Initialize the Cognitive Services client
client = CognitiveServicesManagementClient(credential, subscription_id=args.subscription_id)
try:
# Begin delete the deployment
poller = client.deployments.begin_delete(
resource_group_name=args.resource_group, account_name=args.resource_name, deployment_name=args.deployment_name
)
except azure.core.exceptions.ResourceNotFoundError:
print(f"Deployment {args.deployment_name} not found.")
exit(0)

# Wait for the delete operation to complete
poller.result()

print(f"Deployment {args.deployment_name} deleted successfully.")
13 changes: 13 additions & 0 deletions scripts/pre-down.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
#!/bin/bash

# Get the directory of the current script
script_dir=$(dirname "$0")

# Load environment variables from azd env
subscription_id=$(azd env get-value AZURE_SUBSCRIPTION_ID)
resource_name=$(azd env get-value AZURE_OPENAI_RESOURCE_NAME)
resource_group=$(azd env get-value AZURE_OPENAI_RESOURCE_GROUP)
deployment_name=$(azd env get-value AZURE_OPENAI_CHAT_DEPLOYMENT)

# Run the Python script with the retrieved values
python "$script_dir/pre-down.py" --subscription-id $subscription_id --resource-name $resource_name --resource-group $resource_group --deployment-name $deployment_name
46 changes: 32 additions & 14 deletions src/quartapp/chat.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
import json
import os

import azure.identity.aio
from azure.identity.aio import (
AzureDeveloperCliCredential,
ChainedTokenCredential,
ManagedIdentityCredential,
get_bearer_token_provider,
)
from openai import AsyncAzureOpenAI
from quart import (
Blueprint,
Expand All @@ -17,26 +22,39 @@

@bp.before_app_serving
async def configure_openai():
# Authenticate using the default Azure credential chain
# See https://docs.microsoft.com/azure/developer/python/azure-sdk-authenticate#defaultazurecredential
# This will *not* work inside a local Docker container.
# If using managed user-assigned identity, make sure that AZURE_CLIENT_ID is set
# to the client ID of the user-assigned identity.
current_app.logger.info("Using Azure OpenAI with default credential")
default_credential = azure.identity.aio.DefaultAzureCredential(exclude_shared_token_cache_credential=True)
token_provider = azure.identity.aio.get_bearer_token_provider(
default_credential, "https://cognitiveservices.azure.com/.default"
)

# Use ManagedIdentityCredential with the client_id for user-assigned managed identities
user_assigned_managed_identity_credential = ManagedIdentityCredential(client_id=os.getenv("AZURE_CLIENT_ID"))

# Use AzureDeveloperCliCredential with the current tenant.
azure_dev_cli_credential = AzureDeveloperCliCredential(tenant_id=os.getenv("AZURE_TENANT_ID"), process_timeout=60)

# Create a ChainedTokenCredential with ManagedIdentityCredential and AzureDeveloperCliCredential
# - ManagedIdentityCredential is used for deployment on Azure Container Apps

# - AzureDeveloperCliCredential is used for local development
# The order of the credentials is important, as the first valid token is used
# For more information check out:

# https://learn.microsoft.com/azure/developer/python/sdk/authentication/credential-chains?tabs=ctc#chainedtokencredential-overview
azure_credential = ChainedTokenCredential(user_assigned_managed_identity_credential, azure_dev_cli_credential)
current_app.logger.info("Using Azure OpenAI with credential")

# Get the token provider for Azure OpenAI based on the selected Azure credential
token_provider = get_bearer_token_provider(azure_credential, "https://cognitiveservices.azure.com/.default")
if not os.getenv("AZURE_OPENAI_ENDPOINT"):
raise ValueError("AZURE_OPENAI_ENDPOINT is required for Azure OpenAI")
if not os.getenv("AZURE_OPENAI_CHATGPT_DEPLOYMENT"):
raise ValueError("AZURE_OPENAI_CHATGPT_DEPLOYMENT is required for Azure OpenAI")
if not os.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT"):
raise ValueError("AZURE_OPENAI_CHAT_DEPLOYMENT is required for Azure OpenAI")

# Create the Asynchronous Azure OpenAI client
bp.openai_client = AsyncAzureOpenAI(
api_version=os.getenv("AZURE_OPENAI_API_VERSION") or "2024-02-15-preview",
azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"),
azure_ad_token_provider=token_provider,
)
bp.openai_model = os.getenv("AZURE_OPENAI_CHATGPT_DEPLOYMENT")
# Set the model name to the Azure OpenAI model deployment name
bp.openai_model = os.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT")


@bp.after_app_serving
Expand Down
Loading

0 comments on commit 389cc8c

Please sign in to comment.