diff --git a/scripts/envs/audio-to-text b/scripts/envs/audio-to-text index cfda4af0..925ea6e5 100644 --- a/scripts/envs/audio-to-text +++ b/scripts/envs/audio-to-text @@ -2,6 +2,7 @@ export APP_NAME="audio-to-text" export APP_DISPLAY_NAME="Audio to Text Application" export APP_DESC="Audio to Text Application example with AI enabled audio transcription" export APP_TAGS='["ai", "whispercpp", "python", "asr"]' +export APP_RUN_COMMAND="streamlit run whisper_client.py" export INIT_CONTAINER="quay.io/redhat-ai-dev/whisper-small:latest" export INIT_CONTAINER_COMMAND="['/usr/bin/install', '/model/model.file', '/shared/']" export MODEL_PATH="/model/model.file" diff --git a/scripts/envs/chatbot b/scripts/envs/chatbot index e2adf4b8..37893b94 100755 --- a/scripts/envs/chatbot +++ b/scripts/envs/chatbot @@ -2,6 +2,7 @@ export APP_NAME="chatbot" export APP_DISPLAY_NAME="Chatbot Application" export APP_DESC="Chatbot Application example with LLM enabled chat applications" export APP_TAGS='["ai", "llamacpp", "vllm", "python"]' +export APP_RUN_COMMAND="streamlit run chatbot_ui.py" export INIT_CONTAINER="quay.io/redhat-ai-dev/granite-7b-lab:latest" export INIT_CONTAINER_COMMAND="['/usr/bin/install', '/model/model.file', '/shared/']" export MODEL_PATH="/model/model.file" diff --git a/scripts/envs/codegen b/scripts/envs/codegen index 79317724..5b6b6494 100755 --- a/scripts/envs/codegen +++ b/scripts/envs/codegen @@ -2,6 +2,7 @@ export APP_NAME="codegen" export APP_DISPLAY_NAME="Code Generation Application" export APP_DESC="Code Generation Application example that generate code in countless programming languages." export APP_TAGS='["ai", "llamacpp", "vllm", "python"]' +export APP_RUN_COMMAND="streamlit run codegen-app.py" export INIT_CONTAINER="quay.io/redhat-ai-dev/mistral-7b-code-16k-qlora:latest" export INIT_CONTAINER_COMMAND="['/usr/bin/install', '/model/model.file', '/shared/']" export MODEL_PATH="/model/model.file" diff --git a/scripts/envs/object-detection b/scripts/envs/object-detection index 02655499..697f4c1b 100755 --- a/scripts/envs/object-detection +++ b/scripts/envs/object-detection @@ -2,6 +2,7 @@ export APP_NAME="object-detection" export APP_DISPLAY_NAME="Object Detection Application" export APP_DESC="AI enabled Object Detection Application example using DEtection TRansformer(DETR) model to detect objects in an image" export APP_TAGS='["ai", "detr", "python"]' +export APP_RUN_COMMAND="streamlit run object_detection_client.py" export INIT_CONTAINER="quay.io/redhat-ai-dev/detr-resnet-101:latest" export INIT_CONTAINER_COMMAND="['cp', '-R', '/model/detr-resnet-101', '/shared/detr-resnet-101']" export MODEL_PATH="/model/detr-resnet-101" diff --git a/skeleton/techdoc/docs/.assets/open-terminal.png b/skeleton/techdoc/docs/.assets/open-terminal.png new file mode 100644 index 00000000..b3330cb4 Binary files /dev/null and b/skeleton/techdoc/docs/.assets/open-terminal.png differ diff --git a/skeleton/techdoc/docs/.assets/workbench-name.png b/skeleton/techdoc/docs/.assets/workbench-name.png new file mode 100644 index 00000000..ccbdc160 Binary files /dev/null and b/skeleton/techdoc/docs/.assets/workbench-name.png differ diff --git a/skeleton/techdoc/docs/rhoai.md b/skeleton/techdoc/docs/rhoai.md new file mode 100644 index 00000000..8c4a945b --- /dev/null +++ b/skeleton/techdoc/docs/rhoai.md @@ -0,0 +1,37 @@ +# Running Samples in OpenShift AI + +This document will outline how you can run build and run your sample applications within an OpenShift AI workbench. + +## Prerequisites + +- Red Hat OpenShift AI installed, and `Create workbench for OpenShift AI` selected during component creation. +- `oc` cli installed + - `oc` can be downloaded from https://mirror.openshift.com/pub/openshift-v4/clients/ocp/stable/ +- Permissions to run `oc port-forward` on the cluster, specifically an account with the following roles: + - `get`, `create`, and `list` for the `pods/portforward` subresource + +## Running the Sample + +1) Navigate to the OpenShift AI workbench created for your sample application + +2) Go to `File->Open` and select `Terminal` +![image](./.assets/open-terminal.png) + +3) In the terminal, run `cd ${{ values.name }}` to navigate to your sample app's directory + +4) Run `pip install --upgrade -r requirements.txt` to install the dependencies for your application + +5) Run `${{ values.appRunCommand }}` to run the sample in the workbench. + +## Accessing the Sample + +With the sample app now running, the following steps will allow you to access the sample app in your browser: + +1) Navigate back to the OpenShift AI dashboard, and find the name of your workbench. +![image](./.assets/workbench-name.png) + +2) In a terminal window on your machine, run `oc get pods -l app=`. This will retrieve the name of the pod where the workbench is running. + +3) Run `oc port-forward ${{ values.appPort }}` to port forward the sample application's port to your local machine. + +4) Finally, visit `http://localhost:${{ values.appPort }}` in your browser to access the application. \ No newline at end of file diff --git a/skeleton/techdoc/mkdocs.yml b/skeleton/techdoc/mkdocs.yml index e63a3f1a..bb44658e 100644 --- a/skeleton/techdoc/mkdocs.yml +++ b/skeleton/techdoc/mkdocs.yml @@ -5,6 +5,7 @@ nav: - Source Component: source-component.md - Pipelines: pipelines.md - GitOps Application: gitops-application.md + - OpenShift AI: rhoai.md plugins: - techdocs-core \ No newline at end of file diff --git a/skeleton/template.yaml b/skeleton/template.yaml index 23b263ce..0e054f4a 100644 --- a/skeleton/template.yaml +++ b/skeleton/template.yaml @@ -205,6 +205,7 @@ spec: srcRepoURL: https://${{ parameters.githubServer if parameters.hostType === 'GitHub' else parameters.gitlabServer }}/${{ parameters.repoOwner }}/${{ parameters.repoName }} appContainer: ${{ 'quay.io/redhat-ai-dev/ai-template-bootstrap-app:latest' if parameters.hostType === 'GitHub' else '${APP_INTERFACE_CONTAINER}' }} appPort: ${APP_PORT} + appRunCommand: "${APP_RUN_COMMAND}" modelServiceContainer: ${MODEL_SERVICE_CONTAINER} modelServicePort: ${MODEL_SERVICE_PORT} # Renders all the template variables into the files and directory names and content, and places the result in the workspace. @@ -225,7 +226,7 @@ spec: tags: 'sed.edit.APPTAGS' owner: ${{ parameters.owner }} repoSlug: '${{ parameters.imageOrg }}/${{ parameters.imageName }}' - defaultBranch: ${{ parameters.branch }} + defaultBranch: ${{ parameters.branch }} - id: fetch-github-action name: Fetch GitHub Action action: fetch:plain diff --git a/templates/audio-to-text/content/docs/.assets/open-terminal.png b/templates/audio-to-text/content/docs/.assets/open-terminal.png new file mode 100644 index 00000000..b3330cb4 Binary files /dev/null and b/templates/audio-to-text/content/docs/.assets/open-terminal.png differ diff --git a/templates/audio-to-text/content/docs/.assets/workbench-name.png b/templates/audio-to-text/content/docs/.assets/workbench-name.png new file mode 100644 index 00000000..ccbdc160 Binary files /dev/null and b/templates/audio-to-text/content/docs/.assets/workbench-name.png differ diff --git a/templates/audio-to-text/content/docs/rhoai.md b/templates/audio-to-text/content/docs/rhoai.md new file mode 100644 index 00000000..8c4a945b --- /dev/null +++ b/templates/audio-to-text/content/docs/rhoai.md @@ -0,0 +1,37 @@ +# Running Samples in OpenShift AI + +This document will outline how you can run build and run your sample applications within an OpenShift AI workbench. + +## Prerequisites + +- Red Hat OpenShift AI installed, and `Create workbench for OpenShift AI` selected during component creation. +- `oc` cli installed + - `oc` can be downloaded from https://mirror.openshift.com/pub/openshift-v4/clients/ocp/stable/ +- Permissions to run `oc port-forward` on the cluster, specifically an account with the following roles: + - `get`, `create`, and `list` for the `pods/portforward` subresource + +## Running the Sample + +1) Navigate to the OpenShift AI workbench created for your sample application + +2) Go to `File->Open` and select `Terminal` +![image](./.assets/open-terminal.png) + +3) In the terminal, run `cd ${{ values.name }}` to navigate to your sample app's directory + +4) Run `pip install --upgrade -r requirements.txt` to install the dependencies for your application + +5) Run `${{ values.appRunCommand }}` to run the sample in the workbench. + +## Accessing the Sample + +With the sample app now running, the following steps will allow you to access the sample app in your browser: + +1) Navigate back to the OpenShift AI dashboard, and find the name of your workbench. +![image](./.assets/workbench-name.png) + +2) In a terminal window on your machine, run `oc get pods -l app=`. This will retrieve the name of the pod where the workbench is running. + +3) Run `oc port-forward ${{ values.appPort }}` to port forward the sample application's port to your local machine. + +4) Finally, visit `http://localhost:${{ values.appPort }}` in your browser to access the application. \ No newline at end of file diff --git a/templates/audio-to-text/content/mkdocs.yml b/templates/audio-to-text/content/mkdocs.yml index e63a3f1a..bb44658e 100644 --- a/templates/audio-to-text/content/mkdocs.yml +++ b/templates/audio-to-text/content/mkdocs.yml @@ -5,6 +5,7 @@ nav: - Source Component: source-component.md - Pipelines: pipelines.md - GitOps Application: gitops-application.md + - OpenShift AI: rhoai.md plugins: - techdocs-core \ No newline at end of file diff --git a/templates/audio-to-text/template.yaml b/templates/audio-to-text/template.yaml index 3fa011f7..f046c0ff 100644 --- a/templates/audio-to-text/template.yaml +++ b/templates/audio-to-text/template.yaml @@ -165,6 +165,7 @@ spec: srcRepoURL: https://${{ parameters.githubServer if parameters.hostType === 'GitHub' else parameters.gitlabServer }}/${{ parameters.repoOwner }}/${{ parameters.repoName }} appContainer: ${{ 'quay.io/redhat-ai-dev/ai-template-bootstrap-app:latest' if parameters.hostType === 'GitHub' else 'quay.io/redhat-ai-dev/audio-to-text:latest' }} appPort: 8501 + appRunCommand: "streamlit run whisper_client.py" modelServiceContainer: quay.io/redhat-ai-dev/whispercpp:latest modelServicePort: 8001 # Renders all the template variables into the files and directory names and content, and places the result in the workspace. @@ -185,7 +186,7 @@ spec: tags: '["ai", "whispercpp", "python", "asr"]' owner: ${{ parameters.owner }} repoSlug: '${{ parameters.imageOrg }}/${{ parameters.imageName }}' - defaultBranch: ${{ parameters.branch }} + defaultBranch: ${{ parameters.branch }} - id: fetch-github-action name: Fetch GitHub Action action: fetch:plain diff --git a/templates/chatbot/content/docs/.assets/open-terminal.png b/templates/chatbot/content/docs/.assets/open-terminal.png new file mode 100644 index 00000000..b3330cb4 Binary files /dev/null and b/templates/chatbot/content/docs/.assets/open-terminal.png differ diff --git a/templates/chatbot/content/docs/.assets/workbench-name.png b/templates/chatbot/content/docs/.assets/workbench-name.png new file mode 100644 index 00000000..ccbdc160 Binary files /dev/null and b/templates/chatbot/content/docs/.assets/workbench-name.png differ diff --git a/templates/chatbot/content/docs/rhoai.md b/templates/chatbot/content/docs/rhoai.md new file mode 100644 index 00000000..8c4a945b --- /dev/null +++ b/templates/chatbot/content/docs/rhoai.md @@ -0,0 +1,37 @@ +# Running Samples in OpenShift AI + +This document will outline how you can run build and run your sample applications within an OpenShift AI workbench. + +## Prerequisites + +- Red Hat OpenShift AI installed, and `Create workbench for OpenShift AI` selected during component creation. +- `oc` cli installed + - `oc` can be downloaded from https://mirror.openshift.com/pub/openshift-v4/clients/ocp/stable/ +- Permissions to run `oc port-forward` on the cluster, specifically an account with the following roles: + - `get`, `create`, and `list` for the `pods/portforward` subresource + +## Running the Sample + +1) Navigate to the OpenShift AI workbench created for your sample application + +2) Go to `File->Open` and select `Terminal` +![image](./.assets/open-terminal.png) + +3) In the terminal, run `cd ${{ values.name }}` to navigate to your sample app's directory + +4) Run `pip install --upgrade -r requirements.txt` to install the dependencies for your application + +5) Run `${{ values.appRunCommand }}` to run the sample in the workbench. + +## Accessing the Sample + +With the sample app now running, the following steps will allow you to access the sample app in your browser: + +1) Navigate back to the OpenShift AI dashboard, and find the name of your workbench. +![image](./.assets/workbench-name.png) + +2) In a terminal window on your machine, run `oc get pods -l app=`. This will retrieve the name of the pod where the workbench is running. + +3) Run `oc port-forward ${{ values.appPort }}` to port forward the sample application's port to your local machine. + +4) Finally, visit `http://localhost:${{ values.appPort }}` in your browser to access the application. \ No newline at end of file diff --git a/templates/chatbot/content/mkdocs.yml b/templates/chatbot/content/mkdocs.yml index e63a3f1a..bb44658e 100644 --- a/templates/chatbot/content/mkdocs.yml +++ b/templates/chatbot/content/mkdocs.yml @@ -5,6 +5,7 @@ nav: - Source Component: source-component.md - Pipelines: pipelines.md - GitOps Application: gitops-application.md + - OpenShift AI: rhoai.md plugins: - techdocs-core \ No newline at end of file diff --git a/templates/chatbot/template.yaml b/templates/chatbot/template.yaml index 8abb9ffd..1259c24c 100644 --- a/templates/chatbot/template.yaml +++ b/templates/chatbot/template.yaml @@ -179,6 +179,7 @@ spec: srcRepoURL: https://${{ parameters.githubServer if parameters.hostType === 'GitHub' else parameters.gitlabServer }}/${{ parameters.repoOwner }}/${{ parameters.repoName }} appContainer: ${{ 'quay.io/redhat-ai-dev/ai-template-bootstrap-app:latest' if parameters.hostType === 'GitHub' else 'quay.io/redhat-ai-dev/chatbot:latest' }} appPort: 8501 + appRunCommand: "streamlit run chatbot_ui.py" modelServiceContainer: quay.io/ai-lab/llamacpp_python:latest modelServicePort: 8001 # Renders all the template variables into the files and directory names and content, and places the result in the workspace. @@ -199,7 +200,7 @@ spec: tags: '["ai", "llamacpp", "vllm", "python"]' owner: ${{ parameters.owner }} repoSlug: '${{ parameters.imageOrg }}/${{ parameters.imageName }}' - defaultBranch: ${{ parameters.branch }} + defaultBranch: ${{ parameters.branch }} - id: fetch-github-action name: Fetch GitHub Action action: fetch:plain diff --git a/templates/codegen/content/docs/.assets/open-terminal.png b/templates/codegen/content/docs/.assets/open-terminal.png new file mode 100644 index 00000000..b3330cb4 Binary files /dev/null and b/templates/codegen/content/docs/.assets/open-terminal.png differ diff --git a/templates/codegen/content/docs/.assets/workbench-name.png b/templates/codegen/content/docs/.assets/workbench-name.png new file mode 100644 index 00000000..ccbdc160 Binary files /dev/null and b/templates/codegen/content/docs/.assets/workbench-name.png differ diff --git a/templates/codegen/content/docs/rhoai.md b/templates/codegen/content/docs/rhoai.md new file mode 100644 index 00000000..8c4a945b --- /dev/null +++ b/templates/codegen/content/docs/rhoai.md @@ -0,0 +1,37 @@ +# Running Samples in OpenShift AI + +This document will outline how you can run build and run your sample applications within an OpenShift AI workbench. + +## Prerequisites + +- Red Hat OpenShift AI installed, and `Create workbench for OpenShift AI` selected during component creation. +- `oc` cli installed + - `oc` can be downloaded from https://mirror.openshift.com/pub/openshift-v4/clients/ocp/stable/ +- Permissions to run `oc port-forward` on the cluster, specifically an account with the following roles: + - `get`, `create`, and `list` for the `pods/portforward` subresource + +## Running the Sample + +1) Navigate to the OpenShift AI workbench created for your sample application + +2) Go to `File->Open` and select `Terminal` +![image](./.assets/open-terminal.png) + +3) In the terminal, run `cd ${{ values.name }}` to navigate to your sample app's directory + +4) Run `pip install --upgrade -r requirements.txt` to install the dependencies for your application + +5) Run `${{ values.appRunCommand }}` to run the sample in the workbench. + +## Accessing the Sample + +With the sample app now running, the following steps will allow you to access the sample app in your browser: + +1) Navigate back to the OpenShift AI dashboard, and find the name of your workbench. +![image](./.assets/workbench-name.png) + +2) In a terminal window on your machine, run `oc get pods -l app=`. This will retrieve the name of the pod where the workbench is running. + +3) Run `oc port-forward ${{ values.appPort }}` to port forward the sample application's port to your local machine. + +4) Finally, visit `http://localhost:${{ values.appPort }}` in your browser to access the application. \ No newline at end of file diff --git a/templates/codegen/content/mkdocs.yml b/templates/codegen/content/mkdocs.yml index e63a3f1a..bb44658e 100644 --- a/templates/codegen/content/mkdocs.yml +++ b/templates/codegen/content/mkdocs.yml @@ -5,6 +5,7 @@ nav: - Source Component: source-component.md - Pipelines: pipelines.md - GitOps Application: gitops-application.md + - OpenShift AI: rhoai.md plugins: - techdocs-core \ No newline at end of file diff --git a/templates/codegen/template.yaml b/templates/codegen/template.yaml index 89b2d70d..c80a03c6 100644 --- a/templates/codegen/template.yaml +++ b/templates/codegen/template.yaml @@ -179,6 +179,7 @@ spec: srcRepoURL: https://${{ parameters.githubServer if parameters.hostType === 'GitHub' else parameters.gitlabServer }}/${{ parameters.repoOwner }}/${{ parameters.repoName }} appContainer: ${{ 'quay.io/redhat-ai-dev/ai-template-bootstrap-app:latest' if parameters.hostType === 'GitHub' else 'quay.io/redhat-ai-dev/codegen:latest' }} appPort: 8501 + appRunCommand: "streamlit run codegen-app.py" modelServiceContainer: quay.io/ai-lab/llamacpp_python:latest modelServicePort: 8001 # Renders all the template variables into the files and directory names and content, and places the result in the workspace. @@ -199,7 +200,7 @@ spec: tags: '["ai", "llamacpp", "vllm", "python"]' owner: ${{ parameters.owner }} repoSlug: '${{ parameters.imageOrg }}/${{ parameters.imageName }}' - defaultBranch: ${{ parameters.branch }} + defaultBranch: ${{ parameters.branch }} - id: fetch-github-action name: Fetch GitHub Action action: fetch:plain diff --git a/templates/object-detection/content/docs/.assets/open-terminal.png b/templates/object-detection/content/docs/.assets/open-terminal.png new file mode 100644 index 00000000..b3330cb4 Binary files /dev/null and b/templates/object-detection/content/docs/.assets/open-terminal.png differ diff --git a/templates/object-detection/content/docs/.assets/workbench-name.png b/templates/object-detection/content/docs/.assets/workbench-name.png new file mode 100644 index 00000000..ccbdc160 Binary files /dev/null and b/templates/object-detection/content/docs/.assets/workbench-name.png differ diff --git a/templates/object-detection/content/docs/rhoai.md b/templates/object-detection/content/docs/rhoai.md new file mode 100644 index 00000000..8c4a945b --- /dev/null +++ b/templates/object-detection/content/docs/rhoai.md @@ -0,0 +1,37 @@ +# Running Samples in OpenShift AI + +This document will outline how you can run build and run your sample applications within an OpenShift AI workbench. + +## Prerequisites + +- Red Hat OpenShift AI installed, and `Create workbench for OpenShift AI` selected during component creation. +- `oc` cli installed + - `oc` can be downloaded from https://mirror.openshift.com/pub/openshift-v4/clients/ocp/stable/ +- Permissions to run `oc port-forward` on the cluster, specifically an account with the following roles: + - `get`, `create`, and `list` for the `pods/portforward` subresource + +## Running the Sample + +1) Navigate to the OpenShift AI workbench created for your sample application + +2) Go to `File->Open` and select `Terminal` +![image](./.assets/open-terminal.png) + +3) In the terminal, run `cd ${{ values.name }}` to navigate to your sample app's directory + +4) Run `pip install --upgrade -r requirements.txt` to install the dependencies for your application + +5) Run `${{ values.appRunCommand }}` to run the sample in the workbench. + +## Accessing the Sample + +With the sample app now running, the following steps will allow you to access the sample app in your browser: + +1) Navigate back to the OpenShift AI dashboard, and find the name of your workbench. +![image](./.assets/workbench-name.png) + +2) In a terminal window on your machine, run `oc get pods -l app=`. This will retrieve the name of the pod where the workbench is running. + +3) Run `oc port-forward ${{ values.appPort }}` to port forward the sample application's port to your local machine. + +4) Finally, visit `http://localhost:${{ values.appPort }}` in your browser to access the application. \ No newline at end of file diff --git a/templates/object-detection/content/mkdocs.yml b/templates/object-detection/content/mkdocs.yml index e63a3f1a..bb44658e 100644 --- a/templates/object-detection/content/mkdocs.yml +++ b/templates/object-detection/content/mkdocs.yml @@ -5,6 +5,7 @@ nav: - Source Component: source-component.md - Pipelines: pipelines.md - GitOps Application: gitops-application.md + - OpenShift AI: rhoai.md plugins: - techdocs-core \ No newline at end of file diff --git a/templates/object-detection/template.yaml b/templates/object-detection/template.yaml index 21bee624..e54eb3f2 100644 --- a/templates/object-detection/template.yaml +++ b/templates/object-detection/template.yaml @@ -165,6 +165,7 @@ spec: srcRepoURL: https://${{ parameters.githubServer if parameters.hostType === 'GitHub' else parameters.gitlabServer }}/${{ parameters.repoOwner }}/${{ parameters.repoName }} appContainer: ${{ 'quay.io/redhat-ai-dev/ai-template-bootstrap-app:latest' if parameters.hostType === 'GitHub' else 'quay.io/redhat-ai-dev/object_detection:latest' }} appPort: 8501 + appRunCommand: "streamlit run object_detection_client.py" modelServiceContainer: quay.io/redhat-ai-dev/object_detection_python:latest modelServicePort: 8000 # Renders all the template variables into the files and directory names and content, and places the result in the workspace. @@ -185,7 +186,7 @@ spec: tags: '["ai", "detr", "python"]' owner: ${{ parameters.owner }} repoSlug: '${{ parameters.imageOrg }}/${{ parameters.imageName }}' - defaultBranch: ${{ parameters.branch }} + defaultBranch: ${{ parameters.branch }} - id: fetch-github-action name: Fetch GitHub Action action: fetch:plain