diff --git a/.github/ISSUE_TEMPLATE/internal_techdebt.yml b/.github/ISSUE_TEMPLATE/internal_techdebt.yml new file mode 100644 index 0000000000..a38291e37f --- /dev/null +++ b/.github/ISSUE_TEMPLATE/internal_techdebt.yml @@ -0,0 +1,54 @@ +name: (Internal) Tech Debt Template +description: Intended to help create internal tech debt change. +title: "[Tech Debt]: " +labels: ["kind/tech-debt"] +body: + - type: dropdown + id: deploy-type + attributes: + label: Type + description: What kind of tech debt is it? + multiple: true + options: + - General Tech Debt (eg. Improve the way something exists today) + - Dev Efficiency (eg. CI improvements, issue templates, etc) + - Infrastructure (eg. Add web sockets) + - Quality (eg. tests) + validations: + required: true + - type: input + id: source + attributes: + label: Source + description: Where did you find this issue? + placeholder: main / feature branch x / incubation / etc + validations: + required: false + - type: textarea + id: description + attributes: + label: Description + description: What should we improve? + validations: + required: + true + - type: textarea + id: why + attributes: + label: Why? + description: What value does this bring / what is it fixing? + validations: + required: + true + - type: textarea + id: anything-else + attributes: + label: Anything else? + description: | + Additional information. Suggested topics: + - Initial investigation + - Known impact this will cause + - Anything else you want to add + validations: + required: + false diff --git a/.github/ISSUE_TEMPLATE/internal_tracker.yml b/.github/ISSUE_TEMPLATE/internal_tracker.yml new file mode 100644 index 0000000000..4345f6215c --- /dev/null +++ b/.github/ISSUE_TEMPLATE/internal_tracker.yml @@ -0,0 +1,64 @@ +name: (Internal) Tracker Template +description: Intended to help with a template for tracking larger grouped items. +title: "[Tracker]: " +labels: ["tracker"] +body: + - type: textarea + id: description + attributes: + label: Description + description: A introductory description of the larger task + validations: + required: + true + - type: input + id: branch + attributes: + label: Target Branch + description: What is the feature branch to contain this effort? If not known at this time, replace with `TBD` + placeholder: f/ + validations: + required: true + - type: textarea + id: requirements + attributes: + label: Requirements + description: A series of requirements to consider this tracker complete. + placeholder: | + * P0: Show something + * P2: Allow users to change permissions + validations: + required: true + - type: textarea + id: ux-issues + attributes: + label: Itemized UX Issues + description: | + List the tickets that UX will work on. + + Tip: Using a bullet list will help display links to other tickets by unraveling the name and status of that ticket. + placeholder: | + * #1234 + * Design mocks - Ticket TBD + validations: + required: true + - type: textarea + id: dev-issues + attributes: + label: Itemized Dev Issues + description: | + List the tickets that Development will work on. If unknown at this time, add `TBD` + + Tip: Using a bullet list will help display links to other tickets by unraveling the name and status of that ticket. + placeholder: | + * #1234 + * Implement Table Page - Ticket TBD + validations: + required: true + - type: textarea + id: artifacts + attributes: + label: Related artifacts + description: Any additional artifacts that will help with the tracker goals + validations: + required: false diff --git a/.github/ISSUE_TEMPLATE/internal_ux.yml b/.github/ISSUE_TEMPLATE/internal_ux.yml new file mode 100644 index 0000000000..7107cd476c --- /dev/null +++ b/.github/ISSUE_TEMPLATE/internal_ux.yml @@ -0,0 +1,56 @@ +name: (Internal) UX Template +description: Intended to help ux create internal flows. +title: "[UX]: " +labels: ["kind/ux"] +body: + - type: textarea + id: description + attributes: + label: Description + description: A introductory description of the task + validations: + required: + true + - type: textarea + id: goals + attributes: + label: Goals + description: | + An itemized list of goals to complete for this ticket. + + If you don't have specific goals, this could also include the user story or jobs to be done. + placeholder: | + * Research... + * Design... + validations: + required: false + - type: textarea + id: output + attributes: + label: Expected Output + description: What would be considered the end result? + validations: + required: false + - type: textarea + id: related-issues + attributes: + label: Related Issues + description: | + Any related issues that might be useful to mention as it relates to this ticket's goals, expectations, or follow ups. + + Tip: Using a bullet list will help display links to other tickets by unraveling the name and status of that ticket. + placeholder: | + * #1234 + * Create figma designs - Ticket TBD + validations: + required: false + - type: textarea + id: artifacts + attributes: + label: Completed artifacts + description: | + Any artifacts you want to easily note as results of the effort. + + Typically this is left empty at the start. Also useful to include useful links or information you would like to share for additional context. + validations: + required: false diff --git a/.github/workflows/auto-add-issues-to-project.yaml b/.github/workflows/auto-add-issues-to-project.yaml new file mode 100644 index 0000000000..8ad02516ca --- /dev/null +++ b/.github/workflows/auto-add-issues-to-project.yaml @@ -0,0 +1,24 @@ +name: Auto Add Issues to Tracking boards +on: + issues: + types: + - opened +jobs: + add-to-project: + name: Add issue to projects + runs-on: ubuntu-latest + steps: + - name: Generate github-app token + id: app-token + uses: getsentry/action-github-app-token@v2 + with: + app_id: ${{ secrets.DEVOPS_APP_ID }} + private_key: ${{ secrets.DEVOPS_APP_PRIVATE_KEY }} + - uses: actions/add-to-project@v0.5.0 + with: + project-url: https://github.com/orgs/opendatahub-io/projects/40 + github-token: ${{ steps.app-token.outputs.token }} + - uses: actions/add-to-project@v0.5.0 + with: + project-url: https://github.com/orgs/opendatahub-io/projects/45 + github-token: ${{ steps.app-token.outputs.token }} diff --git a/.github/workflows/pr-close-image-delete.yml b/.github/workflows/pr-close-image-delete.yml index a4026c45cd..e1b5589089 100644 --- a/.github/workflows/pr-close-image-delete.yml +++ b/.github/workflows/pr-close-image-delete.yml @@ -12,7 +12,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Git checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: '0' - name: Install skopeo diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 97ad7b786d..d95072290d 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -10,7 +10,7 @@ jobs: matrix: node-version: [18.x] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup Node.js ${{ matrix.node-version }} uses: actions/setup-node@v3.8.1 with: diff --git a/.github/workflows/quay-tag.yml b/.github/workflows/quay-tag.yml index 2905f5026b..aad1257d9f 100644 --- a/.github/workflows/quay-tag.yml +++ b/.github/workflows/quay-tag.yml @@ -16,7 +16,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Git checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: '0' - name: Install skopeo diff --git a/.github/workflows/vuln_scan.yml b/.github/workflows/vuln_scan.yml index 910dc4a876..30405e857c 100644 --- a/.github/workflows/vuln_scan.yml +++ b/.github/workflows/vuln_scan.yml @@ -8,7 +8,7 @@ jobs: name: Scan Files runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Run Trivy vulnerability scanner for filesystem uses: aquasecurity/trivy-action@0.11.2 with: diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 1de6b600eb..feb94a38f5 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,10 +1,12 @@ +[dev setup documentation]: docs/dev-setup.md#requirements + # Contributing Contributing encompasses repository specific requirements. ## Requirements -You can check the requirements of ODH in the [README section](./README.md#requirements). +To review the ODH requirements, please refer to the [dev setup documentation]. ## Writing code diff --git a/README.md b/README.md index 623b5398f0..ca373ab0b0 100644 --- a/README.md +++ b/README.md @@ -1,107 +1,17 @@ -# Open Data Hub Dashboard - -A dashboard for Open Data Hub components. - -- Shows what's installed -- Show's what's available for installation -- Links to component UIs -- Links to component documentation - -## Requirements - -ODH requires the following to run: - -- [NodeJS and NPM](https://nodejs.org/) - - Node recommended version -> `18.16.0` - - NPM recommended version -> `9.6.7` -- [OpenShift CLI](https://docs.openshift.com/container-platform/4.12/cli_reference/openshift_cli/getting-started-cli.html) -- [kustomize](https://github.com/kubernetes-sigs/kustomize) -- [podman](https://github.com/containers/podman) -- Have access to [Quay.io](https://quay.io/) - -### Additional tooling requirements - -- [OpenShift CLI, the "oc" command](https://docs.openshift.com/container-platform/4.12/cli_reference/openshift_cli/getting-started-cli.html) -- [Podman](https://github.com/containers/podman) -- [Quay.io](https://quay.io/) - -## Development - - 1. Clone the repository - - ``` bash - git clone https://github.com/opendatahub-io/odh-dashboard - ``` - - 2. Within the repo context, install project dependencies - - ```bash - cd odh-dashboard && npm install - ``` - -### Build project - - ```bash - npm run build - ``` - -### Serve development content - -This is the default context for running a local UI. Make sure you build the project using the instructions above prior to running the command below. - - ```bash - npm run start - ``` +[Dev setup & Requirements]: docs/dev-setup.md +[Dashboard documentation]: docs/README.md -For in-depth local run guidance review the [contribution guidelines](./CONTRIBUTING.md#Serving%20Content). - -### Testing - -Run the tests. - - ```bash - npm run test - ``` - -Run storybook a11y tests and interaction tests. - - ```bash - npm run storybook - npm run test:storybook - ``` - -For in-depth testing guidance review the [contribution guidelines](./CONTRIBUTING.md#Testing) - -## Deploying the ODH Dashbard - -### Official Image Builds - -odh-dashboard images are automatically built and pushed to [quay.io](https://quay.io/repository/opendatahub/odh-dashboard) after every commit to the `main` branch. The image tag name format for each image is `main-`. - -Example: The `main` branch is updated with commit `f76e3952834f453b1d085e8627f9c17297c2f64c`. The CI system will automatically build an odh-dashboard image based on that code and push the new image to `odh-dashboard:main-f76e395` and updated `odh-dashboard:main` to point to the same image hash. - -The [nightly](https://quay.io/opendatahub/odh-dashboard:nightly) tag is a floating tag that is updated nightly and points to the most recent `main-` commit from the previous day. - -### Deploy using kustomize - -The [manifests](./manifests) folder contains a [kustomize](https://kustomize.io) manifest that can be used with `kustomize build`. - -### Deploy using a kfdef +# Open Data Hub Dashboard -The [manifests/kfdef](./manifests/kfdef) folder contains an example kfdef to deploy ODH Dashboard with the Notebook Controller backend is located in [odh-dashboard-kfnbc-test.yaml](manifests/kfdef/odh-dashboard-kfnbc-test.yaml). +A dashboard for Open Data Hub components, featuring user flows to navigate and interact with the various component parts of the stack. ## Contributing Contributing encompasses [repository specific requirements](./CONTRIBUTING.md). -### Triaging - -For information on how we triage tickets, see our [triage.md](./docs/triaging.md). - ## Documentation -You can find more information about this project in the [document section](./docs/README.md). - -## Releases +Read more about the Dashboard in one of our documentation links. -For more information on how, when, and what we do for releases, see our [releases.md](./docs/releases.md). +* [Dev setup & Requirements] +* [Dashboard documentation] diff --git a/backend/.eslintrc b/backend/.eslintrc index a0d326e003..0d6208eadf 100755 --- a/backend/.eslintrc +++ b/backend/.eslintrc @@ -14,9 +14,7 @@ "tsconfigRootDir": "." }, // includes the typescript specific rules found here: https://github.com/typescript-eslint/typescript-eslint/tree/master/packages/eslint-plugin#supported-rules - "plugins": [ - "@typescript-eslint" - ], + "plugins": ["@typescript-eslint"], "extends": [ "eslint:recommended", "plugin:@typescript-eslint/recommended", @@ -32,9 +30,16 @@ "process": "readonly", "document": "readonly" }, - "settings": { - }, + "settings": {}, "rules": { + "eqeqeq": ["error", "always", { "null": "ignore" }], + "no-restricted-properties": [ + "error", + { + "property": "toString", + "message": "e.toString() should be fastify.log.error(e, 'your string'). Other use-cases should avoid obj.toString() on principle. Craft the string you want instead." + } + ], "@typescript-eslint/explicit-function-return-type": "off", "@typescript-eslint/interface-name-prefix": "off", "@typescript-eslint/no-var-requires": "off", diff --git a/backend/src/plugins/kube.ts b/backend/src/plugins/kube.ts index ee1d78c87f..60610f7c61 100644 --- a/backend/src/plugins/kube.ts +++ b/backend/src/plugins/kube.ts @@ -4,7 +4,7 @@ import { FastifyInstance } from 'fastify'; import * as jsYaml from 'js-yaml'; import * as k8s from '@kubernetes/client-node'; import { DEV_MODE } from '../utils/constants'; -import { cleanupDSPSuffix, initializeWatchedResources } from '../utils/resourceUtils'; +import { cleanupGPU, initializeWatchedResources } from '../utils/resourceUtils'; import { User } from '@kubernetes/client-node/dist/config_types'; const CONSOLE_CONFIG_YAML_FIELD = 'console-config.yaml'; @@ -46,7 +46,10 @@ export default fp(async (fastify: FastifyInstance) => { ); clusterID = (clusterVersion.body as { spec: { clusterID: string } }).spec.clusterID; } catch (e) { - fastify.log.error(`Failed to retrieve cluster id: ${e.response?.body?.message || e.message}.`); + fastify.log.error( + e, + `Failed to retrieve cluster id: ${e.response?.body?.message || e.message}.`, + ); } let clusterBranding = 'okd'; try { @@ -82,12 +85,10 @@ export default fp(async (fastify: FastifyInstance) => { // Initialize the watching of resources initializeWatchedResources(fastify); - // TODO: Delete this code in the future once we have no customers using RHODS 1.19 / ODH 2.4.0 - // Cleanup for display name suffix of [DSP] - cleanupDSPSuffix(fastify).catch((e) => + cleanupGPU(fastify).catch((e) => fastify.log.error( - `Unable to fully cleanup project display name suffixes - Some projects may not appear in the dashboard UI. ${ - e.response?.body?.message || e.message + `Unable to fully convert GPU to use accelerator profiles. ${ + e.response?.body?.message || e.message || e }`, ), ); diff --git a/backend/src/routes/api/accelerators/acceleratorUtils.ts b/backend/src/routes/api/accelerators/acceleratorUtils.ts new file mode 100644 index 0000000000..d80b12b42a --- /dev/null +++ b/backend/src/routes/api/accelerators/acceleratorUtils.ts @@ -0,0 +1,71 @@ +import { AcceleratorInfo, KubeFastifyInstance } from '../../../types'; + +const RESOURCE_TYPES = [ + 'cpu', + 'memory', + 'pods', + 'ephemeral-storage', + 'hugepages-1Gi', + 'hugepages-2Mi', + 'attachable-volumes-aws-ebs', +]; + +const getIdentifiersFromResources = (resources: { [key: string]: string } = {}) => { + return Object.entries(resources) + .filter(([key]) => !RESOURCE_TYPES.includes(key)) + .reduce<{ [key: string]: number }>((identifiers, [key, value]) => { + identifiers[key] = isNaN(parseInt(value)) ? 0 : parseInt(value); + return identifiers; + }, {}); +}; + +export const getAcceleratorNumbers = async ( + fastify: KubeFastifyInstance, +): Promise => + fastify.kube.coreV1Api + .listNode() + .then((res) => + res.body.items.reduce( + (info, node) => { + // reduce resources down to just the accelerators and their counts + const allocatable = getIdentifiersFromResources(node.status.allocatable); + const capacity = getIdentifiersFromResources(node.status.capacity); + + // update the max count for each accelerator + Object.entries(allocatable).forEach( + ([key, value]) => (info.available[key] = Math.max(info.available[key] ?? 0, value)), + ); + + // update the total count for each accelerator + Object.entries(capacity).forEach( + ([key, value]) => (info.total[key] = (info.total[key] ?? 0) + value), + ); + + // update the allocated count for each accelerator + Object.entries(capacity).forEach( + ([key, value]) => + (info.allocated[key] = (info.allocated[key] ?? 0) + value - (allocatable[key] ?? 0)), + ); + + // if any accelerators are available, the cluster is configured + const configured = + info.configured || Object.values(info.available).some((value) => value > 0); + + return { + total: info.total, + available: info.available, + allocated: info.allocated, + configured, + }; + }, + { configured: false, available: {}, total: {}, allocated: {} }, + ), + ) + .catch((e) => { + fastify.log.error( + `A ${e.statusCode} error occurred when listing cluster nodes: ${ + e.response?.body?.message || e.statusMessage + }`, + ); + return { configured: false, available: {}, total: {}, allocated: {} }; + }); diff --git a/backend/src/routes/api/accelerators/index.ts b/backend/src/routes/api/accelerators/index.ts new file mode 100644 index 0000000000..16d651ad6d --- /dev/null +++ b/backend/src/routes/api/accelerators/index.ts @@ -0,0 +1,11 @@ +import { KubeFastifyInstance, OauthFastifyRequest } from '../../../types'; +import { getAcceleratorNumbers } from './acceleratorUtils'; +import { logRequestDetails } from '../../../utils/fileUtils'; + +export default async (fastify: KubeFastifyInstance): Promise => { + fastify.get('/', async (request: OauthFastifyRequest) => { + logRequestDetails(fastify, request); + + return getAcceleratorNumbers(fastify); + }); +}; diff --git a/backend/src/routes/api/cluster-settings/clusterSettingsUtils.ts b/backend/src/routes/api/cluster-settings/clusterSettingsUtils.ts index 57692c5ca0..4fab3ccd15 100644 --- a/backend/src/routes/api/cluster-settings/clusterSettingsUtils.ts +++ b/backend/src/routes/api/cluster-settings/clusterSettingsUtils.ts @@ -111,9 +111,7 @@ export const updateClusterSettings = async ( } return { success: true, error: null }; } catch (e) { - fastify.log.error( - 'Setting cluster settings error: ' + e.toString() + e.response?.body?.message, - ); + fastify.log.error(e, 'Setting cluster settings error: ' + e.response?.body?.message); if (e.response?.statusCode !== 404) { return { success: false, error: 'Unable to update cluster settings. ' + e.message }; } @@ -137,7 +135,7 @@ export const getClusterSettings = async ( clusterSettings.userTrackingEnabled = segmentEnabledRes.body.data.segmentKeyEnabled === 'true'; } catch (e) { - fastify.log.error('Error retrieving segment key enabled: ' + e.toString()); + fastify.log.error(e, 'Error retrieving segment key enabled.'); } } if (isJupyterEnabled) { @@ -165,7 +163,7 @@ export const getClusterSettings = async ( if (e.statusCode === 404) { fastify.log.warn('Notebook controller culling config not found, culling disabled...'); } else { - fastify.log.error('Error getting notebook controller culling settings: ' + e.toString()); + fastify.log.error(e, 'Error getting notebook controller culling settings.'); throw e; } }); @@ -175,7 +173,7 @@ export const getClusterSettings = async ( clusterSettings.pvcSize = pvcSize; clusterSettings.cullerTimeout = cullerTimeout; } catch (e) { - fastify.log.error('Error retrieving cluster settings: ' + e.toString()); + fastify.log.error(e, 'Error retrieving cluster settings.'); } } diff --git a/backend/src/routes/api/dev-impersonate/index.ts b/backend/src/routes/api/dev-impersonate/index.ts index d5f4a2e9f8..778add7672 100644 --- a/backend/src/routes/api/dev-impersonate/index.ts +++ b/backend/src/routes/api/dev-impersonate/index.ts @@ -23,6 +23,8 @@ export default async (fastify: KubeFastifyInstance): Promise => { url, { headers: { + // This usage of toString is fine for internal dev flows + // eslint-disable-next-line no-restricted-properties Authorization: `Basic ${Buffer.from( `${DEV_IMPERSONATE_USER}:${DEV_IMPERSONATE_PASSWORD}`, ).toString('base64')}`, diff --git a/backend/src/routes/api/dsc/index.ts b/backend/src/routes/api/dsc/index.ts new file mode 100644 index 0000000000..bae485b6db --- /dev/null +++ b/backend/src/routes/api/dsc/index.ts @@ -0,0 +1,12 @@ +import { KubeFastifyInstance } from '../../../types'; +import { secureRoute } from '../../../utils/route-security'; +import { getClusterStatus } from '../../../utils/dsc'; + +module.exports = async (fastify: KubeFastifyInstance) => { + fastify.get( + '/status', + secureRoute(fastify)(async () => { + return getClusterStatus(fastify); + }), + ); +}; diff --git a/backend/src/routes/api/gpu/gpuUtils.ts b/backend/src/routes/api/gpu/gpuUtils.ts index e70cad3ece..d1bf2f736b 100644 --- a/backend/src/routes/api/gpu/gpuUtils.ts +++ b/backend/src/routes/api/gpu/gpuUtils.ts @@ -16,6 +16,9 @@ const storage: { lastFetch: number; lastValue: GPUInfo } = { lastFetch: 0, }; +/** + * @deprecated - use getAcceleratorNumbers instead + */ export const getGPUNumber = async (fastify: KubeFastifyInstance): Promise => { if (storage.lastFetch >= Date.now() - 30_000) { fastify.log.info(`Returning cached gpu value (${JSON.stringify(storage)})`); @@ -34,7 +37,7 @@ export const getGPUNumber = async (fastify: KubeFastifyInstance): Promise => { fastify.get('/', async (request: OauthFastifyRequest) => { logRequestDetails(fastify, request); diff --git a/backend/src/routes/api/groups-config/groupsConfigUtil.ts b/backend/src/routes/api/groups-config/groupsConfigUtil.ts index f5b082caae..44b20e348b 100644 --- a/backend/src/routes/api/groups-config/groupsConfigUtil.ts +++ b/backend/src/routes/api/groups-config/groupsConfigUtil.ts @@ -16,19 +16,12 @@ const SYSTEM_AUTHENTICATED = 'system:authenticated'; export const getGroupsConfig = async (fastify: KubeFastifyInstance): Promise => { const customObjectsApi = fastify.kube.customObjectsApi; - try { - const groupsCluster = await getAllGroups(customObjectsApi); - const groupsData = getGroupsCR(); - const groupsProcessed = processGroupData(groupsData); - const groupsConfigProcessed = processGroupConfig(groupsProcessed, groupsCluster); - await removeDeletedGroups(fastify, groupsData, groupsConfigProcessed.groupsCRData); - - return groupsConfigProcessed.groupsConfig; - } catch (e) { - fastify.log.error('Error retrieving group configuration: ' + e.toString()); - const error = createError(500, 'Error retrieving group configuration'); - throw error; - } + const groupsCluster = await getAllGroups(customObjectsApi); + const groupsData = getGroupsCR(); + const groupsProcessed = processGroupData(groupsData); + const groupsConfigProcessed = processGroupConfig(fastify, groupsProcessed, groupsCluster); + await removeDeletedGroups(fastify, groupsData, groupsConfigProcessed.groupsCRData); + return groupsConfigProcessed.groupsConfig; }; const transformGroupsConfig = (groupStatus: GroupStatus[]): string[] => { @@ -38,7 +31,7 @@ const transformGroupsConfig = (groupStatus: GroupStatus[]): string[] => { export const updateGroupsConfig = async ( fastify: KubeFastifyInstance, request: FastifyRequest<{ Body: GroupsConfig }>, -): Promise<{ success: GroupsConfig | null; error: string | null }> => { +): Promise => { const customObjectsApi = fastify.kube.customObjectsApi; const { namespace } = fastify.kube; @@ -58,26 +51,17 @@ export const updateGroupsConfig = async ( const error = createError(403, 'Error, groups cannot be empty'); throw error; } - try { - const dataUpdated: GroupsConfigBody = { - adminGroups: adminConfig.join(','), - allowedGroups: allowedConfig.join(','), - }; - - const groupsData = await updateGroupsCR(fastify, dataUpdated); - const groupsProcessed = processGroupData(groupsData); - const groupsCluster = await getAllGroups(customObjectsApi); - const updatedConfig = processGroupConfig(groupsProcessed, groupsCluster); - await removeDeletedGroups(fastify, groupsData, updatedConfig.groupsCRData); - return { - success: updatedConfig.groupsConfig, - error: null, - }; - } catch (e) { - fastify.log.error('Error updating group configuration' + e.toString()); - const error = createError(500, 'Error updating group configuration'); - throw error; - } + const dataUpdated: GroupsConfigBody = { + adminGroups: adminConfig.join(','), + allowedGroups: allowedConfig.join(','), + }; + + const groupsData = await updateGroupsCR(fastify, dataUpdated); + const groupsProcessed = processGroupData(groupsData); + const groupsCluster = await getAllGroups(customObjectsApi); + const updatedConfig = processGroupConfig(fastify, groupsProcessed, groupsCluster); + await removeDeletedGroups(fastify, groupsData, updatedConfig.groupsCRData); + return updatedConfig.groupsConfig; }; const processGroupData = (groupsData: GroupsConfigBody): GroupsConfigBodyList => { @@ -105,6 +89,7 @@ const mapListToGroupStatus = * @returns Processed object with the groups, removing missing groups that might be selected */ const processGroupConfig = ( + fastify: KubeFastifyInstance, groupsDataList: GroupsConfigBodyList, groups: string[], ): { groupsConfig: GroupsConfig; groupsCRData: GroupsConfigBody } => { @@ -120,9 +105,14 @@ const processGroupConfig = ( const groupsConfig: GroupsConfig = { adminGroups: adminGroupsConfig, allowedGroups: allowedGroupsConfig, - errorAdmin: getError(groupsDataList.adminGroups, (group) => !groups.includes(group)), + errorAdmin: getError( + fastify, + groupsDataList.adminGroups.filter((group) => group), + (group) => !groups.includes(group), + ), errorUser: getError( - groupsDataList.allowedGroups, + fastify, + groupsDataList.allowedGroups.filter((group) => group), (group) => !groups.includes(group) && group !== SYSTEM_AUTHENTICATED, ), }; @@ -137,13 +127,26 @@ const processGroupConfig = ( return { groupsConfig, groupsCRData: updatedBody }; }; -const getError = (array: string[], predicate: (group: string) => boolean): string | undefined => { +const getError = ( + fastify: KubeFastifyInstance, + array: string[], + predicate: (group: string) => boolean, +): string | undefined => { + let error; + if (array.length === 0) { + error = 'No group is set in the group config, please set one or more group.'; + fastify.log.error(error); + return error; + } + const missingItems = array.filter(predicate); if (missingItems.length === 0) return undefined; - return `The group${missingItems.length === 1 ? '' : 's'} ${missingItems.join( + error = `The group${missingItems.length === 1 ? '' : 's'} ${missingItems.join( ', ', )} no longer exists in OpenShift and has been removed from the selected group list.`; + fastify.log.error(error); + return error; }; /** diff --git a/backend/src/routes/api/groups-config/index.ts b/backend/src/routes/api/groups-config/index.ts index e2b07c58ac..9b70b5e59e 100644 --- a/backend/src/routes/api/groups-config/index.ts +++ b/backend/src/routes/api/groups-config/index.ts @@ -6,8 +6,13 @@ import { secureAdminRoute } from '../../../utils/route-security'; export default async (fastify: FastifyInstance): Promise => { fastify.get( '/', - secureAdminRoute(fastify)(async () => { - return getGroupsConfig(fastify); + secureAdminRoute(fastify)(async (request, reply) => { + return getGroupsConfig(fastify).catch((e) => { + fastify.log.error( + `Error retrieving group configuration, ${e.response?.body?.message || e.message}`, + ); + reply.status(500).send({ message: e.response?.body?.message || e.message }); + }); }), ); diff --git a/backend/src/routes/api/images/imageUtils.ts b/backend/src/routes/api/images/imageUtils.ts index 4101960e09..29c0337b0a 100644 --- a/backend/src/routes/api/images/imageUtils.ts +++ b/backend/src/routes/api/images/imageUtils.ts @@ -319,7 +319,7 @@ export const postImage = async ( return { success: true, error: null }; } catch (e) { if (e.response?.statusCode !== 404) { - fastify.log.error('Unable to add notebook image: ' + e.toString()); + fastify.log.error(e, 'Unable to add notebook image'); return { success: false, error: 'Unable to add notebook image: ' + e.message }; } } @@ -348,7 +348,7 @@ export const deleteImage = async ( return { success: true, error: null }; } catch (e) { if (e.response?.statusCode !== 404) { - fastify.log.error('Unable to delete notebook image: ' + e.toString()); + fastify.log.error(e, 'Unable to delete notebook image.'); return { success: false, error: 'Unable to delete notebook image: ' + e.message }; } } @@ -375,7 +375,7 @@ export const updateImage = async ( ); if (validName.length > 0) { - fastify.log.error('Duplicate name unable to add notebook image'); + fastify.log.error('Duplicate name unable to add notebook image.'); return { success: false, error: 'Unable to add notebook image: ' + body.name }; } @@ -439,7 +439,7 @@ export const updateImage = async ( return { success: true, error: null }; } catch (e) { if (e.response?.statusCode !== 404) { - fastify.log.error('Unable to update notebook image: ' + e.toString()); + fastify.log.error(e, 'Unable to update notebook image.'); return { success: false, error: 'Unable to update notebook image: ' + e.message }; } } diff --git a/backend/src/routes/api/segment-key/segmentKeyUtils.ts b/backend/src/routes/api/segment-key/segmentKeyUtils.ts index b4c02729a9..8289a4c5c4 100644 --- a/backend/src/routes/api/segment-key/segmentKeyUtils.ts +++ b/backend/src/routes/api/segment-key/segmentKeyUtils.ts @@ -11,7 +11,7 @@ export const getSegmentKey = async (fastify: KubeFastifyInstance): Promise { +app.listen({ port: PORT, host: IP }, (err) => { if (err) { app.log.error(err); process.exit(1); // eslint-disable-line diff --git a/backend/src/types.ts b/backend/src/types.ts index f0815db3eb..1eb8da42d8 100644 --- a/backend/src/types.ts +++ b/backend/src/types.ts @@ -744,6 +744,14 @@ export type GPUInfo = { available: number; autoscalers: gpuScale[]; }; + +export type AcceleratorInfo = { + configured: boolean; + available: { [key: string]: number }; + total: { [key: string]: number }; + allocated: { [key: string]: number }; +}; + export type EnvironmentVariable = EitherNotBoth< { value: string | number }, { valueFrom: Record } @@ -794,12 +802,17 @@ export type NotebookData = { notebookSizeName: string; imageName: string; imageTagName: string; - gpus: number; + accelerator: AcceleratorState; envVars: EnvVarReducedTypeKeyValues; state: NotebookState; username?: string; }; +export type AcceleratorState = { + accelerator?: AcceleratorKind; + count: number; +}; + export const LIMIT_NOTEBOOK_IMAGE_GPU = 'nvidia.com/gpu'; type DisplayNameAnnotations = Partial<{ @@ -857,19 +870,20 @@ export type SupportedModelFormats = { autoSelect?: boolean; }; -export type GPUCount = string | number; +export enum ContainerResourceAttributes { + CPU = 'cpu', + MEMORY = 'memory', +} export type ContainerResources = { requests?: { - cpu?: string; + cpu?: string | number; memory?: string; - 'nvidia.com/gpu'?: GPUCount; - }; + } & Record; limits?: { - cpu?: string; + cpu?: string | number; memory?: string; - 'nvidia.com/gpu'?: GPUCount; - }; + } & Record; }; export type ServingRuntime = K8sResourceCommon & { @@ -897,3 +911,52 @@ export type ServingRuntime = K8sResourceCommon & { volumes?: Volume[]; }; }; + +export type AcceleratorKind = K8sResourceCommon & { + metadata: { + name: string; + annotations?: Partial<{ + 'opendatahub.io/modified-date': string; + }>; + }; + spec: { + displayName: string; + enabled: boolean; + identifier: string; + description?: string; + tolerations?: NotebookToleration[]; + }; +}; + +export enum KnownLabels { + DASHBOARD_RESOURCE = 'opendatahub.io/dashboard', + PROJECT_SHARING = 'opendatahub.io/project-sharing', + MODEL_SERVING_PROJECT = 'modelmesh-enabled', + DATA_CONNECTION_AWS = 'opendatahub.io/managed', +} + +type ComponentNames = + | 'codeflare' + | 'data-science-pipelines-operator' + | 'kserve' + | 'model-mesh' + // Bug: https://github.com/opendatahub-io/opendatahub-operator/issues/641 + | 'odh-dashboard' + | 'ray' + | 'workbenches'; + +export type DataScienceClusterKindStatus = { + conditions: []; + installedComponents: { [key in ComponentNames]: boolean }; + phase?: string; +}; + +export type DataScienceClusterKind = K8sResourceCommon & { + spec: unknown; // we should never need to look into this + status: DataScienceClusterKindStatus; +}; + +export type DataScienceClusterList = { + kind: 'DataScienceClusterList'; + items: DataScienceClusterKind[]; +}; diff --git a/backend/src/utils/adminUtils.ts b/backend/src/utils/adminUtils.ts index da2a96d6d8..66daf92dc5 100644 --- a/backend/src/utils/adminUtils.ts +++ b/backend/src/utils/adminUtils.ts @@ -52,7 +52,7 @@ export const getGroupsConfig = async ( return await checkUserInGroups(fastify, customObjectApi, adminGroupsList, username); } } catch (e) { - fastify.log.error(e.toString()); + fastify.log.error(e, 'Error getting groups config'); return false; } }; @@ -84,7 +84,7 @@ export const isUserAllowed = async ( ); } } catch (e) { - fastify.log.error(e.toString()); + fastify.log.error(e, 'Error determining isUserAllowed.'); return false; } }; @@ -142,7 +142,7 @@ const checkUserInGroups = async ( return true; } } catch (e) { - fastify.log.error(e.toString()); + fastify.log.error(e, 'Error checking if user is in group.'); } } return false; diff --git a/backend/src/utils/constants.ts b/backend/src/utils/constants.ts index 1a6e736c70..1699c0eac1 100644 --- a/backend/src/utils/constants.ts +++ b/backend/src/utils/constants.ts @@ -1,6 +1,6 @@ import * as path from 'path'; import './dotenv'; -import { DashboardConfig, NotebookSize } from '../types'; +import { DashboardConfig, KnownLabels, NotebookSize } from '../types'; export const PORT = Number(process.env.PORT) || Number(process.env.BACKEND_PORT) || 8080; export const IP = process.env.IP || '0.0.0.0'; @@ -131,3 +131,8 @@ export const DEFAULT_NOTEBOOK_SIZES: NotebookSize[] = [ }, }, ]; + +export const imageUrlRegex = + /^([\w.\-_]+((?::\d+|)(?=\/[a-z0-9._-]+\/[a-z0-9._-]+))|)(?:\/|)([a-z0-9.\-_]+(?:\/[a-z0-9.\-_]+|))(?::([\w.\-_]{1,127})|)/; + +export const LABEL_SELECTOR_DASHBOARD_RESOURCE = `${KnownLabels.DASHBOARD_RESOURCE}=true`; diff --git a/backend/src/utils/dsc.ts b/backend/src/utils/dsc.ts new file mode 100644 index 0000000000..eae01bc1e1 --- /dev/null +++ b/backend/src/utils/dsc.ts @@ -0,0 +1,26 @@ +import { + DataScienceClusterKind, + DataScienceClusterKindStatus, + DataScienceClusterList, + KubeFastifyInstance, +} from '../types'; +import { createCustomError } from './requestUtils'; + +export const getClusterStatus = async ( + fastify: KubeFastifyInstance, +): Promise => { + const result: DataScienceClusterKind | null = await fastify.kube.customObjectsApi + .listClusterCustomObject('datasciencecluster.opendatahub.io', 'v1', 'datascienceclusters') + .then((res) => (res.body as DataScienceClusterList).items[0]) + .catch((e) => { + fastify.log.error(`Failure to fetch dsc: ${e.response.body}`); + return null; + }); + + if (!result) { + // May not be using v2 Operator + throw createCustomError('DSC Unavailable', 'Unable to get status', 404); + } + + return result.status; +}; diff --git a/backend/src/utils/groupsUtils.ts b/backend/src/utils/groupsUtils.ts index cca1ad8077..2cfc266b7a 100644 --- a/backend/src/utils/groupsUtils.ts +++ b/backend/src/utils/groupsUtils.ts @@ -16,7 +16,7 @@ export class MissingGroupError extends Error { } export const getGroupsCR = (): GroupsConfigBody => { - if (typeof getDashboardConfig().spec.groupsConfig !== 'undefined') { + if (getDashboardConfig().spec.groupsConfig) { return getDashboardConfig().spec.groupsConfig; } throw new Error(`Failed to retrieve Dashboard CR groups configuration`); diff --git a/backend/src/utils/httpUtils.ts b/backend/src/utils/httpUtils.ts index 6b7ae051c2..014e7354b1 100644 --- a/backend/src/utils/httpUtils.ts +++ b/backend/src/utils/httpUtils.ts @@ -55,7 +55,7 @@ export const proxyCall = ( requestOptions.headers = { ...requestOptions.headers, 'Content-Type': contentType, - 'Content-Length': requestData.length, + 'Content-Length': Buffer.byteLength(requestData, 'utf8'), }; } diff --git a/backend/src/utils/notebookUtils.ts b/backend/src/utils/notebookUtils.ts index 769efce71d..af3c9e2703 100644 --- a/backend/src/utils/notebookUtils.ts +++ b/backend/src/utils/notebookUtils.ts @@ -1,10 +1,10 @@ import { getDashboardConfig } from './resourceUtils'; import { + ContainerResourceAttributes, EnvironmentVariable, ImageInfo, ImageTag, KubeFastifyInstance, - LIMIT_NOTEBOOK_IMAGE_GPU, Notebook, NotebookAffinity, NotebookData, @@ -156,7 +156,7 @@ export const assembleNotebook = async ( envName: string, tolerationSettings: NotebookTolerationSettings, ): Promise => { - const { notebookSizeName, imageName, imageTagName, gpus, envVars } = data; + const { notebookSizeName, imageName, imageTagName, accelerator, envVars } = data; const notebookSize = getNotebookSize(notebookSizeName); @@ -191,40 +191,35 @@ export const assembleNotebook = async ( const resources: NotebookResources = { ...notebookSize.resources }; const tolerations: NotebookToleration[] = []; - let affinity: NotebookAffinity = {}; - if (gpus > 0) { + const affinity: NotebookAffinity = {}; + if (accelerator.count > 0 && accelerator.accelerator) { if (!resources.limits) { resources.limits = {}; } if (!resources.requests) { resources.requests = {}; } - resources.limits[LIMIT_NOTEBOOK_IMAGE_GPU] = gpus; - resources.requests[LIMIT_NOTEBOOK_IMAGE_GPU] = gpus; - tolerations.push({ - effect: 'NoSchedule', - key: LIMIT_NOTEBOOK_IMAGE_GPU, - operator: 'Exists', - }); + resources.limits[accelerator.accelerator.spec.identifier] = accelerator.count; + resources.requests[accelerator.accelerator.spec.identifier] = accelerator.count; } else { - affinity = { - nodeAffinity: { - preferredDuringSchedulingIgnoredDuringExecution: [ - { - preference: { - matchExpressions: [ - { - key: 'nvidia.com/gpu.present', - operator: 'NotIn', - values: ['true'], - }, - ], - }, - weight: 1, - }, - ], - }, - }; + // step type down to string to avoid type errors + const containerResourceKeys: string[] = Object.values(ContainerResourceAttributes); + + Object.keys(resources.limits || {}).forEach((key) => { + if (!containerResourceKeys.includes(key)) { + delete resources.limits?.[key]; + } + }); + + Object.keys(resources.requests || {}).forEach((key) => { + if (!containerResourceKeys.includes(key)) { + delete resources.requests?.[key]; + } + }); + } + + if (accelerator.accelerator?.spec.tolerations) { + tolerations.push(...accelerator.accelerator.spec.tolerations); } if (tolerationSettings?.enabled) { @@ -272,6 +267,7 @@ export const assembleNotebook = async ( 'notebooks.opendatahub.io/last-image-selection': imageSelection, 'opendatahub.io/username': username, 'kubeflow-resource-stopped': null, + 'opendatahub.io/accelerator-name': accelerator.accelerator?.metadata.name || '', }, name: name, namespace: namespace, diff --git a/backend/src/utils/prometheusUtils.ts b/backend/src/utils/prometheusUtils.ts index 141f3025de..bee83cb6b6 100644 --- a/backend/src/utils/prometheusUtils.ts +++ b/backend/src/utils/prometheusUtils.ts @@ -41,7 +41,7 @@ const callPrometheus = async ( fastify.log.info('Successful response from Prometheus.'); return { code: 200, response: parsedData }; } catch (e) { - const errorMessage = e.message || e.toString(); + const errorMessage = e.message || 'Unknown reason.'; fastify.log.error(`Failure parsing the response from Prometheus. ${errorMessage}`); if (errorMessage.includes('Unexpected token < in JSON')) { throw { code: 422, response: 'Unprocessable prometheus response' }; diff --git a/backend/src/utils/resourceUtils.ts b/backend/src/utils/resourceUtils.ts index 44cbeec139..3fe6353169 100644 --- a/backend/src/utils/resourceUtils.ts +++ b/backend/src/utils/resourceUtils.ts @@ -2,6 +2,7 @@ import * as _ from 'lodash'; import createError from 'http-errors'; import { PatchUtils, V1ConfigMap, V1Namespace, V1NamespaceList } from '@kubernetes/client-node'; import { + AcceleratorKind, BUILD_PHASE, BuildKind, BuildStatus, @@ -33,6 +34,7 @@ import { getRouteForClusterId, } from './componentUtils'; import { createCustomError } from './requestUtils'; +import { getAcceleratorNumbers } from '../routes/api/accelerators/acceleratorUtils'; const dashboardConfigMapName = 'odh-dashboard-config'; const consoleLinksGroup = 'console.openshift.io'; @@ -631,6 +633,126 @@ export const getConsoleLinks = (): ConsoleLinkKind[] => { return consoleLinksWatcher.getResources(); }; +/** + * Converts GPU usage to use accelerator by adding an accelerator profile CRD to the cluster if GPU usage is detected + */ +export const cleanupGPU = async (fastify: KubeFastifyInstance): Promise => { + // When we startup — in kube.ts we can handle a migration (catch ALL promise errors — exit gracefully and use fastify logging) + // Check for migration-gpu-status configmap in dashboard namespace — if found, exit early + const CONFIG_MAP_NAME = 'migration-gpu-status'; + + const continueProcessing = await fastify.kube.coreV1Api + .readNamespacedConfigMap(CONFIG_MAP_NAME, fastify.kube.namespace) + .then(() => { + // Found configmap, not continuing + fastify.log.info(`GPU migration already completed, skipping`); + return false; + }) + .catch((e) => { + if (e.statusCode === 404) { + // No config saying we have already migrated gpus, continue + return true; + } else { + throw `fetching gpu migration configmap had a ${e.statusCode} error: ${ + e.response?.body?.message || e?.response?.statusMessage + }`; + } + }); + + if (continueProcessing) { + // Read existing AcceleratorProfiles + const acceleratorProfilesResponse = await fastify.kube.customObjectsApi + .listNamespacedCustomObject( + 'dashboard.opendatahub.io', + 'v1', + fastify.kube.namespace, + 'acceleratorprofiles', + ) + .catch((e) => { + console.log(e); + // If error shows up — CRD may not be installed, exit early + throw `A ${e.statusCode} error occurred when trying to fetch accelerator profiles: ${ + e.response?.body?.message || e?.response?.statusMessage + }`; + }); + + const acceleratorProfiles = ( + acceleratorProfilesResponse?.body as { + items: AcceleratorKind[]; + } + )?.items; + + // If not error and no profiles detected: + if ( + acceleratorProfiles && + Array.isArray(acceleratorProfiles) && + acceleratorProfiles.length === 0 + ) { + // if gpu detected on cluster, create our default migrated-gpu + const acceleratorDetected = await getAcceleratorNumbers(fastify); + + if (acceleratorDetected.configured) { + const payload: AcceleratorKind = { + kind: 'AcceleratorProfile', + apiVersion: 'dashboard.opendatahub.io/v1', + metadata: { + name: 'migrated-gpu', + namespace: fastify.kube.namespace, + }, + spec: { + displayName: 'NVIDIA GPU', + identifier: 'nvidia.com/gpu', + enabled: true, + tolerations: [ + { + effect: 'NoSchedule', + key: 'nvidia.com/gpu', + operator: 'Exists', + }, + ], + }, + }; + + try { + await fastify.kube.customObjectsApi.createNamespacedCustomObject( + 'dashboard.opendatahub.io', + 'v1', + fastify.kube.namespace, + 'acceleratorprofiles', + payload, + ); + } catch (e) { + // If bad detection — exit early and dont create config + throw `A ${ + e.statusCode + } error occurred when trying to add migrated-gpu accelerator profile: ${ + e.response?.body?.message || e?.response?.statusMessage + }`; + } + } + } + + // Create configmap to flag operation as successful + const configMap = { + metadata: { + name: CONFIG_MAP_NAME, + namespace: fastify.kube.namespace, + }, + data: { + migratedCompleted: 'true', + }, + }; + + await fastify.kube.coreV1Api + .createNamespacedConfigMap(fastify.kube.namespace, configMap) + .then(() => fastify.log.info('Successfully migrated GPUs to accelerator profiles')) + .catch((e) => { + throw `A ${e.statusCode} error occurred when trying to create gpu migration configmap: ${ + e.response?.body?.message || e?.response?.statusMessage + }`; + }); + } +}; /** * @deprecated - Look to remove asap (see comments below) * Converts namespaces that have a display-name annotation suffixed with `[DSP]` over to using a label. diff --git a/docs/README.md b/docs/README.md index 27bc4a227a..6e688e4eee 100644 --- a/docs/README.md +++ b/docs/README.md @@ -1,11 +1,38 @@ +[Dev setup & Requirements]: dev-setup.md +[Dashboard Configuration]: dashboard-config.md +[Settings Panel]: admin-dashboard.md +[User Interaction]: user-interaction.md +[Bring Your Own Notebook]: byon.md +[process flows docs]: process-definition/README.md +[Architecture]: architecture.md +[SDK]: SDK.md +[Old custom APIs]: apis.md +[releases]: release-steps.md + # Dashboard Documentation -This project is divided into several sections, each one describing features of the dashboard. +This is the general documentation of the Dashboard component. + +## Configuration & Features + +> Note: We are working to detail out our feature flows to help with internal documentation efforts. + +* [Dashboard Configuration] +* [Settings Panel] +* [User Interaction] +* [Bring Your Own Notebook] + +## Developer Readmes + +* [Dev setup & Requirements] +* [Architecture] +* [SDK] + * [Old custom APIs] - Deprecated, moving to SDK + +## Process Flows + +Read more on how we do things in our [process flows docs]. + +### Releases -* [ODH Dashboard Architecture](architecture.md) -* [Dashboard Configuration](dashboard_config.md) -* [Settings Panel](admin_dashboard.md) -* [User Interaction](user_interaction.md) -* [Bring Your Own Notebook](byon.md) -* [SDK](SDK.md) - * [Custom APIs](apis.md) -- Deprecated, moving to SDK +For more information on how, when, and what we do for releases, see our [releases] documentation. diff --git a/docs/SDK.md b/docs/SDK.md index 2c4e050901..d50a987704 100644 --- a/docs/SDK.md +++ b/docs/SDK.md @@ -1,12 +1,20 @@ +[k8s pass through API]: ../backend/src/routes/api/k8s/pass-through.ts +[CONTRIBUTING]: ../CONTRIBUTING.md +[@openshift/dynamic-plugin-sdk]: https://www.npmjs.com/package/@openshift/dynamic-plugin-sdk +[@openshift/dynamic-plugin-sdk-utils]: https://www.npmjs.com/package/@openshift/dynamic-plugin-sdk-utils +[@openshift/dynamic-plugin-sdk-webpack]: https://www.npmjs.com/package/@openshift/dynamic-plugin-sdk-webpack +[jsonpatch]: https://jsonpatch.com/ +[jsonpatch operations]: https://jsonpatch.com/#operations + # OpenShift Plugin SDK -[@openshift/dynamic-plugin-sdk](https://www.npmjs.com/package/@openshift/dynamic-plugin-sdk) is an SDK package for making k8s related calls. +[@openshift/dynamic-plugin-sdk] is an SDK package for making k8s related calls. It is technically split into three parts: -- [@openshift/dynamic-plugin-sdk](https://www.npmjs.com/package/@openshift/dynamic-plugin-sdk) -- Core set of tools to build a plugin, we don't use this a lot today, but this is needed to bootstrap our frontend for later inclusion on other systems -- [@openshift/dynamic-plugin-sdk-utils](https://www.npmjs.com/package/@openshift/dynamic-plugin-sdk-utils) -- This will be where we will pull the most of our content from, it has all the k8s utilities we should need -- [@openshift/dynamic-plugin-sdk-webpack](https://www.npmjs.com/package/@openshift/dynamic-plugin-sdk-webpack) -- Simply just as it states, a webpack set of tools to help configure out webpack to package up to be an SDK +- [@openshift/dynamic-plugin-sdk] - Core set of tools to build a plugin, we don't use this a lot today, but this is needed to bootstrap our frontend for later inclusion on other systems +- [@openshift/dynamic-plugin-sdk-utils] - This is where we will pull most of our content from, it has all the k8s utilities we should need +- [@openshift/dynamic-plugin-sdk-webpack] - Simply just as it states, a webpack set of tools to help configure out webpack to package up to be an SDK As of today (2022-08-31), we are not looking to be a plugin. We just want to start moving that direction and making use of the utilities so we don't need custom backend to make API calls. @@ -22,26 +30,26 @@ Instead of using our custom NodeJS backend, we'll want to migrate to using one o All these are heavily typed and are Generics`*`, so you should be able to infer from the types what is needed. But to help that along, here are a couple helpful notes: -- `model` -- Models are simply just a collection of properties that describes a K8s Resource Type. K8s ones like `ConfigMap` & `Pod` to CRDs like `Notebook` and `OdhDashboardConfig` -- we will create our list of these under `frontend/src/models` -- check it out for examples. -- `queryOptions` -- These are just simply additional options to help target your call at something; typically you'll be setting `name` and `ns` (aka `namespace`) +- `model` - Models are simply just a collection of properties that describes a K8s Resource Type. K8s ones like `ConfigMap` & `Pod` to CRDs like `Notebook` and `OdhDashboardConfig` - we will create our list of these under `frontend/src/models` - check it out for examples. +- `queryOptions` - These are just simply additional options to help target your call at something; typically you'll be setting `name` and `ns` (aka `namespace`) `*` A couple notes about the Generic nature of these functions -1. You can type the response as well as your input -- aim to do this always for better typing, best have TS doing as much as possible +1. You can type the response as well as your input - aim to do this always for better typing, best have TS doing as much as possible 2. Our old types may not be 1:1 compatible with other resource types we have. Likely all you will need to do is convert over to the `K8sResourceCommon` type from the SDK instead of using ours ### Pass Through API -We have set up a pass through API that will effectively take the path built by the SDK's utilities during one of the k8s{CRUD}Resource calls noted above. This API will use the token of your user provided by our OAuth container and send your request off to a kube instance. We will give up on custom error handling and let the client deal with the error from k8s. +We have set up a pass through API that will effectively take the path built by the SDK's utilities during one of the k8s{CRUD}Resource calls noted above. This API will use the token of your user provided by our OAuth container and send your request off to a kube instance. We will give up on custom error handling and let the client deal with the error from k8s. -See the [k8s pass through API](../backend/src/routes/api/k8s/pass-through.ts) here. +See the [k8s pass through API] here. ### Pass Through Impersonate User Dev Mode In order to check regular user permissions without disabling the rest of the backend functionality in `dev mode`, you can add the `DEV_IMPERSONATE_USER` and `DEV_IMPERSONATE_PASSWORD` environment variables to your local setup with valid k8s username and password in your cluster. This will bypass the regular pass-through flow and will add that specific headers to the calls. The steps to impersonate another user are listed as follows: -1. Create a new env variable in your `.env.local` file with this format `DEV_IMPERSONATE_USER=` and `DEV_IMPERSONATE_PASSWORD=` -2. Run the dev server for ODH dashboard. If you don't know how to run a local dev server, please refer to [CONTRIBUTING](../CONTRIBUTING.md) +1. Create a new env variable in your `.env.local` file with this format `DEV_IMPERSONATE_USER=` and `DEV_IMPERSONATE_PASSWORD=` +2. Run the dev server for ODH dashboard. If you don't know how to run a local dev server, please refer to [CONTRIBUTING] 3. Click on the username on the top right corner to open the dropdown menu, and choose `Start impersonate`, then the page will refresh and you will be impersonating as the user you set up in step 1 4. To stop impersonating, click on the `Stop impersonate` button in the header toolbar @@ -49,7 +57,7 @@ NOTE: You may not be able to read data from some Prometheus applications when im ## Patches -Patches are based on [jsonpatch](https://jsonpatch.com/). For those who are unaware of the details let's do a quick breakdown on how they work. When making a `k8sPatchResource` call, it will ask for `Patches[]`. A `Patch` is just simply a straight forward operation on the existing resource. +Patches are based on [jsonpatch]. For those who are unaware of the details let's do a quick breakdown on how they work. When making a `k8sPatchResource` call, it will ask for `Patches[]`. A `Patch` is just simply a straight forward operation on the existing resource. Say you wanted to update a `ConfigMap` to have a new property: @@ -64,10 +72,10 @@ k8sPatchResource({ ``` - `op` is what kind of operation to apply to this path - - 'add' -- adds a new item - - 'replace' -- updates an existing item - - 'remove' -- removes the item (you'd omit `value` naturally as there is no value for removing) - - There are other operations you can do as well ([see the docs](https://jsonpatch.com/#operations)) + - 'add' - adds a new item + - 'replace' - updates an existing item + - 'remove' - removes the item (you'd omit `value` naturally as there is no value for removing) + - There are other operations you can do as well (see the [jsonpatch operations]) - `path` is the path from the root of the k8s object using `/` as a deliminator, include the key you want to modify - Unless it is a complex object you're adding, you'll likely specify `value` as a string/number diff --git a/docs/admin_dashboard.md b/docs/admin-dashboard.md similarity index 100% rename from docs/admin_dashboard.md rename to docs/admin-dashboard.md diff --git a/docs/apis.md b/docs/apis.md index d83d1267a4..55cadc3b80 100644 --- a/docs/apis.md +++ b/docs/apis.md @@ -1,5 +1,11 @@ + +[architectural documentation]: architecture.md#custom-backend-business-logic + # Backend APIs -The backend for the dashboard is node REST server that performs k8s calls on behalf of the frontend. The following are a list of current apis and there functionality. + +> Note: These flows are deprecated in the [architectural documentation] + +The backend for the dashboard is node REST server that performs k8s calls on behalf of the frontend. The following are a list of current apis and their functionality. ## Endpoints and methods @@ -39,11 +45,11 @@ The backend for the dashboard is node REST server that performs k8s calls on beh ### /notebook/{notebook} -**GET** - Retrieves a specific notebook by it's id. This returns a single [notebook object](https://github.com/opendatahub-io/odh-dashboard/blob/bf49dc23cd4b5477111ad4590e401a423186fa54/backend/src/types.ts#L259) +**GET** - Retrieves a specific notebook by its id. This returns a single [notebook object](https://github.com/opendatahub-io/odh-dashboard/blob/bf49dc23cd4b5477111ad4590e401a423186fa54/backend/src/types.ts#L259) -**DELETE** - Deletes a specific notebook image using it's notebook id. +**DELETE** - Deletes a specific notebook image using its notebook id. -**PUT** - Updates a specific notebook by it's id. The payload should be the updated fields as shown in a [notebook object](https://github.com/opendatahub-io/odh-dashboard/blob/bf49dc23cd4b5477111ad4590e401a423186fa54/backend/src/types.ts#L259) +**PUT** - Updates a specific notebook by its id. The payload should be the updated fields as shown in a [notebook object](https://github.com/opendatahub-io/odh-dashboard/blob/bf49dc23cd4b5477111ad4590e401a423186fa54/backend/src/types.ts#L259) ### /quickstarts @@ -63,4 +69,4 @@ The backend for the dashboard is node REST server that performs k8s calls on beh ### /validate-isv/results -**GET** - Retrieves the current status of an ISV. \ No newline at end of file +**GET** - Retrieves the current status of an ISV. diff --git a/docs/architecture.md b/docs/architecture.md index 564a3af313..57a47d2955 100644 --- a/docs/architecture.md +++ b/docs/architecture.md @@ -1,14 +1,33 @@ +[Dashboard Deployment `containers`]: ../manifests/base/deployment.yaml +[OpenShift OAuth Proxy repo]: https://github.com/openshift/oauth-proxy +[OpenShift SDK]: https://github.com/openshift/dynamic-plugin-sdk +[SDK tidbits]: SDK.md +[Playwright API]: https://playwright.dev/docs/locators +[VSCode plugin]: https://marketplace.visualstudio.com/items?itemName=ms-playwright.playwright +[teardown]: https://playwright.dev/docs/api-testing#setup-and-teardown + # ODH Dashboard Architecture +Main topics: + +- [Overall Architecture](#overview) +- [Client Structure](#client-structure) +- [Test Infrastructure](#testing-infrastructure) + ## Overview ![Overview](./meta/arch-overview.png) -> **OpenShift OAuth Proxy** is not part of the Dashboard (see [Dashboard Deployment `containers`](https://github.com/opendatahub-io/odh-dashboard/blob/main/manifests/base/deployment.yaml)) but useful to understand its role. See [OpenShift OAuth Proxy repo](https://github.com/openshift/oauth-proxy) for more information. It is typically in the containers of all services we talk to from the proxy API, as well as the Dashboard itself when the client calls the NodeJS server (in the pod). +> **OpenShift OAuth Proxy** is not part of the Dashboard (see [Dashboard Deployment `containers`]) but useful to understand its role. See [OpenShift OAuth Proxy repo] for more information. It is typically in the containers of all services we talk to from the proxy API, as well as the Dashboard itself when the client calls the NodeJS server (in the pod). The main focus point here is that there are 5 kinds of calls from the client. These are split into two different types. The difference in these two types is one takes your request and does it on your behalf (via the service account), and the other does it as you (through direct calls with your bearer token). -### Custom Business Logic Backend +Additional topics in this section: + +- [Custom Backend Business Logic](#custom-backend-business-logic) +- [Proxy/Pass-through based efforts](#proxypass-through-based-efforts) + +### Custom Backend Business Logic > **Note**: this functionality is deprecated @@ -32,17 +51,17 @@ Notes: These are made using your user's k8s permissions. It consumes your OpenShift OAuth Proxy value during the call and makes the call directly to the endpoint of the caller's choosing. This functionality eliminates CORS issues when talking to services that are in OpenShift Console. - Proxy call - - These are always POST calls due to the nature of how the HTTP spec works -- we want to send metadata to the endpoint and GETs are not used for this + - These are always POST calls due to the nature of how the HTTP spec works - we want to send metadata to the endpoint and GETs are not used for this - The call is made from the client with the target OpenShift Route path, request params, and the actual method you want to use - K8s call - - Powered by the [OpenShift SDK](https://github.com/openshift/dynamic-plugin-sdk) + - Powered by the [OpenShift SDK] - These are crafted with the k8sResource calls and makes use of a known model system - Each call takes a `ResourceModel` and additional params like namespace (if applicable) & name (if applicable) and the SDK crafts a url to the endpoint - Our server will take this call and proxy it onto the k8s API server - - See [SDK tidbits](./SDK.md) + - See [SDK tidbits] - Prometheus call - Uses known prometheus endpoints to get data - - Somewhat deprecated -- it could probably be reworked to use the Proxy call endpoint, but serves as isolated functionality + - Somewhat deprecated - it could probably be reworked to use the Proxy call endpoint, but serves as isolated functionality These all share the same underlying proxy to an endpoint call, they just structure the data differently before calling. @@ -52,51 +71,60 @@ These all share the same underlying proxy to an endpoint call, they just structu When building new client features, there are a few things worth noting about the current direction of the codebase. +Topics in this section: + +- [Coding Structure](#coding-structure) + ### Coding Structure -> **Note**: Some folders do not fully comply with this design -- there is a cleanup effort underway -- do not let that dissuade you from following the correct architecture choices noted down below when adding new features / code. +> **Note**: Some folders do not fully comply with this design - there is a cleanup effort underway - do not let that dissuade you from following the correct architecture choices noted down below when adding new features / code. -- `/src/api` -- The current API +- `/src/api` - The current API - Allowed imports: `/src/utilities` - This content should be isolated to the main effort to make the call - There is some work here to clean up a little of the data, but effectively this folder should only talk about the very specific call structure needed to do a basic action (aka, less business logic - more functionality); eg, "Get Route" not "Get Notebook Route" or "Get DS Projects Notebook Route" - If it's a cornerstone of data and has a specific core use-case, separate the logic; eg "Get Projects" & "Get DS Projects" are both in here -- `/src/api/types/k8s.ts` -- These are the types that are for k8s API data and responses - - The k8s calls are usually the source of these types -- any resource that is `K8sResourceCommon` is a `${Name}Kind` as it has a `.kind` parameter of the name -- `/src/components` -- All generic components (not tied to a feature or functionality) +- `/src/api/types/k8s.ts` - These are the types that are for k8s API data and responses + - The k8s calls are usually the source of these types - any resource that is `K8sResourceCommon` is a `${Name}Kind` as it has a `.kind` parameter of the name +- `/src/components` - All generic components (not tied to a feature or functionality) - Allowed imports: `/src/utilities` - These components should not contain any application data concepts - no external side effects - eg. "IndentSection" - a component for indenting content -- `/src/components/pf-overrides` -- All PatternFly component overrides +- `/src/components/pf-overrides` - All PatternFly component overrides - Allowed imports: `/src/utilities` - When a PatternFly component has an issue that has not yet landed, or will not land, in official PatternFly, create an override component that should be used instead. - eg. "Table" - a component to build tables and fixes layout -- `/src/concepts` -- Sharable code and logic for multiple areas of the application; Read as "This is conceptually about X resource, doesn't care where it is mounted" +- `/src/concepts` - Sharable code and logic for multiple areas of the application; Read as "This is conceptually about X resource, doesn't care where it is mounted" - Disallowed imports: `/src/pages` - eg. Reading project details / shared context - - eg. Shared conceptual logic -- "Components and utilities to parse and read Notebook resources" -- `/src/pages` -- All specific view/route logic; Read as "This is tied to the nav item for this page" + - eg. Shared conceptual logic - "Components and utilities to parse and read Notebook resources" +- `/src/pages` - All specific view/route logic; Read as "This is tied to the nav item for this page" - Allowed imports: `*` - Should contain constants, utilities, hooks, and anything else that is specifically needed for that area of the application -- `/src/utilities` -- All generic utilities and hook-utilities not tied to a feature or functionality +- `/src/utilities` - All generic utilities and hook-utilities not tied to a feature or functionality - Allowed imports: `none` - These utilities should not contain any application data concepts - eg. `useFetchState` - the generic fetch and store data hook - eg. `time`, `string`, etc - generic utilities for manipulation of not feature-related data -- `/src/types.ts` -- The generic types +- `/src/types.ts` - The generic types - Allowed to be imported everywhere - Should not contain any application data concepts - - _This will have duplicates from the old design (used in `/src/services`) -- they will likely duplicate some effort in `/src/k8sTypes.ts`_ -- `/src/typeHelpers.ts` -- All TypeScript type utilities + - _This will have duplicates from the old design (used in `/src/services`) - they will likely duplicate some effort in `/src/k8sTypes.ts`_ +- `/src/typeHelpers.ts` - All TypeScript type utilities - Allowed to be imported everywhere -> **Note**: if the folder was not mentioned above, it is deprecated and should be avoided when modifying code (within' reason). +> **Note**: if the folder was not mentioned above, it is deprecated and should be avoided when modifying code (within reason). -### Testing Structure +## Testing Infrastructure ![testing structure](./meta/testing.png) -#### Test Structure +Topics in this section: + +- [Test Structure](#test-structure) +- [Testing Types](#testing-types) + +### Test Structure Tests can be divided into the following categories: unit, integration, accessibility, and end to end testing. To keep organized of the different types of tests, there will be a test folder at the root of the frontend project with the following structure. @@ -115,9 +143,16 @@ Unit tests are co-located in a `__tests__` directory adjacent to the target sour /targetFile.spec.ts ``` -#### Testing Types +### Testing Types + +Sub topics: + +- [Unit Testing](#unit-testing) +- [Integration Testing](#integration-testing) +- [Accessibility Testing](#accessibility-testing) +- [E2E Testing](#e2e-testing) -##### Unit Testing +#### Unit Testing Unit tests cover util functions and other non React based functions. Use Jest to test each function using `describe` to group together the utils file and the specific function. Then each test is described using `it`. @@ -140,11 +175,11 @@ describe('Project view screen utils', () => { }); ``` -##### Integration Testing +#### Integration Testing Integration tests will be conducted on specific screens in the app using storybook stories and the playwright testing framework. -###### Test setup steps +**Test setup steps** 1. Identify view components that don't require any props and create a story around the entire screen component that handles the business logic. 2. Create a default export story configuration for the selected component and its child stories. Specify the API mocks using the `msw` parameter. @@ -196,9 +231,7 @@ export const EditModel = { import { test, expect } from '@playwright/test'; test('Create project', async ({ page }) => { - await page.goto( - './iframe.html?id=tests-stories-pages-projects-projectview--create-project&viewMode=story', - ); + await page.goto(navigateToStory('projects-projectview', 'create-project')); // wait for page to load await page.waitForSelector('text=Create data science project'); @@ -217,9 +250,7 @@ test('Create project', async ({ page }) => { To run storybook UI: `cd ./frontend && npm run storybook` ```ts -await page.goto( - './iframe.html?id=tests-stories-pages-projects-projectview--create-project&viewMode=story', -); +await page.goto(navigateToStory('projects-projectview', 'create-project')); ``` 6. Wait for the page to load and the story to settle before performing any assertions or actions. Use `page.waitForSelector()` to wait for a specific element to appear as an indication of the story being loaded. @@ -238,9 +269,9 @@ await page.getByLabel('Description').fill('Test project description'); await expect(page.getByRole('button', { name: 'Create' })).toBeEnabled(); ``` -Note: Adjust the selectors in the code according to the specific component and testing requirements. Use the [Playwright API](https://playwright.dev/docs/locators) to find the appropriate selectors. +Note: Adjust the selectors in the code according to the specific component and testing requirements. Use the [Playwright API] to find the appropriate selectors. -1. To execute these tests you can run: +8. To execute these tests you can run: ```bash npm run test:integration @@ -248,7 +279,7 @@ npm run test:integration This will either attach to an already running instance of storybook UI, or start up a new instance at port `6006.` -##### Accessibility Testing +#### Accessibility Testing Accessibility testing is covered automatically by an a11y storybook plugin. Tests will fail if there is an error determined by a11y. Run these tests with @@ -257,11 +288,11 @@ cd ./frontend && npm run storybook npm run test:accessibility ``` -##### E2E Testing +#### E2E Testing -For end to end testing we will use Playwright just as we did with e2e testing. These are probably the easiest tests to write as there is a handy [VSCode plugin](https://marketplace.visualstudio.com/items?itemName=ms-playwright.playwright) to generate these tests for us. An e2e test covers a wider range of use cases and is not restricted to one screen or page. These tests should be grouped by user stories. Each file is an area to test such as pipelines or projects, and each test is a user story. +For end to end testing we will use Playwright just as we did with e2e testing. These are probably the easiest tests to write as there is a handy [VSCode plugin] to generate these tests for us. An e2e test covers a wider range of use cases and is not restricted to one screen or page. These tests should be grouped by user stories. Each file is an area to test such as pipelines or projects, and each test is a user story. -For example, a possible e2e test might be under `projects.spec.ts` and a test called “Create, edit, and destroy a project” which would create a project, edit the project, and then delete the project. This test is nice because it does the cleanup for us. But there may be times when this is not the case, so make sure to [teardown](https://playwright.dev/docs/api-testing#setup-and-teardown) any hanging resources. +For example, a possible e2e test might be under `projects.spec.ts` and a test called “Create, edit, and destroy a project” which would create a project, edit the project, and then delete the project. This test is nice because it does the cleanup for us. But there may be times when this is not the case, so make sure to [teardown] any hanging resources. _Example_ diff --git a/docs/byon.md b/docs/byon.md index f560333a4b..3f55bb6ea8 100644 --- a/docs/byon.md +++ b/docs/byon.md @@ -1,16 +1,19 @@ + + # Bring Your Own Notebook ODH provides several out-of-the-box notebook images that automatically include packages to make it easy for users to get started with common components. However, in many/most instances, users need a specific set of packages/libraries with specific versions depending on the projects. That is why ODH Dashboard has the ability to import existing notebook images to spawn as custom notebooks. ## Enabling BYON -To enable this feature, you need to have first enable the [admin panel](admin_dashboard.md) and then the flag `disableBYONImageStream` in the [dashboard configuration](dashboard_config.md) turned to `false`. +To enable this feature, you need to have first enabled the [admin panel](admin-dashboard.md) and then the flag `disableBYONImageStream` in the [dashboard configuration](dashboard-config.md) turned to `false`. Once you have completed both steps you will see a section called `Notebook Images` inside the `Settings` panel. ## Minimum requirements for BYON For image to be spawneable via JupyterHub Spawner, it is required to meet the following criteria: + * It needs to include Python runtime, >= 3.8. * Python packages `jupyterhub` and `jupyterlab` need to be installed. * Environment variable `HOME` is set and points to a writable directory for every user. diff --git a/docs/dashboard_config.md b/docs/dashboard-config.md similarity index 96% rename from docs/dashboard_config.md rename to docs/dashboard-config.md index c0cb9891c9..cf20d49269 100644 --- a/docs/dashboard_config.md +++ b/docs/dashboard-config.md @@ -1,3 +1,5 @@ + + # Dashboard Config By default the ODH Dashboard comes with a set of core features enabled that are design to work for most scenarios. The dashboard can be configured from its OdhDashboard CR, `odh-dashboard-config`. @@ -87,7 +89,6 @@ The `notebookController` field controls the Notebook Controller options such as ```yaml notebookController: enabled: true - gpuSetting: autodetect pvcSize: 20Gi notebookNamespace: odh-notebooks notebookTolerationSettings: @@ -108,7 +109,7 @@ New annotations we created are: | `notebooks.opendatahub.io/last-image-selection` | The last image the user selected (on create notebook) | | `notebooks.opendatahub.io/last-size-selection` | The last notebook size the user selected (on create notebook) | -`*` - We need the original user's name (we translate their name to kube safe characters for notebook name and for the label) for some functionality. If this is omitted from the Notebook (or they don't have one yet) we try to make a validation against the current logged in user. This will work most of the time (and we assume logged in user when they don't have a Notebook), if this fails because you're an Admin and we don't have this state, we consider this an invalid state -- should be rare though as it requires the subset of users that are Admins to have a bad-state Notebook they are trying to impersonate (to start or view that users Notebook information). +`*` - We need the original user's name (we translate their name to kube safe characters for notebook name and for the label) for some functionality. If this is omitted from the Notebook (or they don't have one yet) we try to make a validation against the current logged in user. This will work most of the time (and we assume logged in user when they don't have a Notebook), if this fails because you're an Admin and we don't have this state, we consider this an invalid state - should be rare though as it requires the subset of users that are Admins to have a bad-state Notebook they are trying to impersonate (to start or view that users Notebook information). ### Serving Runtime Template Order diff --git a/docs/dev-setup.md b/docs/dev-setup.md new file mode 100644 index 0000000000..847b513951 --- /dev/null +++ b/docs/dev-setup.md @@ -0,0 +1,84 @@ +# Dev Setup + +## Requirements + +ODH requires the following to run: + +- [NodeJS and NPM](https://nodejs.org/) + - Node recommended version -> `18.16.0` + - NPM recommended version -> `9.6.7` +- [OpenShift CLI](https://docs.openshift.com/container-platform/4.12/cli_reference/openshift_cli/getting-started-cli.html) +- [kustomize](https://github.com/kubernetes-sigs/kustomize) + +### Additional tooling + +- [Podman](https://github.com/containers/podman) +- [Quay.io](https://quay.io/) + +## Development + +1. Clone the repository + ``` bash + git clone https://github.com/opendatahub-io/odh-dashboard + ``` +2. Within the repo context, install project dependencies + ```bash + cd odh-dashboard && npm install + ``` + +### Build project + +```bash +npm run build +``` + +### Serve development content + +This is the default context for running a local UI. Make sure you build the project using the instructions above prior to running the command below. + +> Note: You must be logged-in with `oc` before you can start the backend. + +> Note: The CLI logged-in user will need to be a `cluster-admin` level user on the cluster to mimic the Dashboard Service Account level of permissions. You could also bind the [cluster role](../manifests/base/cluster-role.yaml) to your user as we do with the service account [binding](../manifests/base/cluster-role-binding.yaml). + +```bash +npm run start +``` + +For in-depth local run guidance review the [contribution guidelines](./CONTRIBUTING.md#Serving%20Content). + +### Testing + +Run the tests. + + ```bash + npm run test + ``` + +Run storybook a11y tests and interaction tests. + + ```bash + npm run storybook + npm run test:storybook + ``` + +For in-depth testing guidance review the [contribution guidelines](./CONTRIBUTING.md#Testing) + +## Deploying the ODH Dashbard + +### Official Image Builds + +odh-dashboard images are automatically built and pushed to [quay.io](https://quay.io/repository/opendatahub/odh-dashboard) after every commit to the `main` branch. The image tag name format for each image is `main-`. + +Example: The `main` branch is updated with commit `f76e3952834f453b1d085e8627f9c17297c2f64c`. The CI system will automatically build an odh-dashboard image based on that code and push the new image to `odh-dashboard:main-f76e395` and updated `odh-dashboard:main` to point to the same image hash. + +The [nightly](https://quay.io/opendatahub/odh-dashboard:nightly) tag is a floating tag that is updated nightly and points to the most recent `main-` commit from the previous day. + +### Deploy using kustomize + +The [manifests](./manifests) folder contains a [kustomize](https://kustomize.io) manifest that can be used with `kustomize build`. + +### Deploy using a kfdef + +> Note: This flow is deprecated, deploy v2 [Operator](https://github.com/opendatahub-io/opendatahub-operator) with their custom CR. + +The [manifests/kfdef](./manifests/kfdef) folder contains an example kfdef to deploy ODH Dashboard with the Notebook Controller backend is located in [odh-dashboard-kfnbc-test.yaml](manifests/kfdef/odh-dashboard-kfnbc-test.yaml). diff --git a/docs/process-definition/README.md b/docs/process-definition/README.md new file mode 100644 index 0000000000..93cae1a7de --- /dev/null +++ b/docs/process-definition/README.md @@ -0,0 +1,54 @@ +[Detailing out our branches]: branches.md +[Triage Team]: triage.md +[incubation]: incubation.md +[Standard Issue Flow]: flow-standard.md +[Feature Flow]: flow-feature.md +[Tracker Flow]: flow-feature.md#tracker-flow +[Story Flow]: flow-feature.md#story-flow +[Tech Debt]: tech-debt.md +[UX Flow]: flow-ux.md +[templates]: https://github.com/opendatahub-io/odh-dashboard/issues/new/choose +[ODH Dashboard Planning]: https://github.com/orgs/opendatahub-io/projects/24 + +# Process Flows Defined + +> Note: Process flows revolve around the [ODH Dashboard Planning] board's statuses. + +These flows are around our process for how to manage our tickets and keep things consistent and aligned. Allowing for multiple individuals and teams to collaborate effectively without a lot of slowdown for conversations on "what do I do here?" - If the process has gaps, we will improve it as they are identified. + +![fullFlow.png](meta%2FfullFlow.png) + +> Note: There is a lot of detail in here, this is all the flows together. See each flow individually below. + +- [Standard Issue Flow] +- [Feature Flow] + - [Tracker Flow] + - [Story Flow] +- [UX Flow] + +There are also some additional resources that may be helpful: + +- [Detailing out our branches] +- [Triage Team] +- [Incubation] +- [Tech Debt] + +## Issue Templates + +The issue [templates] we use have a flow or a use-case behind it. To help sync the discussion on process with these templates, see below how that mapping works. + +Users, designers, and developers of the Dashboard will often find themselves interacting with the first two non-internal flows to help identify bugs & to request new features: + +- "Bug Report" is used in the [Standard Issue Flow] +- "Feature Request" is used in the [Standard Issue Flow] + +Whereas the other templates are used in internal flows: + +> **What is an internal template?**
+> It is just a template structure we built to help unify the organization of common flows. They can be created by anyone, but usually has mixed results if not participating in said flows. + +- "(Internal) Feature Story Template" is used in the [Story Flow] +- "(Internal) Tech Debt Template" use-case is described in [Tech Debt] +- "(Internal) Test Template" use-case is described in [Tech Debt] +- "(Internal) Tracker Template" is used in the [Feature Flow] +- "(Internal) UX Template" is used in the [UX Flow] diff --git a/docs/process-definition/advisors.md b/docs/process-definition/advisors.md new file mode 100644 index 0000000000..78eaa3bd34 --- /dev/null +++ b/docs/process-definition/advisors.md @@ -0,0 +1,17 @@ +[endpoints]: ../../backend/src/routes/api +[OWNERS]: ../../OWNERS +[core flows]: ./README.md +[incubation]: incubation.md#main-stability + +# Dashboard Advisors + +Those considered "Advisors" of the Dashboard are also [OWNERS] file `approvers`. + +These are the major responsibilities fall upon the Advisors. + +1. They are the `approvers` of all PRs that go into the Dashboard - no code change can be done without an Advisor approval +2. They help direct the path of the Dashboard code... they are intended to answer questions like this: + - Do we want a community logged feature; does it add the value / user flows we want to showcase? (UX will be consulted as needed) + - Is a feature ready to merge into `main`? (see [incubation] for more details) +3. They strive to align the team on the same [core flows] +4. Bring stability to our API; CRD definition & internal [endpoints] diff --git a/docs/process-definition/branches.md b/docs/process-definition/branches.md new file mode 100644 index 0000000000..889edfb78f --- /dev/null +++ b/docs/process-definition/branches.md @@ -0,0 +1,95 @@ +[test infrastructure]: ../architecture.md#testing-infrastructure +[release documentation]: releases.md +[incubation]: incubation.md +[bugs and small feature requests]: flow-standard.md +[Dashboard Advisors]: advisors.md +[tracker]: https://github.com/opendatahub-io/odh-dashboard/issues?q=is%3Aopen+is%3Aissue+label%3Atracker + +# Branches + +There are really two types of branches. + +- Fork Branch + - When you create a fork of our repo, these are the branches you push to your fork (often times called `origin`) + - These are often the source of PRs (with a few exceptions, see below) +- Upstream Branch + - Feature branches + - Core branches (like `main` and `incubation`) + - [Bot branches](#bot-branches) + +Every _new_ commit needs to come from a fork through a PR. We don't allow for pushing new content directly through our flows. New docs file, new code change, and even fixing a typo needs a PR from your fork to get into our repository. + +With that said, there are really 3 types of flows that utilize both fork branches and Upstream branches. + +![branchesComplex.png](meta%2FbranchesComplex.png) + +Notes: + +- `main` this is our base branch, all code stems from here +- `incubation` this is our "bleeding edge" of our feature set, this came from `main` but [cannot ever return to it](#never-returns-to-main) +- Feature branches are the main other flow, they start from `main` and will eventually return to `main` - often times with a stay in `incubation` + +There is only ever 1 `main` and 1 `incubation` branch. Feature branches start with `f/`, and can be as many as needed. + +Read more on git tags & releases in our [release documentation]. + +## Main + +> aka "The Stable Branch" + +`main` is our most stable and tested code. This flow should be as stable as we are today. We look to only push [bugs and small feature requests] into this branch. Everything else goes to [feature branches](#feature-branches), so it can make it into `incubation` and get closer to stability before merging into `main`. + +When a feature branch merges into `main` it can be officially considered _done_ and up to our standards for stable. + +## Incubation + +> Note: This branch is an amalgamation of several feature branches, source code here is often struggling to co-exist among the features. Conflicts are normal when merging into it. + +This branch has a unique flow to it. We have documented more about it in the [incubation] readme. + +This branch holds the most bleeding edge of our code. All our features completed to this point are in this branch. This branch is what we take to ODH releases and encourage testing on to help build the best flows we can have. Internal nightly builds consist of this branch too. + +`main` is often merged into this branch to keep it updated with the stable fixes. + +### Never returns to main + +Once the second feature merged into `incubation` it was not likely to ever see a merge back into `main`. As it is unlikely for `incubation` to be so close to `main` that it can directly sync back into it. If we do merge `incubation` into `main` it would take all the features not ready for a stable release and put them in it. This would be counter productive to this system of incubation. + +## Feature Branches + +> Note: Always prefix your feature branch with `f/`. This helps with organization and showcases it is a feature branch. + +Feature branches have two main criteria: + +- A [tracker] issue backs it, notes its relationship, and the contents within +- A set of related issues; sometimes it is a singular goal + - Determining what goes into each feature branch is handled by the [Dashboard Advisors] + +### How to determine it is stable + +This is a two step process - understanding Stability & Completing the Checklist. + +#### What does it mean to be stable? + +Breaking code is never the goal - but feature branches are there for that very specific reason. You isolate your code to work on something. At any given state on a feature branch the code could: not work, work but cause issues, have bugs that if a certain flow is followed it will crash the UI. Feature branches live and breathe with the feature and often times need that flexibility to build up brand-new flows or to adjust from previous ones. + +Stable code has been tested and proven to support the majority of flows and situations. + +The feature is considered stable when it matches the [stability checklist](#stability-checklist). + +#### Stability Checklist + +It needs to follow the following criteria: + +- [ ] Tested by QE + major bugs are fixed +- [ ] Tested by BU + flows are uninterrupted & improved by the change +- [ ] Has tests associated to the effort (does not need to be 100%) + - QE has a test layer (this is extra) + - We have [test infrastructure] that needs to be in place +- [ ] [Dashboard Advisors] need to sign off and do the merge to `main` + +## Bot Branches + +These are our dependabot branches. They are extra branches not described in the diagram above. Essentially they are created in the upstream repo and merges into `main`. + +Each branch and subsequent related PR are informing of us a dependency upgrade among the wider open community. We use these PRs to make sure we are not falling too far behind on our dependency versions. diff --git a/docs/process-definition/flow-feature.md b/docs/process-definition/flow-feature.md new file mode 100644 index 0000000000..31eed3b03f --- /dev/null +++ b/docs/process-definition/flow-feature.md @@ -0,0 +1,78 @@ +[SMEs]: ../smes.md#dashboard-feature-areas +[incubation]: incubation.md +[branches]: branches.md +[stability & merging back ot `main`]: branches.md#how-to-determine-it-is-stable +[development based effort]: tech-debt.md +[Advisors]: advisors.md +[UX Flow]: flow-ux.md + +# Feature Process Flow + +When we need to create a larger group of work, we'll need an issue called a "Tracker". Trackers offer up a grouping above a single issue, as to serve as a tracker of the issues related to a goal. All tracker work is tied to a feature branch `f/...` - this is to help with [incubation]. + +Often times this is a new feature to the Dashboard, including issues broken down into bite-sized development tasks. These may come in issues like "create the table", "add filters to the table", "create empty state", etc. These issues also contain UX related endeavours, such as a design task to determine the UX flow. + +## The Overall Flow + +![featureFlow.png](meta%2FfeatureFlow.png) + +There are three types of flows part of this process: + +- The [Tracker Flow](#tracker-flow) +- The [Story Flow](#story-flow) +- The [UX Flow] (not detailed above) + +### Tracker Flow + +Every feature needs a big tracking issue that helps drive unified goals and a status of where we are at with the feature. + +![featureTrackerFlow.png](meta%2FfeatureTrackerFlow.png) + +Notes: + +- Dev or UI create the tracker as needed (typically its the [Advisors] or a [Feature Lead](#feature-leads)) +- Once it gets into the `Trackers` status we can start +- Design effort happens through the [UX Flow] +- Development effort happens through the [Story flow](#story-flow) +- Typically all stories & ux tickets are closed before a feature is ready for [incubation] +- Post incubation leads us to merge back into `main` + - Read more about [branches] + - Read more about [stability & merging back ot `main`] + +### Story Flow + +The Dev "bite-sized" efforts to completing the larger task. + +![featureStoryFlow.png](meta%2FfeatureStoryFlow.png) + +Notes: + +- Stories are created and moved to `Dev To Do` +- They are picked up by the feature lead or others that are helping out +- The work is completed and merged into the feature branch associated with the [Tracker](#tracker-flow) +- Stories are then manually closed (until we can get automation to do this) + +When creating Stories, it is good to note some edges of the effort: + +- Stories are created usually in decently large number depending on the size of the feature +- Each story has an object and drives home a specific part +- Stories can be partial aspects of pages, but are typically sized in such a way they can be worked on concurrently + - Stories can be as simple as "build table", "add filtering to the table", and "add actions to the table" + - These are dependant on each other, but once the table is done they can be concurrent +- Stories are created to usually fit the [UX Flow] + +## Feature Leads + +The leaders and to be [SMEs] of the feature. They determine the flow needed to complete the objective. + +There is always at least 1 Lead, most times 2. There is not really a limit to the number of leads, but it is typically 1 or 2 main, and then backups/additional resources helping out. + +If there is 1 lead, it is typically a [development based effort](tech-debt.md). + +If there are 2 leads, it is often 1 UX and 1 UI lead. + +- The UX Lead would be in charge of figuring out the flow and the eventual UX designs that drive the UI feature + - This is done through the [UX Flow] +- The UI Lead would be in charge with determining the technical requirements and working with the UX Lead to make sure the flow can work out when we go to development + - If a new tech is involved, the UI Lead would be in charge of understanding the impact + - UI Leads do not have to be [Advisors], but if it is not an Advisor, there will be one that shadows to assist with the architecture diff --git a/docs/process-definition/flow-standard.md b/docs/process-definition/flow-standard.md new file mode 100644 index 0000000000..3fe83c1d3e --- /dev/null +++ b/docs/process-definition/flow-standard.md @@ -0,0 +1,30 @@ +[triage]: triage.md +[branches]: branches.md +[stable branch]: branches.md#main +[feature flow]: flow-feature.md +[incubation flow]: incubation.md +[Advisors]: advisors.md +[templates]: https://github.com/opendatahub-io/odh-dashboard/issues/new/choose + +# Standard Issue Flow + +This is the flow most issues will go through. Did you find a bug? Want a new feature? Maybe an addition to an existing feature? These will make easy flows to follow. + +> Note: We will often times look at [minor vs larger features](#minor-vs-larger-features) and adjust created issues as we see fit. This isn't needed to be thought of beforehand, just insight. + +![standardFlow.png](meta%2FstandardFlow.png) + +Notes: + +- Issues are created through our [templates] and then waits for the [triage] team +- Once we review the issue, if UX is needed the flow switches into the UX flow and waits for that to finish the needed design efforts before we can continue + - Once UX is done, the flow sits in a `Dev Ready` status awaiting the [Advisors] to review the results and hand it off to the Dev team +- Eventually or if it doesn't need UX the flow will fall into the `Dev To Do` bucket, and we'll pick it up organically based on availability and priority and set into `Dev In Progress` +- Once the solution has been applied, the following runs out the process: + - A PR will be created reviewed by maintainers of ODH Dashboard + - [Advisors] will also review and be in charge of approving the solution + - The merge bot then will merge the PR into `main` (see more about [branches]) which will auto-close the issue + +## Minor vs Larger Features + +Features come in all sizes and can be sometimes problematic to the stability of the Dashboard. To avoid instability in our [stable branch] (`main`) we will often send the request through our [feature flow]. This will make the solution pass through our [incubation flow]. It is slower, but has a greater chance of applying the right solution before we merge it into `main`. ODH will get the feature for awhile while we verify the solution and stability of the flows. diff --git a/docs/process-definition/flow-ux.md b/docs/process-definition/flow-ux.md new file mode 100644 index 0000000000..150f968782 --- /dev/null +++ b/docs/process-definition/flow-ux.md @@ -0,0 +1,34 @@ +[bug and feature request flow]: flow-standard.md +[larger feature efforts]: flow-feature.md#the-overall-flow +[triage]: triage.md +[internal UX template]: https://github.com/opendatahub-io/odh-dashboard/issues/new/choose + +# User Experience (UX) Flow + +The UX team performs several investigations and design efforts for the UI dev team. We use these flows to work through how we shape the product to use-cases and design the new addition. + +There are three types of UX flows today: + +1. The [bug and feature request flow] when it needs UX attention before development can act on it + * The ticket gets attention from the UX team after normal [triage] + * When the UX team has made a decision it will return to developers + * Decision-making can be made through collaboration with SMEs or external individuals that have domain knowledge +2. [UX feature design effort](#ux-only-flow) for [larger feature efforts] +3. [An internal UX flow](#ux-only-flow) + +## UX Only Flow + +This flow is used when there is no reliance on a direct developer follow up. For those flows, see the [bug and feature request flow]. + +![uxInternalFlow.png](meta%2FuxInternalFlow.png) + +This flow assists in both internal flows and flows to fit the [larger feature efforts]. + +A larger feature effort flow: + +- UX creates an issue via their [internal UX template] + - If this is for feature work, you'll need to do one more step and link it to your related feature Tracker + > Note: If the issue is not mentioned in the tracker, artifacts associated to this effort will likely be hidden from easy access. +- Change the status from `Untriaged` to `UX Backlog` and skip the [triage] team + - If this is accidentally missed, the Triage team will move it along +- Process continues as the UX flows need until the ticket is completed and then it is closed diff --git a/docs/process-definition/incubation.md b/docs/process-definition/incubation.md new file mode 100644 index 0000000000..244deeec56 --- /dev/null +++ b/docs/process-definition/incubation.md @@ -0,0 +1,72 @@ +[branches]: branches.md +[branches stable]: branches.md#how-to-determine-it-is-stable +[incubation releases]: releases.md#release-of-incubation +[triage]: triage.md +[tracker]: https://github.com/opendatahub-io/odh-dashboard/issues?q=is%3Aopen+is%3Aissue+label%3Atracker +[templates]: https://github.com/opendatahub-io/odh-dashboard/issues/new/choose + +# Incubation + +Incubation is a term we use for flows that are not quite fully stable. They have been worked on by the developers and have yielded a flow that may not have all the edge cases figured out, or simply may just not be fully vetted for the flow. Essentially, we are looking for feedback on how the functionality works and if you find any use-cases you might be experiencing that does not quite work with the incubated features. + +## Branching Strategy + +There is a lot to discuss here, more detailed information can be found in the [branches] readme, but simply put, the flow follows this pattern: + +![branchesBasic.png](meta%2FbranchesBasic.png) + +- Features are created off the state of `main` +- Code is added to the feature branches through PRs +- The feature branches merge into `incubation` when they are ready +- The feature branches get updates through testing / incubation efforts and merge these new changes into `incubation` +- When they are finished incubation, signed off on by quality teams, business units, they merge back to `main` and become part of the stable product + +> Note; This process is simplified for the discussion of incubation, more details can be found on the [incubation releases] & [branches] readmes. + +## Branch Stability + +In a nutshell: + +- `main` - stable +- `incubation` - semi-stable +- feature branches - not reliably stable + +### `main` Stability + +`main` should strive to always be stable and avoiding of regressions. Naturally this is easier said than done, but we are continually improving the process to make this more reality than goal. With the help of `incubation`, this is easier today than it was in the past. + +### `incubation` Stability + +Incubation strives to be as stable as `main` but it suffers from multiple streams colliding (see [branches] for more info on this). A break in `incubation` is still very important as it is what we release to the ODH community. How we approach this can be found more in the [addressing issues](#addressing-issues) section + +As feature branches merge into `incubation`, this is when stability becomes far more important. Read more on how we consider [branches stable]. + +## Addressing issues + +### Trackers evolve with the changes in incubation + +We'll look to improve our [trackers] list of items as we fix issues and address concerns to help track the effort for future reflection. This is not required if you are reporting an issue, the [triage] team will handle this. + +### Reporting Issues + +This can be in the form of a bug or a request for an addition to the feature incubating. Both of these flows can be found in our [templates]. + +Logging notes: + +- Report as many as you want / per idea + - Avoid double downing on an issue, but if easier, we can split them at [triage] +- Bug report issue flow has an option for you to say ODH release or incubation branch if you want +- Feature Requests do not - but that's okay, effectively you're still asking for "something", just detail out your request + +### Developer Investigation Of Issues + +Incubation will have issues occasionally despite best efforts to avoid this. Here is some known flows how we will address this. + +1. Track the source of the issue + * Was it from `main` merging in and not being compatible with the state of `incubation`? + * Was this from a feature branch? + * Could this not be from any source and was a failure of merge conflicts? +2. Once the issue is traced, we have some paths forward + * From `main`? This is critical - we need to address this asap for [main stability](#main-stability) + * From a feature branch? Fix it there, merge back into `incubation` when ready + * From a merge conflict resolution? (TODO) diff --git a/docs/process-definition/meta/ImagesSource.drawio b/docs/process-definition/meta/ImagesSource.drawio new file mode 100644 index 0000000000..22b06bb056 --- /dev/null +++ b/docs/process-definition/meta/ImagesSource.drawio @@ -0,0 +1,1537 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/process-definition/meta/branchesBasic.png b/docs/process-definition/meta/branchesBasic.png new file mode 100644 index 0000000000..7ae8ef1601 Binary files /dev/null and b/docs/process-definition/meta/branchesBasic.png differ diff --git a/docs/process-definition/meta/branchesComplex.png b/docs/process-definition/meta/branchesComplex.png new file mode 100644 index 0000000000..c2409e234f Binary files /dev/null and b/docs/process-definition/meta/branchesComplex.png differ diff --git a/docs/process-definition/meta/featureFlow.png b/docs/process-definition/meta/featureFlow.png new file mode 100644 index 0000000000..696977838a Binary files /dev/null and b/docs/process-definition/meta/featureFlow.png differ diff --git a/docs/process-definition/meta/featureStoryFlow.png b/docs/process-definition/meta/featureStoryFlow.png new file mode 100644 index 0000000000..0cdb1a51b2 Binary files /dev/null and b/docs/process-definition/meta/featureStoryFlow.png differ diff --git a/docs/process-definition/meta/featureTrackerFlow.png b/docs/process-definition/meta/featureTrackerFlow.png new file mode 100644 index 0000000000..e9c3fd8158 Binary files /dev/null and b/docs/process-definition/meta/featureTrackerFlow.png differ diff --git a/docs/process-definition/meta/fullFlow.png b/docs/process-definition/meta/fullFlow.png new file mode 100644 index 0000000000..999d0f452a Binary files /dev/null and b/docs/process-definition/meta/fullFlow.png differ diff --git a/docs/process-definition/meta/standardFlow.png b/docs/process-definition/meta/standardFlow.png new file mode 100644 index 0000000000..310be4ecd0 Binary files /dev/null and b/docs/process-definition/meta/standardFlow.png differ diff --git a/docs/process-definition/meta/uxInternalFlow.png b/docs/process-definition/meta/uxInternalFlow.png new file mode 100644 index 0000000000..47d4c6487f Binary files /dev/null and b/docs/process-definition/meta/uxInternalFlow.png differ diff --git a/docs/process-definition/releases.md b/docs/process-definition/releases.md new file mode 100644 index 0000000000..4f18b3982e --- /dev/null +++ b/docs/process-definition/releases.md @@ -0,0 +1,36 @@ +[release-steps]: ../release-steps.md +[incubation]: incubation.md +[Release Notes]: https://github.com/opendatahub-io/odh-dashboard/releases + +# Release Documentation + +This describes how and why we do releases. To do an actual release, you'll want to read the [release-steps]. + +![branchesComplex.png](meta%2FbranchesComplex.png) + +This diagram showcases the flow between features & incubation. It ends with the git tags we use to denote releases. + +## Git Tags & Release Notes + +Our release git tags are showcased as [Release Notes] on the release page. + +There are two types of releases: + +1. [Main & Stable releases](#release-of-main) - these are denoted by `vX.YY.Z` +2. [Incubation releases](#release-of-incubation) - these are denoted in two ways: + 1. Typically following a `main` release `vX.YY.Z-incubation` + 2. Adhoc (unofficial) release `incubation-YYYY-MM-DD` - these releases are infrequent and are not an up-to-date representation of our stable branch, these do not get release notes + +## Official Release Types + +### Release of `main` + +Every 3 weeks we release on a Friday. This is useful for any downstream products (like RHODS) to use our stable code. + +ODH & other "bleeding edge" implementations may want to consider using `incubation`. Read more on [incubation]. + +### Release of `incubation` + +We take the contents of the `main` release and merge it into `incubation` and fork a branch. This allows us to continue to provide the latest stable fixes along with all that we have in [incubation]. + +> Note: The rare unofficial & adhoc `incubation` release is often for a particular customer and aimed around a feature branch. These adhoc releases are not guaranteed for long term release stability. The next official `incubation` release will close this gap. diff --git a/docs/process-definition/tech-debt.md b/docs/process-definition/tech-debt.md new file mode 100644 index 0000000000..342cec0eb6 --- /dev/null +++ b/docs/process-definition/tech-debt.md @@ -0,0 +1,10 @@ +[Advisors]: advisors.md +[the board]: https://github.com/orgs/opendatahub-io/projects/24/views/28 + +# Tech Debt Flow + +> This is a work in progress (WIP) flow. See [the board] for the latest status. + +We are addressing our Tech Debt through a series of meetings for priority and organization. This is our first step in managing our backlog in a way that the developer team can participate. + +This effort is driven by the [Advisors]. diff --git a/docs/process-definition/triage.md b/docs/process-definition/triage.md new file mode 100644 index 0000000000..69dd175639 --- /dev/null +++ b/docs/process-definition/triage.md @@ -0,0 +1,13 @@ +[larger flow]: flow-feature.md +[standard issue flow]: flow-standard.md +[triage steps]: https://github.com/opendatahub-io/odh-dashboard/wiki/Triaging + +# Triage + +## Triage Team + +The triage team consists of all the developers associated to the Dashboard. We triage an `Untriaged` issues and determine if it needs UX, if it's a part of a [larger flow] or if it's following the [standard issue flow]. + +## Triage Steps + +The [triage steps] are recorded in the repo wiki for easy editing and adjusting. diff --git a/docs/release-steps.md b/docs/release-steps.md new file mode 100644 index 0000000000..16344c175f --- /dev/null +++ b/docs/release-steps.md @@ -0,0 +1,99 @@ +[base/kustomization.yaml]: ../manifests/base/kustomization.yaml +[quay repo]: https://quay.io/repository/opendatahub/odh-dashboard?tab=tags +[drafting a new release]: https://github.com/opendatahub-io/odh-dashboard/releases/new +[semver]: https://semver.org/ + +# Releases + +There are two types of releases in the Dashboard, and they usually happen together one after another. + +1. `main` (stable branch) Release - good for adoption of downstream components +2. `incubation` (bleeding edge) Release - good for ODH & community involvement + +## `main` Release + +* Start by [drafting a new release] +* Pick a new [release version](#version-numbers) + * `vX.YY.Z` based on what is included (eg. `v1.23.4`) + * If you do not have enough information to make the call on minor or patch version updating, you may need to pick one and wait until the release notes are generated +* Set the release title to the same name as the tag +* Click the `Generate release notes` button to get the full list of PRs merged into `main` + * Scan the notes it generated for any gaps in your Release + * If you were waiting on full determination of release number, make that call now + * Verify the tag & the title of the release; these should match +* Add a header section at the top of the readme called `## Notable Changes` + * Add notes on what can be considered a high level set of changes, don't be too specific on changes and no need to catch every change - just notable ones + * See previous releases for examples +* Mark the release as the `latest` and submit the release + +## `incubation` Release + +> Note: This is a two-day process until we can get the CI working for `incubation`. + +### Day 1 + +> Note: The day of a `main` release is ideal, but it can use the tag if more commits occur in between. Ideally follow the exact release to maintain purity of the release. + +* First make sure [main is released](#main-release) +* Merge the `main` release content into `incubation` +* Wait for the `nightly` build to re-trigger + +### Day 2 + +> Note: Technically can be any day after the first day. Ideally avoid subsequent `main` merges for purity of following up the release. + +1. Go to our [quay repo] & "Add New Tag" off of the latest `nightly` build + * `vX.YY.Z-incubation` naming convention (eg. `v1.23.4-incubation`) + * Get the latest sha digest value for this release +2. Create a branch on the latest of `incubation` + * `vX.YY.Z-incubation-release` naming convention + * Add a comment to this branch, modify the [base/kustomization.yaml] value for `images.odh-dashboard` to specify the latest sha digest value +3. Start [drafting a new release], this release will not make use of the generate release notes feature as we want a high level breakdown. + * Versioning will match the quay image name you generated in the first step + * Be sure to disable the `Set as the latest release` setting as we use the stable releases as our latest release + * Template for the release (review the `TODOs` within): + ```markdown + [incubation]: https://github.com/opendatahub-io/odh-dashboard/tree/main/docs/process-definition/incubation.md + [question-based issue]: https://github.com/opendatahub-io/odh-dashboard/issues/new?assignees=&labels=kind%2Fquestion + + [Release Notes]: https://github.com/opendatahub-io/odh-dashboard/releases/tag/vX.YY.Z + + ## Features Incubating + + These are based on the latest release to `main` ([Release Notes]) + + + * Feature A - #12345 + * Feature B - #12346 + + ## What's Incubation? + + Read more about [incubation] in our docs. + + If you have any questions regarding `incubation`, please log a [question-based issue]. We look forward to improving the flow and adding more clarity. + ``` + +## Version Numbers + +Since this is primarily a frontend repo, versioning will need to be aligned to what we do as a UI application. + +We will release when the ODH Dashboard... +- has meaningful content merged since last release +- is in a stable state +- is requested to be released as a ready-state for downstream / other components + +Version naming is done in a [semver] structure `vMajor.Minor.Patch` +- Increment `Major` if we have... + - taken the code in a major architecturally different direction and could have side effects for features +- Increment `Minor` if we have... + - made a notable existing feature change + - a new feature being advertised to be turned on + - a new feature in alpha state (advertise-able but may not be fully feature complete or completely stable) +- Increment `Patch` if we have... + - general bug fixes + - internal refactors (that do not change the feature set) + - new features that are not ready to be turned on (*must* be fully behind a feature flag) +- Note: We try to follow [semver] based on the above information, so if incrementing... + - `Major`, set `Minor` and `Patch` to `0` + - `Minor`, set `Patch` to `0` & leave `Major` as the last value + - `Patch`, leave `Major` and `Minor` at the last value diff --git a/docs/releases.md b/docs/releases.md deleted file mode 100644 index 73974b2f95..0000000000 --- a/docs/releases.md +++ /dev/null @@ -1,83 +0,0 @@ -# Releases - -Releases in ODH Dashboard are done when needed. We release typically every 3 weeks, but we hope to move to a weekly release schedule on Fridays. - -## Determining Release Content - -As an open-source application contributing (see our [CONTRIBUTING.md](../CONTRIBUTING.md)) is available to anyone who would like to participate in asking for features, reporting issues, and even in contributing fixes for them. - -We work hard to provide an application that is applicable on its own and customizable for downstream consumers. This usually leads us to creating features and addressing bugs that are reported outside our repo and brought into it from the outside source. This does not mean we don't intend to support open-source requests for features and bug fixes as they come up. - -Determining the content is often based on importance and ease to integrate new features and bug fixes into the on-going growth of our application. There isn't a clear order to what we address each release but the goal is to create `Current Release` Milestones (see our [Milestone page](https://github.com/opendatahub-io/odh-dashboard/milestones) for active goals). If you do not see your issue within' this milestone, you'll likely not make it into the next release (which should be dated on each milestone). You however can request it on your issue if you see an importance or need for it. - -## Version Numbers - -Since this is primarily a frontend repo, versioning will need to be aligned to what we do as a UI application. - -We will release when the ODH Dashboard... -- has meaningful content merged since last release -- is in a stable state -- is requested to be released as a ready-state for downstream / other components - -Version naming is done in a [semver](https://semver.org/) structure `vMajor.Minor.Patch` -- Increment `Major` if we have... - - taken the code in a major architecturally different direction and could have side-effects for features -- Increment `Minor` if we have... - - made a notable existing feature change - - a new feature being advertised to be turned on - - a new feature in alpha state (advertise-able but may not be fully feature complete or completely stable) -- Increment `Patch` if we have... - - general bug fixes - - internal refactors (that do not change the feature set) - - new features that are not ready to be turned on (*must* be fully behind a feature flag) -- Note: We try to follow [semver](https://semver.org/) based on the above information, so if incrementing... - - `Major`, set `Minor` and `Patch` to `0` - - `Minor`, set `Patch` to `0` & leave `Major` as the last value - - `Patch`, leave `Major` and `Minor` at the last value - -## Release Steps - -Releases have multiple steps, this documentation here is so we can be aligned on what we need to do and stay consistent between releases. - -Before getting started, you'll want to open two more tabs to these pages: -- [Release page](https://github.com/opendatahub-io/odh-dashboard/releases) -- [Milestone list](https://github.com/opendatahub-io/odh-dashboard/milestones) & select the `Current Release` Milestone - -Once we reach a date in which we want to do a release (see other sections for more information), we'll want to do the following: - -1. Set up the Release notes from the **Release page** - - First start by drafting a new release (button should be in the top right of the page, baring permissions) - - Choose a tag at the top of the forum -- if you have already created a tag, select it, if not type a new version that matches the current Release (eg. `v1.2.3`) - - Set the release title to the same name as the tag - - Click the `Generate release notes` button - - Scan the notes it generated for any gaps in your Release - - Match it up with the `Current Release` milestone (see the **Milestone list**) and verify it has all the needed items - - Add a section at the top of the readme called `Notable Changes` - - Add notes on what can be considered a high level set of changes, don't be too specific on changes and no need to catch every change -- just notable ones - - See previous releases for examples - - Keep this page open as you proceed through the next steps -- if you have to stop at any point mark this release as a `draft` so you can return to it in the future -2. Make sure the `Current Release` Milestone is ready to go (see the **Milestone list**) - - If the contents of the Milestone are all merged, we are ready, move to step 3 - - If the contents of the Milestone are not all merged, determine if we need any of those items in this release - - If not, move them out to a new Milestone or remove them completely from the Milestone - - If yes, merge the content as needed and then restart the release steps -3. Rename the Milestone to the next release (eg. `v1.2.3`) & mark the Milestone closed -4. Update the `Upcoming Release` milestone name and description - - Rename it to `Current Release` as to align with the next set of work we are working on - - Set the description note the current release will be after the version we just renamed to help tie the two together. Eg. "Follows up `v1.2.3`" -5. Create a new `Upcoming Release` milestone, so we can plan two releases out - - Set the date of the new Milestone out 3 weeks from the last release (we may release earlier, but we try not to release later) -- if this changes we will update the information in the related milestones - - Set the description to note that it will follow the `Current Release` so that it's more dynamic and less tied to a release number -6. Mark the Release notes document you have as a full release and publish it -7. Update the ODH Manifest with the release details so the Operator can collect our latest changes on release - - Navigate to the [odh-manifests](https://github.com/opendatahub-io/odh-manifests) - - Focusing on the `odh-dashboard` folder, we'll need to copy some files over to track the latest changes of this release - - Test the latest version of the quay image ([our quay repo](https://quay.io/repository/opendatahub/odh-dashboard?tab=tags)) on a cluster to make sure the pods can come up and the Dashboard is accessible - - Create a PR to include the following: - - Switch to the `incubation` branch in `odh-dashboard` - - First delete everything in the folder -- git will do the diff of what changed for us - - Copy all the child folders in the [manifest folder](../manifests) - - Exclude the `overlays` folder as this is for internal testing purposes - - Copy the OWNERS file into the root of the odh-dashboard manifest folder - - Update the `./base/kustomization.yaml` so that the `odh-dashboard` images section has the `newTag` equal to the current release version (aka the tag we created earlier) - - Update the top row of the component versions table on the root readme to have the latest release version (aka the tag we created earlier) diff --git a/docs/smes.md b/docs/smes.md index da59e636cb..0f8124321f 100644 --- a/docs/smes.md +++ b/docs/smes.md @@ -1,3 +1,19 @@ + +[andrewballantyne]: https://github.com/andrewballantyne +[Gkrumbach07]: https://github.com/Gkrumbach07 +[lucferbux]: https://github.com/lucferbux +[alexcreasy]: https://github.com/alexcreasy +[DaoDaoNoCode]: https://github.com/DaoDaoNoCode + + +[kywalker-rh]: https://github.com/kywalker-rh +[kaedward]: https://github.com/kaedward +[xianli123]: https://github.com/xianli123 +[vconzola]: https://github.com/vconzola +[yannnz]: https://github.com/yannnz +[simrandhaliw]: https://github.com/simrandhaliw +[yih-wang]: https://github.com/yih-wang + # Subject Matter Experts (SMEs) A given subject matter expert is not necessarily the most knowledgeable in the area, but they are the one who probably knows the most about it when it was originally done or has a responsibility to expand their knowledge to know about the area going forward. Contacting them first will help with delegation of responsibilities at the Dashboard level. @@ -5,11 +21,13 @@ A given subject matter expert is not necessarily the most knowledgeable in the a This will detail out former (or current) feature leads, area leads (has a responsibility to understand the area), as well as any other notable position in relation to the area. If you need to talk to someone or ping someone for a review, this information should help you determine who. Below there will be some terms like “previous” and “backup”, these are for additional context. The way you can read each are as follows: + - **previous** – the initial SME in the area. If you need legacy context, this person may be able to help - **backup** – a good person to lean on if there is a need for any 2nd opinions, for bouncing ideas off of, or any larger discussion about direction - **and** – Ping both during conversations – could be onboarding, could be a need to share information, best get both people involved at the same time ## General Dashboard ownership + - Infrastructure / direction - Architect: `Andrew` ([andrewballantyne]) - General UX: `Kyle` ([kywalker-rh]) @@ -20,6 +38,7 @@ Below there will be some terms like “previous” and “backup”, these are f - Area lead: `Lucas` ([lucferbux]) ## Dashboard feature areas + - Data Science Projects - Feature lead: `Andrew` ([andrewballantyne]) - UX: `Kyle` ([kywalker-rh]) **and** `Kun` ([xianli123]) @@ -50,19 +69,3 @@ Below there will be some terms like “previous” and “backup”, these are f - Edge - Feature lead: TBD - UX: `Vince` ([vconzola]) - - -[andrewballantyne]: https://github.com/andrewballantyne -[Gkrumbach07]: https://github.com/Gkrumbach07 -[lucferbux]: https://github.com/lucferbux -[alexcreasy]: https://github.com/alexcreasy -[DaoDaoNoCode]: https://github.com/DaoDaoNoCode - - -[kywalker-rh]: https://github.com/kywalker-rh -[kaedward]: https://github.com/kaedward -[xianli123]: https://github.com/xianli123 -[vconzola]: https://github.com/vconzola -[yannnz]: https://github.com/yannnz -[simrandhaliw]: https://github.com/simrandhaliw -[yih-wang]: https://github.com/yih-wang diff --git a/docs/triaging.md b/docs/triaging.md deleted file mode 100644 index b1e1318963..0000000000 --- a/docs/triaging.md +++ /dev/null @@ -1,66 +0,0 @@ -# Triaging - -Primarily all tickets are triaged and added to our [ODH Dashboard Planning project view](https://github.com/orgs/opendatahub-io/projects/24) to help keep track of everything. - -## Triage Steps - -Each ticket that is logged should come with an `untriaged` label ([live filter](https://github.com/opendatahub-io/odh-dashboard/issues?q=is%3Aissue+is%3Aopen+label%3Auntriaged)). This means they need to be triaged to figure out what part of the product they will be part of, how important the change is, and what kind of issue it is. - -Remove the `untriaged` label once you cover the following steps. - -### (1) Labels - -We typically have 3-5 labels per ticket. They consistent of the type of ticket (kind), the part of product (feature) and the need of the change (priority). - -#### What Kind of Ticket - -Typically this is decided at the time of creation, but `kind/*` labels are to specify what impact they have on the product. - -- `kind/bug` is for when we failed to make a flow work correctly -- this could be classified through one of these ways: - - A UX bug -- a flow that "works" but is not sound for the user - - A Functionality bug -- the feature does not work as originally intended - - A Performance bug -- notable performance issues that prevent the user from having a nominal experience using the app -- `kind/enhancement` is for when the product could be expanded into a new area or add new functionality that does exist currently - -If the ticket does not match the given section, we should override it and change the ticket that was logged. - -There is also `kind/documentation` that we can add to sub-qualify the kind of ticket. Typically documentation issues are also either a bug or an enhancement -- although we usually do not have a lot of internal documentation. - -#### What Part of the Product - -Every ticket is part of some part of the product. We treat each of these areas even after the feature to help keep track of tickets. - -- `feature/*` labels tracks our major features -- can match multiple of these if needed (see the description of the label for more information) -- `infrastructure` label is for when it does not match one of our features or mainly deals with an infrastructure based item (dependencies, react-router, etc) - -#### What Priority is the Issue - -Every ticket should have a priority -- basic rules for priority are our immediate understanding of the need. This can evolve over time but basically should cover the importance to the user or a downstream consumer of ODH. - -Take a look at [the priority labels](https://github.com/opendatahub-io/odh-dashboard/labels?q=priority) to read the description. - -#### Tracking For UX - -We have a UX team that helps us make clean and clear user interface and user experience decisions. They should be able to quickly find issues as we go through. Naturally we loop them in when we do major UX/UI changes, but for smaller things, it's easier for them to be able to track it themselves. - -To that extent, we want to use the `visual changes` label to track any issue that will change the flow for the user (or fix a flow). Here are some broad strokes to when we want to label the issue with this label: - -- If there is a UI shift -- new section, moving something around (indenting, etc) -- If there is a change to the UX -- the user couldn't do something before, now they can -- If we add new UI - new modals, new actions to get to modals, etc - -Under the hood changes to resource creation that doesn't impact the user's ability to use it, should not have this label to avoid clutter. Obviously any internal code changes to improve things or clean up data that doesn't impact the user -- that also shouldn't have this label. - -#### Other labels - -Any other labels are used for additional filtering of the ticket. They are optional and should be used when necessary by reading the description and applying to that use-case. - -### (2) Project - -All tickets belong to the [ODH Dashboard Planning](https://github.com/orgs/opendatahub-io/projects/24) project board. This helps us track the issues and flag them for others to find and work on. Typically all PRs are associated to this one project. - -Note: PRs themselves do not belong to the project -- in the case of no ticket, you should create a ticket and add it to the board for the user. - -### (3) Release - -See the [release planning](./releases.md) document for more information. But basically this field is filled in when we have a desire to complete the ticket within' that release -- which is typically 3 weeks long; see the release milestone for the deadline. diff --git a/docs/user_interaction.md b/docs/user-interaction.md similarity index 93% rename from docs/user_interaction.md rename to docs/user-interaction.md index 85bce81990..027d84b6c6 100644 --- a/docs/user_interaction.md +++ b/docs/user-interaction.md @@ -1,6 +1,8 @@ +[Segment]: https://segment.com/ + # User Interaction -Right now, we are using [Segment](https://segment.com/) as the user interaction tracker for the Dashboard. Segment is a user-interaction telemetry tracker that provides several features, such as multi-platform support, data normalization, and source integration, it can connect several clients with different analytic destinations. +Right now, we are using [Segment] as the user interaction tracker for the Dashboard. Segment is a user-interaction telemetry tracker that provides several features, such as multi-platform support, data normalization, and source integration, it can connect several clients with different analytic destinations. ## How does it work diff --git a/frontend/.eslintrc b/frontend/.eslintrc index 372e05133a..723794959b 100755 --- a/frontend/.eslintrc +++ b/frontend/.eslintrc @@ -56,6 +56,7 @@ "curly": "error", "camelcase": "warn", "no-else-return": "error", + "eqeqeq": ["error", "always", { "null": "ignore" }], "no-restricted-imports": [ "error", { @@ -63,6 +64,14 @@ { "group": ["~/api/**"], "message": "Read from '~/api' instead." + }, + { + "group": ["~/components/table/**", "!~/components/table/useTableColumnSort"], + "message": "Read from '~/components/table' instead." + }, + { + "group": ["~/components/table/useTableColumnSort"], + "message": "The data will be sorted in the table, don't use this hook outside of '~/components/table' repo. For more information, please check the props of the Table component." } ] } diff --git a/frontend/jest.config.js b/frontend/jest.config.js index 68a37f4e48..4a65fc93d5 100644 --- a/frontend/jest.config.js +++ b/frontend/jest.config.js @@ -30,4 +30,6 @@ module.exports = { // A list of paths to snapshot serializer modules Jest should use for snapshot testing snapshotSerializers: [], + + setupFilesAfterEnv: ['/src/__tests__/unit/jest.setup.ts'], }; diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 1e407212c1..39c7cf06d4 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -17006,9 +17006,9 @@ } }, "node_modules/graphql": { - "version": "16.6.0", - "resolved": "https://registry.npmjs.org/graphql/-/graphql-16.6.0.tgz", - "integrity": "sha512-KPIBPDlW7NxrbT/eh4qPXz5FiFdL5UbaA0XUNz2Rp3Z3hqBSkbj0GVjwFDztsWVauZUWsbKHgMg++sk8UX0bkw==", + "version": "16.8.1", + "resolved": "https://registry.npmjs.org/graphql/-/graphql-16.8.1.tgz", + "integrity": "sha512-59LZHPdGZVh695Ud9lRzPBVTtlX9ZCV150Er2W43ro37wVof0ctenSaskPPjN7lVTIN8mSZt8PHUNKZuNQUuxw==", "optional": true, "engines": { "node": "^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0" @@ -25317,9 +25317,9 @@ } }, "node_modules/postcss": { - "version": "8.4.23", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.23.tgz", - "integrity": "sha512-bQ3qMcpF6A/YjR55xtoTr0jGOlnPOKAIMdOWiv0EIT6HVPEaJiJB4NLljSbiHoC2RX7DN5Uvjtpbg1NPdwv1oA==", + "version": "8.4.31", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.31.tgz", + "integrity": "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==", "devOptional": true, "funding": [ { diff --git a/frontend/src/__mocks__/mockInferenceServiceK8sResource.ts b/frontend/src/__mocks__/mockInferenceServiceK8sResource.ts index f47a9d78c5..723b5dc3e3 100644 --- a/frontend/src/__mocks__/mockInferenceServiceK8sResource.ts +++ b/frontend/src/__mocks__/mockInferenceServiceK8sResource.ts @@ -9,6 +9,28 @@ type MockResourceConfigType = { secretName?: string; }; +export const mockInferenceServicek8sError = () => ({ + kind: 'Status', + apiVersion: 'v1', + metadata: {}, + status: 'Failure', + message: + 'InferenceService.serving.kserve.io "trigger-error" is invalid: [metadata.name: Invalid value: "trigger-error": is invalid, metadata.labels: Invalid value: "trigger-error": must have proper format]', + reason: 'Invalid', + details: { + name: 'trigger-error', + group: 'serving.kserve.io', + kind: 'InferenceService', + causes: [ + { + reason: 'FieldValueInvalid', + message: 'Invalid value: "trigger-error": must have proper format', + field: 'metadata.name', + }, + ], + }, +}); + export const mockInferenceServiceK8sResource = ({ name = 'test-inference-service', namespace = 'test-project', diff --git a/frontend/src/__tests__/integration/hooks/useFetchState.spec.ts b/frontend/src/__tests__/integration/hooks/useFetchState.spec.ts index 940f18088e..eae447c826 100644 --- a/frontend/src/__tests__/integration/hooks/useFetchState.spec.ts +++ b/frontend/src/__tests__/integration/hooks/useFetchState.spec.ts @@ -1,9 +1,8 @@ import { test, expect } from '@playwright/test'; +import { navigateToStory } from '~/__tests__/integration/utils'; test('Success', async ({ page }) => { - await page.goto( - './iframe.html?args=&id=tests-integration-hooks-usefetchstate--success&viewMode=story', - ); + await page.goto(navigateToStory('hooks-usefetchstate', 'success')); // wait 2 seconds to settle await new Promise((resolve) => setTimeout(resolve, 2000)); @@ -16,9 +15,7 @@ test('Success', async ({ page }) => { }); test('Failure', async ({ page }) => { - await page.goto( - './iframe.html?args=&id=tests-integration-hooks-usefetchstate--failure&viewMode=story', - ); + await page.goto(navigateToStory('hooks-usefetchstate', 'failure')); // wait 2 seconds to settle await new Promise((resolve) => setTimeout(resolve, 2000)); @@ -34,9 +31,7 @@ test('Failure', async ({ page }) => { }); test('Stable', async ({ page }) => { - await page.goto( - './iframe.html?args=&id=tests-integration-hooks-usefetchstate--stable&viewMode=story', - ); + await page.goto(navigateToStory('hooks-usefetchstate', 'stable')); // wait 2 seconds to settle await new Promise((resolve) => setTimeout(resolve, 2000)); @@ -76,9 +71,7 @@ test('Stable', async ({ page }) => { }); test('Refresh rate', async ({ page }) => { - await page.goto( - './iframe.html?args=&id=tests-integration-hooks-usefetchstate--refresh-rate&viewMode=story', - ); + await page.goto(navigateToStory('hooks-usefetchstate', 'refresh-rate')); // wait 2 seconds to settle await new Promise((resolve) => setTimeout(resolve, 2000)); diff --git a/frontend/src/__tests__/integration/pages/clusterSettings/ClusterSettings.spec.ts b/frontend/src/__tests__/integration/pages/clusterSettings/ClusterSettings.spec.ts index 0a68a8faca..b4b861499a 100644 --- a/frontend/src/__tests__/integration/pages/clusterSettings/ClusterSettings.spec.ts +++ b/frontend/src/__tests__/integration/pages/clusterSettings/ClusterSettings.spec.ts @@ -1,9 +1,8 @@ import { test, expect } from '@playwright/test'; +import { navigateToStory } from '~/__tests__/integration/utils'; test('Cluster settings', async ({ page }) => { - await page.goto( - './iframe.html?args=&id=tests-integration-pages-clustersettings-clustersettings--default&viewMode=story', - ); + await page.goto(navigateToStory('pages-clustersettings-clustersettings', 'default')); // wait for page to load await page.waitForSelector('text=Save changes'); const submitButton = page.locator('[data-id="submit-cluster-settings"]'); diff --git a/frontend/src/__tests__/integration/pages/modelServing/ModelServingGlobal.spec.ts b/frontend/src/__tests__/integration/pages/modelServing/ModelServingGlobal.spec.ts index 9b21197786..b79b6d0605 100644 --- a/frontend/src/__tests__/integration/pages/modelServing/ModelServingGlobal.spec.ts +++ b/frontend/src/__tests__/integration/pages/modelServing/ModelServingGlobal.spec.ts @@ -1,10 +1,36 @@ import { test, expect } from '@playwright/test'; +import { navigateToStory } from '~/__tests__/integration/utils'; -test('Delete model', async ({ page }) => { +test('Empty State No Serving Runtime', async ({ page }) => { + await page.goto( + navigateToStory('pages-modelserving-modelservingglobal', 'empty-state-no-serving-runtime'), + ); + + // wait for page to load + await page.waitForSelector('text=No deployed models yet'); + + // Test that the button is enabled + await expect(page.getByRole('button', { name: 'Go to the Projects page' })).toBeTruthy(); +}); + +test('Empty State No Inference Service', async ({ page }) => { await page.goto( - './iframe.html?args=&id=tests-integration-pages-modelserving-modelservingglobal--delete-model&viewMode=story', + navigateToStory('pages-modelserving-modelservingglobal', 'empty-state-no-inference-service'), ); + // wait for page to load + await page.waitForSelector('text=No deployed models'); + + // Test that the button is enabled + await page.getByRole('button', { name: 'Deploy model' }).click(); + + // test that you can not submit on empty + await expect(await page.getByRole('button', { name: 'Deploy' })).toBeDisabled(); +}); + +test('Delete model', async ({ page }) => { + await page.goto(navigateToStory('pages-modelserving-modelservingglobal', 'delete-model')); + // wait for page to load await page.waitForSelector('text=Delete deployed model?'); @@ -19,9 +45,7 @@ test('Delete model', async ({ page }) => { }); test('Edit model', async ({ page }) => { - await page.goto( - './iframe.html?args=&id=tests-integration-pages-modelserving-modelservingglobal--edit-model&viewMode=story', - ); + await page.goto(navigateToStory('pages-modelserving-modelservingglobal', 'edit-model')); // wait for page to load await page.waitForSelector('text=Deploy model'); @@ -30,7 +54,8 @@ test('Edit model', async ({ page }) => { await await page.getByLabel('Model Name *').fill(''); await await page.getByLabel('Path').fill(''); await expect(await page.getByRole('button', { name: 'Deploy', exact: true })).toBeDisabled(); - + await await page.getByLabel('Path').fill('/'); + await expect(await page.getByRole('button', { name: 'Deploy', exact: true })).toBeDisabled(); // test that you can update the name to a different name await await page.getByLabel('Model Name *').fill('Updated Model Name'); await await page.getByLabel('Path').fill('test-model/'); @@ -40,6 +65,8 @@ test('Edit model', async ({ page }) => { await page.getByText('New data connection').click(); await await page.getByLabel('Path').fill(''); await expect(await page.getByRole('button', { name: 'Deploy', exact: true })).toBeDisabled(); + await await page.getByLabel('Path').fill('/'); + await expect(await page.getByRole('button', { name: 'Deploy', exact: true })).toBeDisabled(); // test that adding required values validates submit await page.getByRole('textbox', { name: 'Field list Name' }).fill('Test Name'); @@ -53,9 +80,7 @@ test('Edit model', async ({ page }) => { }); test('Create model', async ({ page }) => { - await page.goto( - './iframe.html?args=&id=tests-integration-pages-modelserving-modelservingglobal--deploy-model&viewMode=story', - ); + await page.goto(navigateToStory('pages-modelserving-modelservingglobal', 'deploy-model')); // wait for page to load await page.waitForSelector('text=Deploy model'); @@ -83,6 +108,8 @@ test('Create model', async ({ page }) => { await page.getByText('New data connection').click(); await page.getByLabel('Path').fill(''); await expect(await page.getByRole('button', { name: 'Deploy' })).toBeDisabled(); + await page.getByLabel('Path').fill('/'); + await expect(await page.getByRole('button', { name: 'Deploy' })).toBeDisabled(); await page.getByRole('textbox', { name: 'Field list Name' }).fill('Test Name'); await page.getByRole('textbox', { name: 'Field list AWS_ACCESS_KEY_ID' }).fill('test-key'); await page @@ -92,3 +119,44 @@ test('Create model', async ({ page }) => { await page.getByLabel('Path').fill('test-model/'); await expect(await page.getByRole('button', { name: 'Deploy' })).toBeEnabled(); }); + +test('Create model error', async ({ page }) => { + await page.goto(navigateToStory('pages-modelserving-modelservingglobal', 'deploy-model')); + + // wait for page to load + await page.waitForSelector('text=Deploy model'); + + // test that you can not submit on empty + await expect(await page.getByRole('button', { name: 'Deploy' })).toBeDisabled(); + + // test filling in minimum required fields + await page.locator('#existing-project-selection').click(); + await page.getByRole('option', { name: 'Test Project' }).click(); + await page.getByLabel('Model Name *').fill('trigger-error'); + await page.locator('#inference-service-model-selection').click(); + await page.getByRole('option', { name: 'ovms' }).click(); + await expect(page.getByText('Model framework (name - version)')).toBeTruthy(); + await page.locator('#inference-service-framework-selection').click(); + await page.getByRole('option', { name: 'onnx - 1' }).click(); + await expect(await page.getByRole('button', { name: 'Deploy' })).toBeDisabled(); + await page + .getByRole('group', { name: 'Model location' }) + .getByRole('button', { name: 'Options menu' }) + .click(); + await page.getByRole('option', { name: 'Test Secret' }).click(); + await page.getByLabel('Path').fill('test-model/'); + await expect(await page.getByRole('button', { name: 'Deploy' })).toBeEnabled(); + await page.getByLabel('Path').fill('test-model/'); + await expect(await page.getByRole('button', { name: 'Deploy' })).toBeEnabled(); + + // Submit and check the invalid error message + await page.getByRole('button', { name: 'Deploy' }).click(); + await page.waitForSelector('text=Error creating model server'); + + // Close the modal + await page.getByRole('button', { name: 'Cancel' }).click(); + + // Check that the error message is gone + await page.getByRole('button', { name: 'Deploy model' }).click(); + expect(await page.isVisible('text=Error creating model server')).toBeFalsy(); +}); diff --git a/frontend/src/__tests__/integration/pages/modelServing/ModelServingGlobal.stories.tsx b/frontend/src/__tests__/integration/pages/modelServing/ModelServingGlobal.stories.tsx index 5861976db9..348d608ea5 100644 --- a/frontend/src/__tests__/integration/pages/modelServing/ModelServingGlobal.stories.tsx +++ b/frontend/src/__tests__/integration/pages/modelServing/ModelServingGlobal.stories.tsx @@ -8,7 +8,10 @@ import { Route, Routes } from 'react-router-dom'; import { mockK8sResourceList } from '~/__mocks__/mockK8sResourceList'; import { mockProjectK8sResource } from '~/__mocks__/mockProjectK8sResource'; import { mockServingRuntimeK8sResource } from '~/__mocks__/mockServingRuntimeK8sResource'; -import { mockInferenceServiceK8sResource } from '~/__mocks__/mockInferenceServiceK8sResource'; +import { + mockInferenceServiceK8sResource, + mockInferenceServicek8sError, +} from '~/__mocks__/mockInferenceServiceK8sResource'; import { mockSecretK8sResource } from '~/__mocks__/mockSecretK8sResource'; import ModelServingContextProvider from '~/pages/modelServing/ModelServingContext'; import ModelServingGlobal from '~/pages/modelServing/screens/global/ModelServingGlobal'; @@ -38,6 +41,15 @@ export default { rest.get('/api/k8s/apis/project.openshift.io/v1/projects', (req, res, ctx) => res(ctx.json(mockK8sResourceList([mockProjectK8sResource({})]))), ), + rest.post( + 'api/k8s/apis/serving.kserve.io/v1beta1/namespaces/test-project/inferenceservices/test', + (req, res, ctx) => res(ctx.json(mockInferenceServiceK8sResource({}))), + ), + rest.post( + 'api/k8s/apis/serving.kserve.io/v1beta1/namespaces/test-project/inferenceservices/trigger-error', + (req, res, ctx) => + res(ctx.status(422, 'Unprocessable Entity'), ctx.json(mockInferenceServicek8sError())), + ), ], }, }, @@ -51,6 +63,51 @@ const Template: StoryFn = (args) => ( ); +export const EmptyStateNoServingRuntime: StoryObj = { + render: Template, + + parameters: { + msw: { + handlers: [ + rest.get( + 'api/k8s/apis/serving.kserve.io/v1alpha1/namespaces/test-project/servingruntimes', + (req, res, ctx) => res(ctx.json(mockK8sResourceList([]))), + ), + rest.get( + 'api/k8s/apis/serving.kserve.io/v1beta1/namespaces/test-project/inferenceservices', + (req, res, ctx) => res(ctx.json(mockK8sResourceList([]))), + ), + rest.get('/api/k8s/apis/project.openshift.io/v1/projects', (req, res, ctx) => + res(ctx.json(mockK8sResourceList([mockProjectK8sResource({})]))), + ), + ], + }, + }, +}; + +export const EmptyStateNoInferenceServices: StoryObj = { + render: Template, + + parameters: { + msw: { + handlers: [ + rest.get( + 'api/k8s/apis/serving.kserve.io/v1alpha1/namespaces/test-project/servingruntimes', + (req, res, ctx) => + res(ctx.json(mockK8sResourceList([mockServingRuntimeK8sResource({})]))), + ), + rest.get( + 'api/k8s/apis/serving.kserve.io/v1beta1/namespaces/test-project/inferenceservices', + (req, res, ctx) => res(ctx.json(mockK8sResourceList([]))), + ), + rest.get('/api/k8s/apis/project.openshift.io/v1/projects', (req, res, ctx) => + res(ctx.json(mockK8sResourceList([mockProjectK8sResource({})]))), + ), + ], + }, + }, +}; + export const EditModel: StoryObj = { render: Template, diff --git a/frontend/src/__tests__/integration/pages/modelServing/ServingRuntimeList.spec.ts b/frontend/src/__tests__/integration/pages/modelServing/ServingRuntimeList.spec.ts index 486bc8233c..585f794d0d 100644 --- a/frontend/src/__tests__/integration/pages/modelServing/ServingRuntimeList.spec.ts +++ b/frontend/src/__tests__/integration/pages/modelServing/ServingRuntimeList.spec.ts @@ -1,9 +1,8 @@ import { test, expect } from '@playwright/test'; +import { navigateToStory } from '~/__tests__/integration/utils'; test('Deploy model', async ({ page }) => { - await page.goto( - './iframe.html?args=&id=tests-integration-pages-modelserving-servingruntimelist--deploy-model&viewMode=story', - ); + await page.goto(navigateToStory('pages-modelserving-servingruntimelist', 'deploy-model')); // wait for page to load await page.waitForSelector('text=Deploy model'); @@ -38,7 +37,7 @@ test('Deploy model', async ({ page }) => { test('Legacy Serving Runtime', async ({ page }) => { await page.goto( - './iframe.html?args=&id=tests-integration-pages-modelserving-servingruntimelist--list-available-models&viewMode=story', + navigateToStory('pages-modelserving-servingruntimelist', 'list-available-models'), ); // wait for page to load diff --git a/frontend/src/__tests__/integration/pages/projects/ProjectDetails.spec.ts b/frontend/src/__tests__/integration/pages/projects/ProjectDetails.spec.ts index bb17017d8e..a89ca6bf86 100644 --- a/frontend/src/__tests__/integration/pages/projects/ProjectDetails.spec.ts +++ b/frontend/src/__tests__/integration/pages/projects/ProjectDetails.spec.ts @@ -1,9 +1,8 @@ import { test, expect } from '@playwright/test'; +import { navigateToStory } from '~/__tests__/integration/utils'; test('Empty project', async ({ page }) => { - await page.goto( - './iframe.html?args=&id=tests-integration-pages-projects-projectdetails--empty-details-page&viewMode=story', - ); + await page.goto(navigateToStory('pages-projects-projectdetails', 'empty-details-page')); // wait for page to load await page.waitForSelector('text=No model servers'); @@ -16,9 +15,7 @@ test('Empty project', async ({ page }) => { }); test('Non-empty project', async ({ page }) => { - await page.goto( - './iframe.html?id=tests-integration-pages-projects-projectdetails--default&viewMode=story', - ); + await page.goto(navigateToStory('pages-projects-projectdetails', 'default')); // wait for page to load await page.waitForSelector('text=Test Notebook'); diff --git a/frontend/src/__tests__/integration/pages/projects/ProjectView.spec.ts b/frontend/src/__tests__/integration/pages/projects/ProjectView.spec.ts index 7f4800427b..3ded1a4dd2 100644 --- a/frontend/src/__tests__/integration/pages/projects/ProjectView.spec.ts +++ b/frontend/src/__tests__/integration/pages/projects/ProjectView.spec.ts @@ -1,9 +1,8 @@ import { test, expect } from '@playwright/test'; +import { navigateToStory } from '~/__tests__/integration/utils'; test('Create project', async ({ page }) => { - await page.goto( - './iframe.html?id=tests-integration-pages-projects-projectview--create-project&viewMode=story', - ); + await page.goto(navigateToStory('pages-projects-projectview', 'create-project')); // wait for page to load await page.waitForSelector('text=Create data science project'); @@ -52,9 +51,7 @@ test('Create project', async ({ page }) => { }); test('Edit project', async ({ page }) => { - await page.goto( - './iframe.html?id=tests-integration-pages-projects-projectview--edit-project&viewMode=story', - ); + await page.goto(navigateToStory('pages-projects-projectview', 'edit-project')); // wait for page to load await page.waitForSelector('text=Edit data science project'); @@ -71,9 +68,7 @@ test('Edit project', async ({ page }) => { }); test('Delete project', async ({ page }) => { - await page.goto( - './iframe.html?id=tests-integration-pages-projects-projectview--delete-project&viewMode=story', - ); + await page.goto(navigateToStory('pages-projects-projectview', 'delete-project')); // wait for page to load await page.waitForSelector('text=Delete project?'); diff --git a/frontend/src/__tests__/integration/utils.ts b/frontend/src/__tests__/integration/utils.ts new file mode 100644 index 0000000000..6c7a2a8f22 --- /dev/null +++ b/frontend/src/__tests__/integration/utils.ts @@ -0,0 +1,2 @@ +export const navigateToStory = (folder: string, storyId: string) => + `./iframe.html?args=&id=tests-integration-${folder}--${storyId}&viewMode=story`; diff --git a/frontend/src/__tests__/unit/jest.d.ts b/frontend/src/__tests__/unit/jest.d.ts new file mode 100644 index 0000000000..3a99872fcf --- /dev/null +++ b/frontend/src/__tests__/unit/jest.d.ts @@ -0,0 +1,29 @@ +declare namespace jest { + interface Expect { + isIdentityEqual(expected: T): T; + } + + interface Matchers { + hookToBe(expected: unknown): R; + hookToStrictEqual(expected: unknown): R; + hookToHaveUpdateCount(expected: number): R; + hookToBeStable< + V extends T extends Pick< + import('~/__tests__/unit/testUtils/hooks').RenderHookResultExt< + infer Result, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + any + >, + 'result' + > + ? import('~/__tests__/unit/testUtils/hooks').BooleanValues + : never, + >( + expected?: V, + ): R; + } + + interface Expect { + isIdentityEqual(expected: unknown): AsymmetricMatcher; + } +} diff --git a/frontend/src/__tests__/unit/jest.setup.ts b/frontend/src/__tests__/unit/jest.setup.ts new file mode 100644 index 0000000000..7a2b431ba4 --- /dev/null +++ b/frontend/src/__tests__/unit/jest.setup.ts @@ -0,0 +1,62 @@ +import { JestAssertionError } from 'expect'; +import { + BooleanValues, + RenderHookResultExt, + createComparativeValue, +} from '~/__tests__/unit/testUtils/hooks'; + +const tryExpect = (expectFn: () => void) => { + try { + expectFn(); + } catch (e) { + const { matcherResult } = e as JestAssertionError; + if (matcherResult) { + return { ...matcherResult, message: () => matcherResult.message }; + } + throw e; + } + return { + pass: true, + message: () => '', + }; +}; + +expect.extend({ + // custom asymmetric matchers + + /** + * Checks that a value is what you expect. + * It uses Object.is to check strict equality. + * + * Usage: + * expect.isIdentifyEqual(...) + */ + isIdentityEqual: (actual, expected) => ({ + pass: Object.is(actual, expected), + message: () => `expected ${actual} to be identity equal to ${expected}`, + }), + + // hook related custom matchers + hookToBe: (actual: RenderHookResultExt, expected) => + tryExpect(() => expect(actual.result.current).toBe(expected)), + + hookToStrictEqual: (actual: RenderHookResultExt, expected) => + tryExpect(() => expect(actual.result.current).toStrictEqual(expected)), + + hookToHaveUpdateCount: (actual: RenderHookResultExt, expected: number) => + tryExpect(() => expect(actual.getUpdateCount()).toBe(expected)), + + hookToBeStable: (actual: RenderHookResultExt, expected?: BooleanValues) => { + if (actual.getUpdateCount() <= 1) { + throw new Error('Cannot assert stability as the hook has not run at least 2 times.'); + } + if (typeof expected === 'undefined') { + return tryExpect(() => expect(actual.result.current).toBe(actual.getPreviousResult())); + } + return tryExpect(() => + expect(actual.result.current).toStrictEqual( + createComparativeValue(actual.getPreviousResult(), expected), + ), + ); + }, +}); diff --git a/frontend/src/__tests__/unit/testUtils/hooks.spec.ts b/frontend/src/__tests__/unit/testUtils/hooks.spec.ts index dac0f2897a..9b698ea1e0 100644 --- a/frontend/src/__tests__/unit/testUtils/hooks.spec.ts +++ b/frontend/src/__tests__/unit/testUtils/hooks.spec.ts @@ -1,5 +1,5 @@ import * as React from 'react'; -import { expectHook, renderHook, standardUseFetchState, testHook } from './hooks'; +import { createComparativeValue, renderHook, standardUseFetchState, testHook } from './hooks'; const useSayHello = (who: string, showCount = false) => { const countRef = React.useRef(0); @@ -18,34 +18,33 @@ const useSayHelloDelayed = (who: string, delay = 0) => { describe('hook test utils', () => { it('simple testHook', () => { - const renderResult = testHook((who: string) => `Hello ${who}!`, 'world'); - expectHook(renderResult).toBe('Hello world!').toHaveUpdateCount(1); + const renderResult = testHook((who: string) => `Hello ${who}!`)('world'); + expect(renderResult).hookToBe('Hello world!'); + expect(renderResult).hookToHaveUpdateCount(1); renderResult.rerender('world'); - expectHook(renderResult).toBe('Hello world!').toBeStable().toHaveUpdateCount(2); + expect(renderResult).hookToBe('Hello world!'); + expect(renderResult).hookToBeStable(); + expect(renderResult).hookToHaveUpdateCount(2); }); it('use testHook for rendering', () => { - const renderResult = testHook(useSayHello, 'world'); - expectHook(renderResult) - .toHaveUpdateCount(1) - .toBe('Hello world!') - .toStrictEqual('Hello world!'); + const renderResult = testHook(useSayHello)('world'); + expect(renderResult).hookToHaveUpdateCount(1); + expect(renderResult).hookToBe('Hello world!'); + expect(renderResult).hookToStrictEqual('Hello world!'); renderResult.rerender('world', false); - expectHook(renderResult) - .toHaveUpdateCount(2) - .toBe('Hello world!') - .toStrictEqual('Hello world!') - .toBeStable(); + expect(renderResult).hookToHaveUpdateCount(2); + expect(renderResult).hookToBe('Hello world!'); + expect(renderResult).hookToStrictEqual('Hello world!'); + expect(renderResult).hookToBeStable(); renderResult.rerender('world', true); - expectHook(renderResult) - .toHaveUpdateCount(3) - .toBe('Hello world! x3') - .toStrictEqual('Hello world! x3') - .toBeStable(false); + expect(renderResult).hookToHaveUpdateCount(3); + expect(renderResult).hookToBe('Hello world! x3'); + expect(renderResult).hookToStrictEqual('Hello world! x3'); }); it('use renderHook for rendering', () => { @@ -59,50 +58,47 @@ describe('hook test utils', () => { }, }); - expectHook(renderResult) - .toHaveUpdateCount(1) - .toBe('Hello world!') - .toStrictEqual('Hello world!'); + expect(renderResult).hookToHaveUpdateCount(1); + expect(renderResult).hookToBe('Hello world!'); + expect(renderResult).hookToStrictEqual('Hello world!'); renderResult.rerender({ who: 'world', }); - expectHook(renderResult) - .toHaveUpdateCount(2) - .toBe('Hello world!') - .toStrictEqual('Hello world!') - .toBeStable(); + expect(renderResult).hookToHaveUpdateCount(2); + expect(renderResult).hookToBe('Hello world!'); + expect(renderResult).hookToStrictEqual('Hello world!'); renderResult.rerender({ who: 'world', showCount: true }); - expectHook(renderResult) - .toHaveUpdateCount(3) - .toBe('Hello world! x3') - .toStrictEqual('Hello world! x3') - .toBeStable(false); + expect(renderResult).hookToHaveUpdateCount(3); + expect(renderResult).hookToBe('Hello world! x3'); + expect(renderResult).hookToStrictEqual('Hello world! x3'); }); it('should use waitForNextUpdate for async update testing', async () => { - const renderResult = testHook(useSayHelloDelayed, 'world'); - expectHook(renderResult).toHaveUpdateCount(1).toBe(''); + const renderResult = testHook(useSayHelloDelayed)('world'); + expect(renderResult).hookToHaveUpdateCount(1); + expect(renderResult).hookToBe(''); await renderResult.waitForNextUpdate(); - expectHook(renderResult).toHaveUpdateCount(2).toBe('Hello world!'); + expect(renderResult).hookToHaveUpdateCount(2); + expect(renderResult).hookToBe('Hello world!'); }); it('should throw error if waitForNextUpdate times out', async () => { const renderResult = renderHook(() => useSayHelloDelayed('', 20)); await expect(renderResult.waitForNextUpdate({ timeout: 10, interval: 5 })).rejects.toThrow(); - expectHook(renderResult).toHaveUpdateCount(1); + expect(renderResult).hookToHaveUpdateCount(1); // unmount to test waiting for an update that will never happen renderResult.unmount(); await expect(renderResult.waitForNextUpdate({ timeout: 50, interval: 10 })).rejects.toThrow(); - expectHook(renderResult).toHaveUpdateCount(1); + expect(renderResult).hookToHaveUpdateCount(1); }); it('should not throw if waitForNextUpdate timeout is sufficient', async () => { @@ -112,43 +108,47 @@ describe('hook test utils', () => { renderResult.waitForNextUpdate({ timeout: 50, interval: 10 }), ).resolves.not.toThrow(); - expectHook(renderResult).toHaveUpdateCount(2); + expect(renderResult).hookToHaveUpdateCount(2); }); it('should assert stability of results using isStable', () => { let testValue = 'test'; const renderResult = renderHook(() => testValue); - expectHook(renderResult).toHaveUpdateCount(1); + expect(renderResult).hookToHaveUpdateCount(1); renderResult.rerender(); - expectHook(renderResult).toHaveUpdateCount(2).toBeStable(true); + expect(renderResult).hookToHaveUpdateCount(2); + expect(renderResult).hookToBeStable(); testValue = 'new'; renderResult.rerender(); - expectHook(renderResult).toHaveUpdateCount(3).toBeStable(false); + expect(renderResult).hookToHaveUpdateCount(3); renderResult.rerender(); - expectHook(renderResult).toHaveUpdateCount(4).toBeStable(true); + expect(renderResult).hookToHaveUpdateCount(4); + expect(renderResult).hookToBeStable(); }); - it('should assert stability of results using isStableArray', () => { - let testValue = 'test'; + it(`should assert stability of result using isStable 'array'`, () => { + let testValue = ['test']; // explicitly returns a new array each render to show the difference between `isStable` and `isStableArray` - const renderResult = renderHook(() => [testValue]); - expectHook(renderResult).toHaveUpdateCount(1); + const renderResult = renderHook(() => testValue); + expect(renderResult).hookToHaveUpdateCount(1); renderResult.rerender(); - expectHook(renderResult).toHaveUpdateCount(2).toBeStable(false); - expectHook(renderResult).toHaveUpdateCount(2).toBeStable([true]); + expect(renderResult).hookToHaveUpdateCount(2); + expect(renderResult).hookToBeStable(); + expect(renderResult).hookToBeStable([true]); - testValue = 'new'; + testValue = ['new']; renderResult.rerender(); - expectHook(renderResult).toHaveUpdateCount(3).toBeStable(false); - expectHook(renderResult).toHaveUpdateCount(3).toBeStable([false]); + expect(renderResult).hookToHaveUpdateCount(3); + expect(renderResult).hookToBeStable([false]); renderResult.rerender(); - expectHook(renderResult).toHaveUpdateCount(4).toBeStable(false); - expectHook(renderResult).toHaveUpdateCount(4).toBeStable([true]); + expect(renderResult).hookToHaveUpdateCount(4); + expect(renderResult).hookToBeStable(); + expect(renderResult).hookToBeStable([true]); }); it('standardUseFetchState should return an array matching the state of useFetchState', () => { @@ -160,4 +160,102 @@ describe('hook test utils', () => { standardUseFetchState('test', false, new Error('error')), ); }); + + describe('createComparativeValue', () => { + it('should extract array values according to the boolean object', () => { + expect([1, 2, 3]).toStrictEqual(createComparativeValue([1, 2, 3], [true, true, true])); + expect([1, 2, 3]).toStrictEqual(createComparativeValue([1, 2, 3], [true, true, false])); + expect([1, 2, 3]).toStrictEqual(createComparativeValue([1, 2, 4], [true, true, false])); + expect([1, 2, 3]).toStrictEqual(createComparativeValue([1, 2, 4], [true, true])); + expect([1, 2, 3]).not.toStrictEqual(createComparativeValue([1, 4, 3], [true, true, true])); + }); + + it('should extract object values according to the boolean object', () => { + expect({ a: 1, b: 2, c: 3 }).toStrictEqual( + createComparativeValue({ a: 1, b: 2, c: 3 }, { a: true, b: true, c: true }), + ); + expect({ a: 1, b: 2, c: 3 }).toStrictEqual( + createComparativeValue({ a: 1, b: 2, c: 3 }, { a: true, b: true, c: true }), + ); + expect({ a: 1, b: 2, c: 3 }).toStrictEqual( + createComparativeValue({ a: 1, b: 2, c: 4 }, { a: true, b: true, c: false }), + ); + expect({ a: 1, b: 2, c: 3 }).toStrictEqual( + createComparativeValue({ a: 1, b: 2, c: 4 }, { a: true, b: true }), + ); + expect({ a: 1, b: 2, c: 3 }).not.toStrictEqual( + createComparativeValue({ a: 1, b: 4, c: 3 }, { a: true, b: true, c: true }), + ); + }); + + it('should extract nested values', () => { + const testValue = { + a: 1, + b: { + c: 2, + d: [{ e: 3 }, 'f', {}], + }, + }; + expect(testValue).toStrictEqual( + createComparativeValue( + { a: 10, b: { c: 2, d: [null, 'f'] } }, + { + b: { + c: true, + d: [false, true], + }, + }, + ), + ); + }); + + it('should extract objects for identity comparisons', () => { + const obj = {}; + const array: string[] = []; + const testValue = { + a: obj, + b: array, + c: { + d: obj, + e: array, + }, + }; + + expect(testValue).not.toStrictEqual( + createComparativeValue( + { + a: {}, + b: [], + c: { + d: {}, + e: [], + }, + }, + { + a: true, + b: true, + c: { d: true, e: true }, + }, + ), + ); + + expect(testValue).toStrictEqual( + createComparativeValue( + { + a: obj, + b: array, + c: { + d: obj, + e: array, + }, + }, + { + a: true, + b: true, + c: { d: true, e: true }, + }, + ), + ); + }); + }); }); diff --git a/frontend/src/__tests__/unit/testUtils/hooks.ts b/frontend/src/__tests__/unit/testUtils/hooks.ts index 4f13d7ee67..98e5136f3b 100644 --- a/frontend/src/__tests__/unit/testUtils/hooks.ts +++ b/frontend/src/__tests__/unit/testUtils/hooks.ts @@ -7,50 +7,25 @@ import { } from '@testing-library/react'; import { queries, Queries } from '@testing-library/dom'; -/** - * Set of helper functions used to perform assertions on the hook result. - */ -export type RenderHookResultExpect = { - /** - * Check that a value is what you expect. It uses `Object.is` to check strict equality. - * Don't use `toBe` with floating-point numbers. - */ - toBe: (expected: Result) => RenderHookResultExpect; - - /** - * Check that the result has the same types as well as structure. - */ - toStrictEqual: (expected: Result) => RenderHookResultExpect; - - /** - * Check the stability of the result. - * If the expected value is a boolean array, uses `isStableArray` for comparison, otherwise uses `isStable`. - * - * Stability is checked against the previous update. - */ - toBeStable: (expected?: boolean | boolean[]) => RenderHookResultExpect; - - /** - * Check the update count is the expected number. - * Update count increases every time the hook is called. - */ - toHaveUpdateCount: (expected: number) => RenderHookResultExpect; -}; +export type BooleanValues = T extends + | boolean + | number + | string + | null + | undefined + // eslint-disable-next-line @typescript-eslint/ban-types + | Function + ? boolean | undefined + : boolean | undefined | { [K in keyof T]?: BooleanValues }; /** * Extension of RTL RenderHookResult providing functions used query the current state of the result. */ export type RenderHookResultExt = RenderHookResult & { /** - * Returns `true` if the previous result is equal to the current result. Uses `Object.is` for comparison. + * Returns the previous result. */ - isStable: () => boolean; - - /** - * Returns `true` if the previous result array items are equal to the current result array items. Uses `Object.is` for comparison. - * The equality of the array instances is not checked. - */ - isStableArray: () => boolean[]; + getPreviousResult: () => Result; /** * Get the update count for how many times the hook has been rendered. @@ -67,61 +42,6 @@ export type RenderHookResultExt = RenderHookResult waitForNextUpdate: (options?: Pick) => Promise; }; -/** - * Helper function that wraps a render result and provides a small set of jest Matcher equivalent functions that act directly on the result. - * - * ``` - * expectHook(renderResult).toBeStable().toHaveUpdateCount(2); - * ``` - * Equivalent to: - * ``` - * expect(renderResult.isStable()).toBe(true); - * expect(renderResult.getUpdateCount()).toBe(2); - * ``` - * - * See `RenderHookResultExpect` - */ -export const expectHook = ( - renderResult: Pick< - RenderHookResultExt, - 'result' | 'getUpdateCount' | 'isStableArray' | 'isStable' - >, -): RenderHookResultExpect => { - const expectUtil: RenderHookResultExpect = { - toBe: (expected) => { - expect(renderResult.result.current).toBe(expected); - return expectUtil; - }, - - toStrictEqual: (expected) => { - expect(renderResult.result.current).toStrictEqual(expected); - return expectUtil; - }, - - toBeStable: (expected = true) => { - if (renderResult.getUpdateCount() > 1) { - if (Array.isArray(expected)) { - expect(renderResult.isStableArray()).toStrictEqual(expected); - } else { - expect(renderResult.isStable()).toBe(expected); - } - } else { - // eslint-disable-next-line no-console - console.warn( - 'expectHook#toBeStable cannot assert stability as the hook has not run at least 2 times.', - ); - } - return expectUtil; - }, - - toHaveUpdateCount: (expected) => { - expect(renderResult.getUpdateCount()).toBe(expected); - return expectUtil; - }, - }; - return expectUtil; -}; - /** * Wrapper on top of RTL `renderHook` returning a result that implements the `RenderHookResultExt` interface. * @@ -131,9 +51,9 @@ export const expectHook = ( * * ``` * const renderResult = renderHook(({ who }: { who: string }) => useSayHello(who), { initialProps: { who: 'world' }}); - * expectHook(renderResult).toBe('Hello world!'); + * expect(renderResult).hookToBe('Hello world!'); * renderResult.rerender({ who: 'there' }); - * expectHook(renderResult).toBe('Hello there!'); + * expect(renderResult).hookToBe('Hello there!'); * ``` */ export const renderHook = < @@ -160,28 +80,8 @@ export const renderHook = < const renderResultExt: RenderHookResultExt = { ...renderResult, - isStable: () => (updateCount > 1 ? Object.is(renderResult.result.current, prevResult) : false), - - isStableArray: () => { - // prefill return array with `false` - const stable: boolean[] = Array( - Math.max( - Array.isArray(prevResult) ? prevResult?.length : 0, - Array.isArray(renderResult.result.current) ? renderResult.result.current.length : 0, - ), - ).fill(false); - - if ( - updateCount > 1 && - Array.isArray(prevResult) && - Array.isArray(renderResult.result.current) - ) { - renderResult.result.current.forEach((v, i) => { - stable[i] = Object.is(v, (prevResult as unknown[])[i]); - }); - } - return stable; - }, + getPreviousResult: () => + updateCount > 1 ? (prevResult as Result) : renderResult.result.current, getUpdateCount: () => updateCount, @@ -204,30 +104,34 @@ export const renderHook = < * Prefer this method of testing over `renderHook` for simplicity. * * ``` - * const renderResult = testHook(useSayHello, 'world'); + * const renderResult = testHook(useSayHello)('world'); * expectHook(renderResult).toBe('Hello world!'); * renderResult.rerender('there'); * expectHook(renderResult).toBe('Hello there!'); * ``` */ - -export const testHook = Result, P extends unknown[]>( - hook: (...params: P) => Result, - ...initialParams: Parameters -) => { - type Params = Parameters; - const renderResult = renderHook(({ $params }: { $params: Params }) => hook(...$params), { - initialProps: { - $params: initialParams, - }, - }); - - return { - ...renderResult, - - rerender: (...params: Params) => renderResult.rerender({ $params: params }), +export const testHook = + (hook: (...params: P) => Result) => + // not ideal to nest functions in terms of API but cannot find a better way to infer P from hook and not initialParams + ( + ...initialParams: P + ): Omit, 'rerender'> & { + rerender: (...params: typeof initialParams) => void; + } => { + const renderResult = renderHook( + ({ $params }) => hook(...$params), + { + initialProps: { + $params: initialParams, + }, + }, + ); + + return { + ...renderResult, + rerender: (...params) => renderResult.rerender({ $params: params }), + }; }; -}; /** * A helper function for asserting the return value of hooks based on `useFetchState`. @@ -251,3 +155,49 @@ export const standardUseFetchState = ( loadError: Error | undefined, refresh: () => Promise, ] => [data, loaded, error, expect.any(Function)]; + +/** + * Extracts a subset of values from the source that can be used to compare equality. + * + * Recursively traverses the `booleanTarget`. For every property or array index equal to `true`, + * adds the value of the source to the result wrapped in custom matcher `expect.isIdentityEqual`. + * If the entry is `false` or `undefined`, adds matcher `expect.anything()` to the result. + */ +export const createComparativeValue = (source: T, booleanTarget: BooleanValues) => + createComparativeValueRecursive(source, booleanTarget); + +const createComparativeValueRecursive = ( + source: unknown, + // eslint-disable-next-line @typescript-eslint/ban-types + booleanTarget: boolean | string | number | Function | BooleanValues, +) => { + if (typeof booleanTarget === 'boolean') { + return booleanTarget ? expect.isIdentityEqual(source) : expect.anything(); + } + if (Array.isArray(booleanTarget)) { + if (Array.isArray(source)) { + return expect.arrayContaining( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + booleanTarget.map((b, i): any => + b == null ? expect.anything() : createComparativeValueRecursive(source[i], b), + ), + ); + } + return undefined; + } + if ( + source == null || + typeof source === 'string' || + typeof source === 'number' || + typeof source === 'function' + ) { + return source; + } + const obj: { [k: string]: unknown } = {}; + const btObj = booleanTarget as { [k: string]: unknown }; + Object.keys(btObj).forEach((key) => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + obj[key] = createComparativeValueRecursive((source as any)[key] as unknown, btObj[key] as any); + }); + return expect.objectContaining(obj); +}; diff --git a/frontend/src/api/index.ts b/frontend/src/api/index.ts index 89261907fa..9d8db270cf 100644 --- a/frontend/src/api/index.ts +++ b/frontend/src/api/index.ts @@ -17,6 +17,7 @@ export * from './k8s/users'; export * from './k8s/groups'; export * from './k8s/templates'; export * from './k8s/dashboardConfig'; +export * from './k8s/accelerators'; // Pipelines uses special redirected API export * from './pipelines/custom'; diff --git a/frontend/src/api/k8s/accelerators.ts b/frontend/src/api/k8s/accelerators.ts new file mode 100644 index 0000000000..de5d47d1e6 --- /dev/null +++ b/frontend/src/api/k8s/accelerators.ts @@ -0,0 +1,11 @@ +import { k8sListResource } from '@openshift/dynamic-plugin-sdk-utils'; +import { AcceleratorKind } from '~/k8sTypes'; +import { AcceleratorModel } from '~/api/models'; + +export const listAccelerators = async (namespace: string): Promise => + k8sListResource({ + model: AcceleratorModel, + queryOptions: { + ns: namespace, + }, + }).then((listResource) => listResource.items); diff --git a/frontend/src/api/k8s/notebooks.ts b/frontend/src/api/k8s/notebooks.ts index d7a86ba07e..3fd58721a8 100644 --- a/frontend/src/api/k8s/notebooks.ts +++ b/frontend/src/api/k8s/notebooks.ts @@ -17,14 +17,13 @@ import { translateDisplayNameForK8s } from '~/pages/projects/utils'; import { getTolerationPatch, TolerationChanges } from '~/utilities/tolerations'; import { applyK8sAPIOptions } from '~/api/apiMergeUtils'; import { + createElyraServiceAccountRoleBinding, ELYRA_VOLUME_NAME, - generateElyraServiceAccountRoleBinding, getElyraVolume, getElyraVolumeMount, getPipelineVolumeMountPatch, getPipelineVolumePatch, } from '~/concepts/pipelines/elyra/utils'; -import { createRoleBinding } from '~/api'; import { Volume, VolumeMount } from '~/types'; import { assemblePodSpecOptions, getshmVolume, getshmVolumeMount } from './utils'; @@ -40,11 +39,13 @@ const assembleNotebook = ( description, notebookSize, envFrom, - gpus, + accelerator, image, volumes: formVolumes, volumeMounts: formVolumeMounts, tolerationSettings, + existingTolerations, + existingResources, } = data; const notebookId = overrideNotebookId || translateDisplayNameForK8s(notebookName); const imageUrl = `${image.imageStream?.status?.dockerImageRepository}:${image.imageVersion?.name}`; @@ -52,8 +53,11 @@ const assembleNotebook = ( const { affinity, tolerations, resources } = assemblePodSpecOptions( notebookSize.resources, - gpus, + accelerator, tolerationSettings, + existingTolerations, + undefined, + existingResources, ); const translatedUsername = usernameTranslate(username); @@ -101,6 +105,7 @@ const assembleNotebook = ( 'notebooks.opendatahub.io/last-image-selection': imageSelection, 'notebooks.opendatahub.io/inject-oauth': 'true', 'opendatahub.io/username': username, + 'opendatahub.io/accelerator-name': accelerator.accelerator?.metadata.name || '', }, name: notebookId, namespace: projectName, @@ -207,8 +212,7 @@ export const stopNotebook = (name: string, namespace: string): Promise => { @@ -223,18 +227,12 @@ export const startNotebook = async ( if (enablePipelines) { patches.push(getPipelineVolumePatch()); patches.push(getPipelineVolumeMountPatch()); - await createRoleBinding(generateElyraServiceAccountRoleBinding(name, namespace)).catch((e) => { - // This is not ideal, but it shouldn't impact the starting of the notebook. Let us log it, and mute the error - // eslint-disable-next-line no-console - console.error( - `Could not patch rolebinding to service account for notebook, ${name}; Reason ${e.message}`, - ); - }); + await createElyraServiceAccountRoleBinding(notebook); } return k8sPatchResource({ model: NotebookModel, - queryOptions: { name, ns: namespace }, + queryOptions: { name: notebook.metadata.name, ns: notebook.metadata.namespace }, patches, }); }; @@ -252,9 +250,9 @@ export const createNotebook = ( }); if (canEnablePipelines) { - return createRoleBinding( - generateElyraServiceAccountRoleBinding(notebook.metadata.name, notebook.metadata.namespace), - ).then(() => notebookPromise); + return notebookPromise.then((notebook) => + createElyraServiceAccountRoleBinding(notebook).then(() => notebook), + ); } return notebookPromise; @@ -273,7 +271,7 @@ export const updateNotebook = ( // clean the envFrom array in case of merging the old value again container.envFrom = []; - // clean the resources, affinity and tolerations for GPU + // clean the resources, affinity and tolerations for accelerator oldNotebook.spec.template.spec.tolerations = []; oldNotebook.spec.template.spec.affinity = {}; container.resources = {}; diff --git a/frontend/src/api/k8s/roleBindings.ts b/frontend/src/api/k8s/roleBindings.ts index bdb129193f..16c83997a5 100644 --- a/frontend/src/api/k8s/roleBindings.ts +++ b/frontend/src/api/k8s/roleBindings.ts @@ -1,4 +1,5 @@ import { + OwnerReference, k8sCreateResource, k8sDeleteResource, k8sGetResource, @@ -162,3 +163,20 @@ export const patchRoleBindingName = ( }, ], }); + +export const patchRoleBindingOwnerRef = ( + rbName: string, + namespace: string, + ownerReferences: OwnerReference[], +): Promise => + k8sPatchResource({ + model: RoleBindingModel, + queryOptions: { name: rbName, ns: namespace }, + patches: [ + { + op: 'replace', + path: '/metadata/ownerReferences', + value: ownerReferences, + }, + ], + }); diff --git a/frontend/src/api/k8s/servingRuntimes.ts b/frontend/src/api/k8s/servingRuntimes.ts index a8b4699cac..f1e757d932 100644 --- a/frontend/src/api/k8s/servingRuntimes.ts +++ b/frontend/src/api/k8s/servingRuntimes.ts @@ -7,12 +7,13 @@ import { k8sUpdateResource, } from '@openshift/dynamic-plugin-sdk-utils'; import { ServingRuntimeModel } from '~/api/models'; -import { K8sAPIOptions, ServingRuntimeKind } from '~/k8sTypes'; +import { K8sAPIOptions, ServingContainer, ServingRuntimeKind } from '~/k8sTypes'; import { CreatingServingRuntimeObject } from '~/pages/modelServing/screens/types'; import { ContainerResources } from '~/types'; import { getModelServingRuntimeName } from '~/pages/modelServing/utils'; import { getDisplayNameFromK8sResource, translateDisplayNameForK8s } from '~/pages/projects/utils'; import { applyK8sAPIOptions } from '~/api/apiMergeUtils'; +import { AcceleratorState } from '~/utilities/useAcceleratorState'; import { getModelServingProjects } from './projects'; import { assemblePodSpecOptions, getshmVolume, getshmVolumeMount } from './utils'; @@ -22,8 +23,9 @@ const assembleServingRuntime = ( servingRuntime: ServingRuntimeKind, isCustomServingRuntimesEnabled: boolean, isEditing?: boolean, + acceleratorState?: AcceleratorState, ): ServingRuntimeKind => { - const { name: displayName, numReplicas, modelSize, externalRoute, tokenAuth, gpus } = data; + const { name: displayName, numReplicas, modelSize, externalRoute, tokenAuth } = data; const createName = isCustomServingRuntimesEnabled ? translateDisplayNameForK8s(displayName) : getModelServingRuntimeName(namespace); @@ -50,6 +52,7 @@ const assembleServingRuntime = ( }), ...(isCustomServingRuntimesEnabled && { 'opendatahub.io/template-display-name': getDisplayNameFromK8sResource(servingRuntime), + 'opendatahub.io/accelerator-name': acceleratorState?.accelerator?.metadata.name || '', }), }, }; @@ -60,6 +63,7 @@ const assembleServingRuntime = ( ...updatedServingRuntime.metadata.annotations, 'enable-route': externalRoute ? 'true' : 'false', 'enable-auth': tokenAuth ? 'true' : 'false', + 'opendatahub.io/accelerator-name': acceleratorState?.accelerator?.metadata.name || '', ...(isCustomServingRuntimesEnabled && { 'openshift.io/display-name': displayName.trim() }), }, }; @@ -77,7 +81,32 @@ const assembleServingRuntime = ( }, }; - const { affinity, tolerations, resources } = assemblePodSpecOptions(resourceSettings, gpus); + const { affinity, tolerations, resources } = assemblePodSpecOptions( + resourceSettings, + acceleratorState, + undefined, + servingRuntime.spec.tolerations, + undefined, + updatedServingRuntime.spec.containers[0]?.resources, + ); + + updatedServingRuntime.spec.containers = servingRuntime.spec.containers.map( + (container): ServingContainer => { + const volumeMounts = container.volumeMounts || []; + if (!volumeMounts.find((volumeMount) => volumeMount.mountPath === '/dev/shm')) { + volumeMounts.push(getshmVolumeMount()); + } + + return { + ...container, + resources, + affinity, + volumeMounts, + }; + }, + ); + + servingRuntime.spec.tolerations = tolerations; const volumes = updatedServingRuntime.spec.volumes || []; if (!volumes.find((volume) => volume.name === 'shm')) { @@ -86,21 +115,6 @@ const assembleServingRuntime = ( updatedServingRuntime.spec.volumes = volumes; - updatedServingRuntime.spec.containers = servingRuntime.spec.containers.map((container) => { - const volumeMounts = container.volumeMounts || []; - if (!volumeMounts.find((volumeMount) => volumeMount.mountPath === '/dev/shm')) { - volumeMounts.push(getshmVolumeMount()); - } - - return { - ...container, - resources, - affinity, - tolerations, - volumeMounts, - }; - }); - return updatedServingRuntime; }; @@ -143,18 +157,22 @@ export const getServingRuntime = (name: string, namespace: string): Promise => { +export const updateServingRuntime = (options: { + data: CreatingServingRuntimeObject; + existingData: ServingRuntimeKind; + isCustomServingRuntimesEnabled: boolean; + opts?: K8sAPIOptions; + acceleratorState?: AcceleratorState; +}): Promise => { + const { data, existingData, isCustomServingRuntimesEnabled, opts, acceleratorState } = options; + const updatedServingRuntime = assembleServingRuntime( data, existingData.metadata.namespace, existingData, isCustomServingRuntimesEnabled, true, + acceleratorState, ); return k8sUpdateResource( @@ -165,18 +183,29 @@ export const updateServingRuntime = ( ); }; -export const createServingRuntime = ( - data: CreatingServingRuntimeObject, - namespace: string, - servingRuntime: ServingRuntimeKind, - isCustomServingRuntimesEnabled: boolean, - opts?: K8sAPIOptions, -): Promise => { +export const createServingRuntime = (options: { + data: CreatingServingRuntimeObject; + namespace: string; + servingRuntime: ServingRuntimeKind; + isCustomServingRuntimesEnabled: boolean; + opts?: K8sAPIOptions; + acceleratorState?: AcceleratorState; +}): Promise => { + const { + data, + namespace, + servingRuntime, + isCustomServingRuntimesEnabled, + opts, + acceleratorState, + } = options; const assembledServingRuntime = assembleServingRuntime( data, namespace, servingRuntime, isCustomServingRuntimesEnabled, + false, + acceleratorState, ); return k8sCreateResource( diff --git a/frontend/src/api/k8s/utils.ts b/frontend/src/api/k8s/utils.ts index ce2867007c..68b0fc9f44 100644 --- a/frontend/src/api/k8s/utils.ts +++ b/frontend/src/api/k8s/utils.ts @@ -3,57 +3,55 @@ import { ContainerResources, PodToleration, TolerationSettings, - ContainerResourceAttributes, VolumeMount, Volume, } from '~/types'; import { determineTolerations } from '~/utilities/tolerations'; +import { AcceleratorState } from '~/utilities/useAcceleratorState'; export const assemblePodSpecOptions = ( resourceSettings: ContainerResources, - gpus: number, + accelerator?: AcceleratorState, tolerationSettings?: TolerationSettings, + existingTolerations?: PodToleration[], affinitySettings?: PodAffinity, + existingResources?: ContainerResources, ): { affinity: PodAffinity; tolerations: PodToleration[]; resources: ContainerResources; } => { - let affinity: PodAffinity = structuredClone(affinitySettings || {}); - const resources = structuredClone(resourceSettings); - if (gpus > 0) { - if (!resources.limits) { - resources.limits = {}; + const affinity: PodAffinity = structuredClone(affinitySettings || {}); + let resources: ContainerResources = { + limits: { ...existingResources?.limits, ...resourceSettings?.limits }, + requests: { ...existingResources?.requests, ...resourceSettings?.requests }, + }; + + if (accelerator?.additionalOptions?.useExisting && !accelerator.useExisting) { + resources = structuredClone(resourceSettings); + } + + // Clear the last accelerator from the resources + if (accelerator?.initialAccelerator) { + if (resources.limits) { + delete resources.limits[accelerator.initialAccelerator.spec.identifier]; + } + if (resources.requests) { + delete resources.requests[accelerator.initialAccelerator.spec.identifier]; + } + } + + // Add back the new accelerator to the resources if count > 0 + if (accelerator?.accelerator && accelerator.count > 0) { + if (resources.limits) { + resources.limits[accelerator.accelerator.spec.identifier] = accelerator.count; } - if (!resources.requests) { - resources.requests = {}; + if (resources.requests) { + resources.requests[accelerator.accelerator.spec.identifier] = accelerator.count; } - resources.limits[ContainerResourceAttributes.NVIDIA_GPU] = gpus; - resources.requests[ContainerResourceAttributes.NVIDIA_GPU] = gpus; - } else { - delete resources.limits?.[ContainerResourceAttributes.NVIDIA_GPU]; - delete resources.requests?.[ContainerResourceAttributes.NVIDIA_GPU]; - affinity = { - nodeAffinity: { - preferredDuringSchedulingIgnoredDuringExecution: [ - { - preference: { - matchExpressions: [ - { - key: 'nvidia.com/gpu.present', - operator: 'NotIn', - values: ['true'], - }, - ], - }, - weight: 1, - }, - ], - }, - }; } - const tolerations = determineTolerations(gpus > 0, tolerationSettings); + const tolerations = determineTolerations(tolerationSettings, accelerator, existingTolerations); return { affinity, tolerations, resources }; }; diff --git a/frontend/src/api/models/openShift.ts b/frontend/src/api/models/openShift.ts index 543ff77a38..41763ed491 100644 --- a/frontend/src/api/models/openShift.ts +++ b/frontend/src/api/models/openShift.ts @@ -55,3 +55,10 @@ export const TemplateModel: K8sModelCommon = { kind: 'Template', plural: 'templates', }; + +export const AcceleratorModel: K8sModelCommon = { + apiVersion: 'v1', + apiGroup: 'dashboard.opendatahub.io', + kind: 'AcceleratorProfile', + plural: 'acceleratorprofiles', +}; diff --git a/frontend/src/app/App.tsx b/frontend/src/app/App.tsx index c42d128ec2..2d8e49dfdf 100644 --- a/frontend/src/app/App.tsx +++ b/frontend/src/app/App.tsx @@ -15,7 +15,7 @@ import ErrorBoundary from '~/components/error/ErrorBoundary'; import ToastNotifications from '~/components/ToastNotifications'; import { useWatchBuildStatus } from '~/utilities/useWatchBuildStatus'; import { useUser } from '~/redux/selectors'; -import { DASHBOARD_MAIN_CONTAINER_SELECTOR } from '~/utilities/const'; +import { DASHBOARD_MAIN_CONTAINER_ID } from '~/utilities/const'; import useDetectUser from '~/utilities/useDetectUser'; import ProjectsContextProvider from '~/concepts/projects/ProjectsContext'; import Header from './Header'; @@ -96,7 +96,7 @@ const App: React.FC = () => { sidebar={isAllowed ? : undefined} notificationDrawer={ setNotificationsOpen(false)} />} isNotificationDrawerExpanded={notificationsOpen} - mainContainerId={DASHBOARD_MAIN_CONTAINER_SELECTOR} + mainContainerId={DASHBOARD_MAIN_CONTAINER_ID} > diff --git a/frontend/src/components/CPUField.tsx b/frontend/src/components/CPUField.tsx index 89dcccadda..8cf8edea67 100644 --- a/frontend/src/components/CPUField.tsx +++ b/frontend/src/components/CPUField.tsx @@ -4,11 +4,11 @@ import ValueUnitField from './ValueUnitField'; type CPUFieldProps = { onChange: (newValue: string) => void; - value?: string; + value?: string | number; }; const CPUField: React.FC = ({ onChange, value = '1' }) => ( - + ); export default CPUField; diff --git a/frontend/src/components/GenericSidebar.tsx b/frontend/src/components/GenericSidebar.tsx index 2a4a268bde..b6ab8ca090 100644 --- a/frontend/src/components/GenericSidebar.tsx +++ b/frontend/src/components/GenericSidebar.tsx @@ -6,7 +6,7 @@ import { SidebarContent, SidebarPanel, } from '@patternfly/react-core'; -import { DASHBOARD_MAIN_CONTAINER_SELECTOR } from '~/utilities/const'; +import { DASHBOARD_SCROLL_CONTAINER_SELECTOR } from '~/utilities/const'; type GenericSidebarProps = { sections: string[]; @@ -20,7 +20,7 @@ const GenericSidebar: React.FC = ({ children, sections, titles, - scrollableSelector = `#${DASHBOARD_MAIN_CONTAINER_SELECTOR}`, + scrollableSelector = DASHBOARD_SCROLL_CONTAINER_SELECTOR, maxWidth, }) => ( diff --git a/frontend/src/components/ResourceNameTooltip.tsx b/frontend/src/components/ResourceNameTooltip.tsx index b7554f6ec2..a43f541dcb 100644 --- a/frontend/src/components/ResourceNameTooltip.tsx +++ b/frontend/src/components/ResourceNameTooltip.tsx @@ -1,17 +1,18 @@ import * as React from 'react'; import { + ClipboardCopy, DescriptionList, DescriptionListDescription, DescriptionListGroup, DescriptionListTerm, - Icon, + Popover, Stack, StackItem, - Tooltip, } from '@patternfly/react-core'; import { OutlinedQuestionCircleIcon } from '@patternfly/react-icons'; import { K8sResourceCommon } from '@openshift/dynamic-plugin-sdk-utils'; import '~/pages/notebookController/NotebookController.scss'; +import DashboardPopupIconButton from '~/concepts/dashboard/DashboardPopupIconButton'; type ResourceNameTooltipProps = { resource: K8sResourceCommon; @@ -23,10 +24,10 @@ const ResourceNameTooltip: React.FC = ({ children, res {children}{' '} {resource.metadata?.name && (
- Resource names and types are used to find your resources in OpenShift. @@ -36,7 +37,9 @@ const ResourceNameTooltip: React.FC = ({ children, res Resource name - {resource.metadata.name} + + {resource.metadata?.name} + @@ -48,10 +51,8 @@ const ResourceNameTooltip: React.FC = ({ children, res } > - - - - + } aria-label="More info" /> +
)} diff --git a/frontend/src/components/SimpleDropdownSelect.scss b/frontend/src/components/SimpleDropdownSelect.scss new file mode 100644 index 0000000000..bcb8baf49f --- /dev/null +++ b/frontend/src/components/SimpleDropdownSelect.scss @@ -0,0 +1,3 @@ +.full-width { + width: 100%; +} \ No newline at end of file diff --git a/frontend/src/components/SimpleDropdownSelect.tsx b/frontend/src/components/SimpleDropdownSelect.tsx index c1d0775549..fad00f220d 100644 --- a/frontend/src/components/SimpleDropdownSelect.tsx +++ b/frontend/src/components/SimpleDropdownSelect.tsx @@ -1,11 +1,22 @@ import * as React from 'react'; import { Dropdown, DropdownItem, DropdownToggle } from '@patternfly/react-core'; +import './SimpleDropdownSelect.scss'; + +export type SimpleDropdownOption = { + key: string; + label: React.ReactNode; + description?: React.ReactNode; + selectedLabel?: React.ReactNode; + isPlaceholder?: boolean; +}; type SimpleDropdownProps = { - options: { key: string; label: React.ReactNode }[]; + options: SimpleDropdownOption[]; value: string; placeholder?: string; - onChange: (key: string) => void; + onChange: (key: string, isPlaceholder: boolean) => void; + isFullWidth?: boolean; + isDisabled?: boolean; } & Omit, 'isOpen' | 'toggle' | 'dropdownItems' | 'onChange'>; const SimpleDropdownSelect: React.FC = ({ @@ -13,30 +24,43 @@ const SimpleDropdownSelect: React.FC = ({ options, placeholder = 'Select...', value, + isFullWidth, + isDisabled, ...props }) => { const [open, setOpen] = React.useState(false); + const selectedOption = options.find(({ key }) => key === value); + const selectedLabel = selectedOption?.selectedLabel ?? selectedOption?.label ?? placeholder; + return ( setOpen(!open)}> - <>{options.find(({ key }) => key === value)?.label ?? placeholder} + setOpen(!open)} + > + <>{selectedLabel} } - dropdownItems={options.map(({ key, label }) => ( - { - onChange(key); - setOpen(false); - }} - > - {label} - - ))} + dropdownItems={options + .sort((a, b) => (a.isPlaceholder === b.isPlaceholder ? 0 : a.isPlaceholder ? -1 : 1)) + .map(({ key, label, description, isPlaceholder }) => ( + { + onChange(key, !!isPlaceholder); + setOpen(false); + }} + > + {label} + + ))} /> ); }; diff --git a/frontend/src/components/table/Table.tsx b/frontend/src/components/table/Table.tsx index 1f7f608da5..697f267d1c 100644 --- a/frontend/src/components/table/Table.tsx +++ b/frontend/src/components/table/Table.tsx @@ -17,9 +17,10 @@ import { Td, TbodyProps, } from '@patternfly/react-table'; -import useTableColumnSort, { SortableData } from '~/components/table/useTableColumnSort'; -import { CHECKBOX_FIELD_ID } from '~/components/table/const'; import { EitherNotBoth } from '~/typeHelpers'; +import useTableColumnSort from './useTableColumnSort'; +import { CHECKBOX_FIELD_ID } from './const'; +import { SortableData } from './types'; type TableProps = { data: DataType[]; diff --git a/frontend/src/components/table/const.ts b/frontend/src/components/table/const.ts index 5b1b730708..dda019bd42 100644 --- a/frontend/src/components/table/const.ts +++ b/frontend/src/components/table/const.ts @@ -1,4 +1,4 @@ -import { SortableData } from '~/components/table/useTableColumnSort'; +import { SortableData } from './types'; export const CHECKBOX_FIELD_ID = 'checkbox'; export const KEBAB_FIELD_ID = 'kebab'; diff --git a/frontend/src/components/table/index.ts b/frontend/src/components/table/index.ts new file mode 100644 index 0000000000..e83ffad4a0 --- /dev/null +++ b/frontend/src/components/table/index.ts @@ -0,0 +1,8 @@ +export * from './types'; +export * from './const'; + +export { default as Table } from './Table'; +export { default as useCheckboxTable } from './useCheckboxTable'; + +export { default as TableRowTitleDescription } from './TableRowTitleDescription'; +export { default as CheckboxTd } from './CheckboxTd'; diff --git a/frontend/src/components/table/types.ts b/frontend/src/components/table/types.ts new file mode 100644 index 0000000000..566475574b --- /dev/null +++ b/frontend/src/components/table/types.ts @@ -0,0 +1,17 @@ +import { ThProps } from '@patternfly/react-table'; + +export type GetColumnSort = (columnIndex: number) => ThProps['sort']; + +export type SortableData = { + label: string; + field: string; + width?: ThProps['width']; + /** + * Set to false to disable sort. + * Set to true to handle string and number fields automatically (everything else is equal). + * Pass a function that will get the two results and what field needs to be matched. + * Assume ASC -- the result will be inverted internally if needed. + */ + sortable: boolean | ((a: T, b: T, keyField: string) => number); + info?: ThProps['info']; +}; diff --git a/frontend/src/components/table/useCheckboxTable.ts b/frontend/src/components/table/useCheckboxTable.ts index 6111c49d97..e3c399aecf 100644 --- a/frontend/src/components/table/useCheckboxTable.ts +++ b/frontend/src/components/table/useCheckboxTable.ts @@ -1,7 +1,7 @@ import * as React from 'react'; import { xor } from 'lodash'; -import Table from '~/components/table/Table'; import { useDeepCompareMemoize } from '~/utilities/useDeepCompareMemoize'; +import type Table from './Table'; type UseCheckboxTable = { selections: string[]; diff --git a/frontend/src/components/table/useTableColumnSort.ts b/frontend/src/components/table/useTableColumnSort.ts index 759363b594..f725d0fbd4 100644 --- a/frontend/src/components/table/useTableColumnSort.ts +++ b/frontend/src/components/table/useTableColumnSort.ts @@ -1,21 +1,6 @@ import * as React from 'react'; import { ThProps } from '@patternfly/react-table'; - -export type GetColumnSort = (columnIndex: number) => ThProps['sort']; - -export type SortableData = { - label: string; - field: string; - width?: ThProps['width']; - /** - * Set to false to disable sort. - * Set to true to handle string and number fields automatically (everything else is equal). - * Pass a function that will get the two results and what field needs to be matched. - * Assume ASC -- the result will be inverted internally if needed. - */ - sortable: boolean | ((a: T, b: T, keyField: string) => number); - info?: ThProps['info']; -}; +import { GetColumnSort, SortableData } from './types'; /** * Using PF Composable Tables, this utility will help with handling sort logic. diff --git a/frontend/src/concepts/dashboard/DashboardPopupIconButton.tsx b/frontend/src/concepts/dashboard/DashboardPopupIconButton.tsx new file mode 100644 index 0000000000..461e2a40cb --- /dev/null +++ b/frontend/src/concepts/dashboard/DashboardPopupIconButton.tsx @@ -0,0 +1,19 @@ +import React from 'react'; +import { Button, ButtonProps, Icon } from '@patternfly/react-core'; + +type DashboardPopupIconButtonProps = Omit & { + icon: React.ReactNode; +}; + +/** + * Overriding PF's button styles to allow for a11y in opening tooltips or popovers on a single item + */ +const DashboardPopupIconButton = ({ icon, ...props }: DashboardPopupIconButtonProps) => ( + +); + +export default DashboardPopupIconButton; diff --git a/frontend/src/concepts/pipelines/content/pipelinesDetails/pipeline/PipelineServerActions.tsx b/frontend/src/concepts/pipelines/content/pipelinesDetails/pipeline/PipelineServerActions.tsx new file mode 100644 index 0000000000..689921b116 --- /dev/null +++ b/frontend/src/concepts/pipelines/content/pipelinesDetails/pipeline/PipelineServerActions.tsx @@ -0,0 +1,84 @@ +import * as React from 'react'; +import { + Dropdown, + DropdownItem, + DropdownSeparator, + DropdownToggle, + KebabToggle, + Tooltip, +} from '@patternfly/react-core'; +import { DeleteServerModal, ViewServerModal } from '~/concepts/pipelines/context'; + +type PipelineServerActionsProps = { + variant?: 'kebab' | 'dropdown'; + isDisabled: boolean; +}; + +const PipelineServerActions: React.FC = ({ variant, isDisabled }) => { + const [open, setOpen] = React.useState(false); + const [deleteOpen, setDeleteOpen] = React.useState(false); + const [viewOpen, setViewOpen] = React.useState(false); + + const DropdownComponent = ( + setOpen(false)} + toggle={ + variant === 'kebab' ? ( + setOpen(!open)} /> + ) : ( + setOpen(!open)} + > + Pipeline server actions + + ) + } + isOpen={open} + position="right" + isPlain={variant === 'kebab'} + dropdownItems={[ + setViewOpen(true)}> + View pipeline server configuration + , + , + { + setDeleteOpen(true); + }} + key="delete-server" + > + Delete pipeline server + , + ]} + /> + ); + + if (isDisabled) { + return ( + + {DropdownComponent} + + ); + } + + return ( + <> + {DropdownComponent} + { + setDeleteOpen(false); + }} + /> + setViewOpen(false)} /> + + ); +}; + +export default PipelineServerActions; diff --git a/frontend/src/concepts/pipelines/content/pipelinesDetails/pipelineRun/PipelineRunDetailsActions.tsx b/frontend/src/concepts/pipelines/content/pipelinesDetails/pipelineRun/PipelineRunDetailsActions.tsx index c799cb4ad0..9a218b9fdb 100644 --- a/frontend/src/concepts/pipelines/content/pipelinesDetails/pipelineRun/PipelineRunDetailsActions.tsx +++ b/frontend/src/concepts/pipelines/content/pipelinesDetails/pipelineRun/PipelineRunDetailsActions.tsx @@ -39,13 +39,13 @@ const PipelineRunDetailsActions: React.FC = ({ onDe .catch((e) => notification.error('Unable to stop pipeline run', e.message)) } > - Stop run + Stop , navigate(`/pipelineRuns/${namespace}/pipelineRun/clone/${run.id}`)} > - Duplicate run + Duplicate , , onDelete()}> diff --git a/frontend/src/concepts/pipelines/content/tables/columns.ts b/frontend/src/concepts/pipelines/content/tables/columns.ts index 801234bbbc..a184465a39 100644 --- a/frontend/src/concepts/pipelines/content/tables/columns.ts +++ b/frontend/src/concepts/pipelines/content/tables/columns.ts @@ -1,4 +1,9 @@ -import { SortableData } from '~/components/table/useTableColumnSort'; +import { + SortableData, + checkboxTableColumn, + expandTableColumn, + kebabTableColumn, +} from '~/components/table'; import { PipelineKF, PipelineRunJobKF, @@ -12,7 +17,6 @@ import { getScheduledStateWeight, getStatusWeight, } from '~/concepts/pipelines/content/tables/utils'; -import { checkboxTableColumn, expandTableColumn, kebabTableColumn } from '~/components/table/const'; export const pipelineColumns: SortableData[] = [ expandTableColumn(), diff --git a/frontend/src/concepts/pipelines/content/tables/pipeline/PipelinesTable.tsx b/frontend/src/concepts/pipelines/content/tables/pipeline/PipelinesTable.tsx index 0a542c771d..06be4fbd5e 100644 --- a/frontend/src/concepts/pipelines/content/tables/pipeline/PipelinesTable.tsx +++ b/frontend/src/concepts/pipelines/content/tables/pipeline/PipelinesTable.tsx @@ -1,7 +1,7 @@ import * as React from 'react'; import { TableVariant } from '@patternfly/react-table'; import { PipelineKF } from '~/concepts/pipelines/kfTypes'; -import Table from '~/components/table/Table'; +import { Table } from '~/components/table'; import PipelinesTableRow from '~/concepts/pipelines/content/tables/pipeline/PipelinesTableRow'; import { FetchStateRefreshPromise } from '~/utilities/useFetchState'; import { pipelineColumns } from '~/concepts/pipelines/content/tables/columns'; diff --git a/frontend/src/concepts/pipelines/content/tables/pipeline/PipelinesTableRow.tsx b/frontend/src/concepts/pipelines/content/tables/pipeline/PipelinesTableRow.tsx index 1ad301a8b3..f83540ddcc 100644 --- a/frontend/src/concepts/pipelines/content/tables/pipeline/PipelinesTableRow.tsx +++ b/frontend/src/concepts/pipelines/content/tables/pipeline/PipelinesTableRow.tsx @@ -6,7 +6,7 @@ import { useNavigate } from 'react-router-dom'; import { PipelineKF } from '~/concepts/pipelines/kfTypes'; import { relativeTime } from '~/utilities/time'; import usePipelineRunsForPipeline from '~/concepts/pipelines/apiHooks/usePipelineRunsForPipeline'; -import TableRowTitleDescription from '~/components/table/TableRowTitleDescription'; +import { TableRowTitleDescription } from '~/components/table'; import { usePipelinesAPI } from '~/concepts/pipelines/context'; import PipelinesTableExpandedRow from '~/concepts/pipelines/content/tables/pipeline/PipelinesTableExpandedRow'; import { getLastRun } from '~/concepts/pipelines/content/tables/utils'; diff --git a/frontend/src/concepts/pipelines/content/tables/pipelineRun/PipelineRunTable.tsx b/frontend/src/concepts/pipelines/content/tables/pipelineRun/PipelineRunTable.tsx index e323982143..6a02b20482 100644 --- a/frontend/src/concepts/pipelines/content/tables/pipelineRun/PipelineRunTable.tsx +++ b/frontend/src/concepts/pipelines/content/tables/pipelineRun/PipelineRunTable.tsx @@ -1,10 +1,9 @@ import * as React from 'react'; import { TableVariant } from '@patternfly/react-table'; -import Table from '~/components/table/Table'; import { PipelineCoreResourceKF, PipelineRunKF } from '~/concepts/pipelines/kfTypes'; import { pipelineRunColumns } from '~/concepts/pipelines/content/tables/columns'; import PipelineRunTableRow from '~/concepts/pipelines/content/tables/pipelineRun/PipelineRunTableRow'; -import useCheckboxTable from '~/components/table/useCheckboxTable'; +import { useCheckboxTable, Table } from '~/components/table'; import EmptyTableView from '~/concepts/pipelines/content/tables/EmptyTableView'; import usePipelineRunFilter from '~/concepts/pipelines/content/tables/pipelineRun/usePipelineRunFilter'; import PipelineRunTableToolbar from '~/concepts/pipelines/content/tables/pipelineRun/PipelineRunTableToolbar'; diff --git a/frontend/src/concepts/pipelines/content/tables/pipelineRun/PipelineRunTableRow.tsx b/frontend/src/concepts/pipelines/content/tables/pipelineRun/PipelineRunTableRow.tsx index 69791b702d..b542a54982 100644 --- a/frontend/src/concepts/pipelines/content/tables/pipelineRun/PipelineRunTableRow.tsx +++ b/frontend/src/concepts/pipelines/content/tables/pipelineRun/PipelineRunTableRow.tsx @@ -1,9 +1,9 @@ import * as React from 'react'; import { ActionsColumn, Td, Tr } from '@patternfly/react-table'; -import { Link } from 'react-router-dom'; +import { Link, useNavigate } from 'react-router-dom'; import { Skeleton } from '@patternfly/react-core'; -import { PipelineRunKF } from '~/concepts/pipelines/kfTypes'; -import TableRowTitleDescription from '~/components/table/TableRowTitleDescription'; +import { PipelineRunKF, PipelineRunStatusesKF } from '~/concepts/pipelines/kfTypes'; +import { TableRowTitleDescription, CheckboxTd } from '~/components/table'; import { RunCreated, RunDuration, @@ -12,8 +12,8 @@ import { RunStatus, } from '~/concepts/pipelines/content/tables/renderUtils'; import { usePipelinesAPI } from '~/concepts/pipelines/context'; -import CheckboxTd from '~/components/table/CheckboxTd'; import { GetJobInformation } from '~/concepts/pipelines/content/tables/pipelineRun/useJobRelatedInformation'; +import useNotification from '~/utilities/useNotification'; type PipelineRunTableRowProps = { isChecked: boolean; @@ -30,8 +30,10 @@ const PipelineRunTableRow: React.FC = ({ run, getJobInformation, }) => { - const { namespace } = usePipelinesAPI(); + const { namespace, api, refreshAllAPI } = usePipelinesAPI(); const { loading, data } = getJobInformation(run); + const notification = useNotification(); + const navigate = useNavigate(); const loadingState = ; @@ -79,6 +81,25 @@ const PipelineRunTableRow: React.FC = ({ { + api + .stopPipelineRun({}, run.id) + .then(refreshAllAPI) + .catch((e) => notification.error('Unable to stop pipeline run', e.message)); + }, + }, + { + title: 'Duplicate', + onClick: () => { + navigate(`/pipelineRuns/${namespace}/pipelineRun/clone/${run.id}`); + }, + }, + { + isSeparator: true, + }, { title: 'Delete', onClick: () => { diff --git a/frontend/src/concepts/pipelines/content/tables/pipelineRunJob/PipelineRunJobTable.tsx b/frontend/src/concepts/pipelines/content/tables/pipelineRunJob/PipelineRunJobTable.tsx index 94c4938ab3..62b553251b 100644 --- a/frontend/src/concepts/pipelines/content/tables/pipelineRunJob/PipelineRunJobTable.tsx +++ b/frontend/src/concepts/pipelines/content/tables/pipelineRunJob/PipelineRunJobTable.tsx @@ -1,9 +1,8 @@ import * as React from 'react'; import { TableVariant } from '@patternfly/react-table'; -import Table from '~/components/table/Table'; import { PipelineCoreResourceKF, PipelineRunJobKF } from '~/concepts/pipelines/kfTypes'; import { pipelineRunJobColumns } from '~/concepts/pipelines/content/tables/columns'; -import useCheckboxTable from '~/components/table/useCheckboxTable'; +import { useCheckboxTable, Table } from '~/components/table'; import PipelineRunJobTableRow from '~/concepts/pipelines/content/tables/pipelineRunJob/PipelineRunJobTableRow'; import PipelineRunJobTableToolbar from '~/concepts/pipelines/content/tables/pipelineRunJob/PipelineRunJobTableToolbar'; import usePipelineRunJobFilter from '~/concepts/pipelines/content/tables/pipelineRunJob/usePipelineRunJobFilter'; diff --git a/frontend/src/concepts/pipelines/content/tables/pipelineRunJob/PipelineRunJobTableRow.tsx b/frontend/src/concepts/pipelines/content/tables/pipelineRunJob/PipelineRunJobTableRow.tsx index 03f1ff4de5..c75c9e35f2 100644 --- a/frontend/src/concepts/pipelines/content/tables/pipelineRunJob/PipelineRunJobTableRow.tsx +++ b/frontend/src/concepts/pipelines/content/tables/pipelineRunJob/PipelineRunJobTableRow.tsx @@ -2,7 +2,7 @@ import * as React from 'react'; import { ActionsColumn, Td, Tr } from '@patternfly/react-table'; import { useNavigate } from 'react-router-dom'; import { PipelineRunJobKF } from '~/concepts/pipelines/kfTypes'; -import TableRowTitleDescription from '~/components/table/TableRowTitleDescription'; +import { TableRowTitleDescription, CheckboxTd } from '~/components/table'; import { RunJobScheduled, RunJobStatus, @@ -11,7 +11,6 @@ import { CoreResourcePipeline, } from '~/concepts/pipelines/content/tables/renderUtils'; import { usePipelinesAPI } from '~/concepts/pipelines/context'; -import CheckboxTd from '~/components/table/CheckboxTd'; type PipelineRunJobTableRowProps = { isChecked: boolean; diff --git a/frontend/src/concepts/pipelines/elyra/utils.ts b/frontend/src/concepts/pipelines/elyra/utils.ts index 028819fbde..bcd430d0cb 100644 --- a/frontend/src/concepts/pipelines/elyra/utils.ts +++ b/frontend/src/concepts/pipelines/elyra/utils.ts @@ -10,9 +10,13 @@ import { import { AWS_KEYS } from '~/pages/projects/dataConnections/const'; import { Volume, VolumeMount } from '~/types'; import { RUNTIME_MOUNT_PATH } from '~/pages/projects/pvc/const'; +import { createRoleBinding, getRoleBinding, patchRoleBindingOwnerRef } from '~/api'; export const ELYRA_VOLUME_NAME = 'elyra-dsp-details'; +export const getElyraServiceAccountRoleBindingName = (notebookName: string) => + `elyra-pipelines-${notebookName}`; + export const getElyraVolumeMount = (): VolumeMount => ({ name: ELYRA_VOLUME_NAME, mountPath: RUNTIME_MOUNT_PATH, @@ -25,6 +29,13 @@ export const getElyraVolume = (): Volume => ({ }, }); +export const getElyraRoleBindingOwnerRef = (notebookName: string, ownerUid: string) => ({ + apiVersion: 'kubeflow.org/v1beta1', + kind: 'Notebook', + name: notebookName, + uid: ownerUid, +}); + export const getPipelineVolumePatch = (): Patch => ({ path: '/spec/template/spec/volumes/-', op: 'add', @@ -83,15 +94,17 @@ export const generateElyraSecret = ( export const generateElyraServiceAccountRoleBinding = ( notebookName: string, namespace: string, + ownerUid: string, ): RoleBindingKind => ({ apiVersion: 'rbac.authorization.k8s.io/v1', kind: 'RoleBinding', metadata: { - name: `elyra-pipelines-${notebookName}`, + name: getElyraServiceAccountRoleBindingName(notebookName), namespace, labels: { [KnownLabels.DASHBOARD_RESOURCE]: 'true', }, + ownerReferences: [getElyraRoleBindingOwnerRef(notebookName, ownerUid)], }, roleRef: { apiGroup: 'rbac.authorization.k8s.io', @@ -105,3 +118,56 @@ export const generateElyraServiceAccountRoleBinding = ( }, ], }); + +export const createElyraServiceAccountRoleBinding = async ( + notebook: NotebookKind, +): Promise => { + const notebookName = notebook.metadata.name; + const namespace = notebook.metadata.namespace; + const notebookUid = notebook.metadata.uid; + + // Check if rolebinding is already exists for backward compatibility + const roleBinding = await getRoleBinding( + namespace, + getElyraServiceAccountRoleBindingName(notebookName), + ).catch((e) => { + // 404 is not an error + if (e.statusObject?.code !== 404) { + // eslint-disable-next-line no-console + console.error( + `Could not get rolebinding to service account for notebook, ${notebookName}; Reason ${e.message}`, + ); + } + return undefined; + }); + + if (notebookUid) { + if (roleBinding) { + const ownerReferences = roleBinding.metadata.ownerReferences || []; + if (!ownerReferences.find((ownerReference) => ownerReference.uid === notebookUid)) { + ownerReferences.push(getElyraRoleBindingOwnerRef(notebookName, notebookUid)); + } + return patchRoleBindingOwnerRef( + roleBinding.metadata.name, + roleBinding.metadata.namespace, + ownerReferences, + ).catch((e) => { + // This is not ideal, but it shouldn't impact the starting of the notebook. Let us log it, and mute the error + // eslint-disable-next-line no-console + console.error( + `Could not patch rolebinding to service account for notebook, ${notebookName}; Reason ${e.message}`, + ); + }); + } + return createRoleBinding( + generateElyraServiceAccountRoleBinding(notebookName, namespace, notebookUid), + ).catch((e) => { + // eslint-disable-next-line no-console + console.error( + `Could not create rolebinding to service account for notebook, ${notebookName}; Reason ${e.message}`, + ); + }); + } + + return undefined; +}; diff --git a/frontend/src/k8sTypes.ts b/frontend/src/k8sTypes.ts index 2795ef2eed..bf50b108f1 100644 --- a/frontend/src/k8sTypes.ts +++ b/frontend/src/k8sTypes.ts @@ -44,7 +44,10 @@ type DisplayNameAnnotations = Partial<{ export type K8sDSGResource = K8sResourceCommon & { metadata: { - annotations?: DisplayNameAnnotations; + annotations?: DisplayNameAnnotations & + Partial<{ + 'opendatahub.io/recommended-accelerators': string; + }>; name: string; }; }; @@ -70,6 +73,7 @@ export type NotebookAnnotations = Partial<{ 'opendatahub.io/username': string; // the untranslated username behind the notebook 'notebooks.opendatahub.io/last-image-selection': string; // the last image they selected 'notebooks.opendatahub.io/last-size-selection': string; // the last notebook size they selected + 'opendatahub.io/accelerator-name': string; // the accelerator attached to the notebook }>; export type DashboardLabels = { @@ -92,6 +96,8 @@ export type ServingRuntimeAnnotations = Partial<{ 'opendatahub.io/template-name': string; 'opendatahub.io/template-display-name': string; 'opendatahub.io/disable-gpu': string; + 'opendatahub.io/recommended-accelerators': string; + 'opendatahub.io/accelerator-name': string; 'enable-route': string; 'enable-auth': string; }>; @@ -310,6 +316,15 @@ export type ServiceAccountKind = K8sResourceCommon & { }[]; }; +export type ServingContainer = { + args: string[]; + image: string; + name: string; + affinity?: PodAffinity; + resources: ContainerResources; + volumeMounts?: VolumeMount[]; +}; + export type ServingRuntimeKind = K8sResourceCommon & { metadata: { annotations?: DisplayNameAnnotations & ServingRuntimeAnnotations; @@ -323,15 +338,10 @@ export type ServingRuntimeKind = K8sResourceCommon & { memBufferBytes?: number; modelLoadingTimeoutMillis?: number; }; - containers: { - args: string[]; - image: string; - name: string; - resources: ContainerResources; - volumeMounts?: VolumeMount[]; - }[]; + containers: ServingContainer[]; supportedModelFormats: SupportedModelFormats[]; replicas: number; + tolerations?: PodToleration[]; volumes?: Volume[]; }; }; @@ -732,3 +742,47 @@ export type DashboardConfigKind = K8sResourceCommon & { templateDisablement?: string[]; }; }; + +export type AcceleratorKind = K8sResourceCommon & { + metadata: { + name: string; + annotations?: Partial<{ + 'opendatahub.io/modified-date': string; + }>; + }; + spec: { + displayName: string; + enabled: boolean; + identifier: string; + description?: string; + tolerations?: PodToleration[]; + }; +}; + +// In the SDK TResource extends from K8sResourceCommon, but both kind and apiVersion are mandatory +export type K8sResourceListResult> = { + apiVersion: string; + kind: string; + items: TResource[]; + metadata: { + resourceVersion: string; + continue: string; + }; +}; + +type ComponentNames = + | 'codeflare' + | 'data-science-pipelines-operator' + | 'kserve' + | 'model-mesh' + // Bug: https://github.com/opendatahub-io/opendatahub-operator/issues/641 + | 'odh-dashboard' + | 'ray' + | 'workbenches'; + +/** We don't need or should ever get the full kind, this is the status section */ +export type DataScienceClusterKindStatus = { + conditions: K8sCondition[]; + installedComponents: { [key in ComponentNames]: boolean }; + phase?: string; +}; diff --git a/frontend/src/pages/BYONImages/BYONImageModal/DisplayedContentTable.tsx b/frontend/src/pages/BYONImages/BYONImageModal/DisplayedContentTable.tsx index 39f1a3fceb..23bec52f5d 100644 --- a/frontend/src/pages/BYONImages/BYONImageModal/DisplayedContentTable.tsx +++ b/frontend/src/pages/BYONImages/BYONImageModal/DisplayedContentTable.tsx @@ -1,7 +1,7 @@ import * as React from 'react'; import { Button, Panel, PanelFooter, PanelHeader, PanelMainBody } from '@patternfly/react-core'; import { PlusCircleIcon } from '@patternfly/react-icons'; -import Table from '~/components/table/Table'; +import { Table } from '~/components/table'; import { BYONImagePackage } from '~/types'; import { DisplayedContentTab } from './ManageBYONImageModal'; import { getColumns } from './tableData'; diff --git a/frontend/src/pages/BYONImages/BYONImageModal/tableData.tsx b/frontend/src/pages/BYONImages/BYONImageModal/tableData.tsx index cc73a0010e..40e16f6bf9 100644 --- a/frontend/src/pages/BYONImages/BYONImageModal/tableData.tsx +++ b/frontend/src/pages/BYONImages/BYONImageModal/tableData.tsx @@ -1,4 +1,4 @@ -import { SortableData } from '~/components/table/useTableColumnSort'; +import { SortableData } from '~/components/table'; import { BYONImagePackage } from '~/types'; import { DisplayedContentTab } from './ManageBYONImageModal'; diff --git a/frontend/src/pages/BYONImages/BYONImagesTable.tsx b/frontend/src/pages/BYONImages/BYONImagesTable.tsx index 9906d36061..3f59e33498 100644 --- a/frontend/src/pages/BYONImages/BYONImagesTable.tsx +++ b/frontend/src/pages/BYONImages/BYONImagesTable.tsx @@ -1,7 +1,7 @@ import React from 'react'; import { ToolbarItem } from '@patternfly/react-core'; import { BYONImage } from '~/types'; -import Table from '~/components/table/Table'; +import { Table } from '~/components/table'; import DashboardSearchField, { SearchType } from '~/concepts/dashboard/DashboardSearchField'; import DashboardEmptyTableView from '~/concepts/dashboard/DashboardEmptyTableView'; import ManageBYONImageModal from './BYONImageModal/ManageBYONImageModal'; diff --git a/frontend/src/pages/BYONImages/tableData.tsx b/frontend/src/pages/BYONImages/tableData.tsx index 1719c64dc5..9766ba576a 100644 --- a/frontend/src/pages/BYONImages/tableData.tsx +++ b/frontend/src/pages/BYONImages/tableData.tsx @@ -1,4 +1,4 @@ -import { SortableData } from '~/components/table/useTableColumnSort'; +import { SortableData } from '~/components/table'; import { BYONImage } from '~/types'; import { getEnabledStatus } from './utils'; diff --git a/frontend/src/pages/learningCenter/CategoryFilters.tsx b/frontend/src/pages/learningCenter/CategoryFilters.tsx index 9a02bd40d1..286f64d49d 100644 --- a/frontend/src/pages/learningCenter/CategoryFilters.tsx +++ b/frontend/src/pages/learningCenter/CategoryFilters.tsx @@ -56,7 +56,7 @@ const CategoryFilters: React.FC = ({ docApps, favorites }) key={category} title={category} shown - active={category === categoryQuery || (!categoryQuery && category == ALL_ITEMS)} + active={category === categoryQuery || (!categoryQuery && category === ALL_ITEMS)} onActivate={() => onSelectCategory(category)} tabIndex={-1} /> diff --git a/frontend/src/pages/modelServing/__tests__/utils.spec.ts b/frontend/src/pages/modelServing/__tests__/utils.spec.ts new file mode 100644 index 0000000000..c6ac177ce3 --- /dev/null +++ b/frontend/src/pages/modelServing/__tests__/utils.spec.ts @@ -0,0 +1,52 @@ +import { resourcesArePositive } from '~/pages/modelServing/utils'; +import { ContainerResources } from '~/types'; + +describe('resourcesArePositive', () => { + it('should return true for undefined limits and request', () => { + const resources: ContainerResources = { + limits: undefined, + requests: undefined, + }; + expect(resourcesArePositive(resources)).toBe(true); + }); + + it('should return false for resources with zero limits and requests', () => { + const resources: ContainerResources = { + limits: { cpu: 0, memory: '0Gi' }, + requests: { cpu: 0, memory: '0Gi' }, + }; + expect(resourcesArePositive(resources)).toBe(false); + }); + + it('should return false for resources with negative limits and requests', () => { + const resources: ContainerResources = { + limits: { cpu: '-1', memory: '-1Mi' }, + requests: { cpu: '-1', memory: '-1Mi' }, + }; + expect(resourcesArePositive(resources)).toBe(false); + }); + + it('should return true for resources with positive limits and requests', () => { + const resources: ContainerResources = { + limits: { cpu: '1', memory: '1Gi' }, + requests: { cpu: '1', memory: '1Gi' }, + }; + expect(resourcesArePositive(resources)).toBe(true); + }); + + it('should return true for resources with positive limits and undefined requests', () => { + const resources: ContainerResources = { + limits: { cpu: 1, memory: '1Gi' }, + requests: undefined, + }; + expect(resourcesArePositive(resources)).toBe(true); + }); + + it('should return true for resources with undefined limits and positive requests', () => { + const resources: ContainerResources = { + limits: undefined, + requests: { cpu: 1, memory: '1Gi' }, + }; + expect(resourcesArePositive(resources)).toBe(true); + }); +}); diff --git a/frontend/src/pages/modelServing/customServingRuntimes/CustomServingRuntimeListView.tsx b/frontend/src/pages/modelServing/customServingRuntimes/CustomServingRuntimeListView.tsx index 99c78fd583..c8fba8316a 100644 --- a/frontend/src/pages/modelServing/customServingRuntimes/CustomServingRuntimeListView.tsx +++ b/frontend/src/pages/modelServing/customServingRuntimes/CustomServingRuntimeListView.tsx @@ -4,7 +4,7 @@ import { Button, ToolbarItem } from '@patternfly/react-core'; import { TemplateKind } from '~/k8sTypes'; import { useDashboardNamespace } from '~/redux/selectors'; import useNotification from '~/utilities/useNotification'; -import Table from '~/components/table/Table'; +import { Table } from '~/components/table'; import useDraggableTable from '~/utilities/useDraggableTable'; import { patchDashboardConfigTemplateOrderBackend } from '~/services/dashboardService'; import { getServingRuntimeNameFromTemplate, getSortedTemplates } from './utils'; diff --git a/frontend/src/pages/modelServing/customServingRuntimes/CustomServingRuntimeTableRow.tsx b/frontend/src/pages/modelServing/customServingRuntimes/CustomServingRuntimeTableRow.tsx index 7ee5b7dd71..4a991b205e 100644 --- a/frontend/src/pages/modelServing/customServingRuntimes/CustomServingRuntimeTableRow.tsx +++ b/frontend/src/pages/modelServing/customServingRuntimes/CustomServingRuntimeTableRow.tsx @@ -35,7 +35,7 @@ const CustomServingRuntimeTableRow: React.FC }} /> - + {getServingRuntimeDisplayNameFromTemplate(template)} {templateOOTB && } @@ -62,6 +62,16 @@ const CustomServingRuntimeTableRow: React.FC onClick: () => navigate(`/servingRuntimes/editServingRuntime/${servingRuntimeName}`), }, + { + title: 'Duplicate', + onClick: () => + navigate('/servingRuntimes/addServingRuntime', { + state: { template: template }, + }), + }, + { + isSeparator: true, + }, { title: 'Delete', onClick: () => onDeleteTemplate(template), diff --git a/frontend/src/pages/modelServing/customServingRuntimes/templatedData.tsx b/frontend/src/pages/modelServing/customServingRuntimes/templatedData.tsx index 726e2fea18..921d4eef68 100644 --- a/frontend/src/pages/modelServing/customServingRuntimes/templatedData.tsx +++ b/frontend/src/pages/modelServing/customServingRuntimes/templatedData.tsx @@ -1,4 +1,4 @@ -import { SortableData } from '~/components/table/useTableColumnSort'; +import { SortableData } from '~/components/table'; import { TemplateKind } from '~/k8sTypes'; export const columns: SortableData[] = [ diff --git a/frontend/src/pages/modelServing/screens/global/EmptyModelServing.tsx b/frontend/src/pages/modelServing/screens/global/EmptyModelServing.tsx index 4bd7471bd9..d1f1954348 100644 --- a/frontend/src/pages/modelServing/screens/global/EmptyModelServing.tsx +++ b/frontend/src/pages/modelServing/screens/global/EmptyModelServing.tsx @@ -1,6 +1,14 @@ import * as React from 'react'; -import { Button, EmptyState, EmptyStateBody, EmptyStateIcon, Title } from '@patternfly/react-core'; -import { PlusCircleIcon } from '@patternfly/react-icons'; +import { + Button, + EmptyState, + EmptyStateBody, + EmptyStateIcon, + EmptyStateVariant, + EmptyStateSecondaryActions, + Title, +} from '@patternfly/react-core'; +import { PlusCircleIcon, WrenchIcon } from '@patternfly/react-icons'; import { useNavigate } from 'react-router-dom'; import { ModelServingContext } from '~/pages/modelServing/ModelServingContext'; import ServeModelButton from './ServeModelButton'; @@ -13,17 +21,20 @@ const EmptyModelServing: React.FC = () => { if (servingRuntimes.length === 0) { return ( - - + + - No model servers + No deployed models yet - Before deploying a model, you must first configure a model server. + To get started, deploy a model from the Models and model servers section + of a project. - + + + ); } @@ -32,7 +43,7 @@ const EmptyModelServing: React.FC = () => { - No deployed models. + No deployed models To get started, use existing model servers to serve a model. diff --git a/frontend/src/pages/modelServing/screens/global/InferenceServiceTable.tsx b/frontend/src/pages/modelServing/screens/global/InferenceServiceTable.tsx index d2926ae9cb..e6ec893a3f 100644 --- a/frontend/src/pages/modelServing/screens/global/InferenceServiceTable.tsx +++ b/frontend/src/pages/modelServing/screens/global/InferenceServiceTable.tsx @@ -1,8 +1,7 @@ import * as React from 'react'; import { Button } from '@patternfly/react-core'; import ManageInferenceServiceModal from '~/pages/modelServing/screens/projects/InferenceServiceModal/ManageInferenceServiceModal'; -import Table from '~/components/table/Table'; - +import { Table } from '~/components/table'; import { InferenceServiceKind, ServingRuntimeKind } from '~/k8sTypes'; import { ProjectsContext } from '~/concepts/projects/ProjectsContext'; import InferenceServiceTableRow from './InferenceServiceTableRow'; diff --git a/frontend/src/pages/modelServing/screens/global/data.ts b/frontend/src/pages/modelServing/screens/global/data.ts index 436c7aa63a..50a9ec2ca0 100644 --- a/frontend/src/pages/modelServing/screens/global/data.ts +++ b/frontend/src/pages/modelServing/screens/global/data.ts @@ -1,5 +1,5 @@ import { InferenceServiceKind, ProjectKind, SecretKind } from '~/k8sTypes'; -import { SortableData } from '~/components/table/useTableColumnSort'; +import { SortableData } from '~/components/table'; import { getProjectDisplayName } from '~/pages/projects/utils'; import { getInferenceServiceDisplayName, getTokenDisplayName } from './utils'; diff --git a/frontend/src/pages/modelServing/screens/projects/InferenceServiceModal/ManageInferenceServiceModal.tsx b/frontend/src/pages/modelServing/screens/projects/InferenceServiceModal/ManageInferenceServiceModal.tsx index 79e5da78fd..763db81cb9 100644 --- a/frontend/src/pages/modelServing/screens/projects/InferenceServiceModal/ManageInferenceServiceModal.tsx +++ b/frontend/src/pages/modelServing/screens/projects/InferenceServiceModal/ManageInferenceServiceModal.tsx @@ -78,6 +78,7 @@ const ManageInferenceServiceModal: React.FC = const onBeforeClose = (submitted: boolean) => { onClose(submitted); + setError(undefined); setActionInProgress(false); resetData(); }; @@ -99,27 +100,12 @@ const ManageInferenceServiceModal: React.FC = ), ); - const cleanFormData = () => { - const cleanedStorageFolderPath = createData.storage.path.replace(/^\/+/, ''); - - return { - ...createData, - storage: { - ...createData.storage, - path: cleanedStorageFolderPath === '' ? '/' : cleanedStorageFolderPath, - }, - }; - }; - const createModel = (): Promise => { - // clean data - const cleanedFormData = cleanFormData(); - - if (cleanedFormData.storage.type === InferenceServiceStorageType.EXISTING_STORAGE) { - return createInferenceService(cleanedFormData); + if (createData.storage.type === InferenceServiceStorageType.EXISTING_STORAGE) { + return createInferenceService(createData); } return createAWSSecret().then((secret) => - createInferenceService(cleanedFormData, secret.metadata.name), + createInferenceService(createData, secret.metadata.name), ); }; @@ -128,14 +114,11 @@ const ManageInferenceServiceModal: React.FC = return Promise.reject(new Error('No model to update')); } - // clean data - const cleanedFormData = cleanFormData(); - - if (cleanedFormData.storage.type === InferenceServiceStorageType.EXISTING_STORAGE) { - return updateInferenceService(cleanedFormData, editInfo); + if (createData.storage.type === InferenceServiceStorageType.EXISTING_STORAGE) { + return updateInferenceService(createData, editInfo); } return createAWSSecret().then((secret) => - updateInferenceService(cleanedFormData, editInfo, secret.metadata.name), + updateInferenceService(createData, editInfo, secret.metadata.name), ); }; diff --git a/frontend/src/pages/modelServing/screens/projects/ServingRuntimeDetails.tsx b/frontend/src/pages/modelServing/screens/projects/ServingRuntimeDetails.tsx index f50ab5d8ce..53d3099e33 100644 --- a/frontend/src/pages/modelServing/screens/projects/ServingRuntimeDetails.tsx +++ b/frontend/src/pages/modelServing/screens/projects/ServingRuntimeDetails.tsx @@ -8,10 +8,10 @@ import { List, ListItem, } from '@patternfly/react-core'; -import { ServingRuntimeKind } from '~/k8sTypes'; import { AppContext } from '~/app/AppContext'; -import { ContainerResourceAttributes } from '~/types'; +import { ServingRuntimeKind } from '~/k8sTypes'; import { getServingRuntimeSizes } from './utils'; +import useServingAccelerator from './useServingAccelerator'; type ServingRuntimeDetailsProps = { obj: ServingRuntimeKind; @@ -22,6 +22,7 @@ const ServingRuntimeDetails: React.FC = ({ obj }) => const container = obj.spec.containers[0]; // can we assume the first container? const sizes = getServingRuntimeSizes(dashboardConfig); const size = sizes.find((size) => _.isEqual(size.resources, container.resources)); + const [accelerator] = useServingAccelerator(obj); return ( @@ -44,11 +45,21 @@ const ServingRuntimeDetails: React.FC = ({ obj }) => - Number of GPUs + Accelerator - {container.resources.limits?.[ContainerResourceAttributes.NVIDIA_GPU] || 0} + {accelerator.accelerator + ? accelerator.accelerator.spec.displayName + : accelerator.useExisting + ? 'Unknown' + : 'None'} + {!accelerator.useExisting && ( + + Number of accelerators + {accelerator.count} + + )} ); }; diff --git a/frontend/src/pages/modelServing/screens/projects/ServingRuntimeModal/ManageServingRuntimeModal.tsx b/frontend/src/pages/modelServing/screens/projects/ServingRuntimeModal/ManageServingRuntimeModal.tsx index a9ae72648b..ec99407ebf 100644 --- a/frontend/src/pages/modelServing/screens/projects/ServingRuntimeModal/ManageServingRuntimeModal.tsx +++ b/frontend/src/pages/modelServing/screens/projects/ServingRuntimeModal/ManageServingRuntimeModal.tsx @@ -36,6 +36,7 @@ import { import useCustomServingRuntimesEnabled from '~/pages/modelServing/customServingRuntimes/useCustomServingRuntimesEnabled'; import { getServingRuntimeFromName } from '~/pages/modelServing/customServingRuntimes/utils'; import { translateDisplayNameForK8s } from '~/pages/projects/utils'; +import useServingAccelerator from '~/pages/modelServing/screens/projects/useServingAccelerator'; import ServingRuntimeReplicaSection from './ServingRuntimeReplicaSection'; import ServingRuntimeSizeSection from './ServingRuntimeSizeSection'; import ServingRuntimeTokenSection from './ServingRuntimeTokenSection'; @@ -69,6 +70,9 @@ const ManageServingRuntimeModal: React.FC = ({ editInfo, }) => { const [createData, setCreateData, resetData, sizes] = useCreateServingRuntimeObject(editInfo); + const [acceleratorState, setAcceleratorState, resetAcceleratorData] = useServingAccelerator( + editInfo?.servingRuntime, + ); const [actionInProgress, setActionInProgress] = React.useState(false); const [error, setError] = React.useState(); @@ -106,6 +110,7 @@ const ManageServingRuntimeModal: React.FC = ({ setError(undefined); setActionInProgress(false); resetData(); + resetAcceleratorData(); }; const setErrorModal = (error: Error) => { @@ -127,33 +132,39 @@ const ManageServingRuntimeModal: React.FC = ({ } const servingRuntimeData = { ...createData, - gpus: isGpuDisabled(servingRuntimeSelected) ? 0 : createData.gpus, + existingTolerations: servingRuntimeSelected.spec.tolerations || [], }; const servingRuntimeName = translateDisplayNameForK8s(servingRuntimeData.name); const createRolebinding = servingRuntimeData.tokenAuth && allowCreate; + const accelerator = isGpuDisabled(servingRuntimeSelected) + ? { count: 0, accelerators: [], useExisting: false } + : acceleratorState; + Promise.all([ ...(editInfo?.servingRuntime ? [ - updateServingRuntime( - servingRuntimeData, - editInfo?.servingRuntime, - customServingRuntimesEnabled, - { + updateServingRuntime({ + data: servingRuntimeData, + existingData: editInfo?.servingRuntime, + isCustomServingRuntimesEnabled: customServingRuntimesEnabled, + opts: { dryRun: true, }, - ), + acceleratorState: accelerator, + }), ] : [ - createServingRuntime( - servingRuntimeData, + createServingRuntime({ + data: servingRuntimeData, namespace, - servingRuntimeSelected, - customServingRuntimesEnabled, - { + servingRuntime: servingRuntimeSelected, + isCustomServingRuntimesEnabled: customServingRuntimesEnabled, + opts: { dryRun: true, }, - ), + acceleratorState: accelerator, + }), ]), setUpTokenAuth( servingRuntimeData, @@ -173,19 +184,22 @@ const ManageServingRuntimeModal: React.FC = ({ : []), ...(editInfo?.servingRuntime ? [ - updateServingRuntime( - servingRuntimeData, - editInfo?.servingRuntime, - customServingRuntimesEnabled, - ), + updateServingRuntime({ + data: servingRuntimeData, + existingData: editInfo?.servingRuntime, + isCustomServingRuntimesEnabled: customServingRuntimesEnabled, + + acceleratorState: accelerator, + }), ] : [ - createServingRuntime( - servingRuntimeData, + createServingRuntime({ + data: servingRuntimeData, namespace, - servingRuntimeSelected, - customServingRuntimesEnabled, - ), + servingRuntime: servingRuntimeSelected, + isCustomServingRuntimesEnabled: customServingRuntimesEnabled, + acceleratorState: accelerator, + }), ]), setUpTokenAuth( servingRuntimeData, @@ -244,6 +258,7 @@ const ManageServingRuntimeModal: React.FC = ({ setData={setCreateData} templates={servingRuntimeTemplates || []} isEditing={!!editInfo} + acceleratorState={acceleratorState} /> @@ -254,6 +269,8 @@ const ManageServingRuntimeModal: React.FC = ({ setData={setCreateData} sizes={sizes} servingRuntimeSelected={servingRuntimeSelected} + acceleratorState={acceleratorState} + setAcceleratorState={setAcceleratorState} /> diff --git a/frontend/src/pages/modelServing/screens/projects/ServingRuntimeModal/ServingRuntimeSizeSection.tsx b/frontend/src/pages/modelServing/screens/projects/ServingRuntimeModal/ServingRuntimeSizeSection.tsx index bd04dad8d4..54079330e2 100644 --- a/frontend/src/pages/modelServing/screens/projects/ServingRuntimeModal/ServingRuntimeSizeSection.tsx +++ b/frontend/src/pages/modelServing/screens/projects/ServingRuntimeModal/ServingRuntimeSizeSection.tsx @@ -2,7 +2,6 @@ import * as React from 'react'; import { FormGroup, FormSection, - NumberInput, Select, SelectOption, Stack, @@ -13,9 +12,11 @@ import { CreatingServingRuntimeObject, ServingRuntimeSize, } from '~/pages/modelServing/screens/types'; -import useGPUSetting from '~/pages/notebookController/screens/server/useGPUSetting'; import { ServingRuntimeKind } from '~/k8sTypes'; import { isGpuDisabled } from '~/pages/modelServing/screens/projects/utils'; +import AcceleratorSelectField from '~/pages/notebookController/screens/server/AcceleratorSelectField'; +import { getCompatibleAcceleratorIdentifiers } from '~/pages/projects/screens/spawner/spawnerUtils'; +import { AcceleratorState } from '~/utilities/useAcceleratorState'; import ServingRuntimeSizeExpandedField from './ServingRuntimeSizeExpandedField'; type ServingRuntimeSizeSectionProps = { @@ -23,6 +24,8 @@ type ServingRuntimeSizeSectionProps = { setData: UpdateObjectAtPropAndValue; sizes: ServingRuntimeSize[]; servingRuntimeSelected?: ServingRuntimeKind; + acceleratorState: AcceleratorState; + setAcceleratorState: UpdateObjectAtPropAndValue; }; const ServingRuntimeSizeSection: React.FC = ({ @@ -30,9 +33,19 @@ const ServingRuntimeSizeSection: React.FC = ({ setData, sizes, servingRuntimeSelected, + acceleratorState, + setAcceleratorState, }) => { const [sizeDropdownOpen, setSizeDropdownOpen] = React.useState(false); - const { available: gpuAvailable, count: gpuCount } = useGPUSetting('autodetect'); + const [supportedAccelerators, setSupportedAccelerators] = React.useState(); + + React.useEffect(() => { + if (servingRuntimeSelected) { + setSupportedAccelerators(getCompatibleAcceleratorIdentifiers(servingRuntimeSelected)); + } else { + setSupportedAccelerators(undefined); + } + }, [servingRuntimeSelected]); const gpuDisabled = servingRuntimeSelected ? isGpuDisabled(servingRuntimeSelected) : false; @@ -88,25 +101,13 @@ const ServingRuntimeSizeSection: React.FC = ({ )} - {gpuAvailable && !gpuDisabled && ( - - ) => { - const target = event.currentTarget; - setData('gpus', parseInt(target.value) || 0); - }} - onBlur={(event: React.FormEvent) => { - const target = event.currentTarget; - const gpuInput = parseInt(target.value) || 0; - setData('gpus', Math.max(0, Math.min(gpuCount, gpuInput))); - }} - onMinus={() => setData('gpus', data.gpus - 1)} - onPlus={() => setData('gpus', data.gpus + 1)} + {!gpuDisabled && ( + + )} diff --git a/frontend/src/pages/modelServing/screens/projects/ServingRuntimeModal/ServingRuntimeTemplateSection.tsx b/frontend/src/pages/modelServing/screens/projects/ServingRuntimeModal/ServingRuntimeTemplateSection.tsx index 0b8f9ddd46..2cee7c6af1 100644 --- a/frontend/src/pages/modelServing/screens/projects/ServingRuntimeModal/ServingRuntimeTemplateSection.tsx +++ b/frontend/src/pages/modelServing/screens/projects/ServingRuntimeModal/ServingRuntimeTemplateSection.tsx @@ -1,5 +1,5 @@ import * as React from 'react'; -import { FormGroup, Select, SelectOption, StackItem, TextInput } from '@patternfly/react-core'; +import { FormGroup, Label, Split, SplitItem, StackItem, TextInput } from '@patternfly/react-core'; import { UpdateObjectAtPropAndValue } from '~/pages/projects/types'; import { CreatingServingRuntimeObject } from '~/pages/modelServing/screens/types'; import { TemplateKind } from '~/k8sTypes'; @@ -7,12 +7,16 @@ import { getServingRuntimeDisplayNameFromTemplate, getServingRuntimeNameFromTemplate, } from '~/pages/modelServing/customServingRuntimes/utils'; +import { isCompatibleWithAccelerator } from '~/pages/projects/screens/spawner/spawnerUtils'; +import SimpleDropdownSelect from '~/components/SimpleDropdownSelect'; +import { AcceleratorState } from '~/utilities/useAcceleratorState'; type ServingRuntimeTemplateSectionProps = { data: CreatingServingRuntimeObject; setData: UpdateObjectAtPropAndValue; templates: TemplateKind[]; isEditing?: boolean; + acceleratorState: AcceleratorState; }; const ServingRuntimeTemplateSection: React.FC = ({ @@ -20,17 +24,24 @@ const ServingRuntimeTemplateSection: React.FC { - const [isOpen, setOpen] = React.useState(false); - - const options = templates.map((template) => ( - - {getServingRuntimeDisplayNameFromTemplate(template)} - - )); + const options = templates.map((template) => ({ + key: getServingRuntimeNameFromTemplate(template), + selectedLabel: getServingRuntimeDisplayNameFromTemplate(template), + label: ( + + {getServingRuntimeDisplayNameFromTemplate(template)} + + + {isCompatibleWithAccelerator( + acceleratorState.accelerator?.spec.identifier, + template.objects[0], + ) && } + + + ), + })); return ( <> @@ -46,22 +57,20 @@ const ServingRuntimeTemplateSection: React.FC - + id="serving-runtime-template-selection" + aria-label="Select a template" + options={options} + placeholder={ + isEditing || templates.length === 0 ? data.servingRuntimeTemplateName : 'Select one' + } + value={data.servingRuntimeTemplateName ?? ''} + onChange={(name) => { + setData('servingRuntimeTemplateName', name); + }} + /> diff --git a/frontend/src/pages/modelServing/screens/projects/ServingRuntimeTable.tsx b/frontend/src/pages/modelServing/screens/projects/ServingRuntimeTable.tsx index cae3343da8..7a9b4db89b 100644 --- a/frontend/src/pages/modelServing/screens/projects/ServingRuntimeTable.tsx +++ b/frontend/src/pages/modelServing/screens/projects/ServingRuntimeTable.tsx @@ -1,5 +1,5 @@ import * as React from 'react'; -import Table from '~/components/table/Table'; +import { Table } from '~/components/table'; import { AccessReviewResourceAttributes, ServingRuntimeKind } from '~/k8sTypes'; import { ProjectDetailsContext } from '~/pages/projects/ProjectDetailsContext'; import { useAccessReview } from '~/api'; diff --git a/frontend/src/pages/modelServing/screens/projects/ServingRuntimeTokensTable.tsx b/frontend/src/pages/modelServing/screens/projects/ServingRuntimeTokensTable.tsx index 642f5ec0ee..fc5e2c2449 100644 --- a/frontend/src/pages/modelServing/screens/projects/ServingRuntimeTokensTable.tsx +++ b/frontend/src/pages/modelServing/screens/projects/ServingRuntimeTokensTable.tsx @@ -1,6 +1,6 @@ import * as React from 'react'; import { HelperText, HelperTextItem } from '@patternfly/react-core'; -import Table from '~/components/table/Table'; +import { Table } from '~/components/table'; import { ProjectDetailsContext } from '~/pages/projects/ProjectDetailsContext'; import { tokenColumns } from '~/pages/modelServing/screens/global/data'; import { ServingRuntimeKind } from '~/k8sTypes'; diff --git a/frontend/src/pages/modelServing/screens/projects/data.ts b/frontend/src/pages/modelServing/screens/projects/data.ts index e648a74b7c..a493fcc4a5 100644 --- a/frontend/src/pages/modelServing/screens/projects/data.ts +++ b/frontend/src/pages/modelServing/screens/projects/data.ts @@ -1,5 +1,5 @@ import { ServingRuntimeKind } from '~/k8sTypes'; -import { SortableData } from '~/components/table/useTableColumnSort'; +import { SortableData } from '~/components/table'; export const columns: SortableData[] = [ { diff --git a/frontend/src/pages/modelServing/screens/projects/useServingAccelerator.ts b/frontend/src/pages/modelServing/screens/projects/useServingAccelerator.ts new file mode 100644 index 0000000000..fa20a86e55 --- /dev/null +++ b/frontend/src/pages/modelServing/screens/projects/useServingAccelerator.ts @@ -0,0 +1,15 @@ +import { ServingRuntimeKind } from '~/k8sTypes'; +import useAcceleratorState, { AcceleratorState } from '~/utilities/useAcceleratorState'; +import { GenericObjectState } from '~/utilities/useGenericObjectState'; + +const useServingAccelerator = ( + servingRuntime?: ServingRuntimeKind | null, +): GenericObjectState => { + const acceleratorName = servingRuntime?.metadata.annotations?.['opendatahub.io/accelerator-name']; + const resources = servingRuntime?.spec.containers[0].resources; + const tolerations = servingRuntime?.spec.tolerations; + + return useAcceleratorState(resources, tolerations, acceleratorName); +}; + +export default useServingAccelerator; diff --git a/frontend/src/pages/modelServing/screens/projects/utils.ts b/frontend/src/pages/modelServing/screens/projects/utils.ts index 616789f8c1..a800389c3e 100644 --- a/frontend/src/pages/modelServing/screens/projects/utils.ts +++ b/frontend/src/pages/modelServing/screens/projects/utils.ts @@ -8,7 +8,7 @@ import { InferenceServiceStorageType, ServingRuntimeSize, } from '~/pages/modelServing/screens/types'; -import { ContainerResourceAttributes, DashboardConfig } from '~/types'; +import { DashboardConfig } from '~/types'; import { DEFAULT_MODEL_SERVER_SIZES } from '~/pages/modelServing/screens/const'; import { useAppContext } from '~/app/AppContext'; import { useDeepCompareMemoize } from '~/utilities/useDeepCompareMemoize'; @@ -61,7 +61,6 @@ export const useCreateServingRuntimeObject = (existingData?: { servingRuntimeTemplateName: '', numReplicas: 1, modelSize: sizes[0], - gpus: 0, externalRoute: false, tokenAuth: false, tokens: [], @@ -82,11 +81,6 @@ export const useCreateServingRuntimeObject = (existingData?: { const existingResources = existingData?.servingRuntime?.spec?.containers[0]?.resources || sizes[0].resources; - const existingGpus = - existingData?.servingRuntime?.spec?.containers[0]?.resources?.requests?.[ - ContainerResourceAttributes.NVIDIA_GPU - ] || 0; - const existingExternalRoute = existingData?.servingRuntime?.metadata.annotations?.['enable-route'] === 'true'; const existingTokenAuth = @@ -118,10 +112,6 @@ export const useCreateServingRuntimeObject = (existingData?: { resources: existingResources, }, ); - setCreateData( - 'gpus', - typeof existingGpus == 'string' ? parseInt(existingGpus) : existingGpus, - ); setCreateData('externalRoute', existingExternalRoute); setCreateData('tokenAuth', existingTokenAuth); setCreateData('tokens', existingTokens); @@ -131,7 +121,6 @@ export const useCreateServingRuntimeObject = (existingData?: { existingServingRuntimeTemplateName, existingNumReplicas, existingResources, - existingGpus, existingExternalRoute, existingTokenAuth, existingTokens, diff --git a/frontend/src/pages/modelServing/screens/types.ts b/frontend/src/pages/modelServing/screens/types.ts index 7b66c456c5..05b720e68d 100644 --- a/frontend/src/pages/modelServing/screens/types.ts +++ b/frontend/src/pages/modelServing/screens/types.ts @@ -34,7 +34,6 @@ export type CreatingServingRuntimeObject = { servingRuntimeTemplateName: string; numReplicas: number; modelSize: ServingRuntimeSize; - gpus: number; externalRoute: boolean; tokenAuth: boolean; tokens: ServingRuntimeToken[]; diff --git a/frontend/src/pages/modelServing/utils.ts b/frontend/src/pages/modelServing/utils.ts index c5498607a8..1b0b42503c 100644 --- a/frontend/src/pages/modelServing/utils.ts +++ b/frontend/src/pages/modelServing/utils.ts @@ -22,8 +22,8 @@ export const getModelServiceAccountName = (name: string): string => `${name}-sa` export const getModelRoleBinding = (name: string): string => `${name}-view`; -const isValidCpuOrMemoryValue = (value?: string) => - value === undefined ? true : parseInt(value) > 0; +const isValidCpuOrMemoryValue = (value?: string | number) => + value === undefined ? true : parseInt(String(value)) > 0; export const resourcesArePositive = (resources: ContainerResources): boolean => isValidCpuOrMemoryValue(resources.limits?.cpu) && diff --git a/frontend/src/pages/notebookController/screens/admin/NotebookAdminControl.tsx b/frontend/src/pages/notebookController/screens/admin/NotebookAdminControl.tsx index 53737e3b91..29cb293254 100644 --- a/frontend/src/pages/notebookController/screens/admin/NotebookAdminControl.tsx +++ b/frontend/src/pages/notebookController/screens/admin/NotebookAdminControl.tsx @@ -1,27 +1,21 @@ import * as React from 'react'; import { Alert, Stack, StackItem, Title } from '@patternfly/react-core'; import { Td, Tr } from '@patternfly/react-table'; -import Table from '~/components/table/Table'; - -import useTableColumnSort from '~/components/table/useTableColumnSort'; +import { Table } from '~/components/table'; import ExternalLink from '~/components/ExternalLink'; import ApplicationsPage from '~/pages/ApplicationsPage'; import StopServerModal from '~/pages/notebookController/screens/server/StopServerModal'; import { Notebook } from '~/types'; -import { columns } from './const'; +import { columns } from './data'; import StopAllServersButton from './StopAllServersButton'; import UserTableCellTransform from './UserTableCellTransform'; import useAdminUsers from './useAdminUsers'; -import { AdminViewUserData } from './types'; import { NotebookAdminContext } from './NotebookAdminContext'; const NotebookAdminControl: React.FC = () => { - const [unsortedUsers, loaded, loadError] = useAdminUsers(); - const { transformData } = useTableColumnSort(columns, 0); + const [users, loaded, loadError] = useAdminUsers(); const { serverStatuses, setServerStatuses } = React.useContext(NotebookAdminContext); - const users = transformData(unsortedUsers); - const onNotebooksStop = React.useCallback( (didStop: boolean) => { if (didStop) { diff --git a/frontend/src/pages/notebookController/screens/admin/const.ts b/frontend/src/pages/notebookController/screens/admin/data.ts similarity index 90% rename from frontend/src/pages/notebookController/screens/admin/const.ts rename to frontend/src/pages/notebookController/screens/admin/data.ts index 4c8e266613..e09e5d8885 100644 --- a/frontend/src/pages/notebookController/screens/admin/const.ts +++ b/frontend/src/pages/notebookController/screens/admin/data.ts @@ -1,4 +1,4 @@ -import { SortableData } from '~/components/table/useTableColumnSort'; +import { SortableData } from '~/components/table'; import { AdminViewUserData } from './types'; export const columns: SortableData[] = [ diff --git a/frontend/src/pages/notebookController/screens/server/AcceleratorSelectField.tsx b/frontend/src/pages/notebookController/screens/server/AcceleratorSelectField.tsx new file mode 100644 index 0000000000..36096a998b --- /dev/null +++ b/frontend/src/pages/notebookController/screens/server/AcceleratorSelectField.tsx @@ -0,0 +1,224 @@ +import * as React from 'react'; +import { + Alert, + AlertVariant, + FormGroup, + InputGroup, + Label, + NumberInput, + Split, + SplitItem, + Stack, + StackItem, +} from '@patternfly/react-core'; +import { isHTMLInputElement } from '~/utilities/utils'; +import { AcceleratorKind } from '~/k8sTypes'; +import SimpleDropdownSelect, { SimpleDropdownOption } from '~/components/SimpleDropdownSelect'; +import { UpdateObjectAtPropAndValue } from '~/pages/projects/types'; +import { AcceleratorState } from '~/utilities/useAcceleratorState'; +import useAcceleratorCounts from './useAcceleratorCounts'; + +type AcceleratorSelectFieldProps = { + acceleratorState: AcceleratorState; + setAcceleratorState: UpdateObjectAtPropAndValue; + supportedAccelerators?: string[]; + resourceDisplayName?: string; +}; + +const AcceleratorSelectField: React.FC = ({ + acceleratorState, + setAcceleratorState, + supportedAccelerators, + resourceDisplayName = 'image', +}) => { + const [detectedAcceleratorInfo] = useAcceleratorCounts(); + + const { + accelerator, + count: acceleratorCount, + accelerators, + useExisting, + additionalOptions, + } = acceleratorState; + + const generateAcceleratorCountWarning = (newSize: number) => { + if (!accelerator) { + return ''; + } + + const identifier = accelerator?.spec.identifier; + + const detectedAcceleratorCount = Object.entries(detectedAcceleratorInfo.available).find( + ([id]) => identifier === id, + )?.[1]; + + if (detectedAcceleratorCount === undefined) { + return `No accelerator detected with the identifier ${identifier}.`; + } else if (newSize > detectedAcceleratorCount) { + return `Only ${detectedAcceleratorCount} accelerator${ + detectedAcceleratorCount > 1 ? 's' : '' + } detected.`; + } + + return ''; + }; + + const acceleratorCountWarning = generateAcceleratorCountWarning(acceleratorCount); + + const isAcceleratorSupported = (accelerator: AcceleratorKind) => + supportedAccelerators?.includes(accelerator.spec.identifier); + + const enabledAccelerators = accelerators.filter((ac) => ac.spec.enabled); + + const formatOption = (ac: AcceleratorKind): SimpleDropdownOption => { + const displayName = `${ac.spec.displayName}${!ac.spec.enabled ? ' (disabled)' : ''}`; + + return { + key: ac.metadata.name, + selectedLabel: displayName, + description: ac.spec.description, + label: ( + + {displayName} + + + {isAcceleratorSupported(ac) && ( + + )} + + + ), + }; + }; + + const options: SimpleDropdownOption[] = enabledAccelerators + .sort((a, b) => { + const aSupported = isAcceleratorSupported(a); + const bSupported = isAcceleratorSupported(b); + if (aSupported && !bSupported) { + return -1; + } + if (!aSupported && bSupported) { + return 1; + } + return 0; + }) + .map((ac) => formatOption(ac)); + + let acceleratorAlertMessage: { title: string; variant: AlertVariant } | null = null; + if (accelerator && supportedAccelerators !== undefined) { + if (supportedAccelerators?.length === 0) { + acceleratorAlertMessage = { + title: `The ${resourceDisplayName} you have selected doesn't support the selected accelerator. It is recommended to use a compatible ${resourceDisplayName} for optimal performance.`, + variant: AlertVariant.info, + }; + } else if (!isAcceleratorSupported(accelerator)) { + acceleratorAlertMessage = { + title: `The ${resourceDisplayName} you have selected is not compatible with the selected accelerator`, + variant: AlertVariant.warning, + }; + } + } + + // add none option + options.push({ + key: '', + label: 'None', + isPlaceholder: true, + }); + + if (additionalOptions?.useExisting) { + options.push({ + key: 'use-existing', + label: 'Existing settings', + description: 'Use the existing accelerator settings from the notebook server', + }); + } else if (additionalOptions?.useDisabled) { + options.push(formatOption(additionalOptions?.useDisabled)); + } + + const onStep = (step: number) => { + setAcceleratorState('count', Math.max(acceleratorCount + step, 1)); + }; + + // if there is more than a none option, show the dropdown + if (options.length === 1) { + return null; + } + + return ( + + + + { + if (isPlaceholder) { + // none + setAcceleratorState('useExisting', false); + setAcceleratorState('accelerator', undefined); + setAcceleratorState('count', 0); + } else if (key === 'use-existing') { + // use existing settings + setAcceleratorState('useExisting', true); + setAcceleratorState('accelerator', undefined); + setAcceleratorState('count', 0); + } else { + // normal flow + setAcceleratorState('count', 1); + setAcceleratorState('useExisting', false); + setAcceleratorState( + 'accelerator', + accelerators.find((ac) => ac.metadata.name === key), + ); + } + }} + /> + + + {acceleratorAlertMessage && ( + + + + )} + {accelerator && ( + + + + onStep(1)} + onMinus={() => onStep(-1)} + onChange={(event) => { + if (isHTMLInputElement(event.target)) { + const newSize = Number(event.target.value); + setAcceleratorState('count', Math.max(newSize, 1)); + } + }} + /> + + + + )} + {acceleratorCountWarning && ( + + + + )} + + ); +}; + +export default AcceleratorSelectField; diff --git a/frontend/src/pages/notebookController/screens/server/NotebookServerDetails.tsx b/frontend/src/pages/notebookController/screens/server/NotebookServerDetails.tsx index e111770c84..edd6ebc690 100644 --- a/frontend/src/pages/notebookController/screens/server/NotebookServerDetails.tsx +++ b/frontend/src/pages/notebookController/screens/server/NotebookServerDetails.tsx @@ -16,11 +16,11 @@ import { getDescriptionForTag, getImageTagByContainer, getNameVersionString, - getNumGpus, } from '~/utilities/imageUtils'; import { useAppContext } from '~/app/AppContext'; import { useWatchImages } from '~/utilities/useWatchImages'; import { NotebookControllerContext } from '~/pages/notebookController/NotebookControllerContext'; +import useNotebookAccelerator from '~/pages/projects/screens/detail/notebooks/useNotebookAccelerator'; import { getNotebookSizes } from './usePreferredNotebookSize'; const NotebookServerDetails: React.FC = () => { @@ -28,6 +28,7 @@ const NotebookServerDetails: React.FC = () => { const { images, loaded } = useWatchImages(); const [isExpanded, setExpanded] = React.useState(false); const { dashboardConfig } = useAppContext(); + const [accelerator] = useNotebookAccelerator(notebook); const container: NotebookContainer | undefined = notebook?.spec.template.spec.containers.find( (container) => container.name === notebook.metadata.name, @@ -45,7 +46,6 @@ const NotebookServerDetails: React.FC = () => { const tagSoftware = getDescriptionForTag(tag); const tagDependencies = tag?.content.dependencies ?? []; - const numGpus = getNumGpus(container); const sizes = getNotebookSizes(dashboardConfig); const size = sizes.find((size) => _.isEqual(size.resources.limits, container.resources?.limits)); @@ -106,9 +106,21 @@ const NotebookServerDetails: React.FC = () => { {`${container.resources?.requests?.cpu} CPU, ${container.resources?.requests?.memory} Memory`} - Number of GPUs - {numGpus} + Accelerator + + {accelerator.accelerator + ? accelerator.accelerator.spec.displayName + : accelerator.useExisting + ? 'Unknown' + : 'None'} + + {!accelerator.useExisting && ( + + Number of accelerators + {accelerator.count} + + )} ); diff --git a/frontend/src/pages/notebookController/screens/server/SpawnerPage.tsx b/frontend/src/pages/notebookController/screens/server/SpawnerPage.tsx index 71a8e6ade5..a439c42d49 100644 --- a/frontend/src/pages/notebookController/screens/server/SpawnerPage.tsx +++ b/frontend/src/pages/notebookController/screens/server/SpawnerPage.tsx @@ -39,7 +39,7 @@ import ImpersonateAlert from '~/pages/notebookController/screens/admin/Impersona import useNamespaces from '~/pages/notebookController/useNamespaces'; import { fireTrackingEvent } from '~/utilities/segmentIOUtils'; import { getEnvConfigMap, getEnvSecret } from '~/services/envService'; -import GPUSelectField from './GPUSelectField'; +import useNotebookAccelerator from '~/pages/projects/screens/detail/notebooks/useNotebookAccelerator'; import SizeSelectField from './SizeSelectField'; import useSpawnerNotebookModalState from './useSpawnerNotebookModalState'; import BrowserTabPreferenceCheckbox from './BrowserTabPreferenceCheckbox'; @@ -49,6 +49,7 @@ import { usePreferredNotebookSize } from './usePreferredNotebookSize'; import StartServerModal from './StartServerModal'; import '~/pages/notebookController/NotebookController.scss'; +import AcceleratorSelectField from './AcceleratorSelectField'; const SpawnerPage: React.FC = () => { const navigate = useNavigate(); @@ -68,7 +69,7 @@ const SpawnerPage: React.FC = () => { tag: undefined, }); const { selectedSize, setSelectedSize, sizes } = usePreferredNotebookSize(); - const [selectedGpu, setSelectedGpu] = React.useState('0'); + const [accelerator, setAccelerator] = useNotebookAccelerator(currentUserNotebook); const [variableRows, setVariableRows] = React.useState([]); const [submitError, setSubmitError] = React.useState(null); @@ -231,7 +232,12 @@ const SpawnerPage: React.FC = () => { const fireStartServerEvent = () => { fireTrackingEvent('Notebook Server Started', { - GPU: parseInt(selectedGpu), + accelerator: accelerator.accelerator + ? `${accelerator.accelerator.spec.displayName} (${accelerator.accelerator.metadata.name}): ${accelerator.accelerator.spec.identifier}` + : accelerator.useExisting + ? 'Unknown' + : 'None', + acceleratorCount: accelerator.useExisting ? undefined : accelerator.count, lastSelectedSize: selectedSize.name, lastSelectedImage: `${selectedImageTag.image?.name}:${selectedImageTag.tag?.name}`, }); @@ -246,7 +252,7 @@ const SpawnerPage: React.FC = () => { notebookSizeName: selectedSize.name, imageName: selectedImageTag.image?.name || '', imageTagName: selectedImageTag.tag?.name || '', - gpus: parseInt(selectedGpu), + accelerator: accelerator, envVars: envVars, state: NotebookState.Started, username: impersonatedUsername || undefined, @@ -307,7 +313,10 @@ const SpawnerPage: React.FC = () => { setValue={(size) => setSelectedSize(size)} sizes={sizes} /> - setSelectedGpu(size)} /> + {renderEnvironmentVariableRows()} diff --git a/frontend/src/pages/notebookController/screens/server/useAcceleratorCounts.ts b/frontend/src/pages/notebookController/screens/server/useAcceleratorCounts.ts new file mode 100644 index 0000000000..1b5c879327 --- /dev/null +++ b/frontend/src/pages/notebookController/screens/server/useAcceleratorCounts.ts @@ -0,0 +1,13 @@ +import useFetchState, { FetchState } from '~/utilities/useFetchState'; +import { getAcceleratorCounts } from '~/services/acceleratorService'; +import { AcceleratorInfo } from '~/types'; + +const useAcceleratorCounts = (): FetchState => + useFetchState(getAcceleratorCounts, { + available: {}, + total: {}, + allocated: {}, + configured: false, + }); + +export default useAcceleratorCounts; diff --git a/frontend/src/pages/notebookController/screens/server/useAccelerators.ts b/frontend/src/pages/notebookController/screens/server/useAccelerators.ts new file mode 100644 index 0000000000..d4f9545077 --- /dev/null +++ b/frontend/src/pages/notebookController/screens/server/useAccelerators.ts @@ -0,0 +1,11 @@ +import React from 'react'; +import useFetchState, { FetchState } from '~/utilities/useFetchState'; +import { AcceleratorKind } from '~/k8sTypes'; +import { listAccelerators } from '~/api'; + +const useAccelerators = (namespace: string): FetchState => { + const getAccelerators = React.useCallback(() => listAccelerators(namespace), [namespace]); + return useFetchState(getAccelerators, []); +}; + +export default useAccelerators; diff --git a/frontend/src/pages/pipelines/global/pipelines/GlobalPipelines.tsx b/frontend/src/pages/pipelines/global/pipelines/GlobalPipelines.tsx index d26e99d2a5..9dd02f7bb0 100644 --- a/frontend/src/pages/pipelines/global/pipelines/GlobalPipelines.tsx +++ b/frontend/src/pages/pipelines/global/pipelines/GlobalPipelines.tsx @@ -4,7 +4,7 @@ import { pipelinesPageDescription, pipelinesPageTitle, } from '~/pages/pipelines/global/pipelines/const'; -import PipelinesPageHeaderActions from '~/pages/pipelines/global/pipelines/PipelinesPageHeaderActions'; +import PipelineServerActions from '~/concepts/pipelines/content/pipelinesDetails/pipeline/PipelineServerActions'; import PipelineCoreApplicationPage from '~/pages/pipelines/global/PipelineCoreApplicationPage'; import PipelinesView from '~/pages/pipelines/global/pipelines/PipelinesView'; import EnsureAPIAvailability from '~/concepts/pipelines/EnsureAPIAvailability'; @@ -16,7 +16,7 @@ const GlobalPipelines: React.FC = () => { } + headerAction={} getRedirectPath={(namespace) => `/pipelines/${namespace}`} > diff --git a/frontend/src/pages/pipelines/global/pipelines/PipelinesPageHeaderActions.tsx b/frontend/src/pages/pipelines/global/pipelines/PipelinesPageHeaderActions.tsx deleted file mode 100644 index 7c88fbaf72..0000000000 --- a/frontend/src/pages/pipelines/global/pipelines/PipelinesPageHeaderActions.tsx +++ /dev/null @@ -1,48 +0,0 @@ -import * as React from 'react'; -import { Dropdown, DropdownItem, DropdownSeparator, DropdownToggle } from '@patternfly/react-core'; -import { DeleteServerModal, ViewServerModal } from '~/concepts/pipelines/context'; - -const PipelinesPageHeaderActions: React.FC = () => { - const [open, setOpen] = React.useState(false); - const [deleteOpen, setDeleteOpen] = React.useState(false); - const [viewOpen, setViewOpen] = React.useState(false); - - return ( - <> - setOpen(false)} - toggle={ - setOpen(!open)}> - Pipeline server actions - - } - isOpen={open} - position="right" - dropdownItems={[ - setViewOpen(true)}> - View pipeline server configuration - , - , - { - setDeleteOpen(true); - }} - key="delete-server" - > - Delete pipeline server - , - ]} - /> - { - setDeleteOpen(false); - }} - /> - setViewOpen(false)} /> - - ); -}; - -export default PipelinesPageHeaderActions; diff --git a/frontend/src/pages/projects/notebook/NotebookStatusToggle.tsx b/frontend/src/pages/projects/notebook/NotebookStatusToggle.tsx index cc63f9c3a6..22e98ecb5c 100644 --- a/frontend/src/pages/projects/notebook/NotebookStatusToggle.tsx +++ b/frontend/src/pages/projects/notebook/NotebookStatusToggle.tsx @@ -2,7 +2,7 @@ import * as React from 'react'; import { Flex, FlexItem, Switch } from '@patternfly/react-core'; import { startNotebook, stopNotebook } from '~/api'; import { fireTrackingEvent } from '~/utilities/segmentIOUtils'; -import useNotebookGPUNumber from '~/pages/projects/screens/detail/notebooks/useNotebookGPUNumber'; +import useNotebookAccelerators from '~/pages/projects/screens/detail/notebooks/useNotebookAccelerator'; import useNotebookDeploymentSize from '~/pages/projects/screens/detail/notebooks/useNotebookDeploymentSize'; import { computeNotebooksTolerations } from '~/utilities/tolerations'; import { useAppContext } from '~/app/AppContext'; @@ -25,7 +25,7 @@ const NotebookStatusToggle: React.FC = ({ enablePipelines, }) => { const { notebook, isStarting, isRunning, isStopping, refresh } = notebookState; - const gpuNumber = useNotebookGPUNumber(notebook); + const [acceleratorData] = useNotebookAccelerators(notebook); const { size } = useNotebookDeploymentSize(notebook); const [isOpenConfirm, setOpenConfirm] = React.useState(false); const [inProgress, setInProgress] = React.useState(false); @@ -51,7 +51,12 @@ const NotebookStatusToggle: React.FC = ({ const fireNotebookTrackingEvent = React.useCallback( (action: 'started' | 'stopped') => { fireTrackingEvent(`Workbench ${action}`, { - GPU: gpuNumber, + acceleratorCount: acceleratorData.useExisting ? undefined : acceleratorData.count, + accelerator: acceleratorData.accelerator + ? `${acceleratorData.accelerator.spec.displayName} (${acceleratorData.accelerator.metadata.name}): ${acceleratorData.accelerator.spec.identifier}` + : acceleratorData.useExisting + ? 'Unknown' + : 'None', lastSelectedSize: size?.name || notebook.metadata.annotations?.['notebooks.opendatahub.io/last-size-selection'], @@ -64,7 +69,7 @@ const NotebookStatusToggle: React.FC = ({ }), }); }, - [gpuNumber, notebook, size], + [acceleratorData, notebook, size], ); const handleStop = React.useCallback(() => { @@ -99,8 +104,7 @@ const NotebookStatusToggle: React.FC = ({ notebookState.notebook, ); startNotebook( - notebookName, - notebookNamespace, + notebook, tolerationSettings, enablePipelines && !currentlyHasPipelines(notebook), ).then(() => { diff --git a/frontend/src/pages/projects/projectSharing/ProjectSharingTable.tsx b/frontend/src/pages/projects/projectSharing/ProjectSharingTable.tsx index a089dc5a75..fe25c78848 100644 --- a/frontend/src/pages/projects/projectSharing/ProjectSharingTable.tsx +++ b/frontend/src/pages/projects/projectSharing/ProjectSharingTable.tsx @@ -1,5 +1,5 @@ import * as React from 'react'; -import Table from '~/components/table/Table'; +import { Table } from '~/components/table'; import { RoleBindingKind } from '~/k8sTypes'; import { ProjectDetailsContext } from '~/pages/projects/ProjectDetailsContext'; import { deleteRoleBinding, generateRoleBindingProjectSharing, createRoleBinding } from '~/api'; diff --git a/frontend/src/pages/projects/projectSharing/__tests__/useGroups.spec.ts b/frontend/src/pages/projects/projectSharing/__tests__/useGroups.spec.ts index fcf404f1a4..21dd760c69 100644 --- a/frontend/src/pages/projects/projectSharing/__tests__/useGroups.spec.ts +++ b/frontend/src/pages/projects/projectSharing/__tests__/useGroups.spec.ts @@ -2,7 +2,7 @@ import { act } from '@testing-library/react'; import { k8sListResource } from '@openshift/dynamic-plugin-sdk-utils'; import useGroups from '~/pages/projects/projectSharing/useGroups'; import { GroupKind } from '~/k8sTypes'; -import { expectHook, standardUseFetchState, testHook } from '~/__tests__/unit/testUtils/hooks'; +import { standardUseFetchState, testHook } from '~/__tests__/unit/testUtils/hooks'; jest.mock('@openshift/dynamic-plugin-sdk-utils', () => ({ k8sListResource: jest.fn(), @@ -17,23 +17,24 @@ describe('useGroups', () => { }; k8sListResourceMock.mockReturnValue(Promise.resolve(mockList)); - const renderResult = testHook(useGroups); + const renderResult = testHook(useGroups)(); expect(k8sListResourceMock).toHaveBeenCalledTimes(1); - expectHook(renderResult).toStrictEqual(standardUseFetchState([])).toHaveUpdateCount(1); + expect(renderResult).hookToStrictEqual(standardUseFetchState([])); + expect(renderResult).hookToHaveUpdateCount(1); // wait for update await renderResult.waitForNextUpdate(); expect(k8sListResourceMock).toHaveBeenCalledTimes(1); - expectHook(renderResult) - .toStrictEqual(standardUseFetchState(mockList.items, true)) - .toHaveUpdateCount(2) - .toBeStable([false, false, true, true]); + expect(renderResult).hookToStrictEqual(standardUseFetchState(mockList.items, true)); + expect(renderResult).hookToHaveUpdateCount(2); + expect(renderResult).hookToBeStable([false, false, true, true]); // refresh k8sListResourceMock.mockReturnValue(Promise.resolve({ items: [...mockList.items] })); await act(() => renderResult.result.current[3]()); expect(k8sListResourceMock).toHaveBeenCalledTimes(2); - expectHook(renderResult).toHaveUpdateCount(3).toBeStable([false, true, true, true]); + expect(renderResult).hookToHaveUpdateCount(3); + expect(renderResult).hookToBeStable([false, true, true, true]); }); it('should handle 403 as an empty result', async () => { @@ -44,26 +45,25 @@ describe('useGroups', () => { }; k8sListResourceMock.mockReturnValue(Promise.reject(error)); - const renderResult = testHook(useGroups); + const renderResult = testHook(useGroups)(); expect(k8sListResourceMock).toHaveBeenCalledTimes(1); - expectHook(renderResult).toStrictEqual(standardUseFetchState([])).toHaveUpdateCount(1); + expect(renderResult).hookToStrictEqual(standardUseFetchState([])); + expect(renderResult).hookToHaveUpdateCount(1); // wait for update await renderResult.waitForNextUpdate(); expect(k8sListResourceMock).toHaveBeenCalledTimes(1); - expectHook(renderResult) - .toStrictEqual(standardUseFetchState([], true)) - .toHaveUpdateCount(2) - .toBeStable([false, false, true, true]); + expect(renderResult).hookToStrictEqual(standardUseFetchState([], true)); + expect(renderResult).hookToHaveUpdateCount(2); + expect(renderResult).hookToBeStable([false, false, true, true]); // refresh await act(() => renderResult.result.current[3]()); // error 403 should cache error and prevent subsequent attempts to fetch expect(k8sListResourceMock).toHaveBeenCalledTimes(1); - expectHook(renderResult) - .toStrictEqual(standardUseFetchState([], true)) - .toHaveUpdateCount(3) - .toBeStable([false, true, true, true]); + expect(renderResult).hookToStrictEqual(standardUseFetchState([], true)); + expect(renderResult).hookToHaveUpdateCount(3); + expect(renderResult).hookToBeStable([false, true, true, true]); }); it('should handle 404 as an error', async () => { @@ -74,17 +74,19 @@ describe('useGroups', () => { }; k8sListResourceMock.mockReturnValue(Promise.reject(error)); - const renderResult = testHook(useGroups); + const renderResult = testHook(useGroups)(); expect(k8sListResourceMock).toHaveBeenCalledTimes(1); - expectHook(renderResult).toStrictEqual(standardUseFetchState([])).toHaveUpdateCount(1); + expect(renderResult).hookToStrictEqual(standardUseFetchState([])); + expect(renderResult).hookToHaveUpdateCount(1); // wait for update await renderResult.waitForNextUpdate(); - expectHook(renderResult) - .toStrictEqual(standardUseFetchState([], false, new Error('No groups found.'))) - .toHaveUpdateCount(2) - .toBeStable([true, true, false, true]); + expect(renderResult).hookToStrictEqual( + standardUseFetchState([], false, new Error('No groups found.')), + ); + expect(renderResult).hookToHaveUpdateCount(2); + expect(renderResult).hookToBeStable([true, true, false, true]); expect(k8sListResourceMock).toHaveBeenCalledTimes(1); @@ -92,34 +94,34 @@ describe('useGroups', () => { await act(() => renderResult.result.current[3]()); expect(k8sListResourceMock).toHaveBeenCalledTimes(2); // we get a new error because the k8s API is called a 2nd time - expectHook(renderResult) - .toStrictEqual(standardUseFetchState([], false, new Error('No groups found.'))) - .toHaveUpdateCount(3) - .toBeStable([true, true, false, true]); + expect(renderResult).hookToStrictEqual( + standardUseFetchState([], false, new Error('No groups found.')), + ); + expect(renderResult).hookToHaveUpdateCount(3); + expect(renderResult).hookToBeStable([true, true, false, true]); }); it('should handle other errors and rethrow', async () => { k8sListResourceMock.mockReturnValue(Promise.reject(new Error('error1'))); - const renderResult = testHook(useGroups); + const renderResult = testHook(useGroups)(); expect(k8sListResourceMock).toHaveBeenCalledTimes(1); - expectHook(renderResult).toStrictEqual(standardUseFetchState([])).toHaveUpdateCount(1); + expect(renderResult).hookToStrictEqual(standardUseFetchState([])); + expect(renderResult).hookToHaveUpdateCount(1); // wait for update await renderResult.waitForNextUpdate(); expect(k8sListResourceMock).toHaveBeenCalledTimes(1); - expectHook(renderResult) - .toStrictEqual(standardUseFetchState([], false, new Error('error1'))) - .toHaveUpdateCount(2) - .toBeStable([true, true, false, true]); + expect(renderResult).hookToStrictEqual(standardUseFetchState([], false, new Error('error1'))); + expect(renderResult).hookToHaveUpdateCount(2); + expect(renderResult).hookToBeStable([true, true, false, true]); // refresh k8sListResourceMock.mockReturnValue(Promise.reject(new Error('error2'))); await act(() => renderResult.result.current[3]()); expect(k8sListResourceMock).toHaveBeenCalledTimes(2); - expectHook(renderResult) - .toStrictEqual(standardUseFetchState([], false, new Error('error2'))) - .toHaveUpdateCount(3) - .toBeStable([true, true, false, true]); + expect(renderResult).hookToStrictEqual(standardUseFetchState([], false, new Error('error2'))); + expect(renderResult).hookToHaveUpdateCount(3); + expect(renderResult).hookToBeStable([true, true, false, true]); }); }); diff --git a/frontend/src/pages/projects/projectSharing/data.ts b/frontend/src/pages/projects/projectSharing/data.ts index 9a4fdadcee..a09532f7c9 100644 --- a/frontend/src/pages/projects/projectSharing/data.ts +++ b/frontend/src/pages/projects/projectSharing/data.ts @@ -1,5 +1,5 @@ import { RoleBindingKind } from '~/k8sTypes'; -import { SortableData } from '~/components/table/useTableColumnSort'; +import { SortableData } from '~/components/table'; import { firstSubject } from './utils'; export const columnsProjectSharing: SortableData[] = [ diff --git a/frontend/src/pages/projects/screens/detail/data-connections/DataConnectionsTable.tsx b/frontend/src/pages/projects/screens/detail/data-connections/DataConnectionsTable.tsx index d99424ed76..7964968dc4 100644 --- a/frontend/src/pages/projects/screens/detail/data-connections/DataConnectionsTable.tsx +++ b/frontend/src/pages/projects/screens/detail/data-connections/DataConnectionsTable.tsx @@ -1,6 +1,5 @@ import * as React from 'react'; -import Table from '~/components/table/Table'; - +import { Table } from '~/components/table'; import { DataConnection } from '~/pages/projects/types'; import { columns } from './data'; import DataConnectionsTableRow from './DataConnectionsTableRow'; diff --git a/frontend/src/pages/projects/screens/detail/data-connections/DataConnectionsTableRow.tsx b/frontend/src/pages/projects/screens/detail/data-connections/DataConnectionsTableRow.tsx index dd68ea2c52..694b87dd94 100644 --- a/frontend/src/pages/projects/screens/detail/data-connections/DataConnectionsTableRow.tsx +++ b/frontend/src/pages/projects/screens/detail/data-connections/DataConnectionsTableRow.tsx @@ -5,7 +5,7 @@ import ConnectedNotebookNames from '~/pages/projects/notebook/ConnectedNotebookN import { ConnectedNotebookContext } from '~/pages/projects/notebook/useRelatedNotebooks'; import { DataConnection } from '~/pages/projects/types'; import EmptyTableCellForAlignment from '~/pages/projects/components/EmptyTableCellForAlignment'; -import TableRowTitleDescription from '~/components/table/TableRowTitleDescription'; +import { TableRowTitleDescription } from '~/components/table'; import { getDataConnectionDescription, getDataConnectionDisplayName, diff --git a/frontend/src/pages/projects/screens/detail/data-connections/data.ts b/frontend/src/pages/projects/screens/detail/data-connections/data.ts index e8069f768c..685a17fd59 100644 --- a/frontend/src/pages/projects/screens/detail/data-connections/data.ts +++ b/frontend/src/pages/projects/screens/detail/data-connections/data.ts @@ -1,4 +1,4 @@ -import { SortableData } from '~/components/table/useTableColumnSort'; +import { SortableData } from '~/components/table'; import { DataConnection } from '~/pages/projects/types'; import { getDataConnectionDisplayName } from './utils'; diff --git a/frontend/src/pages/projects/screens/detail/notebooks/NotebookTable.tsx b/frontend/src/pages/projects/screens/detail/notebooks/NotebookTable.tsx index a7cc9a3ecf..c2fbd11125 100644 --- a/frontend/src/pages/projects/screens/detail/notebooks/NotebookTable.tsx +++ b/frontend/src/pages/projects/screens/detail/notebooks/NotebookTable.tsx @@ -1,6 +1,5 @@ import * as React from 'react'; -import Table from '~/components/table/Table'; - +import { Table } from '~/components/table'; import { NotebookKind } from '~/k8sTypes'; import DeleteNotebookModal from '~/pages/projects/notebook/DeleteNotebookModal'; import AddNotebookStorage from '~/pages/projects/pvc/AddNotebookStorage'; diff --git a/frontend/src/pages/projects/screens/detail/notebooks/NotebookTableRow.tsx b/frontend/src/pages/projects/screens/detail/notebooks/NotebookTableRow.tsx index 979a5c1e23..82d1fbc6d8 100644 --- a/frontend/src/pages/projects/screens/detail/notebooks/NotebookTableRow.tsx +++ b/frontend/src/pages/projects/screens/detail/notebooks/NotebookTableRow.tsx @@ -10,7 +10,7 @@ import NotebookStatusToggle from '~/pages/projects/notebook/NotebookStatusToggle import { NotebookKind } from '~/k8sTypes'; import NotebookImagePackageDetails from '~/pages/projects/notebook/NotebookImagePackageDetails'; import { ProjectDetailsContext } from '~/pages/projects/ProjectDetailsContext'; -import TableRowTitleDescription from '~/components/table/TableRowTitleDescription'; +import { TableRowTitleDescription } from '~/components/table'; import useNotebookDeploymentSize from './useNotebookDeploymentSize'; import useNotebookImage from './useNotebookImage'; import NotebookSizeDetails from './NotebookSizeDetails'; diff --git a/frontend/src/pages/projects/screens/detail/notebooks/data.ts b/frontend/src/pages/projects/screens/detail/notebooks/data.ts index 1ed4104221..aff044584f 100644 --- a/frontend/src/pages/projects/screens/detail/notebooks/data.ts +++ b/frontend/src/pages/projects/screens/detail/notebooks/data.ts @@ -1,4 +1,4 @@ -import { SortableData } from '~/components/table/useTableColumnSort'; +import { SortableData } from '~/components/table'; import { getNotebookDisplayName, getNotebookStatusPriority } from '~/pages/projects/utils'; import { NotebookState } from '~/pages/projects/notebook/types'; diff --git a/frontend/src/pages/projects/screens/detail/notebooks/useNotebookAccelerator.ts b/frontend/src/pages/projects/screens/detail/notebooks/useNotebookAccelerator.ts new file mode 100644 index 0000000000..cd01955ad2 --- /dev/null +++ b/frontend/src/pages/projects/screens/detail/notebooks/useNotebookAccelerator.ts @@ -0,0 +1,18 @@ +import { NotebookKind } from '~/k8sTypes'; +import { Notebook } from '~/types'; +import useAcceleratorState, { AcceleratorState } from '~/utilities/useAcceleratorState'; +import { GenericObjectState } from '~/utilities/useGenericObjectState'; + +const useNotebookAccelerator = ( + notebook?: NotebookKind | Notebook | null, +): GenericObjectState => { + const acceleratorName = notebook?.metadata.annotations?.['opendatahub.io/accelerator-name']; + const resources = notebook?.spec.template.spec.containers.find( + (container) => container.name === notebook.metadata.name, + )?.resources; + const tolerations = notebook?.spec.template.spec.tolerations; + + return useAcceleratorState(resources, tolerations, acceleratorName); +}; + +export default useNotebookAccelerator; diff --git a/frontend/src/pages/projects/screens/detail/notebooks/useNotebookGPUNumber.ts b/frontend/src/pages/projects/screens/detail/notebooks/useNotebookGPUNumber.ts deleted file mode 100644 index d980f91009..0000000000 --- a/frontend/src/pages/projects/screens/detail/notebooks/useNotebookGPUNumber.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { NotebookKind } from '~/k8sTypes'; -import { ContainerResourceAttributes, GPUCount, NotebookContainer } from '~/types'; - -const useNotebookGPUNumber = (notebook?: NotebookKind): GPUCount => { - const container: NotebookContainer | undefined = notebook?.spec.template.spec.containers.find( - (container) => container.name === notebook.metadata.name, - ); - - const gpuNumbers = container?.resources?.limits?.[ContainerResourceAttributes.NVIDIA_GPU]; - - return gpuNumbers || 0; -}; - -export default useNotebookGPUNumber; diff --git a/frontend/src/pages/projects/screens/detail/pipelines/PipelinesSection.tsx b/frontend/src/pages/projects/screens/detail/pipelines/PipelinesSection.tsx index d8039dde92..b3e865c791 100644 --- a/frontend/src/pages/projects/screens/detail/pipelines/PipelinesSection.tsx +++ b/frontend/src/pages/projects/screens/detail/pipelines/PipelinesSection.tsx @@ -8,6 +8,7 @@ import NoPipelineServer from '~/concepts/pipelines/NoPipelineServer'; import ImportPipelineButton from '~/concepts/pipelines/content/import/ImportPipelineButton'; import PipelinesList from '~/pages/projects/screens/detail/pipelines/PipelinesList'; import EnsureAPIAvailability from '~/concepts/pipelines/EnsureAPIAvailability'; +import PipelineServerActions from '~/concepts/pipelines/content/pipelinesDetails/pipeline/PipelineServerActions'; const PipelinesSection: React.FC = () => { const { @@ -27,6 +28,11 @@ const PipelinesSection: React.FC = () => { key={`action-${ProjectSectionID.PIPELINES}`} variant="secondary" />, + , ]} isLoading={initializing} isEmpty={!installed} diff --git a/frontend/src/pages/projects/screens/detail/storage/StorageTable.tsx b/frontend/src/pages/projects/screens/detail/storage/StorageTable.tsx index b55ce38df6..1ffcef66ae 100644 --- a/frontend/src/pages/projects/screens/detail/storage/StorageTable.tsx +++ b/frontend/src/pages/projects/screens/detail/storage/StorageTable.tsx @@ -1,6 +1,5 @@ import * as React from 'react'; -import Table from '~/components/table/Table'; - +import { Table } from '~/components/table'; import { PersistentVolumeClaimKind } from '~/k8sTypes'; import DeletePVCModal from '~/pages/projects/pvc/DeletePVCModal'; import StorageTableRow from './StorageTableRow'; diff --git a/frontend/src/pages/projects/screens/detail/storage/StorageTableRow.tsx b/frontend/src/pages/projects/screens/detail/storage/StorageTableRow.tsx index ef7ca6169d..3fc570be09 100644 --- a/frontend/src/pages/projects/screens/detail/storage/StorageTableRow.tsx +++ b/frontend/src/pages/projects/screens/detail/storage/StorageTableRow.tsx @@ -14,7 +14,7 @@ import { PersistentVolumeClaimKind } from '~/k8sTypes'; import StorageSizeBar from '~/pages/projects/components/StorageSizeBars'; import ConnectedNotebookNames from '~/pages/projects/notebook/ConnectedNotebookNames'; import { ConnectedNotebookContext } from '~/pages/projects/notebook/useRelatedNotebooks'; -import TableRowTitleDescription from '~/components/table/TableRowTitleDescription'; +import { TableRowTitleDescription } from '~/components/table'; import useIsRootVolume from './useIsRootVolume'; import StorageWarningStatus from './StorageWarningStatus'; diff --git a/frontend/src/pages/projects/screens/detail/storage/data.ts b/frontend/src/pages/projects/screens/detail/storage/data.ts index f9f44ec10c..f05742f742 100644 --- a/frontend/src/pages/projects/screens/detail/storage/data.ts +++ b/frontend/src/pages/projects/screens/detail/storage/data.ts @@ -1,4 +1,4 @@ -import { SortableData } from '~/components/table/useTableColumnSort'; +import { SortableData } from '~/components/table'; import { getPvcDisplayName } from '~/pages/projects/utils'; import { PersistentVolumeClaimKind } from '~/k8sTypes'; diff --git a/frontend/src/pages/projects/screens/projects/ProjectListView.tsx b/frontend/src/pages/projects/screens/projects/ProjectListView.tsx index 757853dba4..fadd0b87ee 100644 --- a/frontend/src/pages/projects/screens/projects/ProjectListView.tsx +++ b/frontend/src/pages/projects/screens/projects/ProjectListView.tsx @@ -1,8 +1,7 @@ import * as React from 'react'; import { Button, ButtonVariant, ToolbarItem } from '@patternfly/react-core'; import { useNavigate } from 'react-router-dom'; -import Table from '~/components/table/Table'; -import useTableColumnSort from '~/components/table/useTableColumnSort'; +import { Table } from '~/components/table'; import SearchField, { SearchType } from '~/pages/projects/components/SearchField'; import { ProjectKind } from '~/k8sTypes'; import { getProjectDisplayName, getProjectOwner } from '~/pages/projects/utils'; @@ -25,8 +24,7 @@ const ProjectListView: React.FC = ({ allowCreate }) => { const navigate = useNavigate(); const [searchType, setSearchType] = React.useState(SearchType.NAME); const [search, setSearch] = React.useState(''); - const sort = useTableColumnSort(columns, 0); - const filteredProjects = sort.transformData(unfilteredProjects).filter((project) => { + const filteredProjects = unfilteredProjects.filter((project) => { if (!search) { return true; } diff --git a/frontend/src/pages/projects/screens/projects/tableData.tsx b/frontend/src/pages/projects/screens/projects/tableData.tsx index 8e78f64ae7..8cf792d090 100644 --- a/frontend/src/pages/projects/screens/projects/tableData.tsx +++ b/frontend/src/pages/projects/screens/projects/tableData.tsx @@ -1,4 +1,4 @@ -import { SortableData } from '~/components/table/useTableColumnSort'; +import { SortableData } from '~/components/table'; import { ProjectKind } from '~/k8sTypes'; import { getProjectCreationTime, getProjectDisplayName } from '~/pages/projects/utils'; diff --git a/frontend/src/pages/projects/screens/spawner/SpawnerFooter.tsx b/frontend/src/pages/projects/screens/spawner/SpawnerFooter.tsx index fbf05a466d..ab93e4d235 100644 --- a/frontend/src/pages/projects/screens/spawner/SpawnerFooter.tsx +++ b/frontend/src/pages/projects/screens/spawner/SpawnerFooter.tsx @@ -78,9 +78,14 @@ const SpawnerFooter: React.FC = ({ ); const afterStart = (name: string, type: 'created' | 'updated') => { - const { gpus, notebookSize, image } = startNotebookData; + const { accelerator, notebookSize, image } = startNotebookData; fireTrackingEvent(`Workbench ${type}`, { - GPU: gpus, + acceleratorCount: accelerator.useExisting ? undefined : accelerator.count, + accelerator: accelerator.accelerator + ? `${accelerator.accelerator.spec.displayName} (${accelerator.accelerator.metadata.name}): ${accelerator.accelerator.spec.identifier}` + : accelerator.useExisting + ? 'Unknown' + : 'None', lastSelectedSize: notebookSize.name, lastSelectedImage: image.imageVersion?.from ? `${image.imageVersion.from.name}` diff --git a/frontend/src/pages/projects/screens/spawner/SpawnerPage.tsx b/frontend/src/pages/projects/screens/spawner/SpawnerPage.tsx index cc12d854f3..d7ae0600f3 100644 --- a/frontend/src/pages/projects/screens/spawner/SpawnerPage.tsx +++ b/frontend/src/pages/projects/screens/spawner/SpawnerPage.tsx @@ -21,14 +21,14 @@ import { getNotebookDisplayName, getProjectDisplayName, } from '~/pages/projects/utils'; -import GPUSelectField from '~/pages/notebookController/screens/server/GPUSelectField'; import { NotebookKind } from '~/k8sTypes'; import useNotebookImageData from '~/pages/projects/screens/detail/notebooks/useNotebookImageData'; import useNotebookDeploymentSize from '~/pages/projects/screens/detail/notebooks/useNotebookDeploymentSize'; -import useNotebookGPUNumber from '~/pages/projects/screens/detail/notebooks/useNotebookGPUNumber'; import NotebookRestartAlert from '~/pages/projects/components/NotebookRestartAlert'; import useWillNotebooksRestart from '~/pages/projects/notebook/useWillNotebooksRestart'; import CanEnableElyraPipelinesCheck from '~/concepts/pipelines/elyra/CanEnableElyraPipelinesCheck'; +import AcceleratorSelectField from '~/pages/notebookController/screens/server/AcceleratorSelectField'; +import useNotebookAccelerator from '~/pages/projects/screens/detail/notebooks/useNotebookAccelerator'; import { SpawnerPageSectionID } from './types'; import { ScrollableSelectorID, SpawnerPageSectionTitles } from './const'; import SpawnerFooter from './SpawnerFooter'; @@ -38,7 +38,11 @@ import { useNotebookSize } from './useNotebookSize'; import StorageField from './storage/StorageField'; import EnvironmentVariables from './environmentVariables/EnvironmentVariables'; import { useStorageDataObject } from './storage/utils'; -import { getRootVolumeName, useMergeDefaultPVCName } from './spawnerUtils'; +import { + getCompatibleAcceleratorIdentifiers, + getRootVolumeName, + useMergeDefaultPVCName, +} from './spawnerUtils'; import { useNotebookEnvVariables } from './environmentVariables/useNotebookEnvVariables'; import DataConnectionField from './dataConnection/DataConnectionField'; import { useNotebookDataConnection } from './dataConnection/useNotebookDataConnection'; @@ -61,13 +65,13 @@ const SpawnerPage: React.FC = ({ existingNotebook }) => { imageVersion: undefined, }); const { selectedSize, setSelectedSize, sizes } = useNotebookSize(); - const [selectedGpu, setSelectedGpu] = React.useState('0'); + const [supportedAccelerators, setSupportedAccelerators] = React.useState(); const [storageDataWithoutDefault, setStorageData] = useStorageDataObject(existingNotebook); const storageData = useMergeDefaultPVCName(storageDataWithoutDefault, nameDesc.name); const [envVariables, setEnvVariables] = useNotebookEnvVariables(existingNotebook); - const [dataConnection, setDataConnection] = useNotebookDataConnection( - existingNotebook, + const [dataConnectionData, setDataConnectionData] = useNotebookDataConnection( dataConnections.data, + existingNotebook, ); const restartNotebooks = useWillNotebooksRestart([existingNotebook?.metadata.name || '']); @@ -97,10 +101,16 @@ const SpawnerPage: React.FC = ({ existingNotebook }) => { } }, [notebookSize, setSelectedSize]); - const notebookGPU = useNotebookGPUNumber(existingNotebook); + const [notebookAcceleratorState, setNotebookAcceleratorState] = + useNotebookAccelerator(existingNotebook); + React.useEffect(() => { - setSelectedGpu(notebookGPU.toString()); - }, [notebookGPU, setSelectedGpu]); + if (selectedImage.imageStream) { + setSupportedAccelerators(getCompatibleAcceleratorIdentifiers(selectedImage.imageStream)); + } else { + setSupportedAccelerators(undefined); + } + }, [selectedImage.imageStream]); const editNotebookDisplayName = existingNotebook ? getNotebookDisplayName(existingNotebook) : ''; @@ -157,6 +167,7 @@ const SpawnerPage: React.FC = ({ existingNotebook }) => { = ({ existingNotebook }) => { setValue={setSelectedSize} value={selectedSize} /> - setSelectedGpu(value)} + = ({ existingNotebook }) => { aria-label={SpawnerPageSectionTitles[SpawnerPageSectionID.DATA_CONNECTIONS]} > setDataConnection(connection)} + dataConnectionData={dataConnectionData} + setDataConnectionData={setDataConnectionData} /> @@ -229,13 +241,15 @@ const SpawnerPage: React.FC = ({ existingNotebook }) => { projectName: currentProject.metadata.name, image: selectedImage, notebookSize: selectedSize, - gpus: parseInt(selectedGpu), + accelerator: notebookAcceleratorState, volumes: [], volumeMounts: [], + existingTolerations: existingNotebook?.spec.template.spec.tolerations || [], + existingResources: existingNotebook?.spec.template.spec.containers[0].resources, }} storageData={storageData} envVariables={envVariables} - dataConnection={dataConnection} + dataConnection={dataConnectionData} canEnablePipelines={canEnablePipelines} /> )} diff --git a/frontend/src/pages/projects/screens/spawner/dataConnection/DataConnectionField.tsx b/frontend/src/pages/projects/screens/spawner/dataConnection/DataConnectionField.tsx index 605559046a..478547912d 100644 --- a/frontend/src/pages/projects/screens/spawner/dataConnection/DataConnectionField.tsx +++ b/frontend/src/pages/projects/screens/spawner/dataConnection/DataConnectionField.tsx @@ -4,18 +4,19 @@ import { DataConnectionData, EnvironmentVariableType, SecretCategory, + UpdateObjectAtPropAndValue, } from '~/pages/projects/types'; import AWSField from '~/pages/projects/dataConnections/AWSField'; import { EMPTY_AWS_SECRET_DATA } from '~/pages/projects/dataConnections/const'; import ExistingDataConnectionField from './ExistingDataConnectionField'; type DataConnectionFieldProps = { - dataConnection: DataConnectionData; - setDataConnection: (dataConnection: DataConnectionData) => void; + dataConnectionData: DataConnectionData; + setDataConnectionData: UpdateObjectAtPropAndValue; }; const DataConnectionField: React.FC = ({ - dataConnection, - setDataConnection, + dataConnectionData, + setDataConnectionData, }) => ( = ({ name="enable-data-connection-checkbox" id="enable-data-connection-checkbox" label="Use a data connection" - isChecked={dataConnection.enabled} - onChange={() => setDataConnection({ ...dataConnection, enabled: !dataConnection.enabled })} + isChecked={dataConnectionData.enabled} + onChange={() => setDataConnectionData('enabled', !dataConnectionData.enabled)} body={ - dataConnection.enabled && ( + dataConnectionData.enabled && ( = ({ name="new-data-connection-radio" id="new-data-connection-radio" label="Create new data connection" - isChecked={dataConnection.type === 'creating'} - onChange={() => setDataConnection({ ...dataConnection, type: 'creating' })} + isChecked={dataConnectionData.type === 'creating'} + onChange={() => setDataConnectionData('type', 'creating')} body={ - dataConnection.type === 'creating' && ( + dataConnectionData.type === 'creating' && ( { - setDataConnection({ - ...dataConnection, - creating: { - type: EnvironmentVariableType.SECRET, - values: { category: SecretCategory.AWS, data: newEnvData }, - }, + setDataConnectionData('creating', { + type: EnvironmentVariableType.SECRET, + values: { category: SecretCategory.AWS, data: newEnvData }, }); }} /> @@ -60,18 +58,15 @@ const DataConnectionField: React.FC = ({ name="existing-data-connection-type-radio" id="existing-data-connection-type-radio" label="Use existing data connection" - isChecked={dataConnection.type === 'existing'} - onChange={() => setDataConnection({ ...dataConnection, type: 'existing' })} + isChecked={dataConnectionData.type === 'existing'} + onChange={() => setDataConnectionData('type', 'existing')} body={ - dataConnection.type === 'existing' && ( + dataConnectionData.type === 'existing' && ( - setDataConnection({ - ...dataConnection, - existing: { secretRef: { name: name ?? '' } }, - }) + setDataConnectionData('existing', { secretRef: { name: name ?? '' } }) } /> ) diff --git a/frontend/src/pages/projects/screens/spawner/dataConnection/useNotebookDataConnection.ts b/frontend/src/pages/projects/screens/spawner/dataConnection/useNotebookDataConnection.ts index 031fd2282b..84fa325645 100644 --- a/frontend/src/pages/projects/screens/spawner/dataConnection/useNotebookDataConnection.ts +++ b/frontend/src/pages/projects/screens/spawner/dataConnection/useNotebookDataConnection.ts @@ -1,6 +1,10 @@ -import * as React from 'react'; import { NotebookKind } from '~/k8sTypes'; -import { DataConnection, DataConnectionData } from '~/pages/projects/types'; +import { + DataConnection, + DataConnectionData, + UpdateObjectAtPropAndValue, +} from '~/pages/projects/types'; +import useGenericObjectState from '~/utilities/useGenericObjectState'; export const getNotebookDataConnection = ( notebook?: NotebookKind, @@ -15,25 +19,18 @@ export const getNotebookDataConnection = ( }; export const useNotebookDataConnection = ( + dataConnections: DataConnection[], notebook?: NotebookKind, - dataConnections: DataConnection[] = [], ): [ dataConnection: DataConnectionData, - setDataConnection: (connection: DataConnectionData) => void, + setDataConnection: UpdateObjectAtPropAndValue, + resetDefaults: () => void, ] => { - const [dataConnection, setDataConnection] = React.useState({ - type: 'creating', - enabled: false, - }); - - React.useEffect(() => { - if (notebook) { - // find data connection from env list - const notebookDataConnectionSecret = getNotebookDataConnection(notebook, dataConnections) - ?.data.metadata.name; - - if (notebookDataConnectionSecret) { - setDataConnection({ + const notebookDataConnectionSecret = getNotebookDataConnection(notebook, dataConnections)?.data + .metadata.name; + const createDataState = useGenericObjectState( + notebookDataConnectionSecret + ? { type: 'existing', enabled: true, existing: { @@ -41,10 +38,12 @@ export const useNotebookDataConnection = ( name: notebookDataConnectionSecret, }, }, - }); - } - } - }, [notebook, dataConnections]); + } + : { + type: 'creating', + enabled: false, + }, + ); - return [dataConnection, setDataConnection]; + return createDataState; }; diff --git a/frontend/src/pages/projects/screens/spawner/imageSelector/ImageSelectorField.tsx b/frontend/src/pages/projects/screens/spawner/imageSelector/ImageSelectorField.tsx index e7f6d6bf35..883908472b 100644 --- a/frontend/src/pages/projects/screens/spawner/imageSelector/ImageSelectorField.tsx +++ b/frontend/src/pages/projects/screens/spawner/imageSelector/ImageSelectorField.tsx @@ -17,11 +17,13 @@ import ImageStreamSelector from './ImageStreamSelector'; type ImageSelectorFieldProps = { selectedImage: ImageStreamAndVersion; setSelectedImage: React.Dispatch>; + compatibleAccelerator?: string; }; const ImageSelectorField: React.FC = ({ selectedImage, setSelectedImage, + compatibleAccelerator, }) => { const { dashboardNamespace } = useDashboardNamespace(); const buildStatuses = useBuildStatuses(dashboardNamespace); @@ -69,6 +71,7 @@ const ImageSelectorField: React.FC = ({ buildStatuses={buildStatuses} onImageStreamSelect={onImageStreamSelect} selectedImageStream={selectedImage.imageStream} + compatibleAccelerator={compatibleAccelerator} /> void; + compatibleAccelerator?: string; }; const ImageStreamSelector: React.FC = ({ @@ -22,49 +24,47 @@ const ImageStreamSelector: React.FC = ({ selectedImageStream, onImageStreamSelect, buildStatuses, + compatibleAccelerator, }) => { - const [imageSelectionOpen, setImageSelectionOpen] = React.useState(false); - - const selectOptionObjects = [...imageStreams] - .sort(compareImageStreamOrder) - .map((imageStream) => getImageStreamSelectOptionObject(imageStream)); - - const options = selectOptionObjects.map((optionObject) => { - const imageStream = optionObject.imageStream; + const options = [...imageStreams].sort(compareImageStreamOrder).map((imageStream) => { const description = getRelatedVersionDescription(imageStream); - return ( - - ); + const displayName = getImageStreamDisplayName(imageStream); + + return { + key: imageStream.metadata.name, + selectedLabel: displayName, + description: description, + disabled: !checkImageStreamAvailability(imageStream, buildStatuses), + label: ( + + {displayName} + + + {isCompatibleWithAccelerator(compatibleAccelerator, imageStream) && ( + + )} + + + ), + }; }); return ( - + /> ); }; diff --git a/frontend/src/pages/projects/screens/spawner/spawnerUtils.ts b/frontend/src/pages/projects/screens/spawner/spawnerUtils.ts index a4eecb6126..453a74f9e7 100644 --- a/frontend/src/pages/projects/screens/spawner/spawnerUtils.ts +++ b/frontend/src/pages/projects/screens/spawner/spawnerUtils.ts @@ -1,7 +1,13 @@ import * as React from 'react'; import compareVersions from 'compare-versions'; -import { NotebookSize, Volume, VolumeMount } from '~/types'; -import { BuildKind, ImageStreamKind, ImageStreamSpecTagType, NotebookKind } from '~/k8sTypes'; +import { BYONImage, NotebookSize, Volume, VolumeMount } from '~/types'; +import { + BuildKind, + ImageStreamKind, + ImageStreamSpecTagType, + K8sDSGResource, + NotebookKind, +} from '~/k8sTypes'; import { ConfigMapCategory, DataConnectionData, @@ -16,7 +22,6 @@ import { ROOT_MOUNT_PATH } from '~/pages/projects/pvc/const'; import { AWS_FIELDS } from '~/pages/projects/dataConnections/const'; import { BuildStatus, - ImageStreamSelectOptionObjectType, ImageVersionDependencyType, ImageVersionSelectOptionObjectType, } from './types'; @@ -64,12 +69,6 @@ export const getNameVersionString = (software: ImageVersionDependencyType): stri * Create object for PF Select component to use * `toString` decides the text shown for the select option */ -export const getImageStreamSelectOptionObject = ( - imageStream: ImageStreamKind, -): ImageStreamSelectOptionObjectType => ({ - imageStream, - toString: () => getImageStreamDisplayName(imageStream), -}); export const getImageVersionSelectOptionObject = ( imageStream: ImageStreamKind, imageVersion: ImageStreamSpecTagType, @@ -78,15 +77,10 @@ export const getImageVersionSelectOptionObject = ( toString: () => `${imageVersion.name}${checkVersionRecommended(imageVersion) ? ' (Recommended)' : ''}`, }); -export const isImageStreamSelectOptionObject = ( - object: unknown, -): object is ImageStreamSelectOptionObjectType => - (object as ImageStreamSelectOptionObjectType).imageStream !== undefined; export const isImageVersionSelectOptionObject = ( object: unknown, ): object is ImageVersionSelectOptionObjectType => (object as ImageVersionSelectOptionObjectType).imageVersion !== undefined; - /******************* Compare utils for sorting *******************/ const getBuildNumber = (build: BuildKind): number => { const buildNumber = build.metadata.annotations?.['openshift.io/build.number'] || '-1'; @@ -140,6 +134,37 @@ export const getImageStreamDescription = (imageStream: ImageStreamKind): string export const getImageSteamOrder = (imageStream: ImageStreamKind): number => parseInt(imageStream.metadata.annotations?.[IMAGE_ANNOTATIONS.IMAGE_ORDER] || '100'); +export const getCompatibleAcceleratorIdentifiers = ( + object: ImageStreamKind | K8sDSGResource, +): string[] => { + try { + const annotation = object.metadata.annotations?.['opendatahub.io/recommended-accelerators']; + // in the format of ["foo.com/gpu", "bar.com/gpu"] + if (annotation) { + const identifiers = JSON.parse(annotation); + if (Array.isArray(identifiers)) { + return identifiers; + } + } + } catch (error) { + // catch invalid json in metadata + } + return []; +}; + +export const isCompatibleWithAccelerator = ( + acceleratorIdentifier?: string, + obj?: ImageStreamKind | K8sDSGResource, +) => { + if (!obj || !acceleratorIdentifier) { + return false; + } + + return getCompatibleAcceleratorIdentifiers(obj).some( + (accelerator) => accelerator === acceleratorIdentifier, + ); +}; + /** * Parse annotation software field or dependencies field from long string to array */ @@ -388,3 +413,14 @@ export const isInvalidBYONImageStream = (imageStream: ImageStreamKind) => { (activeTag === undefined || activeTag.items === null) ); }; + +export const convertBYONImageToK8sResource = (image: BYONImage) => ({ + kind: 'ImageStream', + apiVersion: 'image.openshift.io/v1', + metadata: { + name: image.id, + annotations: { + 'openshift.io/display-name': image.name, + }, + }, +}); diff --git a/frontend/src/pages/projects/screens/spawner/types.ts b/frontend/src/pages/projects/screens/spawner/types.ts index 9f4f8c6bc3..3aec64e612 100644 --- a/frontend/src/pages/projects/screens/spawner/types.ts +++ b/frontend/src/pages/projects/screens/spawner/types.ts @@ -1,4 +1,4 @@ -import { BUILD_PHASE, ImageStreamKind, ImageStreamSpecTagType } from '~/k8sTypes'; +import { AcceleratorKind, BUILD_PHASE, ImageStreamKind, ImageStreamSpecTagType } from '~/k8sTypes'; export enum SpawnerPageSectionID { NAME_DESCRIPTION = 'name-and-description', @@ -40,3 +40,8 @@ export type ImageVersionSelectDataType = { imageStream?: ImageStreamKind; imageVersions: ImageStreamSpecTagType[]; }; + +export type AcceleratorSelectOptionObjectType = { + accelerator: AcceleratorKind; + toString: () => string; +}; diff --git a/frontend/src/pages/projects/types.ts b/frontend/src/pages/projects/types.ts index 89de7934e3..870eee7658 100644 --- a/frontend/src/pages/projects/types.ts +++ b/frontend/src/pages/projects/types.ts @@ -1,12 +1,15 @@ import { + ContainerResources, ImageStreamAndVersion, NotebookSize, + PodToleration, TolerationSettings, Volume, VolumeMount, } from '~/types'; import { ValueOf } from '~/typeHelpers'; import { AWSSecretKind } from '~/k8sTypes'; +import { AcceleratorState } from '~/utilities/useAcceleratorState'; import { AWS_KEYS } from './dataConnections/const'; export type UpdateObjectAtPropAndValue = (propKey: keyof T, propValue: ValueOf) => void; @@ -60,11 +63,13 @@ export type StartNotebookData = { projectName: string; notebookName: string; notebookSize: NotebookSize; - gpus: number; + accelerator: AcceleratorState; image: ImageStreamAndVersion; volumes?: Volume[]; volumeMounts?: VolumeMount[]; tolerationSettings?: TolerationSettings; + existingTolerations?: PodToleration[]; + existingResources?: ContainerResources; envFrom?: EnvironmentFromVariable[]; description?: string; /** An override for the assembleNotebook so it doesn't regen an id */ diff --git a/frontend/src/services/acceleratorService.ts b/frontend/src/services/acceleratorService.ts new file mode 100644 index 0000000000..7e7908dbb9 --- /dev/null +++ b/frontend/src/services/acceleratorService.ts @@ -0,0 +1,12 @@ +import axios from 'axios'; +import { AcceleratorInfo } from '~/types'; + +export const getAcceleratorCounts = (): Promise => { + const url = '/api/accelerators'; + return axios + .get(url) + .then((response) => response.data) + .catch((e) => { + throw new Error(e.response.data.message); + }); +}; diff --git a/frontend/src/services/groupSettingsService.ts b/frontend/src/services/groupSettingsService.ts index 1704c8518f..2e957a342d 100644 --- a/frontend/src/services/groupSettingsService.ts +++ b/frontend/src/services/groupSettingsService.ts @@ -11,9 +11,7 @@ export const fetchGroupsSettings = (): Promise => { }); }; -export const updateGroupsSettings = ( - settings: GroupsConfig, -): Promise<{ success: GroupsConfig | null; error: string | null }> => { +export const updateGroupsSettings = (settings: GroupsConfig): Promise => { const url = '/api/groups-config'; return axios .put(url, settings) diff --git a/frontend/src/services/templateService.ts b/frontend/src/services/templateService.ts index 44e562331b..5713a7cf73 100644 --- a/frontend/src/services/templateService.ts +++ b/frontend/src/services/templateService.ts @@ -3,6 +3,7 @@ import axios from 'axios'; import YAML from 'yaml'; import { assembleServingRuntimeTemplate } from '~/api'; import { ServingRuntimeKind, TemplateKind } from '~/k8sTypes'; +import { addTypesToK8sListedResources } from '~/utilities/addTypesToK8sListedResources'; export const listTemplatesBackend = async ( namespace?: string, @@ -10,7 +11,7 @@ export const listTemplatesBackend = async ( ): Promise => axios .get(`/api/templates/${namespace}`, { params: { labelSelector } }) - .then((response) => response.data.items) + .then((response) => addTypesToK8sListedResources(response.data, 'Template').items) .catch((e) => Promise.reject(e)); const dryRunServingRuntimeForTemplateCreationBackend = ( diff --git a/frontend/src/types.ts b/frontend/src/types.ts index 96956fabd0..d1e2971e6d 100644 --- a/frontend/src/types.ts +++ b/frontend/src/types.ts @@ -6,6 +6,7 @@ import { ServingRuntimeSize } from '~/pages/modelServing/screens/types'; import { EnvironmentFromVariable } from '~/pages/projects/types'; import { ImageStreamKind, ImageStreamSpecTagType } from './k8sTypes'; import { EitherNotBoth } from './typeHelpers'; +import { AcceleratorState } from './utilities/useAcceleratorState'; export type PrometheusQueryResponse = { data: { @@ -104,24 +105,22 @@ export type NotebookControllerUserState = { * OdhDashboardConfig contains gpuSetting as a string value override -- proper gpus return as numbers * TODO: Look to make it just number by properly parsing the value */ -export type GPUCount = string | number; export enum ContainerResourceAttributes { CPU = 'cpu', MEMORY = 'memory', - NVIDIA_GPU = 'nvidia.com/gpu', } export type ContainerResources = { requests?: { - cpu?: string; + [key: string]: number | string | undefined; + cpu?: string | number; memory?: string; - 'nvidia.com/gpu'?: GPUCount; }; limits?: { - cpu?: string; + [key: string]: number | string | undefined; + cpu?: string | number; memory?: string; - 'nvidia.com/gpu'?: GPUCount; }; }; @@ -290,7 +289,7 @@ type K8sMetadata = { /** * @deprecated -- use the SDK version -- see k8sTypes.ts * All references that use this are un-vetted data against existing types, should be converted over - * to the new K8sResourceCommon from the SDK to keep everythung unified on one front. + * to the new K8sResourceCommon from the SDK to keep everything unified on one front. */ export type K8sResourceCommon = { apiVersion?: string; @@ -329,7 +328,8 @@ export type TrackingEventProperties = { anonymousID?: string; type?: string; term?: string; - GPU?: GPUCount; + accelerator?: string; + acceleratorCount?: number; lastSelectedSize?: string; lastSelectedImage?: string; projectName?: string; @@ -344,9 +344,11 @@ export type NotebookPort = { }; export type PodToleration = { - effect: string; key: string; - operator: string; + operator?: string; + value?: string; + effect?: string; + tolerationSeconds?: number; }; export type NotebookContainer = { @@ -376,6 +378,7 @@ export type Notebook = K8sResourceCommon & { 'opendatahub.io/username': string; // the untranslated username behind the notebook 'notebooks.opendatahub.io/last-image-selection': string; // the last image they selected 'notebooks.opendatahub.io/last-size-selection': string; // the last notebook size they selected + 'opendatahub.io/accelerator-name': string | undefined; }>; labels: Partial<{ 'opendatahub.io/user': string; // translated username -- see translateUsername @@ -690,7 +693,7 @@ export type NotebookData = { notebookSizeName: string; imageName: string; imageTagName: string; - gpus: number; + accelerator: AcceleratorState; envVars: EnvVarReducedTypeKeyValues; state: NotebookState; // only used for admin calls, regular users cannot use this field @@ -725,3 +728,10 @@ export type ContextResourceData = { export type BreadcrumbItemType = { label: string; } & EitherNotBoth<{ link: string }, { isActive: boolean }>; + +export type AcceleratorInfo = { + configured: boolean; + available: { [key: string]: number }; + total: { [key: string]: number }; + allocated: { [key: string]: number }; +}; diff --git a/frontend/src/utilities/__tests__/addTypesToK8sListedResources.spec.ts b/frontend/src/utilities/__tests__/addTypesToK8sListedResources.spec.ts new file mode 100644 index 0000000000..9ad28eb769 --- /dev/null +++ b/frontend/src/utilities/__tests__/addTypesToK8sListedResources.spec.ts @@ -0,0 +1,36 @@ +import { K8sResourceCommon } from '@openshift/dynamic-plugin-sdk-utils'; +import { addTypesToK8sListedResources } from '~/utilities/addTypesToK8sListedResources'; + +const servingRuntimeTemplate = { + apiVersion: 'template.openshift.io/v1', + kind: 'TemplateList', + items: [ + { + metadata: { + name: 'test-model', + annotations: { + 'openshift.io/display-name': 'New OVMS Server', + }, + labels: { + 'opendatahub.io/dashboard': 'true', + }, + }, + }, + ], + metadata: { + resourceVersion: '24348645', + continue: '', + }, +}; + +describe('addTypesToK8sListedResources', () => { + it('should have apiVersion and kind as Template', () => { + const list = addTypesToK8sListedResources(servingRuntimeTemplate, 'Template'); + expect(list).not.toBe(servingRuntimeTemplate); + expect(list.items).toHaveLength(servingRuntimeTemplate.items.length); + list.items.forEach((i: Partial) => { + expect(i.apiVersion).toBe('template.openshift.io/v1'); + expect(i.kind).toBe('Template'); + }); + }); +}); diff --git a/frontend/src/utilities/__tests__/useFetchState.spec.ts b/frontend/src/utilities/__tests__/useFetchState.spec.ts index f29f380a21..b8081a64f2 100644 --- a/frontend/src/utilities/__tests__/useFetchState.spec.ts +++ b/frontend/src/utilities/__tests__/useFetchState.spec.ts @@ -1,97 +1,125 @@ -import { act } from '@testing-library/react'; -import useFetchState from '~/utilities/useFetchState'; -import { expectHook, standardUseFetchState, testHook } from '~/__tests__/unit/testUtils/hooks'; +import { act, waitFor } from '@testing-library/react'; +import useFetchState, { FetchState } from '~/utilities/useFetchState'; +import { standardUseFetchState, testHook } from '~/__tests__/unit/testUtils/hooks'; jest.useFakeTimers(); describe('useFetchState', () => { it('should be successful', async () => { - const renderResult = testHook( - useFetchState, + const renderResult = testHook(useFetchState)( () => Promise.resolve('success-test-state'), 'default-test-state', ); - expectHook(renderResult) - .toStrictEqual(standardUseFetchState('default-test-state')) - .toHaveUpdateCount(1); + expect(renderResult).hookToStrictEqual(standardUseFetchState('default-test-state')); + expect(renderResult).hookToHaveUpdateCount(1); await renderResult.waitForNextUpdate(); - expectHook(renderResult) - .toStrictEqual(standardUseFetchState('success-test-state', true)) - .toHaveUpdateCount(2) - .toBeStable([false, false, true, true]); + expect(renderResult).hookToStrictEqual(standardUseFetchState('success-test-state', true)); + expect(renderResult).hookToHaveUpdateCount(2); }); it('should fail', async () => { - const renderResult = testHook( - useFetchState, + const renderResult = testHook(useFetchState)( () => Promise.reject(new Error('error-test-state')), 'default-test-state', ); - - expectHook(renderResult) - .toStrictEqual(standardUseFetchState('default-test-state')) - .toHaveUpdateCount(1); + expect(renderResult).hookToStrictEqual(standardUseFetchState('default-test-state')); + expect(renderResult).hookToHaveUpdateCount(1); await renderResult.waitForNextUpdate(); - expectHook(renderResult) - .toStrictEqual( - standardUseFetchState('default-test-state', false, new Error('error-test-state')), - ) - .toHaveUpdateCount(2) - .toBeStable([true, true, false, true]); + expect(renderResult).hookToStrictEqual( + standardUseFetchState('default-test-state', false, new Error('error-test-state')), + ); + expect(renderResult).hookToHaveUpdateCount(2); + expect(renderResult).hookToBeStable([true, true, false, true]); }); it('should refresh', async () => { - const renderResult = testHook(useFetchState, () => Promise.resolve([1, 2, 3]), [], { + const renderResult = testHook(useFetchState)(() => Promise.resolve([1, 2, 3]), [], { refreshRate: 1000, }); - expectHook(renderResult).toStrictEqual(standardUseFetchState([])).toHaveUpdateCount(1); + expect(renderResult).hookToStrictEqual(standardUseFetchState([])); + expect(renderResult).hookToHaveUpdateCount(1); await renderResult.waitForNextUpdate(); - expectHook(renderResult) - .toStrictEqual(standardUseFetchState([1, 2, 3], true)) - .toHaveUpdateCount(2) - .toBeStable([false, false, true, true]); + expect(renderResult).hookToStrictEqual(standardUseFetchState([1, 2, 3], true)); + expect(renderResult).hookToHaveUpdateCount(2); + expect(renderResult).hookToBeStable([false, false, true, true]); await act(() => { jest.advanceTimersByTime(900); }); - expectHook(renderResult).toHaveUpdateCount(2); + expect(renderResult).hookToHaveUpdateCount(2); await act(async () => { jest.advanceTimersByTime(100); }); - expectHook(renderResult).toHaveUpdateCount(3); + expect(renderResult).hookToHaveUpdateCount(3); await renderResult.waitForNextUpdate(); - expectHook(renderResult) - .toStrictEqual(standardUseFetchState([1, 2, 3], true)) - .toHaveUpdateCount(4) - .toBeStable([false, true, true, true]); + expect(renderResult).hookToStrictEqual(standardUseFetchState([1, 2, 3], true)); + expect(renderResult).hookToHaveUpdateCount(4); + expect(renderResult).hookToBeStable([false, true, true, true]); }); it('should test stability', async () => { - const renderResult = testHook(useFetchState, () => Promise.resolve([1, 2, 3]), []); - expectHook(renderResult).toStrictEqual(standardUseFetchState([])).toHaveUpdateCount(1); + const renderResult = testHook(useFetchState)(() => Promise.resolve([1, 2, 3]), []); + expect(renderResult).hookToStrictEqual(standardUseFetchState([])); + expect(renderResult).hookToHaveUpdateCount(1); await renderResult.waitForNextUpdate(); - expectHook(renderResult) - .toStrictEqual(standardUseFetchState([1, 2, 3], true)) - .toHaveUpdateCount(2) - .toBeStable([false, false, true, true]); + + expect(renderResult).hookToStrictEqual(standardUseFetchState([1, 2, 3], true)); + expect(renderResult).hookToHaveUpdateCount(2); + expect(renderResult).hookToBeStable([false, false, true, true]); renderResult.rerender(() => Promise.resolve([1, 2, 4]), []); - expectHook(renderResult).toHaveUpdateCount(3).toBeStable([true, true, true, true]); + expect(renderResult).hookToHaveUpdateCount(3); + expect(renderResult).hookToBeStable([true, true, true, true]); await renderResult.waitForNextUpdate(); - expectHook(renderResult) - .toStrictEqual(standardUseFetchState([1, 2, 4], true)) - .toHaveUpdateCount(4) - .toBeStable([false, true, true, true]); + expect(renderResult).hookToStrictEqual(standardUseFetchState([1, 2, 4], true)); + expect(renderResult).hookToHaveUpdateCount(4); + expect(renderResult).hookToBeStable([false, true, true, true]); + }); + + it('should have a stable default values when initialPromisePurity=true', async () => { + const oriDefaultValue = [10]; + const result: FetchState[] = []; + + const renderResult = testHook((...args: Parameters>) => { + // wrap useFetchState to capture all executions inbetween useEffects + const state = useFetchState(...args); + result.push(state); + return state; + })(() => Promise.resolve([1, 2, 3]), oriDefaultValue, { + initialPromisePurity: true, + }); + + expect(result[0][0]).toBe(oriDefaultValue); + expect(result[0][1]).toBe(false); + + await waitFor(() => expect(result).toHaveLength(2)); + expect(result[1][0]).toStrictEqual([1, 2, 3]); + expect(result[1][1]).toBe(true); + + // rerender but with a promise that doens't resolve + renderResult.rerender(() => new Promise(() => null), [11], { + initialPromisePurity: true, + }); + + expect(result).toHaveLength(4); + + // update immediately after hook completes but before useEffects are run + expect(result[2][0]).toBe(oriDefaultValue); + expect(result[2][1]).toBe(false); + + // final update after all useEffects are run + expect(result[3][0]).toBe(oriDefaultValue); + expect(result[3][1]).toBe(false); }); }); diff --git a/frontend/src/utilities/__tests__/useWatchGroups.spec.ts b/frontend/src/utilities/__tests__/useWatchGroups.spec.ts new file mode 100644 index 0000000000..1474cfb4d8 --- /dev/null +++ b/frontend/src/utilities/__tests__/useWatchGroups.spec.ts @@ -0,0 +1,34 @@ +import { testHook } from '~/__tests__/unit/testUtils/hooks'; +import { fetchGroupsSettings } from '~/services/groupSettingsService'; +import { useWatchGroups } from '~/utilities/useWatchGroups'; + +jest.mock('~/services/groupSettingsService', () => ({ + fetchGroupsSettings: jest.fn(), +})); + +jest.mock('react-redux', () => ({ + useDispatch: jest.fn(), +})); + +const fetchGroupSettingsMock = fetchGroupsSettings as jest.Mock; + +describe('useWatchGroups', () => { + it('should fetch groups successfully', async () => { + const mockEmptyGroupSettings = { + adminGroups: [], + allowedGroups: [], + }; + const mockGroupSettings = { + adminGroups: ['odh-admins'], + allowedGroups: [], + }; + fetchGroupSettingsMock.mockReturnValue(Promise.resolve(mockGroupSettings)); + + const renderResult = testHook(useWatchGroups)(); + expect(fetchGroupSettingsMock).toHaveBeenCalledTimes(1); + expect(renderResult.result.current.groupSettings).toStrictEqual(mockEmptyGroupSettings); + + await renderResult.waitForNextUpdate(); + expect(renderResult.result.current.groupSettings).toStrictEqual(mockGroupSettings); + }); +}); diff --git a/frontend/src/utilities/__tests__/valueUnits.spec.ts b/frontend/src/utilities/__tests__/valueUnits.spec.ts index d16500bdd4..e163b53339 100644 --- a/frontend/src/utilities/__tests__/valueUnits.spec.ts +++ b/frontend/src/utilities/__tests__/valueUnits.spec.ts @@ -1,14 +1,22 @@ -import { isCpuLimitEqual, isMemoryLimitEqual } from '~/utilities/valueUnits'; +import { + isCpuLimitEqual, + isMemoryLimitEqual, + isCpuLimitLarger, + isMemoryLimitLarger, +} from '~/utilities/valueUnits'; describe('isCpuLimitEqual', () => { test('correctly compares non-undefined values', () => { expect(isCpuLimitEqual('1', '1')).toBe(true); + expect(isCpuLimitEqual(1, '1')).toBe(true); expect(isCpuLimitEqual('1000m', '1')).toBe(true); + expect(isCpuLimitEqual('1000m', 1)).toBe(true); expect(isCpuLimitEqual('1001m', '1')).toBe(false); }); test('correctly compares undefined values', () => { expect(isCpuLimitEqual('1000m', undefined)).toBe(false); expect(isCpuLimitEqual('1', undefined)).toBe(false); + expect(isCpuLimitEqual(1, undefined)).toBe(false); expect(isCpuLimitEqual(undefined, undefined)).toBe(true); }); }); @@ -25,3 +33,36 @@ describe('isMemoryLimitEqual', () => { expect(isMemoryLimitEqual(undefined, undefined)).toBe(true); }); }); + +describe('isCpuLimitLarger', () => { + test('correctly compares non-undefined values', () => { + expect(isCpuLimitLarger('1', '1')).toBe(false); + expect(isCpuLimitLarger('1', '1', true)).toBe(true); + expect(isCpuLimitLarger(1, '1')).toBe(false); + expect(isCpuLimitLarger(1, '1', true)).toBe(true); + expect(isCpuLimitLarger('1000m', '1')).toBe(false); + expect(isCpuLimitLarger('1000m', '1', true)).toBe(true); + expect(isCpuLimitLarger('1', '1001m')).toBe(true); + }); + test('correctly compares undefined values', () => { + expect(isCpuLimitLarger(undefined, '1000m')).toBe(false); + expect(isCpuLimitLarger('1000m', undefined)).toBe(false); + expect(isCpuLimitLarger(1, undefined)).toBe(false); + expect(isCpuLimitLarger(undefined, undefined)).toBe(false); + }); +}); + +describe('isMemoryLimitLarger', () => { + test('correctly compares non-undefined values', () => { + expect(isMemoryLimitLarger('1Gi', '1Gi')).toBe(false); + expect(isMemoryLimitLarger('1Gi', '1Gi', true)).toBe(true); + expect(isMemoryLimitLarger('1Gi', '1024Mi')).toBe(false); + expect(isMemoryLimitLarger('1Gi', '1024Mi', true)).toBe(true); + expect(isMemoryLimitLarger('1Gi', '1025Mi')).toBe(true); + }); + test('correctly compares undefined values', () => { + expect(isMemoryLimitLarger(undefined, '1Gi')).toBe(false); + expect(isMemoryLimitLarger('1Gi', undefined)).toBe(false); + expect(isMemoryLimitLarger(undefined, undefined)).toBe(false); + }); +}); diff --git a/frontend/src/utilities/addTypesToK8sListedResources.ts b/frontend/src/utilities/addTypesToK8sListedResources.ts new file mode 100644 index 0000000000..d11ee3ef8d --- /dev/null +++ b/frontend/src/utilities/addTypesToK8sListedResources.ts @@ -0,0 +1,14 @@ +import { K8sResourceCommon } from '@openshift/dynamic-plugin-sdk-utils'; +import { K8sResourceListResult } from '~/k8sTypes'; + +export const addTypesToK8sListedResources = >( + response: K8sResourceListResult, + kind: string, +): K8sResourceListResult => ({ + ...response, + items: response.items.map((i) => ({ + ...i, + apiVersion: response.apiVersion, + kind, + })), +}); diff --git a/frontend/src/utilities/const.ts b/frontend/src/utilities/const.ts index 8379731c07..1589247229 100644 --- a/frontend/src/utilities/const.ts +++ b/frontend/src/utilities/const.ts @@ -46,4 +46,8 @@ export const DEFAULT_CONTEXT_DATA: ContextResourceData = { export const REPOSITORY_URL_REGEX = /^([\w.\-_]+((?::\d+|)(?=\/[a-z0-9._-]+\/[a-z0-9._-]+))|)(?:\/|)([a-z0-9.\-_]+(?:\/[a-z0-9.\-_]+|))(?::([\w.\-_]{1,127})|)/; -export const DASHBOARD_MAIN_CONTAINER_SELECTOR = 'dashboard-page-main'; +export const DASHBOARD_MAIN_CONTAINER_ID = 'dashboard-page-main'; + +// Quick starts drawer creates a new scroll container within its DrawerContentBody. +// Not an ideal selector but components such as JumpLinks require the use of a selector instead of a direct node reference. +export const DASHBOARD_SCROLL_CONTAINER_SELECTOR = `#${DASHBOARD_MAIN_CONTAINER_ID} > .pf-c-drawer > .pf-c-drawer__main > .pf-c-drawer__content`; diff --git a/frontend/src/utilities/imageUtils.ts b/frontend/src/utilities/imageUtils.ts index d2bda5123b..6b1a04d17e 100644 --- a/frontend/src/utilities/imageUtils.ts +++ b/frontend/src/utilities/imageUtils.ts @@ -2,13 +2,11 @@ import compareVersions from 'compare-versions'; import { BuildStatus, BUILD_PHASE, - GPUCount, ImageInfo, ImageSoftwareType, ImageTag, ImageTagInfo, NotebookContainer, - ContainerResourceAttributes, } from '~/types'; const PENDING_PHASES = [ @@ -72,9 +70,6 @@ export const getVersion = (version?: string, prefix?: string): string => { export const getNameVersionString = (software: ImageSoftwareType): string => `${software.name}${getVersion(software.version, ' v')}`; -export const getNumGpus = (container?: NotebookContainer): GPUCount => - container?.resources?.limits?.[ContainerResourceAttributes.NVIDIA_GPU] || 0; - export const getDefaultTag = ( buildStatuses: BuildStatus[], image: ImageInfo, diff --git a/frontend/src/utilities/tolerations.ts b/frontend/src/utilities/tolerations.ts index aa3e17480d..5878656918 100644 --- a/frontend/src/utilities/tolerations.ts +++ b/frontend/src/utilities/tolerations.ts @@ -1,6 +1,8 @@ import { Patch } from '@openshift/dynamic-plugin-sdk-utils'; +import _ from 'lodash'; import { DashboardConfig, PodToleration, TolerationSettings } from '~/types'; import { NotebookKind } from '~/k8sTypes'; +import { AcceleratorState } from './useAcceleratorState'; export type TolerationChanges = { type: 'add' | 'remove' | 'replace' | 'nothing'; @@ -8,19 +10,35 @@ export type TolerationChanges = { }; export const determineTolerations = ( - hasGpu: boolean, tolerationSettings?: TolerationSettings, + acceleratorState?: AcceleratorState, + existingTolerations?: PodToleration[], ): PodToleration[] => { - const tolerations: PodToleration[] = []; + let tolerations = existingTolerations || []; - if (hasGpu) { - tolerations.push({ - effect: 'NoSchedule', - key: 'nvidia.com/gpu', - operator: 'Exists', - }); + // remove old accelerator tolerations if they exist + if (acceleratorState?.initialAccelerator) { + tolerations = tolerations.filter( + (t) => !acceleratorState.initialAccelerator?.spec.tolerations?.some((t2) => _.isEqual(t2, t)), + ); } - if (tolerationSettings?.enabled) { + + // add new accelerator tolerations if they exist + if (acceleratorState?.accelerator?.spec.tolerations) { + tolerations.push(...acceleratorState.accelerator.spec.tolerations); + } + + // remove duplicated tolerations + tolerations = _.uniqWith(tolerations, _.isEqual); + + // add toleration from settings if they exist + if ( + tolerationSettings?.enabled && + !tolerations.some( + (t) => + t.key === tolerationSettings.key && t.operator === 'Exists' && t.effect === 'NoSchedule', + ) + ) { tolerations.push({ effect: 'NoSchedule', key: tolerationSettings.key, @@ -35,15 +53,9 @@ export const computeNotebooksTolerations = ( dashboardConfig: DashboardConfig, notebook: NotebookKind, ): TolerationChanges => { - const hasGPU = !!notebook.spec.template.spec.containers.find( - (container) => - !!container.resources?.limits?.['nvidia.com/gpu'] || - !!container.resources?.requests?.['nvidia.com/gpu'], - ); const tolerations = notebook.spec.template.spec.tolerations || []; const settings = determineTolerations( - hasGPU, dashboardConfig.spec.notebookController?.notebookTolerationSettings, ); diff --git a/frontend/src/utilities/useAcceleratorState.ts b/frontend/src/utilities/useAcceleratorState.ts new file mode 100644 index 0000000000..3d3b3d2aea --- /dev/null +++ b/frontend/src/utilities/useAcceleratorState.ts @@ -0,0 +1,135 @@ +import React from 'react'; +import { AcceleratorKind } from '~/k8sTypes'; +import useAccelerators from '~/pages/notebookController/screens/server/useAccelerators'; +import { useDashboardNamespace } from '~/redux/selectors'; +import { ContainerResourceAttributes, ContainerResources, PodToleration } from '~/types'; +import useGenericObjectState, { GenericObjectState } from '~/utilities/useGenericObjectState'; + +export type AcceleratorState = { + accelerator?: AcceleratorKind; + accelerators: AcceleratorKind[]; + initialAccelerator?: AcceleratorKind; + useExisting: boolean; + count: number; + additionalOptions?: { + useExisting?: boolean; + useDisabled?: AcceleratorKind; + }; +}; + +const useAcceleratorState = ( + resources?: ContainerResources, + tolerations?: PodToleration[], + existingAcceleratorName?: string, +): GenericObjectState => { + const [acceleratorState, setData, resetData] = useGenericObjectState({ + accelerator: undefined, + accelerators: [], + initialAccelerator: undefined, + count: 0, + useExisting: false, + }); + + const { dashboardNamespace } = useDashboardNamespace(); + const [accelerators, loaded, loadError, refresh] = useAccelerators(dashboardNamespace); + + React.useEffect(() => { + if (loaded && !loadError) { + setData('accelerators', accelerators); + + // Exit early if no resources = not in edit mode + if (!resources) { + return; + } + + const accelerator = accelerators.find( + (accelerator) => accelerator.metadata.name === existingAcceleratorName, + ); + + if (accelerator) { + setData('accelerator', accelerator); + setData('initialAccelerator', accelerator); + setData('count', Number(resources.requests?.[accelerator.spec.identifier] ?? 0)); + if (!accelerator.spec.enabled) { + setData('additionalOptions', { useDisabled: accelerator }); + } + } else { + // check if there is accelerator usage in the container + // this is to handle the case where the accelerator is disabled, deleted, or empty + const containerResourceAttributes = Object.values(ContainerResourceAttributes) as string[]; + const possibleAcceleratorRequests = Object.entries(resources.requests ?? {}) + .filter(([key]) => !containerResourceAttributes.includes(key)) + .map(([key, value]) => ({ identifier: key, count: value })); + if (possibleAcceleratorRequests.length > 0) { + // check if they are just using the nvidia.com/gpu + // if so, lets migrate them over to using the migrated-gpu accelerator profile if it exists + const nvidiaAcceleratorRequests = possibleAcceleratorRequests.find( + (request) => request.identifier === 'nvidia.com/gpu', + ); + + if ( + nvidiaAcceleratorRequests && + tolerations?.some( + (toleration) => + toleration.key === 'nvidia.com/gpu' && + toleration.operator === 'Exists' && + toleration.effect === 'NoSchedule', + ) + ) { + const migratedAccelerator = accelerators.find( + (accelerator) => accelerator.metadata.name === 'migrated-gpu', + ); + + if (migratedAccelerator) { + setData('accelerator', migratedAccelerator); + setData('initialAccelerator', migratedAccelerator); + setData('count', Number(nvidiaAcceleratorRequests.count ?? 0)); + if (!migratedAccelerator.spec.enabled) { + setData('additionalOptions', { useDisabled: accelerator }); + } + } else { + // create a fake accelerator to use + const fakeAccelerator: AcceleratorKind = { + apiVersion: 'dashboard.opendatahub.io/v1', + kind: 'AcceleratorProfile', + metadata: { + name: 'migrated-gpu', + }, + spec: { + identifier: 'nvidia.com/gpu', + displayName: 'NVIDIA GPU', + enabled: true, + tolerations: [ + { + key: 'nvidia.com/gpu', + operator: 'Exists', + effect: 'NoSchedule', + }, + ], + }, + }; + + setData('accelerator', fakeAccelerator); + setData('accelerators', [fakeAccelerator, ...accelerators]); + setData('initialAccelerator', fakeAccelerator); + setData('count', Number(nvidiaAcceleratorRequests.count ?? 0)); + } + } else { + // fallback to using the existing accelerator + setData('useExisting', true); + setData('additionalOptions', { useExisting: true }); + } + } + } + } + }, [accelerators, loaded, loadError, resources, tolerations, existingAcceleratorName, setData]); + + const resetDataAndRefresh = React.useCallback(() => { + resetData(); + refresh(); + }, [refresh, resetData]); + + return [acceleratorState, setData, resetDataAndRefresh]; +}; + +export default useAcceleratorState; diff --git a/frontend/src/utilities/useFetchState.ts b/frontend/src/utilities/useFetchState.ts index 03c0c3da2f..14eb4eb323 100644 --- a/frontend/src/utilities/useFetchState.ts +++ b/frontend/src/utilities/useFetchState.ts @@ -21,7 +21,7 @@ export const isCommonStateError = (e: Error) => { // Re-compute your callback to re-trigger again return true; } - if (e.name == 'AbortError') { + if (e.name === 'AbortError') { // Abort errors are silent return true; } @@ -111,7 +111,7 @@ type FetchOptions = { */ const useFetchState = ( /** React.useCallback result. */ - fetchCallbackPromise: FetchStateCallbackPromise | FetchStateCallbackPromiseAdHoc, + fetchCallbackPromise: FetchStateCallbackPromise>, /** * A preferred default states - this is ignored after the first render * Note: This is only read as initial value; changes do nothing. @@ -120,10 +120,13 @@ const useFetchState = ( /** Configurable features */ { refreshRate = 0, initialPromisePurity = false }: Partial = {}, ): FetchState => { + const initialDefaultStateRef = React.useRef(initialDefaultState); const [result, setResult] = React.useState(initialDefaultState); const [loaded, setLoaded] = React.useState(false); const [loadError, setLoadError] = React.useState(undefined); const abortCallbackRef = React.useRef<() => void>(() => undefined); + const changePendingRef = React.useRef(true); + /** Setup on initial hook a singular reset function. DefaultState & resetDataOnNewPromise are initial render states. */ const cleanupRef = React.useRef(() => { if (initialPromisePurity) { @@ -145,6 +148,7 @@ const useFetchState = ( const doRequest = () => fetchCallbackPromise({ signal: abortController.signal }) .then((r) => { + changePendingRef.current = false; if (alreadyAborted) { return undefined; } @@ -178,6 +182,7 @@ const useFetchState = ( return r; }) .catch((e) => { + changePendingRef.current = false; if (alreadyAborted) { return undefined; } @@ -190,6 +195,7 @@ const useFetchState = ( }); const unload = () => { + changePendingRef.current = false; if (alreadyAborted) { return; } @@ -201,6 +207,13 @@ const useFetchState = ( return [doRequest(), unload]; }, [fetchCallbackPromise]); + // Use a memmo to update the `changePendingRef` immediately on change. + React.useMemo(() => { + changePendingRef.current = true; + // React to changes to the `call` reference. + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [call]); + React.useEffect(() => { let interval: ReturnType; @@ -234,6 +247,11 @@ const useFetchState = ( return callPromise; }, []); + // Return the default reset state if a change is pending and initialPromisePurity is true + if (initialPromisePurity && changePendingRef.current) { + return [initialDefaultStateRef.current, false, undefined, refresh]; + } + return [result, loaded, loadError, refresh]; }; diff --git a/frontend/src/utilities/useGenericObjectState.ts b/frontend/src/utilities/useGenericObjectState.ts index 0f535c5d15..ae2808256d 100644 --- a/frontend/src/utilities/useGenericObjectState.ts +++ b/frontend/src/utilities/useGenericObjectState.ts @@ -1,7 +1,7 @@ import * as React from 'react'; import { UpdateObjectAtPropAndValue } from '~/pages/projects/types'; -type GenericObjectState = [ +export type GenericObjectState = [ data: T, setData: UpdateObjectAtPropAndValue, resetDefault: () => void, diff --git a/frontend/src/utilities/useWatchGroups.tsx b/frontend/src/utilities/useWatchGroups.tsx index 66311036e4..46bb7aadfc 100644 --- a/frontend/src/utilities/useWatchGroups.tsx +++ b/frontend/src/utilities/useWatchGroups.tsx @@ -48,10 +48,10 @@ export const useWatchGroups = (): { React.useEffect(() => { if (errorAdmin) { - notification.error(`Group no longer exists`, errorAdmin); + notification.error(`Group error`, errorAdmin); } if (errorUser) { - notification.error(`Group no longer exists`, errorUser); + notification.error(`Group error`, errorUser); } }, [errorAdmin, errorUser, notification]); @@ -59,13 +59,11 @@ export const useWatchGroups = (): { setIsLoading(true); updateGroupsSettings(group) .then((response) => { - if (response.success) { - setGroupSettings(response.success); - notification.success( - 'Group settings changes saved', - 'It may take up to 2 minutes for configuration changes to be applied.', - ); - } + setGroupSettings(response); + notification.success( + 'Group settings changes saved', + 'It may take up to 2 minutes for configuration changes to be applied.', + ); }) .catch((error) => { setLoadError(error); diff --git a/frontend/src/utilities/utils.ts b/frontend/src/utilities/utils.ts index 5a81adceee..eff87fc7bd 100644 --- a/frontend/src/utilities/utils.ts +++ b/frontend/src/utilities/utils.ts @@ -1,5 +1,5 @@ import { OdhApplication, OdhDocument, OdhDocumentType } from '~/types'; -import { CATEGORY_ANNOTATION, DASHBOARD_MAIN_CONTAINER_SELECTOR, ODH_PRODUCT_NAME } from './const'; +import { CATEGORY_ANNOTATION, DASHBOARD_MAIN_CONTAINER_ID, ODH_PRODUCT_NAME } from './const'; /** * Feature flags are required in the config -- but upgrades can be mixed and omission of the property @@ -144,7 +144,7 @@ export const isGroupEmpty = (groupList: Array groupList.filter((element) => element.enabled).length === 0; export const getDashboardMainContainer = (): HTMLElement => - document.getElementById(DASHBOARD_MAIN_CONTAINER_SELECTOR) || document.body; + document.getElementById(DASHBOARD_MAIN_CONTAINER_ID) || document.body; export const isHTMLInputElement = (object: unknown): object is HTMLInputElement => (object as HTMLInputElement).value !== undefined; diff --git a/frontend/src/utilities/valueUnits.ts b/frontend/src/utilities/valueUnits.ts index 55fd54c6ef..3d20a272ef 100644 --- a/frontend/src/utilities/valueUnits.ts +++ b/frontend/src/utilities/valueUnits.ts @@ -4,6 +4,12 @@ */ export type ValueUnitString = string; +/** + * Format: '{value: number}' + * eg. 1; "1" + */ +export type ValueUnitCPU = string | number; + export type UnitOption = { name: string; unit: string; @@ -51,16 +57,19 @@ export const isEqual = ( units: UnitOption[], ): boolean => calculateDelta(value1, value2, units) === 0; -export const isCpuLimitEqual = (cpu1?: ValueUnitString, cpu2?: ValueUnitString): boolean => { - if (!cpu1 && !cpu2) { +export const isCpuLimitEqual = (cpu1?: ValueUnitCPU, cpu2?: ValueUnitCPU): boolean => { + const cpu1String = typeof cpu1 === 'number' ? `${cpu1}` : cpu1; + const cpu2String = typeof cpu2 === 'number' ? `${cpu2}` : cpu2; + + if (!cpu1String && !cpu2String) { return true; } - if (!cpu1 || !cpu2) { + if (!cpu1String || !cpu2String) { return false; } - return isEqual(cpu1, cpu2, CPU_UNITS); + return isEqual(cpu1String, cpu2String, CPU_UNITS); }; export const isMemoryLimitEqual = ( @@ -90,15 +99,18 @@ export const isLarger = ( }; export const isCpuLimitLarger = ( - requestCpu?: ValueUnitString, - limitCpu?: ValueUnitString, + requestCpu?: ValueUnitCPU, + limitCpu?: ValueUnitCPU, isEqualOkay = false, ): boolean => { - if (!requestCpu || !limitCpu) { - return true; + const requestCpuString = typeof requestCpu === 'number' ? `${requestCpu}` : requestCpu; + const limitCpuString = typeof limitCpu === 'number' ? `${limitCpu}` : limitCpu; + + if (!limitCpuString || !requestCpuString) { + return false; } - return isLarger(limitCpu, requestCpu, CPU_UNITS, isEqualOkay); + return isLarger(limitCpuString, requestCpuString, CPU_UNITS, isEqualOkay); }; export const isMemoryLimitLarger = ( @@ -106,8 +118,8 @@ export const isMemoryLimitLarger = ( limitMemory?: ValueUnitString, isEqualOkay = false, ): boolean => { - if (requestMemory == null || limitMemory == null) { - return true; + if (!limitMemory || !requestMemory) { + return false; } return isLarger(limitMemory, requestMemory, MEMORY_UNITS, isEqualOkay); diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json index 32c295a529..e584ca32d3 100644 --- a/frontend/tsconfig.json +++ b/frontend/tsconfig.json @@ -16,8 +16,9 @@ "esModuleInterop": true, "allowSyntheticDefaultImports": true, "strict": true, + "baseUrl": "./src", "paths": { - "~/*": ["./src/*"] + "~/*": ["./*"] }, "importHelpers": true, "skipLibCheck": true diff --git a/manifests/base/cluster-role.yaml b/manifests/base/cluster-role.yaml index 51b5798502..228eaeed7c 100644 --- a/manifests/base/cluster-role.yaml +++ b/manifests/base/cluster-role.yaml @@ -3,6 +3,13 @@ apiVersion: rbac.authorization.k8s.io/v1 metadata: name: odh-dashboard rules: + - verbs: + - get + - list + apiGroups: + - '' + resources: + - nodes - verbs: - get - list @@ -157,3 +164,11 @@ rules: - delete resources: - notebooks + - apiGroups: + - datasciencecluster.opendatahub.io + verbs: + - list + - watch + - get + resources: + - datascienceclusters diff --git a/manifests/base/fetch-accelerators.rbac.yaml b/manifests/base/fetch-accelerators.rbac.yaml new file mode 100644 index 0000000000..d11b7dc3ad --- /dev/null +++ b/manifests/base/fetch-accelerators.rbac.yaml @@ -0,0 +1,26 @@ +kind: Role +apiVersion: rbac.authorization.k8s.io/v1 +metadata: + name: fetch-accelerators-role +rules: + - apiGroups: + - dashboard.opendatahub.io + verbs: + - get + - list + - watch + resources: + - acceleratorprofiles +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: RoleBinding +metadata: + name: accelerators +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: Role + name: fetch-accelerators-role +subjects: + - apiGroup: rbac.authorization.k8s.io + kind: Group + name: system:authenticated \ No newline at end of file diff --git a/manifests/base/kustomization.yaml b/manifests/base/kustomization.yaml index 4fe93e8917..3595eb216d 100644 --- a/manifests/base/kustomization.yaml +++ b/manifests/base/kustomization.yaml @@ -21,6 +21,7 @@ resources: - image-puller.clusterrolebinding.yaml - model-serving-role.yaml - model-serving-role-binding.yaml + - fetch-accelerators.rbac.yaml images: - name: odh-dashboard newName: quay.io/opendatahub/odh-dashboard diff --git a/manifests/base/role.yaml b/manifests/base/role.yaml index bac744a29e..5a885ab041 100644 --- a/manifests/base/role.yaml +++ b/manifests/base/role.yaml @@ -3,6 +3,14 @@ apiVersion: rbac.authorization.k8s.io/v1 metadata: name: odh-dashboard rules: + - verbs: + - create + - get + - list + apiGroups: + - dashboard.opendatahub.io + resources: + - acceleratorprofiles - apiGroups: - route.openshift.io resources: diff --git a/manifests/crd/acceleratorprofiles.opendatahub.io.crd.yaml b/manifests/crd/acceleratorprofiles.opendatahub.io.crd.yaml new file mode 100644 index 0000000000..56b11b73f3 --- /dev/null +++ b/manifests/crd/acceleratorprofiles.opendatahub.io.crd.yaml @@ -0,0 +1,63 @@ +apiVersion: apiextensions.k8s.io/v1 +kind: CustomResourceDefinition +metadata: + name: acceleratorprofiles.dashboard.opendatahub.io +spec: + group: dashboard.opendatahub.io + scope: Namespaced + names: + plural: acceleratorprofiles + singular: acceleratorprofile + kind: AcceleratorProfile + versions: + - name: v1 + served: true + storage: true + schema: + openAPIV3Schema: + type: object + required: + - spec + properties: + spec: + type: object + required: + - displayName + - enabled + - identifier + properties: + displayName: + type: string + description: "The display name of the accelerator profile." + enabled: + type: boolean + description: "Indicates whether the accelerator profile is available for new resources." + identifier: + type: string + description: "The resource identifier of the accelerator device. Example: nvidia.com/gpu" + description: + type: string + description: "A short description of the accelerator profile." + tolerations: + type: array + description: "Any number of Kubernetes toleration values that are added to resources when created or updated to this accelerator profile." + items: + type: object + required: + - key + properties: + key: + type: string + description: "Taint key. Empty matches all keys." + operator: + type: string + description: "Relationship with the value. Valid: 'Exists', 'Equal'. Defaults to 'Equal'." + value: + type: string + description: "Tolerance value. If key is empty, use 'Exists' to match all values and keys." + effect: + type: string + description: "Taint effect. Empty matches all effects. Allowed: 'NoSchedule', 'PreferNoSchedule', 'NoExecute'." + tolerationSeconds: + type: integer + description: "Duration in seconds. If effect is 'NoExecute', specifies eviction time. Default is forever." \ No newline at end of file diff --git a/manifests/crd/kustomization.yaml b/manifests/crd/kustomization.yaml index 7709378df1..3d8497d804 100644 --- a/manifests/crd/kustomization.yaml +++ b/manifests/crd/kustomization.yaml @@ -8,3 +8,4 @@ resources: - odhquickstarts.console.openshift.io.crd.yaml - odhdocuments.dashboard.opendatahub.io.crd.yaml - odhapplications.dashboard.opendatahub.io.crd.yaml +- acceleratorprofiles.opendatahub.io.crd.yaml \ No newline at end of file