diff --git a/.github/workflows/dev-build.yaml b/.github/workflows/dev-build.yaml
index 87a4813a9..128b2785b 100644
--- a/.github/workflows/dev-build.yaml
+++ b/.github/workflows/dev-build.yaml
@@ -1,11 +1,11 @@
## For each release, please update the value of workflow name, branches and PR_NUMBER
## Also update frontend/package.json version
-name: Dev Build 1.58.0
+name: Dev Build 1.59.0
on:
push:
- branches: [ release-1.58.0 ]
+ branches: [ release-1.59.0 ]
paths:
- frontend/**
- backend/**
@@ -14,8 +14,8 @@ on:
env:
## The pull request number of the Tracking pull request to merge the release branch to main
- PR_NUMBER: 2118
- VERSION: 1.58.0
+ PR_NUMBER: 2153
+ VERSION: 1.59.0
GIT_URL: https://github.com/bcgov/zeva.git
TOOLS_NAMESPACE: ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-tools
DEV_NAMESPACE: ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-dev
@@ -32,7 +32,7 @@ jobs:
call-unit-test:
uses: ./.github/workflows/unit-test-template.yaml
with:
- pr-number: 2118
+ pr-number: 2153
build:
diff --git a/.github/workflows/release-build.yaml b/.github/workflows/release-build.yaml
index 557c47b64..2a6714ed9 100644
--- a/.github/workflows/release-build.yaml
+++ b/.github/workflows/release-build.yaml
@@ -1,7 +1,7 @@
## For each release, please update the value of workflow name, branches and PR_NUMBER
## Also update frontend/package.json version
-name: Release Build 1.58.0
+name: Release Build 1.59.0
on:
workflow_dispatch:
@@ -9,8 +9,8 @@ on:
env:
## The pull request number of the Tracking pull request to merge the release branch to main
- PR_NUMBER: 2118
- VERSION: 1.58.0
+ PR_NUMBER: 2153
+ VERSION: 1.59.0
GIT_URL: https://github.com/bcgov/zeva.git
TOOLS_NAMESPACE: ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-tools
DEV_NAMESPACE: ${{ secrets.OPENSHIFT_NAMESPACE_PLATE }}-dev
@@ -27,7 +27,7 @@ jobs:
call-unit-test:
uses: ./.github/workflows/unit-test-template.yaml
with:
- pr-number: 2118
+ pr-number: 2153
build:
diff --git a/.github/workflows/unit-test-template.yaml b/.github/workflows/unit-test-template.yaml
index e5d757fdd..4a8ebb3e6 100644
--- a/.github/workflows/unit-test-template.yaml
+++ b/.github/workflows/unit-test-template.yaml
@@ -18,13 +18,13 @@ jobs:
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
ref: refs/pull/${{ inputs.pr-number }}/head
- - uses: actions/setup-node@v3
+ - uses: actions/setup-node@v4
with:
- node-version: 16.20.0
+ node-version: 20
- name: Frontend Jest coverage report
uses: ArtiomTr/jest-coverage-report-action@v2.2.1
@@ -44,7 +44,7 @@ jobs:
steps:
- name: Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
ref: refs/pull/${{ inputs.pr-number }}/head
diff --git a/backend/.s2i/bin/assemble b/backend/.s2i/bin/assemble
index 1b547cbc4..ae8d905f8 100755
--- a/backend/.s2i/bin/assemble
+++ b/backend/.s2i/bin/assemble
@@ -86,7 +86,8 @@ elif [[ -f requirements.txt ]]; then
pip install -r requirements.txt
else
echo "---> Installing dependencies from artifactory ..."
- pip install -i https://$ARTIFACTORY_USER:$ARTIFACTORY_PASSWORD@artifacts.developer.gov.bc.ca/artifactory/api/pypi/pypi-remote/simple -r requirements.txt
+ # pip install -i https://$ARTIFACTORY_USER:$ARTIFACTORY_PASSWORD@artifacts.developer.gov.bc.ca/artifactory/api/pypi/pypi-remote/simple -r requirements.txt
+ pip install -r requirements.txt
fi
fi
diff --git a/backend/api/migrations/0005_auto_20240425_0941.py b/backend/api/migrations/0005_auto_20240425_0941.py
new file mode 100644
index 000000000..f091f82bd
--- /dev/null
+++ b/backend/api/migrations/0005_auto_20240425_0941.py
@@ -0,0 +1,18 @@
+# Generated by Django 3.2.18 on 2024-04-25 16:41
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('api', '0004_auto_20231024_0908'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='modelyearreportcomplianceobligation',
+ name='reduction_value',
+ field=models.DecimalField(blank=True, decimal_places=2, max_digits=20, null=True),
+ )
+ ]
diff --git a/backend/api/migrations/0006_auto_20240508_1553.py b/backend/api/migrations/0006_auto_20240508_1553.py
new file mode 100644
index 000000000..3edf87459
--- /dev/null
+++ b/backend/api/migrations/0006_auto_20240508_1553.py
@@ -0,0 +1,22 @@
+# Generated by Django 3.2.20 on 2024-05-08 22:53
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('api', '0005_auto_20240425_0941'),
+ ]
+
+ operations = [
+ migrations.AlterUniqueTogether(
+ name='vehicle',
+ unique_together=set(),
+ ),
+ migrations.AddConstraint(
+ model_name='vehicle',
+ constraint=models.UniqueConstraint(condition=models.Q(('validation_status', 'DELETED'), _negated=True), fields=('make', 'model_name', 'vehicle_zev_type', 'model_year'), name='unique_non_deleted_vehicles'),
+ ),
+ ]
diff --git a/backend/api/models/model_year_report_compliance_obligation.py b/backend/api/models/model_year_report_compliance_obligation.py
index c53e0cbe9..c4e5dff03 100644
--- a/backend/api/models/model_year_report_compliance_obligation.py
+++ b/backend/api/models/model_year_report_compliance_obligation.py
@@ -31,6 +31,13 @@ class ModelYearReportComplianceObligation(Auditable):
decimal_places=2,
db_comment='Value of credit B'
)
+ reduction_value = models.DecimalField(
+ max_digits=20,
+ decimal_places=2,
+ db_comment='Reduction value',
+ blank=True,
+ null=True,
+ )
category = models.CharField(
blank=True,
max_length=100,
diff --git a/backend/api/models/vehicle.py b/backend/api/models/vehicle.py
index 60b73238f..d8b3abb7a 100644
--- a/backend/api/models/vehicle.py
+++ b/backend/api/models/vehicle.py
@@ -1,6 +1,6 @@
from django.db import models
from enumfields import EnumField
-
+from django.db.models import Q
from auditable.models import Auditable
from .vehicle_statuses import VehicleDefinitionStatuses
@@ -133,11 +133,13 @@ def get_credit_value(self):
return round(credit, 2)
- class Meta:
- db_table = 'vehicle'
- unique_together = [[
- 'make', 'model_name', 'vehicle_zev_type',
- 'model_year'
- ]]
+ class Meta:
+ db_table = "vehicle"
+ constraints = [
+ models.UniqueConstraint(
+ fields=["make", "model_name", "vehicle_zev_type", "model_year"],
+ condition=~Q(validation_status="DELETED"),
+ name="unique_non_deleted_vehicles",
+ )]
db_table_comment = "List of credit-generating vehicle definitions"
diff --git a/backend/api/serializers/model_year_report_compliance_obligation.py b/backend/api/serializers/model_year_report_compliance_obligation.py
index a59bd0d6c..c10110015 100644
--- a/backend/api/serializers/model_year_report_compliance_obligation.py
+++ b/backend/api/serializers/model_year_report_compliance_obligation.py
@@ -40,7 +40,7 @@ class ModelYearReportComplianceObligationSnapshotSerializer(serializers.ModelSer
class Meta:
model = ModelYearReportComplianceObligation
fields = (
- 'credit_a_value', 'credit_b_value',
+ 'credit_a_value', 'credit_b_value', 'reduction_value',
'category', 'model_year', 'update_timestamp',
)
diff --git a/backend/api/services/icbc_upload.py b/backend/api/services/icbc_upload.py
index 7d3660be8..d8373ab3b 100644
--- a/backend/api/services/icbc_upload.py
+++ b/backend/api/services/icbc_upload.py
@@ -3,7 +3,8 @@
import math
import time
from django.db import transaction
-
+from datetime import datetime
+from dateutil.relativedelta import relativedelta
from api.models.icbc_registration_data import IcbcRegistrationData
from api.models.icbc_vehicle import IcbcVehicle
from api.models.model_year import ModelYear
@@ -131,12 +132,17 @@ def ingest_icbc_spreadsheet(excelfile, requesting_user, dateCurrentTo, previous_
# print("unique_vins", unique_vins.shape[0])
model_years = []
+
for unique_model_year in unique_model_years:
+ eff_date = datetime.strptime(str(unique_model_year), '%Y')
+ exp_date = eff_date + relativedelta(years=1) - relativedelta(days=1)
(model_year, _) = ModelYear.objects.get_or_create(
name=unique_model_year,
defaults={
'create_user': requesting_user.username,
- 'update_user': requesting_user.username
+ 'update_user': requesting_user.username,
+ 'effective_date': eff_date,
+ 'expiration_date': exp_date
})
model_years.append(model_year)
diff --git a/backend/api/services/model_year_report_ldv_sales.py b/backend/api/services/model_year_report_ldv_sales.py
index 3faf10a56..fa63fdf52 100644
--- a/backend/api/services/model_year_report_ldv_sales.py
+++ b/backend/api/services/model_year_report_ldv_sales.py
@@ -6,7 +6,7 @@ def get_most_recent_ldv_sales(model_year_report):
ModelYearReportLDVSales.objects.filter(
model_year=model_year_report.model_year, model_year_report=model_year_report
)
- .order_by("-create_timestamp")
+ .order_by("-update_timestamp")
.only("ldv_sales")
.first()
)
diff --git a/backend/api/viewsets/model_year_report_compliance_obligation.py b/backend/api/viewsets/model_year_report_compliance_obligation.py
index e509bdb91..ccf2a0b5e 100644
--- a/backend/api/viewsets/model_year_report_compliance_obligation.py
+++ b/backend/api/viewsets/model_year_report_compliance_obligation.py
@@ -144,12 +144,14 @@ def update_obligation(self, request):
model_year = ModelYear.objects.get(name=each['year'])
credit_a_value = each['a']
credit_b_value = each['b']
+ reduction_value = each.get('reduction_value')
compliance_obj = ModelYearReportComplianceObligation.objects.create(
model_year_report_id=id,
model_year=model_year,
category=category,
credit_a_value=credit_a_value,
credit_b_value=credit_b_value,
+ reduction_value=reduction_value,
from_gov=True
)
compliance_obj.save()
@@ -196,8 +198,9 @@ def details(self, request, *args, **kwargs):
report.validation_status == ModelYearReportStatuses.ASSESSED) or
request.user.is_government
)
+ use_from_gov_snapshot = request.GET.get('use_from_gov_snapshot') == 'True'
- if is_assessment:
+ if is_assessment or use_from_gov_snapshot:
organization = report.organization
snapshot_from_gov = ModelYearReportComplianceObligation.objects.filter(
from_gov=True,
diff --git a/backend/requirements.txt b/backend/requirements.txt
index 253d43ea0..32f52cf96 100644
--- a/backend/requirements.txt
+++ b/backend/requirements.txt
@@ -8,8 +8,8 @@ cffi
chardet==3.0.4
configparser==5.0.0
coverage==5.0.3
-cryptography==3.4.7
-Django==3.2.20
+cryptography==42.0.4
+Django==3.2.25
django-celery-beat==1.5.0
django-cors-headers==3.2.1
django-enumfields==2.1.1
@@ -17,8 +17,8 @@ django-filter==2.4.0
django-timezone-field==4.0
djangorestframework==3.12.4
djangorestframework-camel-case==1.1.2
-gunicorn==20.0.4
-idna==2.8
+idna==3.7
+gunicorn==22.0.0
importlib-metadata==1.5.0
isort==4.3.21
kombu==4.6.7
@@ -41,9 +41,9 @@ python-dateutil==2.8.1
python-dotenv==0.10.5
python-magic==0.4.18
pytz==2022.2.1
-requests==2.31.0
+requests==2.32.0
six==1.13.0
-sqlparse==0.4.4
+sqlparse==0.5.0
typed-ast
urllib3==1.25.11
vine==1.3.0
diff --git a/frontend/Dockerfile-Openshift b/frontend/Dockerfile-Openshift
index 3406ce670..c76675d51 100644
--- a/frontend/Dockerfile-Openshift
+++ b/frontend/Dockerfile-Openshift
@@ -1,5 +1,6 @@
# Stage 1: Use yarn to build the app
-FROM artifacts.developer.gov.bc.ca/docker-remote/node:16.20.0 as builder
+# FROM artifacts.developer.gov.bc.ca/docker-remote/node:16.20.0 as builder
+FROM node:16.20.0 as builder
WORKDIR /usr/src/app
COPY ./ ./
RUN npm install -g npm@9.1.1 \
@@ -8,7 +9,8 @@ RUN npm install -g npm@9.1.1 \
RUN yes | npm run dist
# Stage 2: Copy the JS React SPA into the Nginx HTML directory
-FROM artifacts.developer.gov.bc.ca/docker-remote/bitnami/nginx:1.21.6
+# FROM artifacts.developer.gov.bc.ca/docker-remote/bitnami/nginx:1.21.6
+FROM bitnami/nginx:1.21.6
COPY ./nginx.conf /opt/bitnami/nginx/conf/
COPY --from=builder /usr/src/app/public/build /app
EXPOSE 8080
diff --git a/frontend/package.json b/frontend/package.json
index edb692fb9..9c975f353 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -1,6 +1,6 @@
{
"name": "zeva-frontend",
- "version": "1.58.0",
+ "version": "1.59.0",
"private": true,
"dependencies": {
"@babel/eslint-parser": "^7.19.1",
diff --git a/frontend/src/app/utilities/constructReassessmentReductions.js b/frontend/src/app/utilities/constructReassessmentReductions.js
index 3154703f9..f3db62a71 100644
--- a/frontend/src/app/utilities/constructReassessmentReductions.js
+++ b/frontend/src/app/utilities/constructReassessmentReductions.js
@@ -1,46 +1,49 @@
+import Big from "big.js"
+
const constructReassessmentReductions = (reductions, prevReductions) => {
const result = {
reductionsToUpdate: [],
reductionsToAdd: []
}
+ const bigZero = new Big(0)
const potentialReductionsToAdd = []
if (reductions && prevReductions) {
for (const reduction of reductions) {
let foundCorrespondingPrevReduction = false
for (const prevReduction of prevReductions) {
if (reduction.type === prevReduction.type && reduction.modelYear === prevReduction.modelYear) {
- if (reduction.creditA !== prevReduction.creditA) {
+ if (!(reduction.creditA.eq(prevReduction.creditA))) {
potentialReductionsToAdd.push({
creditClass: 'A',
modelYear: reduction.modelYear,
- oldValue: prevReduction.creditA,
- newValue: reduction.creditA
+ oldValue: prevReduction.creditA.toNumber(),
+ newValue: reduction.creditA.toNumber()
})
}
- if (reduction.creditB !== prevReduction.creditB) {
+ if (!(reduction.creditB.eq(prevReduction.creditB))) {
potentialReductionsToAdd.push({
creditClass: 'B',
modelYear: reduction.modelYear,
- oldValue: prevReduction.creditB,
- newValue: reduction.creditB
+ oldValue: prevReduction.creditB.toNumber(),
+ newValue: reduction.creditB.toNumber()
})
}
foundCorrespondingPrevReduction = true
}
}
if (!foundCorrespondingPrevReduction) {
- if (reduction.creditA) {
+ if (!(reduction.creditA.eq(bigZero))) {
result.reductionsToAdd.push({
creditClass: 'A',
modelYear: reduction.modelYear,
- value: reduction.creditA
+ value: reduction.creditA.toNumber()
})
}
- if (reduction.creditB) {
+ if (!(reduction.creditB.eq(bigZero))) {
result.reductionsToAdd.push({
creditClass: 'B',
modelYear: reduction.modelYear,
- value: reduction.creditB
+ value: reduction.creditB.toNumber()
})
}
}
diff --git a/frontend/src/app/utilities/getSnapshottedReductions.js b/frontend/src/app/utilities/getSnapshottedReductions.js
new file mode 100644
index 000000000..f77e75662
--- /dev/null
+++ b/frontend/src/app/utilities/getSnapshottedReductions.js
@@ -0,0 +1,16 @@
+import Big from 'big.js'
+
+const getSnapshottedComplianceRatioReductions = (complianceResponseDetails) => {
+ const result = {}
+ const reductionValueCategories = ['complianceRatioTotalReduction', 'complianceRatioClassAReduction', 'complianceRatioUnspecifiedReduction']
+ complianceResponseDetails.forEach((item) => {
+ const category = item.category
+ const reductionValue = item.reductionValue
+ if (reductionValue !== undefined && reductionValue !== null && reductionValueCategories.indexOf(category) > -1) {
+ result[category] = new Big(reductionValue)
+ }
+ })
+ return result
+}
+
+export default getSnapshottedComplianceRatioReductions
\ No newline at end of file
diff --git a/frontend/src/compliance/AssessmentContainer.js b/frontend/src/compliance/AssessmentContainer.js
index 70f2c71ab..12c5fa48c 100644
--- a/frontend/src/compliance/AssessmentContainer.js
+++ b/frontend/src/compliance/AssessmentContainer.js
@@ -17,6 +17,7 @@ import getTotalReductionBig from '../app/utilities/getTotalReductionBig'
import getClassAReductionBig from '../app/utilities/getClassAReductionBig'
import getUnspecifiedClassReductionBig from '../app/utilities/getUnspecifiedClassReductionBig'
import { convertBalances, convertCarryOverDeficits } from '../app/utilities/convertToBig'
+import getSnapshottedComplianceRatioReductions from '../app/utilities/getSnapshottedReductions'
const AssessmentContainer = (props) => {
const { keycloak, user } = props
@@ -276,6 +277,8 @@ const AssessmentContainer = (props) => {
const complianceResponseDetails =
creditActivityResponse.data.complianceObligation
+ const snapshottedComplianceRatioReductions = getSnapshottedComplianceRatioReductions(complianceResponseDetails)
+
const {
creditBalanceEnd,
creditBalanceStart,
@@ -332,7 +335,15 @@ const AssessmentContainer = (props) => {
filteredRatios.zevClassA,
tempSupplierClass
)
- setTotalReduction(new Big(tempTotalReduction.toFixed(2)))
+
+ const calculatedComplianceRatioReductions = {
+ complianceRatioTotalReduction: tempTotalReduction,
+ complianceRatioClassAReduction: classAReduction,
+ complianceRatioUnspecifiedReduction: leftoverReduction
+ }
+ const complianceRatioReductions = { ...calculatedComplianceRatioReductions, ...snapshottedComplianceRatioReductions }
+
+ setTotalReduction(new Big(complianceRatioReductions.complianceRatioTotalReduction.toFixed(2)))
const tempBalances = getNewBalancesStructure(provisionalBalance)
@@ -342,14 +353,14 @@ const AssessmentContainer = (props) => {
const tempClassAReductions = [
{
modelYear: Number(modelYear.name),
- value: new Big (classAReduction.toFixed(2))
+ value: new Big(complianceRatioReductions.complianceRatioClassAReduction.toFixed(2))
}
]
const tempUnspecifiedReductions = [
{
modelYear: Number(modelYear.name),
- value: new Big(leftoverReduction.toFixed(2))
+ value: new Big(complianceRatioReductions.complianceRatioUnspecifiedReduction.toFixed(2))
}
]
@@ -524,6 +535,36 @@ const AssessmentContainer = (props) => {
}
}
+ for (const reduction of classAReductions) {
+ reportDetailsArray.push({
+ category: 'complianceRatioClassAReduction',
+ year: reduction.modelYear,
+ a: 0,
+ b: 0,
+ reduction_value: reduction.value.toString()
+ })
+ }
+
+ for (const reduction of unspecifiedReductions) {
+ reportDetailsArray.push({
+ category: 'complianceRatioUnspecifiedReduction',
+ year: reduction.modelYear,
+ a: 0,
+ b: 0,
+ reduction_value: reduction.value.toString()
+ })
+ }
+
+ if (totalReduction instanceof Big) {
+ reportDetailsArray.push({
+ category: 'complianceRatioTotalReduction',
+ year: reportYear,
+ a: 0,
+ b: 0,
+ reduction_value: totalReduction.toString()
+ })
+ }
+
const ObligationData = {
reportId: id,
creditActivity: reportDetailsArray
diff --git a/frontend/src/compliance/components/ComplianceCalculatorDetailsTotals.js b/frontend/src/compliance/components/ComplianceCalculatorDetailsTotals.js
index 56c3673ee..21b112067 100644
--- a/frontend/src/compliance/components/ComplianceCalculatorDetailsTotals.js
+++ b/frontend/src/compliance/components/ComplianceCalculatorDetailsTotals.js
@@ -43,11 +43,11 @@ const ComplianceCalculatorDetailsTotals = (props) => {
Class A Credit Total: |
- {user.organization.balance.A} |
+ {formatNumeric(user.organization.balance.A)} |
Class B Credit Total: |
- {user.organization.balance.B} |
+ {formatNumeric(user.organization.balance.B)} |
diff --git a/frontend/src/dashboard/components/__tests__/DashboardContainer.test.js b/frontend/src/dashboard/components/__tests__/DashboardContainer.test.js
new file mode 100644
index 000000000..01eccad8f
--- /dev/null
+++ b/frontend/src/dashboard/components/__tests__/DashboardContainer.test.js
@@ -0,0 +1,154 @@
+import React from 'react'
+import { describe, expect, test, beforeEach, jest } from '@jest/globals'
+import { render, screen, act } from '@testing-library/react'
+import { BrowserRouter as Router } from 'react-router-dom'
+import axios from 'axios'
+import DashboardContainer from '../../DashboardContainer'
+import '@testing-library/jest-dom/extend-expect'
+
+jest.mock('axios')
+
+const mockUser = {
+ hasPermission: jest.fn(),
+ isGovernment: false,
+ organization: {
+ name: 'Test Organization',
+ organizationAddress: [
+ {
+ id: 1,
+ addressType: {
+ addressType: 'Records'
+ },
+ addressLine1: '123 Main St',
+ addressLine2: 'Suite 100',
+ city: 'Test City',
+ state: 'TS',
+ zip: '12345'
+ }
+ ]
+ }
+}
+
+describe('DashboardContainer', () => {
+ beforeEach(() => {
+ axios.get.mockClear()
+ })
+
+ test('renders without crashing', () => {
+ render(
+
+
+
+ )
+ })
+
+ test('fetches and sets dashboard data', async () => {
+ const mockData = {
+ data: [
+ {
+ activity: {
+ vehicle: [
+ { status: 'VALIDATED', total: 10 },
+ { status: 'REJECTED', total: 5 },
+ { status: 'DRAFT', total: 2 },
+ { status: 'SUBMITTED', total: 8 },
+ { status: 'CHANGES_REQUESTED', total: 1 }
+ ],
+ creditTransfer: [
+ { status: 'SUBMITTED', total: 4 },
+ { status: 'APPROVED', total: 3 },
+ { status: 'VALIDATED', total: 6 },
+ { status: 'REJECTED', total: 1 },
+ { status: 'DISAPPROVED', total: 1 }
+ ],
+ modelYearReport: [
+ { status: 'DRAFT', total: 2 },
+ { status: 'SUBMITTED', total: 3 },
+ { status: 'ASSESSED', total: 1 }
+ ],
+ creditRequest: [
+ { status: 'DRAFT', total: 2 },
+ { status: 'SUBMITTED', total: 4 },
+ { status: 'VALIDATED', total: 3 }
+ ],
+ creditAgreement: [
+ { status: 'ISSUED', total: 2 },
+ { status: 'DRAFT', total: 1 },
+ { status: 'RETURNED', total: 1 },
+ { status: 'RECOMMENDED', total: 1 }
+ ]
+ }
+ }
+ ]
+ }
+
+ axios.get.mockResolvedValueOnce(mockData)
+
+ mockUser.hasPermission.mockImplementation((permission) => {
+ if (permission === 'VIEW_ZEV') {
+ return true
+ }
+ return false
+ })
+
+ await act(async () => {
+ render(
+
+
+
+ )
+ })
+
+ await screen.findByText('Test Organization')
+
+ const validatedModelsElement = await screen.findByText((content, element) => {
+ return content.includes('10 validated by Government of B.C.')
+ })
+ expect(validatedModelsElement).toBeTruthy()
+
+ const rejectedModelsElement = await screen.findByText((content, element) => {
+ return content.includes('5 rejected by Government of B.C.')
+ })
+ expect(rejectedModelsElement).toBeTruthy()
+
+ const draftModelsElement = await screen.findByText((content, element) => {
+ return content.includes('2 saved in draft')
+ })
+
+ expect(draftModelsElement).toBeTruthy()
+
+ const submittedModelsElement = await screen.findByText((content, element) => {
+ return content.includes('8 awaiting validation')
+ })
+ expect(submittedModelsElement).toBeTruthy()
+
+ })
+
+ test('does not display loading indicator when finished fetching data', async () => {
+ const mockData = {
+ data: [
+ {
+ activity: {
+ vehicle: [],
+ creditTransfer: [],
+ modelYearReport: [],
+ creditRequest: [],
+ creditAgreement: []
+ }
+ }
+ ]
+ }
+
+ axios.get.mockResolvedValueOnce(mockData)
+
+ await act(async () => {
+ render(
+
+
+
+ )
+ })
+
+ expect(await screen.queryByText('Loading...')).not.toBeInTheDocument()
+ })
+})
diff --git a/frontend/src/supplementary/SupplementaryContainer.js b/frontend/src/supplementary/SupplementaryContainer.js
index ade75be64..20d07de6b 100644
--- a/frontend/src/supplementary/SupplementaryContainer.js
+++ b/frontend/src/supplementary/SupplementaryContainer.js
@@ -587,7 +587,7 @@ const SupplementaryContainer = (props) => {
`${ROUTES_SUPPLEMENTARY.ASSESSED_SUPPLEMENTALS.replace(':id', id)}`
),
axios.get(
- `${ROUTES_COMPLIANCE.REPORT_COMPLIANCE_DETAILS_BY_ID.replace(':id', id)}?most_recent_ldv_sales=true`
+ `${ROUTES_COMPLIANCE.REPORT_COMPLIANCE_DETAILS_BY_ID.replace(':id', id)}?most_recent_ldv_sales=true&use_from_gov_snapshot=True`
),
axios.get(ROUTES_COMPLIANCE.RATIOS),
axios.get(
diff --git a/frontend/src/supplementary/components/CreditActivity.js b/frontend/src/supplementary/components/CreditActivity.js
index a2bda766e..d476f99b2 100644
--- a/frontend/src/supplementary/components/CreditActivity.js
+++ b/frontend/src/supplementary/components/CreditActivity.js
@@ -1,15 +1,17 @@
import PropTypes from 'prop-types'
import React, { useEffect } from 'react'
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'
+import Big from 'big.js'
import ComplianceObligationTableCreditsIssued from '../../compliance/components/ComplianceObligationTableCreditsIssued'
import Loading from '../../app/components/Loading'
import formatNumeric from '../../app/utilities/formatNumeric'
import getComplianceObligationDetails from '../../app/utilities/getComplianceObligationDetails'
-import calculateCreditReduction from '../../app/utilities/calculateCreditReduction'
-import getClassAReduction from '../../app/utilities/getClassAReduction'
-import getTotalReduction from '../../app/utilities/getTotalReduction'
-import getUnspecifiedClassReduction from '../../app/utilities/getUnspecifiedClassReduction'
+import calculateCreditReductionBig from '../../app/utilities/calculateCreditReductionBig'
+import getClassAReductionBig from '../../app/utilities/getClassAReductionBig'
+import getTotalReductionBig from '../../app/utilities/getTotalReductionBig'
+import getUnspecifiedClassReductionBig from '../../app/utilities/getUnspecifiedClassReductionBig'
+import { convertBalances, convertCarryOverDeficits } from '../../app/utilities/convertToBig'
const CreditActivity = (props) => {
const {
@@ -26,6 +28,8 @@ const CreditActivity = (props) => {
isEditable
} = props
+ const bigZero = new Big(0)
+
let newLdvSales =
newData && newData.supplierInfo && newData.supplierInfo.ldvSales
@@ -104,34 +108,39 @@ const CreditActivity = (props) => {
}
}
- const totalReduction = getTotalReduction(ldvSales, ratios.complianceRatio)
- const classAReduction = getClassAReduction(
+ const totalReduction = getTotalReductionBig(ldvSales, ratios.complianceRatio, supplierClass)
+ const classAReduction = getClassAReductionBig(
ldvSales,
ratios.zevClassA,
supplierClass
)
- const leftoverReduction = getUnspecifiedClassReduction(
- totalReduction,
- classAReduction
+ const leftoverReduction = getUnspecifiedClassReductionBig(
+ ldvSales,
+ ratios.complianceRatio,
+ ratios.zevClassA,
+ supplierClass
)
- const newTotalReduction = getTotalReduction(
+ const newTotalReduction = getTotalReductionBig(
newLdvSales,
- ratios.complianceRatio
+ ratios.complianceRatio,
+ supplierClass
)
- const newClassAReduction = getClassAReduction(
+ const newClassAReduction = getClassAReductionBig(
newLdvSales,
ratios.zevClassA,
supplierClass
)
- const newLeftoverReduction = getUnspecifiedClassReduction(
- newTotalReduction,
- newClassAReduction
+ const newLeftoverReduction = getUnspecifiedClassReductionBig(
+ newLdvSales,
+ ratios.complianceRatio,
+ ratios.zevClassA,
+ supplierClass
)
const classAReductions = [
{
modelYear: Number(reportYear),
- value: Number(classAReduction)
+ value: new Big(classAReduction.toFixed(2))
}
]
@@ -139,16 +148,16 @@ const CreditActivity = (props) => {
{
modelYear: Number(reportYear),
value:
- Number(newClassAReduction) > 0
- ? Number(newClassAReduction)
- : Number(classAReduction)
+ (new Big(newClassAReduction.toFixed(2))).gte(bigZero)
+ ? new Big(newClassAReduction.toFixed(2))
+ : new Big(classAReduction.toFixed(2))
}
]
const unspecifiedReductions = [
{
modelYear: Number(reportYear),
- value: Number(leftoverReduction)
+ value: new Big(leftoverReduction.toFixed(2))
}
]
@@ -156,9 +165,9 @@ const CreditActivity = (props) => {
{
modelYear: Number(reportYear),
value:
- Number(newLeftoverReduction) > 0
- ? Number(newLeftoverReduction)
- : Number(leftoverReduction)
+ (new Big(newLeftoverReduction.toFixed(2))).gte(bigZero)
+ ? new Big(newLeftoverReduction.toFixed(2))
+ : new Big(leftoverReduction.toFixed(2))
}
]
@@ -183,7 +192,9 @@ const CreditActivity = (props) => {
})
})
- const creditReduction = calculateCreditReduction(
+ convertBalances(tempBalances)
+ convertCarryOverDeficits(carryOverDeficits)
+ const creditReduction = calculateCreditReductionBig(
tempBalances,
classAReductions,
unspecifiedReductions,
@@ -191,7 +202,8 @@ const CreditActivity = (props) => {
carryOverDeficits
)
- const newCreditReduction = calculateCreditReduction(
+ convertBalances(newTempBalances)
+ const newCreditReduction = calculateCreditReductionBig(
newTempBalances,
newClassAReductions,
newUnspecifiedReductions,
@@ -204,8 +216,8 @@ const CreditActivity = (props) => {
const getAssociatedDeduction = (deduction, arr) => {
const values = {
- creditA: 0,
- creditB: 0
+ creditA: bigZero,
+ creditB: bigZero
}
const found = arr.find(
@@ -338,7 +350,7 @@ const CreditActivity = (props) => {
|
{newLdvSales >= 0 && (
@@ -361,7 +373,7 @@ const CreditActivity = (props) => {
|
{
|
{
|
{
|
{getAssociatedDeduction(deduction, deductions)
- .creditA > 0 && (
+ .creditA.gt(bigZero) && (
-
{formatNumeric(
@@ -487,12 +499,12 @@ const CreditActivity = (props) => {
)}
)}
- {!getAssociatedDeduction(deduction, deductions)
- .creditA && 0.00}
+ {getAssociatedDeduction(deduction, deductions)
+ .creditA.eq(bigZero) && 0.00}
|
{getAssociatedDeduction(deduction, deductions)
- .creditB > 0 && (
+ .creditB.gt(bigZero) && (
-
{formatNumeric(
@@ -502,40 +514,32 @@ const CreditActivity = (props) => {
)}
)}
- {!getAssociatedDeduction(deduction, deductions)
- .creditB && 0.00}
+ {getAssociatedDeduction(deduction, deductions)
+ .creditB.eq(bigZero) && 0.00}
|
- {deduction.creditA > 0 && (
+ {deduction.creditA.gt(bigZero) && (
-{formatNumeric(deduction.creditA, 2)}
)}
- {!deduction.creditA && 0.00}
+ {deduction.creditA.eq(bigZero) && 0.00}
|
- {deduction.creditB > 0 && (
+ {deduction.creditB.gt(bigZero) && (
-{formatNumeric(deduction.creditB, 2)}
)}
- {!deduction.creditB && 0.00}
+ {deduction.creditB.eq(bigZero) && 0.00}
|
))}
@@ -596,8 +600,8 @@ const CreditActivity = (props) => {
• {deduction.modelYear} Credits
- {getAssociatedDeduction(deduction, deductions).creditA >
- 0 && (
+ {getAssociatedDeduction(deduction, deductions).creditA.gt(bigZero)
+ && (
-
{formatNumeric(
@@ -607,12 +611,12 @@ const CreditActivity = (props) => {
)}
)}
- {!getAssociatedDeduction(deduction, deductions)
- .creditA && 0.00}
+ {getAssociatedDeduction(deduction, deductions)
+ .creditA.eq(bigZero) && 0.00}
|
- {getAssociatedDeduction(deduction, deductions).creditB >
- 0 && (
+ {getAssociatedDeduction(deduction, deductions).creditB.gt(bigZero)
+ && (
-
{formatNumeric(
@@ -622,36 +626,32 @@ const CreditActivity = (props) => {
)}
)}
- {!getAssociatedDeduction(deduction, deductions)
- .creditB && 0.00}
+ {getAssociatedDeduction(deduction, deductions)
+ .creditB.eq(bigZero) && 0.00}
|
- {deduction.creditA > 0 && (
+ {deduction.creditA.gt(0) && (
-{formatNumeric(deduction.creditA, 2)}
)}
- {!deduction.creditA && 0.00}
+ {deduction.creditA.eq(bigZero) && 0.00}
|
- {deduction.creditB > 0 && (
+ {deduction.creditB.gt(bigZero) && (
-{formatNumeric(deduction.creditB, 2)}
)}
- {!deduction.creditB && 0.00}
+ {deduction.creditB.eq(bigZero) && 0.00}
|
))}
@@ -716,7 +716,7 @@ const CreditActivity = (props) => {
{
|
{
• Credit Deficit
|
- {Number(deficit.creditA) > 0 && (
+ {deficit.creditA.gt(bigZero) && (
({formatNumeric(deficit.creditA)})
)}
- {!deficit.creditA && 0.00}
+ {deficit.creditA.eq(bigZero) && 0.00}
|
- {Number(deficit.creditB) > 0 && (
+ {deficit.creditB.gt(bigZero) && (
({formatNumeric(deficit.creditB)})
)}
- {!deficit.creditB && 0.00}
+ {deficit.creditB.eq(bigZero) && 0.00}
|
- {Number(deficit.newCreditA) > 0 && (
+ {deficit.newCreditA.gt(bigZero) && (
({formatNumeric(deficit.newCreditA)})
)}
|
- {Number(deficit.newCreditB) > 0 && (
+ {deficit.newCreditB.gt(bigZero) && (
({formatNumeric(deficit.newCreditB)})
)}
|
diff --git a/frontend/src/supplementary/components/ReassessmentDetailsPage.js b/frontend/src/supplementary/components/ReassessmentDetailsPage.js
index 132b4792d..00adeeb00 100644
--- a/frontend/src/supplementary/components/ReassessmentDetailsPage.js
+++ b/frontend/src/supplementary/components/ReassessmentDetailsPage.js
@@ -1,17 +1,19 @@
// intended only for directors to view an assessed reassessment
import React, { useEffect } from 'react'
-import calculateCreditReduction from '../../app/utilities/calculateCreditReduction'
-import getClassAReduction from '../../app/utilities/getClassAReduction'
+import Big from 'big.js'
+import calculateCreditReductionBig from '../../app/utilities/calculateCreditReductionBig'
+import getClassAReductionBig from '../../app/utilities/getClassAReductionBig'
import getComplianceObligationDetails from '../../app/utilities/getComplianceObligationDetails'
-import getTotalReduction from '../../app/utilities/getTotalReduction'
-import getUnspecifiedClassReduction from '../../app/utilities/getUnspecifiedClassReduction'
+import getTotalReductionBig from '../../app/utilities/getTotalReductionBig'
+import getUnspecifiedClassReductionBig from '../../app/utilities/getUnspecifiedClassReductionBig'
import ComplianceObligationAmountsTable from '../../compliance/components/ComplianceObligationAmountsTable'
import ComplianceObligationReductionOffsetTable from '../../compliance/components/ComplianceObligationReductionOffsetTable'
import ComplianceObligationTableCreditsIssued from '../../compliance/components/ComplianceObligationTableCreditsIssued'
import NoticeOfAssessmentSection from '../../compliance/components/NoticeOfAssessmentSection'
import constructReassessmentReductions from '../../app/utilities/constructReassessmentReductions'
import { getNewBalancesStructure } from '../../app/utilities/getNewStructures'
+import { convertBalances, convertCarryOverDeficits } from '../../app/utilities/convertToBig'
const ReassessmentDetailsPage = (props) => {
// from props, reconcile existing data with new data, then pass to downstream components
@@ -64,14 +66,14 @@ const ReassessmentDetailsPage = (props) => {
const reportYear = Number(details.assessmentData.modelYear)
- let classAReductionValue = getClassAReduction(
+ let classAReductionValue = getClassAReductionBig(
ldvSales,
ratios.zevClassA,
supplierClass
)
const prevClassAReductionValue = classAReductionValue
if (newData && newData.supplierInfo && newData.supplierInfo.ldvSales) {
- classAReductionValue = getClassAReduction(
+ classAReductionValue = getClassAReductionBig(
newData.supplierInfo.ldvSales,
ratios.zevClassA,
supplierClass
@@ -81,14 +83,14 @@ const ReassessmentDetailsPage = (props) => {
const prevClassAReductions = [
{
modelYear: reportYear,
- value: prevClassAReductionValue
+ value: new Big(prevClassAReductionValue.toFixed(2))
}
]
const classAReductions = [
{
modelYear: reportYear,
- value: classAReductionValue
+ value: new Big(classAReductionValue.toFixed(2))
}
]
@@ -97,36 +99,43 @@ const ReassessmentDetailsPage = (props) => {
sales = newData.supplierInfo.ldvSales
}
- let totalReduction = getTotalReduction(ldvSales, ratios.complianceRatio)
- const prevTotalReduction = totalReduction
+ let totalReduction = getTotalReductionBig(ldvSales, ratios.complianceRatio, supplierClass)
if (newData && newData.supplierInfo && newData.supplierInfo.ldvSales) {
- totalReduction = getTotalReduction(
+ totalReduction = getTotalReductionBig(
newData.supplierInfo.ldvSales,
- ratios.complianceRatio
+ ratios.complianceRatio,
+ supplierClass
)
}
+ totalReduction = new Big(totalReduction.toFixed(2))
- const prevUnspecifiedReductionValue = getUnspecifiedClassReduction(
- prevTotalReduction,
- prevClassAReductionValue
- )
-
- const unspecifiedReductionValue = getUnspecifiedClassReduction(
- totalReduction,
- classAReductionValue
+ let unspecifiedReductionValue = getUnspecifiedClassReductionBig(
+ ldvSales,
+ ratios.complianceRatio,
+ ratios.zevClassA,
+ supplierClass
)
+ const prevUnspecifiedReductionValue = unspecifiedReductionValue
+ if (newData && newData.supplierInfo && newData.supplierInfo.ldvSales) {
+ unspecifiedReductionValue = getUnspecifiedClassReductionBig(
+ newData.supplierInfo.ldvSales,
+ ratios.complianceRatio,
+ ratios.zevClassA,
+ supplierClass
+ )
+ }
const prevUnspecifiedReductions = [
{
modelYear: reportYear,
- value: prevUnspecifiedReductionValue
+ value: new Big(prevUnspecifiedReductionValue.toFixed(2))
}
]
const unspecifiedReductions = [
{
modelYear: reportYear,
- value: unspecifiedReductionValue
+ value: new Big(unspecifiedReductionValue.toFixed(2))
}
]
@@ -229,17 +238,22 @@ const ReassessmentDetailsPage = (props) => {
details.assessmentData && details.assessmentData.creditReductionSelection
const transformedBalances = getNewBalancesStructure(prevProvisionalBalance)
+ convertBalances(transformedBalances)
const transformedNewBalances = getNewBalancesStructure(newBalances)
+ convertBalances(transformedNewBalances)
+
+ convertCarryOverDeficits(prevCarryOverDeficits)
+ convertCarryOverDeficits(carryOverDeficits)
- const prevCreditReduction = calculateCreditReduction(
+ const prevCreditReduction = calculateCreditReductionBig(
transformedBalances,
prevClassAReductions,
prevUnspecifiedReductions,
creditReductionSelection,
prevCarryOverDeficits
)
- const creditReduction = calculateCreditReduction(
+ const creditReduction = calculateCreditReductionBig(
transformedNewBalances,
classAReductions,
unspecifiedReductions,
diff --git a/openshift/release-management/emergency-release-1.22.0.1.drawio b/openshift/release-management/emergency-release-1.22.0.1.drawio
deleted file mode 100644
index 10da0ae66..000000000
--- a/openshift/release-management/emergency-release-1.22.0.1.drawio
+++ /dev/null
@@ -1 +0,0 @@
-7V1tk5u2Fv41O/2UHZB4/ZjdZHOnbaZpcpu290uGBXlNgo2L2V1vf/3l3aAjY2KEJOxkJoktwMY6z3N0XsUVvl3t3iXeZvk+Dkh0hbRgd4XfXCGk67aV/ZePvJQjFnLKgYckDKqT9gOfwn9JNahVo49hQLadE9M4jtJw0x304/Wa+GlnzEuS+Ll72iKOut+68R4IGPjkexEc/TMM0mU56pjafvw/JHxY1t+sa9WRlVefXA1sl14QP7eG8NsrfJvEcVq+Wu1uSZRPXj0v5XV3B442N5aQdTrkAu3mU/DP7/b63f2XP97cfraXf/zv+RWu7i19qX8wCbLfX72Nk3QZP8RrL3q7H71J4sd1QPJP1bJ3+3N+jeNNNqhng19Jmr5UwvQe0zgbWqarqDqa3XDy8ld1ffHm7/zNtVm/fbNrH3zzUr2Dv7iahG38mPik52fWyPGSB5L2nIfK8/I5aH1BNZ/vSLwi2f1kJyQk8tLwqYsRr4LaQ3PeXhrZi0og3yGc6nOfvOix+ibnfoEWzgIKLYoyQuTCeV6GKfm08YrpeM442Z16b7spWbIId7kID8/pE0lSsuudhYbkFcIrijeIf24RphpatrhSj3GfN2dOoCa7MP2r/ozs9d+t1/tL8jfjaYAG0sDgTYPq0g9xmN1yAxxsdoGj04gob7S6igJFcxun46SZN7FAYQhc6xX4qdBaxOv0No7ipPhtOP9zd8cbQMHXr97yw0fLTb693uxe2+Yvdze15lJEjyKoRw0XuY7JUY9Sc61pOPubjW/TJP5GWEc4aF6ETLU0r6sMoUpyTKBDjYE61FSKAgagALK9BSFILVPC0LorApYN6Nq0UR/R4o1qcyATLKWYYAIm2Jl3qmkMo1EmE0ysGhPUMZaUY4I1kAm2UkywABOw7jrYVmxNsEzVmGBLYYJwVNsDUa2rpeBtAGviEsNCDNtUJqxtRzVYQzcpExjxtuSVfo3wNbSEsl+adieq6+2s43U+uYswiqghLwof1tlbP5s/ko3f5PMW+l70ujqwCoMgOiSZLpc4yALRKsaEsnAZskCTyQIDWay8bT5VZysDiwoiMkRgCBUBdJladEBnTQe9G1bQXdl0gEZ7Sxb6OctCpzxi+bKAZiPS74PgnmEVnby+gqhZE7v87pjm8Kl2ddVWZIcx1UE119qHj1f5DWIr9xS126W3zsyvPDua30pmPq68ArZR/gsfN4GXFkeX+b+hf+8XU7MM/SgfqI6Gi/yf9KdtPhQuFqSYTTW5BQKumsYLCJRlZjNWItuchnLMaDojTv3D864oUln1RzMSlXGriI9S33Y7CmX7jr/gqUQPMYSRkuDHHdOg1itdshKtWXnQjruGgS01NBwHaWDaesDirAcmDQ0pqmxKtdSX/zweOnFlqaW+224nSm3ka/c8I4KTJkQNXZ726Vt2WjNq2tq9ixk8UHNGLUexGYXuRzZT37wHcv11G6/PWJW7VK5fk6zKYZz1MgQBGCHQI2cKAnqJW2+Vu3N+7RN6uUN3yEtXQzATunO0ScqqkmEJDE8lMGj87315uNRegHyOazZLpL+tQ5ehE2y5NAG5qEsgg2EDWCI1ng4N1UZASFUNx0EQtquaIKB9uxcETLiejSAsSzVBwIThXhDW+QqCLpOSLwiYNrwMs5gu3WRJQqhZzEgaNjFWrWNwVcmTIDeZ46BJopRJky9x8sVPSJkg2ebjYdoczO3qMqWyyjvf8rfPS7IuR5PC9E72lnh+eJHEq+Yif/ukKCDEZVcYef7GyOKe0Px993P6+b/RL9gg4WZzg5/w+zrodUYhSebPrFbqTkiSdZ68uvWeu55bC5wpMuvBmrdZ9XVO1QLXA++jNOBeqltdeqQFzrApRHBsgWPi5EdueTRUMPd2yVHUh2YP/243oV1t0pUp9J8U5cjpWGfVerPOk5av7Ltr1bva6NSNLRvQchrkZ6H0GfVEbK9BXotzz20r39WGFGPCj/7O4xA/zgWsFBcY2QE1G9sM1cjAmCB1vcYRwMZDge2oBWwY5Feztc1WDdi9vTzn3dpGKxlDXFieLQvon15Ya5vJEMFkrW1MEcAaukttbTMc2XSAPuyltrbJlwWr3+o8Wts01Vbk2ewUxd08nTqlYFJFQiZdx1jax5OlFBB0vy6o4cZVbH1B5+HTTU0ag7bRRJOmryZYVRtgypJTyk5zWDXBInlUC+HiSk4d1QTRV/t7ziWnpmqCQD2COOeSU6SaIGBY8DJKTmljiyUJocYWhnHGBTZ8Oy8t5eZASmnrNzTFajIwjJt8+ufR2y6zsRVJirY36HaUhb11vFHzFuX/RUVvWOx6QjZR/FKUAufM0T4kMZTddultSMGKdIAA7zMuPhTY/+0xjcKcSMV44CXffsuuCtPC+r3WzBbDIrJIOUmO7hlheCQNI9qiMyYTHbR1IT3Wwev86R173RNksi2Uh96d3nz8g5dmglwXI0jDDRnqJ3agHtqIrbMZnFFqScdkCKceG+n26BplVdjmML+H8UnWkU/iV8lIvr397L36iAPn+debf5+TJxT8ydi7wNTM7BRGJezI3BjgHgMKh4OemJ5xgZqUOXGz2pLjdLZSjDtKX+ZcVYqqnThmnofZGBjLVt3u6vKhbOXFMWjxY80xfK7xcR4cA7pINsfOf/PsbcaJlFqyi7G7MJ8q3kRkVHAcXhUE1Gv03WSLLpprOoHNAINUujTmnzJ0mdWaxBHWOqPngH2iIxPYjBqOqdqyRi4EXWC70oEtp5VAcF8WHyo4A6lQKi/uxhamdo1xBnZmZWug99I6bZOfsD38Paj2UOrthLTKqDvkadE39v0X6N0LshflTfP1x+Q8JUF4hl8s2LFcvQ8LZMT3lo1aEej6L/krwmwKYTjCvbY3j+t2JBPujK1jp+ovGwVqTOVodZGbNLJn7uz9XZ5sGBpVQoZUNsDoz1Q9ZqPYYNJPr5DPBjm7McyUDYwHbLJPtKSyAaZ6p2oyG8UGCyvHBjlbTiiAbMZjAtlGvtSoJYLdIVN1mY1Ctm0ph+zeXg7V28zG+VVAzQjsM2MLA/qqM+gzG2f5UNFOgX1mbBHAQo8Z9ZmN4wNVL+UI7ANgrynQl51Rn9koWVAFUPJFAUtV1GkzGzXTjqvakswoUp5VMxLXuJB85PfsUjzTLpeRmoniC3Pre6ESgm7tXNpcxjkTtnKSgDn2ufS5jAtYGMpJAjrEc2l04RtIlS+JGT9ziG+GR+TTn5iiMOFyLSCKJ7rKcGha3nDZ4hNUZwhDHeq0HY3DvatYur6On8y9d4UPPST3rtCWgn5684p57KMm7l4xZtO+QtUKy3fxDQYDLyKfZAwtgi95Imt1MqDTr2jXCFAC0qEtpw7+fNtGjKH19YbUFKwBfX81O0d05SrPDIYtHO6u8ic7rRdR6Oel2Fq4Ln7mJXiOmYio9AfDc9Snerxmj7PULSQjWDMMGNWSW2BAJY6kOx8mNBLBjNV7Eywisqv06k3LIfEjb7sNfbbC19oKf6C632v4pjtEkPWjD1XlB3xzMV4KAJF2oo/i0P0ogj0UE00LPv0E8HVsjUmg17vNy6lt4HKQ59Abegz2jpFmdz6p6Q8VBT3oQUwHvdMMXXHgG2rBng32qN5k2izh1IZnU1srWIaAHjkTZlv5APvE5VlyPHFwF5zcJd2koOLQBt9QcFtULM2hNx3lhG2T2txU12wR4IZOLFetPdxa5QhRJBN4Fq0LT453I0xrVTQJ8nSd+qJ6c+ppkQcT9tLsBY7YO9T7KwZ8DqVE3FPB51CQcF1RWk8TgT1YooBzGRW3Xm1MeK6xJodaGFkF6K7ISJMJg4EXIwwdUQEDy5UsjZqPrDoqRhLvfCSBqU4lkTsGsyUhpz1bfsrUqmB21MOwpLZa17ep+v5KdPYHM4AtNEJu/ei0/g4yDA1jlsu6NDLAIMncNp2h/DYFeDLLh/CNKyWkdRWGMhDaHWnN+il84ywiKjysI5thmwoVxqwfwzcuzU2XlkuXBfTaLqhnjy5Cky8OKTuUCS4Mqyf5uC2EZNpCNnShz6XSn8a9dBvJUWZnvv49V2dSellvjHSUYY7ULS7r21S9OK15AI86fLnc3W9M0Mcte9F2oA3l+paj6zwXCg4zZ9F7ExsMt0wojF24xLZgbJwzjA16+47mqaiyYOzCitfLKNsGvDCRbFHAhbHvgWuFvp/n49a45kAdRtaNmaY/4XFr2dskjtN2AjubjuX7OCD5Gf8H
\ No newline at end of file
diff --git a/openshift/templates/certificate/Readme.md b/openshift/templates/certificate/Readme.md
deleted file mode 100644
index 42ac9216b..000000000
--- a/openshift/templates/certificate/Readme.md
+++ /dev/null
@@ -1,44 +0,0 @@
-
-### Decrypt an encrypted private key
-The encrypted private key file is encrypted-private-key.txt
-The passphase used to encrypt the original private key is: examplevalueexamplevalueexamplevalueexamplevalueexamplevalue
-Command to decrypt the encrypted-private-key.txt is:
- $ openssl rsa -in enkey -out dekey
- Enter pass phrase for enkey: //enter the above passphase
- writing RSA key
-
-### Renew a certificate
-1. copy the existing private key to lowcarbonfuels.gov.bc.ca.key
-2. run teh following command to create a new csr based on the existing private key
-openssl req -new -newkey rsa:2048 -nodes -key ./lowcarbonfuels.gov.bc.ca.key -out ./lowcarbonfuels.gov.bc.ca.csr
-You are about to be asked to enter information that will be incorporated
-into your certificate request.
-What you are about to enter is what is called a Distinguished Name or a DN.
-There are quite a few fields but you can leave some blank
-For some fields there will be a default value,
-If you enter '.', the field will be left blank.
------
-Country Name (2 letter code) []:CA
-State or Province Name (full name) []:British Columbia
-Locality Name (eg, city) []:Victoria
-Organization Name (eg, company) []:Government of the Province of British Columbia
-Organizational Unit Name (eg, section) []:
-Common Name (eg, fully qualified host name) []:lowcarbonfuels.gov.bc.ca
-Email Address []:
-
-Please enter the following 'extra' attributes
-to be sent with your certificate request
-A challenge password []:
-3. list the csr
-openssl req -text -noout -in ./lowcarbonfuels.gov.bc.ca.csr
-
-### Commands for creating csr for itvr
-* create password protecting privatekey
- openssl rand -base64 48 > passphrase.txt
-* create encrypted privatekey protected by the above password
- openssl genrsa -aes256 -passout file:passphrase.txt -out server.key 2048
-* create the csr
- openssl req -new -newkey rsa:2048 -nodes -key ./server.key -out ./electric-vehicle-rebates.gov.bc.ca.csr
-* decrypt the encrypted privatekey, it requires to enter the password in passphrase.txt
- openssl rsa -in ./server.key -out ./privatekey.txt
-
\ No newline at end of file
diff --git a/openshift/templates/cleanup/Dockerfile b/openshift/templates/cleanup/Dockerfile
new file mode 100644
index 000000000..876880aa9
--- /dev/null
+++ b/openshift/templates/cleanup/Dockerfile
@@ -0,0 +1,4 @@
+FROM registry.redhat.io/openshift4/ose-cli
+RUN mkdir /.kube && \
+ chgrp -R root /.kube && \
+ chmod -R g+w /.kube
diff --git a/openshift/templates/cleanup/cleanup-bc-docker.yaml b/openshift/templates/cleanup/cleanup-bc-docker.yaml
new file mode 100644
index 000000000..e06caf635
--- /dev/null
+++ b/openshift/templates/cleanup/cleanup-bc-docker.yaml
@@ -0,0 +1,52 @@
+apiVersion: template.openshift.io/v1
+kind: Template
+metadata:
+ creationTimestamp: null
+ name: frontend
+parameters:
+ - name: GIT_URL
+ displayName:
+ description: zeva repo
+ required: true
+ - name: GIT_REF
+ displayName:
+ description: zeva branch name of the pr
+ required: true
+objects:
+ - apiVersion: image.openshift.io/v1
+ kind: ImageStream
+ metadata:
+ annotations:
+ description: cleanup
+ creationTimestamp: null
+ name: zeva-cleanup
+ spec:
+ lookupPolicy:
+ local: false
+ - apiVersion: build.openshift.io/v1
+ kind: BuildConfig
+ metadata:
+ name: zeva-cleanup
+ creationTimestamp:
+ spec:
+ output:
+ to:
+ kind: ImageStreamTag
+ name: zeva-cleanup:prod
+ resources:
+ limits:
+ cpu: 1500m
+ memory: 1300Mi
+ requests:
+ cpu: 750m
+ memory: 650Mi
+ source:
+ contextDir: openshift/templates/cleanup
+ git:
+ uri: ${GIT_URL}
+ ref: ${GIT_REF}
+ type: Git
+ strategy:
+ type: Docker
+ dockerStrategy:
+ dockerfilePath: Dockerfile
diff --git a/openshift/templates/cleanup/cleanup-cron.yaml b/openshift/templates/cleanup/cleanup-cron.yaml
new file mode 100644
index 000000000..dcaff806c
--- /dev/null
+++ b/openshift/templates/cleanup/cleanup-cron.yaml
@@ -0,0 +1,114 @@
+apiVersion: template.openshift.io/v1
+kind: Template
+metadata:
+ creationTimestamp: null
+ name: zeva-cleanup
+parameters:
+ - name: LICENSE_PLATE
+ description: license plate for the projec
+ required: true
+ - name: LOGIN_TOKEN_SECRET
+ description: The secret having the login token
+ required: true
+objects:
+ - kind: CronJob
+ apiVersion: batch/v1
+ metadata:
+ name: zeva-cleanup
+ spec:
+ schedule: 0 7 * * *
+ concurrencyPolicy: Forbid
+ suspend: false
+ jobTemplate:
+ metadata:
+ creationTimestamp: null
+ spec:
+ backoffLimit: 0
+ template:
+ metadata:
+ creationTimestamp: null
+ spec:
+ containers:
+ - resources:
+ limits:
+ cpu: 100m
+ memory: 100Mi
+ requests:
+ cpu: 50m
+ memory: 50Mi
+ terminationMessagePath: /dev/termination-log
+ name: oc
+ command:
+ - /bin/sh
+ - "-c"
+ env:
+ - name: LOGIN_TOKEN
+ valueFrom:
+ secretKeyRef:
+ name: ${LOGIN_TOKEN_SECRET}
+ key: token
+ imagePullPolicy: Always
+ terminationMessagePolicy: File
+ image: >-
+ image-registry.openshift-image-registry.svc:5000/${LICENSE_PLATE}-tools/zeva-cleanup:prod
+ args:
+ - >
+ date
+
+ oc login --token=$(LOGIN_TOKEN) --server=https://api.silver.devops.gov.bc.ca:6443
+
+ oc version
+
+ echo ""
+
+ echo "====> Cleaning up ${LICENSE_PLATE}-tools"
+
+ echo "==========> Removing expired builds"
+
+ oc -n ${LICENSE_PLATE}-tools get builds | grep -E "Complete|Failed|Cancelled" | awk '{print $1}' | xargs oc -n ${LICENSE_PLATE}-tools delete build || true
+
+ echo "==========> Removing expired frontend and backend image tags"
+
+ oc -n ${LICENSE_PLATE}-tools get imagetags | grep -E "zeva-frontend|zeva-backend" | awk '{print $1}' | xargs oc -n ${LICENSE_PLATE}-tools delete imagetag || true
+
+ echo "==========> Removing expired pods"
+
+ oc -n ${LICENSE_PLATE}-tools get pods | grep -E "Completed|Error|ContainerStatusUnknown" | grep -v crunchy | grep -v spilo | awk '{print $1}' | xargs oc -n ${LICENSE_PLATE}-tools delete pod || true
+
+ namespaces=("${LICENSE_PLATE}-dev" "${LICENSE_PLATE}-test")
+
+ for namespace in "${namespaces[@]}"; do
+
+ echo ""
+
+ echo "====> Cleaning up $namespace"
+
+ echo "==========> Removing expired pods"
+
+ oc -n $namespace get pods | grep Completed | grep -v backup | awk '{print $1}' | xargs oc -n $namespace delete pod || true
+
+ oc -n $namespace get pods | grep -E "Error|ContainerStatusUnknown" | grep -v crunchy | grep -v spilo | grep -v backup | awk '{print $1}' | xargs oc -n $namespace delete pod || true
+
+ env=$(echo $namespace | awk -F '-' '{print $NF}')
+
+ runningBackendImageTag=$(oc -n $namespace describe dc/zeva-backend-$env | grep Triggers | grep zeva-backend | awk -F '@' '{print $2}' | awk -F ',' '{print $1}')
+
+ echo "==========> Removing expired backend image tags except zeva-backend:$runningBackendImageTag"
+
+ oc -n $namespace get imagetags | grep zeva-backend | grep -v $runningBackendImageTag | awk '{print $1}' | xargs oc -n $namespace delete imagetag || true
+
+ runningFrontendImageTag=$(oc -n $namespace describe dc/zeva-frontend-$env | grep Triggers | grep zeva-frontend | awk -F '@' '{print $2}' | awk -F ',' '{print $1}')
+
+ echo "==========> Removing expired frontend image tags except zeva-frontend:$runningFrontendImageTag"
+
+ oc -n $namespace get imagetags | grep zeva-frontend | grep -v $runningFrontendImageTag | awk '{print $1}' | xargs oc -n $namespace delete imagetag || true
+
+ done
+
+ restartPolicy: Never
+ terminationGracePeriodSeconds: 30
+ dnsPolicy: ClusterFirst
+ securityContext: {}
+ schedulerName: default-scheduler
+ successfulJobsHistoryLimit: 3
+ failedJobsHistoryLimit: 1
diff --git a/openshift/templates/cleanup/readme.md b/openshift/templates/cleanup/readme.md
new file mode 100644
index 000000000..0885415ec
--- /dev/null
+++ b/openshift/templates/cleanup/readme.md
@@ -0,0 +1,13 @@
+# Cleanup Cron Job
+
+## cleanup-bc-docker.yaml
+
+The build config to build a clean up image base on Openshift4 oc client image
+
+## cleanup-cron.yaml
+
+The Openshift Cron Job to run periodically to clean up unused resource on in ZEVA spaces
+
+## Dockerfile
+
+The Dockerfile to build a new image on top of registry.redhat.io/openshift4/ose-cli
diff --git a/openshift/templates/minio/11.yaml b/openshift/templates/minio/11.yaml
deleted file mode 100644
index 0721994e2..000000000
--- a/openshift/templates/minio/11.yaml
+++ /dev/null
@@ -1,13 +0,0 @@
-apiVersion: template.openshift.io/v1
-kind: Template
-metadata:
- creationTimestamp: null
- name: jenkins
-objects:
- - apiVersion: v1
- kind: Secret
- metadata:
- name: wwww
- stringData:
- MINIO_ACCESS_KEY: wwww
- MINIO_SECRET_KEY: www
diff --git a/openshift/templates/minio/README.md b/openshift/templates/minio/README.md
deleted file mode 100644
index 1c08449e5..000000000
--- a/openshift/templates/minio/README.md
+++ /dev/null
@@ -1,28 +0,0 @@
-### Files included
-
-* minio-bc.yaml minio build config
-* minio-dc.yaml minio deployment config
-* minio-secret.yaml create template.minio-secret, it is NOT being used as minio creation is not part of pipeline anymore
-
-### build minio
-
-oc tag registry.access.redhat.com/rhel:7.7-481 e52f12-tools/rhel7:7.7-481
-oc process -f ./minio-bc.yaml | oc create -f - -n e52f12-tools
-oc tag minio:latest minio:20200309
-
-### One minio instance serve all PRs on Dev
-
-oc process -f ./minio-dc.yaml \
-NAME=zeva ENV_NAME=dev SUFFIX=-dev OCP_NAME=apps.silver.devops \
-| oc create -f - -n e52f12-dev
-
-#### Test and Prod Minio setup
-
-oc process -f ./minio-dc.yaml \
-NAME=zeva ENV_NAME=test SUFFIX=-test OCP_NAME=apps.silver.devops \
-| oc create -f - -n e52f12-test
-
-
-oc process -f ./minio-dc.yaml \
-NAME=zeva ENV_NAME=prod SUFFIX=-prod OCP_NAME=apps.silver.devops \
-| oc create -f - -n e52f12-prod
\ No newline at end of file
diff --git a/openshift/templates/minio/docker/Dockerfile b/openshift/templates/minio/docker/Dockerfile
deleted file mode 100644
index 94bad7d14..000000000
--- a/openshift/templates/minio/docker/Dockerfile
+++ /dev/null
@@ -1,33 +0,0 @@
-FROM to-be-replaced-by-buildconfig
-
-RUN useradd -d /opt/minio -g root minio
-
-WORKDIR /opt/minio
-
-ADD entrypoint.sh .
-
-RUN curl -o minio https://dl.minio.io/server/minio/release/linux-amd64/minio && \
- curl -o mc https://dl.minio.io/client/mc/release/linux-amd64/mc && \
- chmod +x minio && \
- chmod +x mc && \
- mkdir config && \
- mkdir data && \
- mkdir s3 && \
- mkdir s3/config && \
- mkdir s3/data && \
- chown minio:root -R . && chmod 777 -R .
-
-USER minio
-
-ENV MINIO_ACCESS_KEY="demoaccesskey"
-ENV MINIO_SECRET_KEY="mysecret"
-ENV MINIO_BIN=/opt/minio/minio
-ENV MINIO_DATA_DIR=/opt/minio/s3/data
-ENV MINIO_CONFIG_DIR=/opt/minio/s3/config
-
-VOLUME $MINIO_CONFIG_DIR
-VOLUME $MINIO_DATA_DIR
-
-EXPOSE 9000
-
-ENTRYPOINT [ "./entrypoint.sh" ]
diff --git a/openshift/templates/minio/docker/entrypoint.sh b/openshift/templates/minio/docker/entrypoint.sh
deleted file mode 100755
index 86717f150..000000000
--- a/openshift/templates/minio/docker/entrypoint.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-${MINIO_BIN} server --config-dir=${MINIO_CONFIG_DIR} $@ ${MINIO_DATA_DIR}
diff --git a/openshift/templates/minio/minio-bc.yaml b/openshift/templates/minio/minio-bc.yaml
deleted file mode 100644
index 553b70f6d..000000000
--- a/openshift/templates/minio/minio-bc.yaml
+++ /dev/null
@@ -1,64 +0,0 @@
----
-kind: Template
-apiVersion: v1
-metadata:
- creationTimestamp: null
- name: minio
-parameters:
- - name: GIT_URL
- displayName: Zeva Git Repo URL
- description: The URL to your GIT repo
- required: true
- value: https://github.com/bcgov/zeva.git
- - name: GIT_REF
- displayName: Git Reference
- description: The git reference or branch.
- required: true
- value: master
-objects:
- - kind: ImageStream
- apiVersion: v1
- metadata:
- name: minio
- creationTimestamp:
- labels:
- shared: "true"
- spec:
- lookupPolicy:
- local: false
- status:
- dockerImageRepository: ""
- - apiVersion: build.openshift.io/v1
- kind: BuildConfig
- metadata:
- creationTimestamp: null
- name: minio
- spec:
- failedBuildsHistoryLimit: 5
- nodeSelector: null
- output:
- to:
- kind: ImageStreamTag
- name: minio:latest
- postCommit: {}
- resources: {}
- runPolicy: Serial
- source:
- contextDir: openshift/templates/minio/docker
- git:
- ref: ${GIT_REF}
- uri: ${GIT_URL}
- type: Git
- strategy:
- dockerStrategy:
- from:
- kind: ImageStreamTag
- name: rhel7:7.7-481
- type: Docker
- successfulBuildsHistoryLimit: 5
- triggers:
- - imageChange: {}
- type: ImageChange
- - type: ConfigChange
- status:
- lastVersion: 0
diff --git a/openshift/templates/minio/minio-dc.yaml b/openshift/templates/minio/minio-dc.yaml
deleted file mode 100644
index e3b439bbc..000000000
--- a/openshift/templates/minio/minio-dc.yaml
+++ /dev/null
@@ -1,223 +0,0 @@
-apiVersion: template.openshift.io/v1
-kind: Template
-metadata:
- name: minio
- annotations:
- description: Minio with persistent storage. By BC Gov.
- openshift.io/display-name: BC Gov Minio
-parameters:
- - name: NAME
- value: zeva
- required: true
- - name: SUFFIX
- displayName:
- description: sample is -pr-0
- required: true
- - name: ENV_NAME
- value: dev
- required: true
- - name: PVC_SIZE
- displayName: Volume Capacity
- description: Volume space available to Minio server for files, e.g. 512Mi, 2Gi.
- value: 5G
- required: true
- - name: CPU_REQUEST
- displayName: Requested CPU
- description: Requested CPU
- required: true
- value: '100m'
- - name: CPU_LIMIT
- displayName: CPU upper limit
- description: CPU upper limit
- required: true
- value: '200m'
- - name: MEMORY_REQUEST
- displayName: Requested memory
- description: Requested memory
- required: true
- value: '200M'
- - name: MEMORY_LIMIT
- displayName: Memory upper limit
- description: Memory upper limit
- required: true
- value: '500M'
- - name: MINIO_ACCESS_KEY
- description: Minio access key
- from: "[a-zA-Z0-9]{8}"
- generate: expression
- required: true
- - name: MINIO_SECRET_KEY
- description: Minio secret key
- from: "[a-zA-Z0-9]{16}"
- generate: expression
- required: true
- - name: OCP_NAME
- displayName: Openshift Name
- description: Openshift Name
- required: true
-objects:
- - apiVersion: v1
- kind: Secret
- metadata:
- name: ${NAME}-minio${SUFFIX}
- stringData:
- MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY}
- MINIO_SECRET_KEY: ${MINIO_SECRET_KEY}
- - kind: PersistentVolumeClaim
- apiVersion: v1
- metadata:
- name: ${NAME}-minio${SUFFIX}
- annotations:
- volume.beta.kubernetes.io/storage-class: netapp-file-standard
- template.openshift.io.bcgov/create: 'true'
- spec:
- accessModes:
- - ReadWriteMany
- resources:
- requests:
- storage: ${PVC_SIZE}
- status: {}
- - kind: Service
- apiVersion: v1
- metadata:
- name: ${NAME}-minio${SUFFIX}
- creationTimestamp:
- labels:
- name: minio
- app: zeva
- role: minio
- env: ${ENV_NAME}
- spec:
- ports:
- - name: 9000-tcp
- protocol: TCP
- port: 9000
- targetPort: 9000
- selector:
- deploymentconfig: ${NAME}-minio${SUFFIX}
- type: ClusterIP
- sessionAffinity: None
- status:
- loadBalancer: {}
- - apiVersion: route.openshift.io/v1
- kind: Route
- metadata:
- creationTimestamp: null
- labels:
- app: zeva
- role: minio
- env: ${ENV_NAME}
- name: ${NAME}-minio${SUFFIX}
- spec:
- host: ${NAME}-minio${SUFFIX}.${OCP_NAME}.gov.bc.ca
- port:
- targetPort: 9000-tcp
- tls:
- insecureEdgeTerminationPolicy: Redirect
- termination: edge
- to:
- kind: Service
- name: ${NAME}-minio${SUFFIX}
- weight: 100
- wildcardPolicy: None
- - kind: DeploymentConfig
- apiVersion: v1
- metadata:
- name: ${NAME}-minio${SUFFIX}
- labels:
- name: minio
- spec:
- strategy:
- type: Recreate
- resources: {}
- activeDeadlineSeconds: 21600
- triggers:
- - type: ConfigChange
- - type: ImageChange
- imageChangeParams:
- automatic: true
- containerNames:
- - minio
- from:
- kind: ImageStreamTag
- namespace: e52f12-tools
- name: minio:20200309
- replicas: 1
- test: false
- selector:
- app: ${NAME}-minio${SUFFIX}
- template:
- metadata:
- creationTimestamp:
- labels:
- app: ${NAME}-minio${SUFFIX}
- spec:
- volumes:
- - name: minio-data
- persistentVolumeClaim:
- claimName: ${NAME}-minio${SUFFIX}
- containers:
- - name: minio
- image:
- ports:
- - containerPort: 9000
- protocol: TCP
- env:
- - name: MINIO_ACCESS_KEY
- valueFrom:
- secretKeyRef:
- name: ${NAME}-minio${SUFFIX}
- key: MINIO_ACCESS_KEY
- - name: MINIO_SECRET_KEY
- valueFrom:
- secretKeyRef:
- name: ${NAME}-minio${SUFFIX}
- key: MINIO_SECRET_KEY
- - name: MINIO_CONFIG_DIR
- value: "/tmp"
- - name: MINIO_DATA_DIR
- value: "/data"
- resources:
- limits:
- cpu: '${CPU_LIMIT}'
- memory: '${MEMORY_LIMIT}'
- requests:
- cpu: '${CPU_REQUEST}'
- memory: '${MEMORY_REQUEST}'
- volumeMounts:
- - name: minio-data
- mountPath: /data
- livenessProbe:
- failureThreshold: 3
- initialDelaySeconds: 35
- periodSeconds: 10
- successThreshold: 1
- tcpSocket:
- port: 9000
- timeoutSeconds: 3
- readinessProbe:
- exec:
- command:
- - /bin/sh
- - '-c'
- - mkdir -p /data/zeva
- failureThreshold: 3
- initialDelaySeconds: 25
- periodSeconds: 10
- successThreshold: 1
- timeoutSeconds: 5
- terminationMessagePath: "/dev/termination-log"
- terminationMessagePolicy: File
- imagePullPolicy: Always
- restartPolicy: Always
- terminationGracePeriodSeconds: 30
- dnsPolicy: ClusterFirst
- securityContext: {}
- schedulerName: default-scheduler
- status:
- latestVersion: 0
- observedGeneration: 0
- replicas: 0
- updatedReplicas: 0
- availableReplicas: 0
- unavailableReplicas: 0
diff --git a/openshift/templates/minio/minio-secret.yaml b/openshift/templates/minio/minio-secret.yaml
deleted file mode 100644
index bae505b28..000000000
--- a/openshift/templates/minio/minio-secret.yaml
+++ /dev/null
@@ -1,25 +0,0 @@
-apiVersion: template.openshift.io/v1
-kind: Template
-metadata:
- creationTimestamp: null
- name: template.minio-secret
-parameters:
- - name: MINIO_ACCESS_KEY
- description: Minio access key
- from: "[a-zA-Z0-9]{8}"
- generate: expression
- required: true
- - name: MINIO_SECRET_KEY
- description: Minio secret key
- from: "[a-zA-Z0-9]{16}"
- generate: expression
- required: true
-objects:
- - apiVersion: v1
- kind: Secret
- metadata:
- name: template.minio-secret
- labels: {}
- stringData:
- MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY}
- MINIO_SECRET_KEY: ${MINIO_SECRET_KEY}
diff --git a/openshift/templates/patroni/.pipeline/build.js b/openshift/templates/patroni/.pipeline/build.js
deleted file mode 100755
index 4df964f0b..000000000
--- a/openshift/templates/patroni/.pipeline/build.js
+++ /dev/null
@@ -1,5 +0,0 @@
-'use strict';
-const build = require('./lib/build.js')
-const phases = require('./lib/config.js')
-
-build({phases:phases})
\ No newline at end of file
diff --git a/openshift/templates/patroni/.pipeline/clean.js b/openshift/templates/patroni/.pipeline/clean.js
deleted file mode 100755
index f88e35923..000000000
--- a/openshift/templates/patroni/.pipeline/clean.js
+++ /dev/null
@@ -1,4 +0,0 @@
-'use strict';
-const build = require('./lib/clean.js')
-
-build()
\ No newline at end of file
diff --git a/openshift/templates/patroni/.pipeline/deploy.js b/openshift/templates/patroni/.pipeline/deploy.js
deleted file mode 100755
index a2d1f0da1..000000000
--- a/openshift/templates/patroni/.pipeline/deploy.js
+++ /dev/null
@@ -1,6 +0,0 @@
-'use strict';
-const phases = require('./lib/config.js')
-const deploy = require('./lib/deploy.js')
-const options= require('pipeline-cli').Util.parseArguments()
-
-deploy({phases:phases, options:options})
diff --git a/openshift/templates/patroni/.pipeline/lib/build.js b/openshift/templates/patroni/.pipeline/lib/build.js
deleted file mode 100755
index 59ddc3436..000000000
--- a/openshift/templates/patroni/.pipeline/lib/build.js
+++ /dev/null
@@ -1,25 +0,0 @@
-'use strict';
-const {OpenShiftClientX} = require('pipeline-cli')
-const path = require('path');
-
-module.exports = (settings)=>{
- const phases=settings.phases
- const oc=new OpenShiftClientX({'namespace':phases.build.namespace});
- const phase='build'
- var objects = []
- var git_http_url = oc.git.http_url
- //git_http_url = 'https://github.com/BCDevOps/platform-services.git'
-
- objects = objects.concat(oc.processDeploymentTemplate(oc.toFileUrl(path.resolve(__dirname, '../../openshift/build.yaml')), {
- 'param':{
- 'NAME': phases[phase].name,
- 'SUFFIX': phases[phase].suffix,
- 'VERSION': phases[phase].tag,
- 'GIT_URI': git_http_url,
- 'GIT_REF': oc.git.ref
- }
- }))
-
- oc.applyRecommendedLabels(objects, phases[phase].name, phase, phases[phase].changeId, phases[phase].instance)
- oc.applyAndBuild(objects)
-}
\ No newline at end of file
diff --git a/openshift/templates/patroni/.pipeline/lib/clean.js b/openshift/templates/patroni/.pipeline/lib/clean.js
deleted file mode 100755
index 0e0f74f12..000000000
--- a/openshift/templates/patroni/.pipeline/lib/clean.js
+++ /dev/null
@@ -1,22 +0,0 @@
-'use strict';
-const {OpenShiftClientX} = require('pipeline-cli')
-const phases = require('./config')
-const options= require('pipeline-cli').Util.parseArguments()
-
-module.exports = (settings)=>{
- const oc=new OpenShiftClientX({'namespace':phases.build.namespace});
- const target_phase=options.env
-
- //console.log(`target_phase=${target_phase}`)
-
- for (var k in phases){
- if (phases.hasOwnProperty(k) && k != 'prod') {
- const phase=phases[k]
- if (k == target_phase){
- //console.log(`phase=${phase}`)
- oc.raw('delete', ['all'], {selector:`app-name=${phase.name},env-id=${phase.changeId},env-name!=prod,!shared,github-repo=${oc.git.repository},github-owner=${oc.git.owner}`, namespace:phase.namespace})
- oc.raw('delete', ['pvc,Secret,configmap,endpoints,RoleBinding,role,ServiceAccount,Endpoints'], {selector:`app-name=${phase.name},env-id=${phase.changeId},env-name!=prod,!shared,github-repo=${oc.git.repository},github-owner=${oc.git.owner}`, namespace:phase.namespace})
- }
- }
- }
-}
\ No newline at end of file
diff --git a/openshift/templates/patroni/.pipeline/lib/config.js b/openshift/templates/patroni/.pipeline/lib/config.js
deleted file mode 100644
index 020029fdf..000000000
--- a/openshift/templates/patroni/.pipeline/lib/config.js
+++ /dev/null
@@ -1,13 +0,0 @@
-'use strict';
-const options= require('pipeline-cli').Util.parseArguments()
-const changeId = options.pr //aka pull-request
-const version = '10'
-const name = 'patroni'
-
-const phases = {
- build: {namespace:'bcgov-tools' , name: `${name}`, phase: 'build', changeId:changeId, suffix: `-build-${changeId}`, instance: `${name}-build-${changeId}`, tag:`v${version}-${changeId}`},
- test: {namespace:`bcgov`, name: `${name}`, phase: 'test' , changeId:changeId, suffix: '-test' , instance: `${name}-test` , tag:`v${version}-latest`},
- prod: {namespace:`bcgov`, name: `${name}`, phase: 'prod' , changeId:changeId, suffix: '' , instance: `${name}-prod` , tag:`v${version}-stable`}
-}
-
-module.exports = exports = phases
diff --git a/openshift/templates/patroni/.pipeline/lib/deploy.js b/openshift/templates/patroni/.pipeline/lib/deploy.js
deleted file mode 100755
index 6e0131e7f..000000000
--- a/openshift/templates/patroni/.pipeline/lib/deploy.js
+++ /dev/null
@@ -1,12 +0,0 @@
-'use strict';
-const {OpenShiftClientX} = require('pipeline-cli')
-
-
-module.exports = (settings)=>{
- const phases=settings.phases
- const phase=settings.options.env
- const oc=new OpenShiftClientX({'namespace':phases[phase].namespace});
-
- oc.tag([`${phases.build.namespace}/${phases.build.name}:${phases.build.tag}`, `${phases[phase].namespace}/${phases[phase].name}:${phases[phase].tag}`])
-
-}
\ No newline at end of file
diff --git a/openshift/templates/patroni/.pipeline/npmw b/openshift/templates/patroni/.pipeline/npmw
deleted file mode 100755
index e7a80b4c2..000000000
--- a/openshift/templates/patroni/.pipeline/npmw
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/usr/bin/env sh
-set -e
-
-curl -sSL 'https://raw.githubusercontent.com/BCDevOps/pipeline-cli/v1.0/cli.sh' | bash -s "$@"
\ No newline at end of file
diff --git a/openshift/templates/patroni/.pipeline/package.json b/openshift/templates/patroni/.pipeline/package.json
deleted file mode 100644
index 9d62bc018..000000000
--- a/openshift/templates/patroni/.pipeline/package.json
+++ /dev/null
@@ -1,27 +0,0 @@
-{
- "name": "pipeline",
- "version": "1.0.0",
- "description": "This a pipeliene script",
- "engines": {
- "node": ">=8"
- },
- "scripts": {
- "build": "node build.js",
- "clean": "node clean.js",
- "deploy": "node deploy.js",
- "test": "mocha",
- "version": "echo \"node@$(node --version) ($(which node))\" && echo \"npm@$(npm --version) ($(which npm))\" && npm ls"
- },
- "repository": {
- "type": "git",
- "url": "git+https://github.com/BCDevOps/platform-services.git"
- },
- "author": "",
- "license": "Apache-2.0",
- "dependencies": {
- "pipeline-cli": "git+https://github.com/BCDevOps/pipeline-cli.git#v1.0"
- },
- "devDependencies": {
- "mocha": "^5.2.0"
- }
-}
diff --git a/openshift/templates/patroni/.pipeline/test/e2e.js b/openshift/templates/patroni/.pipeline/test/e2e.js
deleted file mode 100644
index 662954204..000000000
--- a/openshift/templates/patroni/.pipeline/test/e2e.js
+++ /dev/null
@@ -1,173 +0,0 @@
-var assert = require('assert');
-const {OpenShiftClientX} = require('pipeline-cli')
-const {spawnSync} = require('child_process');
-const path = require('path');
-
-function randomstring(L) {
- var s = '';
- var randomchar = function() {
- var n = Math.floor(Math.random() * 62);
- if (n < 10) return n; //1-10
- if (n < 36) return String.fromCharCode(n + 55); //A-Z
- return String.fromCharCode(n + 61); //a-z
- }
- while (s.length < L) s += randomchar();
- return s;
-}
-
-describe('e2e2', function() {
- const namespace='a1b2c3d';//randomstring(6).toLowerCase()
- const buildNamespace = `${namespace}-tools`;
- const deployNamespace = `${namespace}-tools`;
- const oc=new OpenShiftClientX();
- let currentNamespace=""
-
- before(function() {
- currentNamespace=oc.raw('project', ['-q']).stdout
- console.log(`currentNamespace=${currentNamespace}`)
- });
-
- it(`delete project`, function(done) {
- this.timeout(20000)
- spawnSync('oc', ['delete', `project/${buildNamespace}`], {encoding:'utf8'})
- //oc.raw('delete', [`namespace/${buildNamespace}`])
- //assert.equal([1,2,3].indexOf(4), -1);
- setTimeout(function(){done()}, 5000)
- });
-
- it('create project', function() {
- currentNamespace=oc.raw('project', ['-q']).stdout
- console.log(`currentNamespace=${currentNamespace}`)
- oc.raw('create', ['namespace',buildNamespace])
- oc.raw('label', [`namespace/${buildNamespace}`, 'mocha=e2e', 'name=patroni'])
- });
-
- it('build', function() {
- this.timeout(60000)
- const build = require('../lib/build.js');
- const changeId=0;
- const _phase={name:'patroni', changeId:0}
- const settings={
- phases:{
- build:{
- namespace: buildNamespace,
- name:`${_phase.name}`,
- suffix:'-build',
- tag:`v10-${_phase.changeId}`,
- instance: `${_phase.name}-build-${_phase.changeId}`
- }
- }
- }
- build(settings)
- assert.equal([1,2,3].indexOf(4), -1);
- });
-
- it('deploy', function() {
- this.timeout(60000)
- const _phase={name:'patroni', changeId:0}
- const settings={
- phases:{
- build:{
- namespace: buildNamespace,
- name:`${_phase.name}`,
- suffix:'-build',
- tag:`v10-${_phase.changeId}`,
- instance: `${_phase.name}-build-${_phase.changeId}`,
- changeId: _phase.changeId
- },
- e2e:{
- namespace: deployNamespace,
- name:`${_phase.name}`,
- suffix:'-e2e',
- tag:`v10-${_phase.changeId}`,
- instance: `${_phase.name}-e2e-${_phase.changeId}`,
- changeId: _phase.changeId
- }
- }
- }
- const phases = settings.phases
- const phase = 'e2e'
- let objects =[]
-
- //Switch to Build Namespace
- oc.namespace(deployNamespace);
-
- objects = objects.concat(oc.processDeploymentTemplate(oc.toFileUrl(path.resolve(__dirname, '../../openshift/deployment-prereq.yaml')), {
- 'param':{
- 'NAME': `${phases[phase].name}-pgsql`,
- 'SUFFIX': phases[phase].suffix,
- 'APP_DB_USERNAME': 'rhsso',
- 'APP_DB_NAME': 'rhsso'
- }
- }))
-
- objects = objects.concat(oc.processDeploymentTemplate(oc.toFileUrl(path.resolve(__dirname, '../../openshift/deployment.yaml')), {
- 'param':{
- 'NAME': `${phases[phase].name}-pgsql`,
- 'SUFFIX': phases[phase].suffix,
- 'INSTANCE': `${phases[phase].name}-pgsql${phases[phase].suffix}`,
- 'IMAGE_STREAM_NAMESPACE': phases[phase].namespace,
- 'OPENSHIFT_IMAGE_REGISTRY': '172.30.1.1:5000',
- 'IMAGE_STREAM_TAG': `patroni:v10-${phases[phase].changeId}`
- }
- }))
-
- oc.applyRecommendedLabels(objects, phases[phase].name, phase, `${phases[phase].changeId}`, phases[phase].instance)
-
- objects.forEach((item)=>{
- if (item.kind == 'StatefulSet' && item.metadata.labels["app.kubernetes.io/name"] === "patroni"){
- oc.copyRecommendedLabels(item.metadata.labels, item.spec.selector.matchLabels)
- oc.copyRecommendedLabels(item.metadata.labels, item.spec.template.metadata.labels)
-
- item.spec.template.spec.containers.forEach((container)=>{
- container.env.forEach((env)=>{
- if (env.name === "PATRONI_KUBERNETES_LABELS"){
- var labels = JSON.parse(env.value)
- oc.copyRecommendedLabels(item.metadata.labels, labels)
- env.value = JSON.stringify(labels)
- }
- })
- })
- }
- })
-
- oc.importImageStreams(objects, phases[phase].tag, phases.build.namespace, phases.build.tag)
- oc.applyAndDeploy(objects, phases[phase].instance)
-
- });
-
-
- after(function() {
- //this.timeout(10000)
- //let p1=spawnSync('bash', ['-c', `oc delete "project/${buildNamespace}"`], {encoding:'utf8'})
- //console.dir(p1.output)
- /*
- return new Promise( (resolve) => {
- resolve(true)
- }).then((result)=>{
- return new Promise((resolve)=>{
- setTimeout(function(){
- resolve(true)
- }, 5000)
- })
- }).then((result)=>{
- let p2=spawnSync('oc', ['delete', `namespace/${buildNamespace}`], {encoding:'utf8'})
- console.dir(p2.output)
- done()
- });
- */
- //.finally(done)
- //setTimeout(function(){
- /*
- let p1=spawnSync('oc', ['delete', `namespace/${buildNamespace}`], {encoding:'utf8'})
- console.dir(p1.output)
-
- console.log(`previousNamespace=${currentNamespace}`)
- let p2=spawnSync('oc', ['project', currentNamespace], {encoding:'utf8'})
- console.dir(p2.output)
- oc.raw('delete', [`namespace/${buildNamespace}`])
- done()
- */
- //}, 5)
- })
-});
\ No newline at end of file
diff --git a/openshift/templates/patroni/README.md b/openshift/templates/patroni/README.md
deleted file mode 100644
index 949bc0f02..000000000
--- a/openshift/templates/patroni/README.md
+++ /dev/null
@@ -1,85 +0,0 @@
-### Files included
-
-* build.yaml build patroni image
-* deployment.yaml deploy patroni
-* deployment-prereq.yaml create pre-required objects for patroni
-* secret-template.yaml create template.patroni-patroni secret, it is used by pipeline
-
-### Defore triggering pipeline
-
-1. Create template.patroni-patroni secret
-oc process -f ./secret-template.yaml | oc create -f - -n [environment namespace]
-
-2. Build patroni image
-oc process -f ./build.yaml | oc create -f - -n [tools namespace]
-
-3. tag the patroni image to environment
-oc tag e52f12-tools/patroni:v10-latest e52f12-[env]]/patroni:v10-stable
-
-### Database Migration from Openshift v3 to Openshift 4
-
-1. Openshift v4 - Update zeva database user same as the one on Openshift v3
- For example, Openshift v3 zeva db user name is userABC and opassword is pwpwpwpwpw
- create user "userABC" with password 'pwpwpwpwpw'; //password is same with secret
- ALTER DATABASE zeva OWNER TO "userABC";
- DROP USER usershh; //usershh is the old user on Openshift v4
-
-2. Openshift v4 - Update secrets patroni-prod and template.patroni-patroni
-
-Update app-db-username and app-db-password same as the one on Openshift v3
-
-3. Openshift v3 - Create backup
-login to patroni-backup pod and run backup.sh -1
- created backup: /backups/2020-08-28/postgresql-zeva_2020-08-28_19-06-28.sql.gz
-
-4. Move the above backup file from backup container on Openshift v3 to v4
- for example: moved to /backups/fromv3/postgresql-zeva_2020-08-28_19-06-28.sql.gz
-
-5. Recover the backup to paroni database on Openshift v4
-login patroini-backup pod on Openshift v4, run the following command
-./backup.sh -r patroni-master-prod/zeva -f /backups/fromv3 //yes, folder name only, it will pickup the file and ask confirmation
-
-6. Verify the database on Openshift v3 and v4 to make sure they are same
-
-### Create read only user for metabase conection
-CREATE USER metabaseuser WITH PASSWORD '[password]';
-GRANT CONNECT ON DATABASE zeva TO metabaseuser;
-GRANT USAGE ON SCHEMA public TO metabaseuser;
-GRANT SELECT ON ALL TABLES IN SCHEMA public TO metabaseuser;
-ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT ON TABLES TO metabaseuser;
-
-### Create staging patroni in order to test the operational scripts
-
-1. backup prod database and rsync to test env ex. /backups/2020-10-30-prod/patroni-master-prod-tfrs_2020-10-30_12-29-48.sql.gz
-
-2. create patroni-staging statefulset
- oc process -f ./deployment-prereq.yaml SUFFIX=-staging ... //make sure the user passwors are same as prod
- oc process -f ./deployment.yaml \
- NAME=patroni \
- ENV_NAME=test \
- SUFFIX=-staging \
- CPU_REQUEST=200m \
- CPU_LIMIT=400m \
- MEMORY_REQUEST=250M \
- MEMORY_LIMIT=500M \
- IMAGE_REGISTRY=docker-registry.default.svc:5000 \
- IMAGE_STREAM_NAMESPACE=mem-tfrs-test \
- IMAGE_STREAM_TAG=patroni:v10-stable \
- REPLICA=1 \
- PVC_SIZE=1G \
- STORAGE_CLASS=netapp-block-standard \
- | oc create -f - -n mem-tfrs-test
-
-3. restore
-
-login to patroni-master-staging pod:
- create user "userSRU" with password ''; //password to find in patroni-staging secret
- ALTER DATABASE tfrs OWNER TO "userSRU";
- DROP USER usersru;
-
-on backup pod in test env:
-./backup.sh -r patroni-master-staging:5430/tfrs -f /backups/2020-10-30-prod/patroni-master-prod-tfrs_2020-10-30_12-29-48.sql.gz
-the admin command can get from the patroni-staging secret
-
-4. update bacckend dc to connect to staging database
-
diff --git a/openshift/templates/patroni/build.yaml b/openshift/templates/patroni/build.yaml
deleted file mode 100644
index b8d583f40..000000000
--- a/openshift/templates/patroni/build.yaml
+++ /dev/null
@@ -1,110 +0,0 @@
-apiVersion: v1
-kind: Template
-metadata:
- creationTimestamp: null
- name: patroni
-labels:
- app: ${NAME}${SUFFIX}
- phase: build
- app.kubernetes.io/component: database
- app.kubernetes.io/name: patroni
- app.kubernetes.io/managed-by: template
- app.kubernetes.io/version: "10"
-parameters:
- - name: NAME
- value: patroni
- - name: SUFFIX
- value: ""
- description: for zeva, use empty value as pipeline doesn't build patroni for single PR
- - name: VERSION
- description: Ouput version
- value: "v10-latest"
- - name: GIT_URI
- value: https://github.com/bcgov/zeva.git
- - name: GIT_REF
- value: master
- - name: POSTGRES_VERSION
- value: "10"
-objects:
-#ImageStream is create if it doesn't already exist
-- apiVersion: image.openshift.io/v1
- kind: ImageStream
- metadata:
- annotations: {}
- creationTimestamp: null
- generation: 1
- name: postgres
- spec:
- lookupPolicy:
- local: false
- tags:
- - annotations: null
- from:
- kind: DockerImage
- name: registry.hub.docker.com/library/postgres:${POSTGRES_VERSION}
- generation: 1
- importPolicy: {}
- name: "${POSTGRES_VERSION}"
- referencePolicy:
- type: Source
- status:
- dockerImageRepository: ""
-#- apiVersion: v1
-# generation: 0
-# kind: ImageStreamTag
-# lookupPolicy:
-# local: false
-# metadata:
-# creationTimestamp: null
-# name: postgres:${POSTGRES_VERSION}
-# tag:
-# annotations: null
-# from:
-# kind: DockerImage
-# name: registry.hub.docker.com/library/postgres:${POSTGRES_VERSION}
-# generation: 0
-# importPolicy: {}
-# name: "${POSTGRES_VERSION}"
-# referencePolicy:
-# type: Source
-- apiVersion: v1
- kind: ImageStream
- metadata:
- creationTimestamp: null
- name: ${NAME}
- spec:
- lookupPolicy:
- local: false
- status:
- dockerImageRepository: ""
-- apiVersion: v1
- kind: BuildConfig
- metadata:
- creationTimestamp: null
- name: ${NAME}${SUFFIX}
- spec:
- nodeSelector: null
- output:
- to:
- kind: ImageStreamTag
- name: "${NAME}:${VERSION}"
- postCommit: {}
- resources: {}
- source:
- contextDir: openshift/templates/patroni/docker
- git:
- ref: ${GIT_REF}
- uri: ${GIT_URI}
- type: Git
- strategy:
- dockerStrategy:
- from:
- kind: ImageStreamTag
- name: postgres:${POSTGRES_VERSION}
- type: Docker
- triggers:
- - type: ConfigChange
- - imageChange: {}
- type: ImageChange
- status:
- lastVersion: 0
\ No newline at end of file
diff --git a/openshift/templates/patroni/deployment-prereq.yaml b/openshift/templates/patroni/deployment-prereq.yaml
deleted file mode 100644
index 784b1667e..000000000
--- a/openshift/templates/patroni/deployment-prereq.yaml
+++ /dev/null
@@ -1,95 +0,0 @@
-apiVersion: template.openshift.io/v1
-kind: Template
-metadata:
- annotations:
- description: |-
- Patroni Postgresql database cluster, with persistent storage.
- iconClass: icon-postgresql
- openshift.io/display-name: Patroni Postgresql (Persistent)
- openshift.io/long-description: This template deploys a patroni postgresql HA
- cluster with persistent storage.
- tags: postgresql
- name: patroni-pgsql-persistent
-labels:
- app.kubernetes.io/component: database
- app.kubernetes.io/name: patroni
- app.kubernetes.io/managed-by: template
- app.kubernetes.io/version: "10"
-objects:
-- apiVersion: v1
- kind: Secret
- metadata:
- labels:
- app: ${NAME}${SUFFIX}
- cluster-name: ${NAME}${SUFFIX}
- annotations:
- as-copy-of: "template.${NAME}-patroni"
- name: ${NAME}${SUFFIX}
-# move ServiceAccount and Role to here, otherwise image pulled may fail because ServiceAccount hasn't been created
-- apiVersion: v1
- kind: ServiceAccount
- metadata:
- labels:
- cluster-name: ${NAME}${SUFFIX}
- name: ${NAME}${SUFFIX}
-- apiVersion: rbac.authorization.k8s.io/v1
- kind: Role
- metadata:
- labels:
- cluster-name: ${NAME}${SUFFIX}
- name: ${NAME}${SUFFIX}
- rules:
- - apiGroups:
- - ""
- resources:
- - services
- verbs:
- - create
- - get
- - list
- - patch
- - update
- - watch
- - delete
- - apiGroups:
- - ""
- resources:
- - configmaps
- verbs:
- - create
- - get
- - list
- - patch
- - update
- - watch
- - delete
- - apiGroups:
- - ""
- resources:
- - endpoints
- verbs:
- - get
- - patch
- - update
- - create
- - list
- - watch
- - delete
- - apiGroups:
- - ""
- resources:
- - pods
- verbs:
- - get
- - list
- - patch
- - update
- - watch
-parameters:
-- description: The name of the application for labelling all artifacts.
- displayName: Application Name
- name: NAME
- value: patroni
-- name: SUFFIX
- required: true
-
diff --git a/openshift/templates/patroni/deployment.yaml b/openshift/templates/patroni/deployment.yaml
deleted file mode 100644
index ee6562d28..000000000
--- a/openshift/templates/patroni/deployment.yaml
+++ /dev/null
@@ -1,361 +0,0 @@
-apiVersion: template.openshift.io/v1
-kind: Template
-metadata:
- annotations:
- description: |-
- Patroni Postgresql database cluster, with persistent storage.
- iconClass: icon-postgresql
- openshift.io/display-name: Patroni Postgresql (Persistent)
- openshift.io/long-description: This template deploys a patroni postgresql HA
- cluster with persistent storage.
- tags: postgresql
- name: patroni-pgsql-persistent
-labels:
- app: ${NAME}${SUFFIX}
- phase: deploy
- app.kubernetes.io/instance: ${NAME}${SUFFIX}
- app.kubernetes.io/component: database
- app.kubernetes.io/name: patroni
- app.kubernetes.io/managed-by: template
- app.kubernetes.io/version: "10"
-objects:
-# It doesn't seem to be used/needed - remote it?
-#- apiVersion: v1
-# kind: Service
-# metadata:
-# creationTimestamp: null
-# labels:
-# cluster-name: ${NAME}${SUFFIX}
-# name: ${NAME}${SUFFIX}
-# spec:
-# ports:
-# - name: 'postgresql'
-# port: 5432
-# protocol: TCP
-# targetPort: 5432
-# sessionAffinity: None
-# type: ClusterIP
-# status:
-# loadBalancer: {}
-- apiVersion: v1
- kind: Service
- metadata:
- creationTimestamp: null
- labels:
- cluster-name: ${NAME}${SUFFIX}
- name: ${NAME}-master${SUFFIX}
- spec:
- ports:
- - port: 5432
- name: postgresql
- protocol: TCP
- targetPort: 5432
- - port: 8008
- name: health
- protocol: TCP
- targetPort: 8008
- selector:
- cluster-name: ${NAME}${SUFFIX}
- role: master
- app.kubernetes.io/name: patroni
- sessionAffinity: None
- type: ClusterIP
- status:
- loadBalancer: {}
-- apiVersion: v1
- kind: Service
- metadata:
- creationTimestamp: null
- labels:
- cluster-name: ${NAME}${SUFFIX}
- name: ${NAME}-replica${SUFFIX}
- spec:
- ports:
- - port: 5432
- name: postgresql
- protocol: TCP
- targetPort: 5432
- - port: 8008
- name: health
- protocol: TCP
- targetPort: 8008
- selector:
- cluster-name: ${NAME}${SUFFIX}
- role: replica
- app.kubernetes.io/name: patroni
- sessionAffinity: None
- type: ClusterIP
- status:
- loadBalancer: {}
-#- apiVersion: v1
-# kind: Service
-# metadata:
-# creationTimestamp: null
-# labels:
-# cluster-name: ${NAME}${SUFFIX}
-# name: ${NAME}-replica${SUFFIX}
-# spec:
-# ports:
-# - port: 5432
-## name: 'postgresql'
-# protocol: TCP
-# targetPort: 5432
-# selector:
-# cluster-name: ${NAME}${SUFFIX}
-# app.kubernetes.io/name: patroni
-# role: replica
-# sessionAffinity: None
-# type: ClusterIP
-# status:
-# loadBalancer: {}
-# - apiVersion: v1
-# kind: ConfigMap
-# metadata:
-# name: ${NAME}${SUFFIX}-config
-# - apiVersion: v1
-# kind: ConfigMap
-# metadata:
-# name: ${NAME}${SUFFIX}-leader
-- apiVersion: apps/v1
- kind: StatefulSet
- metadata:
- creationTimestamp: null
- generation: 3
- labels:
- cluster-name: ${NAME}${SUFFIX}
- app: ${NAME}${SUFFIX}
- role: patroni
- env: ${ENV_NAME}
- name: ${NAME}${SUFFIX}
- spec:
- podManagementPolicy: OrderedReady
- replicas: ${{REPLICA}}
- revisionHistoryLimit: 10
- selector:
- matchLabels:
- statefulset: ${NAME}${SUFFIX}
- serviceName: ${NAME}${SUFFIX}
- template:
- metadata:
- creationTimestamp: null
- labels:
- statefulset: ${NAME}${SUFFIX}
- cluster-name: ${NAME}${SUFFIX}
- app.kubernetes.io/name: patroni
- spec:
- affinity:
- podAntiAffinity:
- requiredDuringSchedulingIgnoredDuringExecution:
- - labelSelector:
- matchExpressions:
- - key: statefulset
- operator: In
- values:
- - ${NAME}${SUFFIX}
- topologyKey: "kubernetes.io/hostname"
- containers:
- - env:
- #TODO: Remove POD_IP in favor of PATRONI_KUBERNETES_POD_IP
- - name: POD_IP
- valueFrom:
- fieldRef:
- apiVersion: v1
- fieldPath: status.podIP
-# - name: PATRONI_KUBERNETES_USE_ENDPOINTS
-# value: 'true'
-# - name: PATRONI_KUBERNETES_POD_IP
-# valueFrom:
-# fieldRef:
-# apiVersion: v1
-# fieldPath: status.podIP
-# - name: PATRONI_KUBERNETES_PORTS
-# value: '{[{"name": "postgresql", "port": 5432}]}'
- - name: PATRONI_KUBERNETES_NAMESPACE
- valueFrom:
- fieldRef:
- apiVersion: v1
- fieldPath: metadata.namespace
- - name: PATRONI_KUBERNETES_LABELS
- value: '{"cluster-name": "${NAME}${SUFFIX}", "app.kubernetes.io/name": "patroni"}'
- - name: PATRONI_SUPERUSER_USERNAME
- valueFrom:
- secretKeyRef:
- key: superuser-username
- name: ${NAME}${SUFFIX}
- - name: PATRONI_SUPERUSER_PASSWORD
- valueFrom:
- secretKeyRef:
- key: superuser-password
- name: ${NAME}${SUFFIX}
- - name: PATRONI_REPLICATION_USERNAME
- valueFrom:
- secretKeyRef:
- key: replication-username
- name: ${NAME}${SUFFIX}
- - name: PATRONI_REPLICATION_PASSWORD
- valueFrom:
- secretKeyRef:
- key: replication-password
- name: ${NAME}${SUFFIX}
- - name: APP_USER
- valueFrom:
- secretKeyRef:
- key: app-db-username
- name: ${NAME}${SUFFIX}
- - name: APP_PASSWORD
- valueFrom:
- secretKeyRef:
- key: app-db-password
- name: ${NAME}${SUFFIX}
- - name: APP_DATABASE
- valueFrom:
- secretKeyRef:
- key: app-db-name
- name: ${NAME}${SUFFIX}
- - name: PATRONI_SCOPE
- value: ${NAME}${SUFFIX}
- - name: PATRONI_NAME
- valueFrom:
- fieldRef:
- apiVersion: v1
- fieldPath: metadata.name
- - name: PATRONI_LOG_LEVEL
- value: WARNING
- - name: PATRONI_POSTGRESQL_DATA_DIR
- value: /home/postgres/pgdata/pgroot/data
- - name: PATRONI_POSTGRESQL_PGPASS
- value: /tmp/pgpass
- - name: PATRONI_POSTGRESQL_LISTEN
- value: 0.0.0.0:5432
- - name: PATRONI_RESTAPI_LISTEN
- value: 0.0.0.0:8008
-# - name: PATRONI_LOG_LEVEL
-# value: DEBUG
-# - name: PATRONI_LOG_DIR
-# value: /tmp
-# - name: PATRONI_LOG_FILE_SIZE
-# value: '50000000'
-# - name: PATRONI_LOG_FILE_NUM
-# value: '4'
- image: ${IMAGE_REGISTRY}/${IMAGE_STREAM_NAMESPACE}/${IMAGE_STREAM_TAG}
- # Because we are using image reference to a tag, we need to always pull the image otherwise
- # we end up with outdated/out-of-sync image depending on the node where it is running
- imagePullPolicy: Always
- name: postgresql
- ports:
- - containerPort: 8008
- protocol: TCP
- - containerPort: 5432
- protocol: TCP
- resources:
- requests:
- cpu: ${CPU_REQUEST}
- memory: ${MEMORY_REQUEST}
- limits:
- cpu: ${CPU_LIMIT}
- memory: ${MEMORY_LIMIT}
- terminationMessagePath: /dev/termination-log
- terminationMessagePolicy: File
-# used /usr/share/scripts/patroni/health_check.sh, but it doesn't output Lag in MB any more
- readinessProbe:
- failureThreshold: 20
- httpGet:
- path: /health
- port: 8008
- scheme: HTTP
- initialDelaySeconds: 30
- periodSeconds: 10
- successThreshold: 1
- timeoutSeconds: 3
- volumeMounts:
- - mountPath: /home/postgres/pgdata
- name: postgresql
- dnsPolicy: ClusterFirst
- restartPolicy: Always
- schedulerName: default-scheduler
- securityContext: {}
- serviceAccountName: ${NAME}${SUFFIX}
- terminationGracePeriodSeconds: 0
- updateStrategy:
- type: RollingUpdate
- volumeClaimTemplates:
- - metadata:
- annotations:
- volume.beta.kubernetes.io/storage-class: ${STORAGE_CLASS}
- labels:
- app: ${NAME}${SUFFIX}
- name: postgresql
- spec:
- storageClassName: ${STORAGE_CLASS}
- accessModes:
- - ReadWriteOnce
- resources:
- requests:
- storage: ${PVC_SIZE}
-# It doesn't seem to be used/needed - remote it?
-#- apiVersion: v1
-# kind: Endpoints
-# metadata:
-# labels:
-# app: ${NAME}${SUFFIX}
-# cluster-name: ${NAME}${SUFFIX}
-# name: ${NAME}${SUFFIX}
-# subsets: []
-- apiVersion: rbac.authorization.k8s.io/v1
- kind: RoleBinding
- metadata:
- labels:
- cluster-name: ${NAME}${SUFFIX}
- name: ${NAME}${SUFFIX}
- roleRef:
- apiGroup: rbac.authorization.k8s.io
- kind: Role
- name: ${NAME}${SUFFIX}
- subjects:
- - kind: ServiceAccount
- name: ${NAME}${SUFFIX}
-parameters:
-- description: The name of the application for labelling all artifacts.
- displayName: Application Name
- name: NAME
- value: patroni
-- description: The environment name
- displayName: environment name
- name: ENV_NAME
- required: true
-- name: SUFFIX
- description: A suffix appended to all artifact's name (NAME)
-- description: Starting amount of CPU the container can use.
- displayName: CPU REQUEST
- name: CPU_REQUEST
- value: '250m'
-- description: Maximum amount of CPU the container can use.
- displayName: CPU Limit
- name: CPU_LIMIT
- value: '1'
-- description: Starting amount of memory the container can use.
- displayName: Memory Request
- name: MEMORY_REQUEST
- value: 512Mi
-- description: Maximum amount of memory the container can use.
- displayName: Memory Limit
- name: MEMORY_LIMIT
- value: 512Mi
-- description: The OpenShift Namespace where the patroni and postgresql ImageStream
- resides.
- displayName: ImageStream Namespace
- name: IMAGE_STREAM_NAMESPACE
- value: "bcgov"
-- name: IMAGE_STREAM_TAG
- description: Patroni ImageTag
- value: patroni:v10-stable
-- description: The size of the persistent volume to create.
- displayName: Persistent Volume Size
- name: PVC_SIZE
- value: 1G
-- name: STORAGE_CLASS
- value: netapp-block-standard
-- name: IMAGE_REGISTRY
- value: image-registry.openshift-image-registry.svc:5000
-- name: REPLICA
- value: '2'
diff --git a/openshift/templates/patroni/docker/Dockerfile b/openshift/templates/patroni/docker/Dockerfile
deleted file mode 100644
index 0588f64aa..000000000
--- a/openshift/templates/patroni/docker/Dockerfile
+++ /dev/null
@@ -1,43 +0,0 @@
-FROM postgres:10
-MAINTAINER Alexander Kukushkin
-
-ARG PGHOME=/home/postgres
-
-RUN export DEBIAN_FRONTEND=noninteractive \
- && echo 'APT::Install-Recommends "0";\nAPT::Install-Suggests "0";' > /etc/apt/apt.conf.d/01norecommend \
- && apt-get update -y \
- && apt-get upgrade -y \
- && apt-cache depends patroni | sed -n -e 's/.* Depends: \(python3-.\+\)$/\1/p' \
- | grep -Ev '^python3-(sphinx|etcd|consul|kazoo|kubernetes)' \
- | xargs apt-get install -y gettext curl jq locales git python3-pip python3-wheel \
-
- ## Make sure we have a en_US.UTF-8 locale available
- && localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8 \
-
- && pip3 install setuptools \
- && pip3 install 'git+https://github.com/zalando/patroni.git#egg=patroni[kubernetes]' \
-
- && mkdir -p $PGHOME \
- && sed -i "s|/var/lib/postgresql.*|$PGHOME:/bin/bash|" /etc/passwd \
-
- # Set permissions for OpenShift
- && chmod 775 $PGHOME \
- && chmod 664 /etc/passwd \
- && mkdir -p $PGHOME/pgdata/pgroot \
- && chgrp -R 0 $PGHOME \
- && chown -R postgres $PGHOME \
- && chmod -R 775 $PGHOME \
- # Clean up
- && apt-get remove -y git python3-pip python3-wheel \
- && apt-get autoremove -y \
- && apt-get clean -y \
- && rm -rf /var/lib/apt/lists/* /root/.cache
-
-COPY contrib/root /
-
-VOLUME /home/postgres/pgdata
-EXPOSE 5432 8008
-ENV LC_ALL=en_US.UTF-8 LANG=en_US.UTF-8
-USER postgres
-WORKDIR /home/postgres
-CMD ["/bin/bash", "/usr/bin/entrypoint.sh"]
\ No newline at end of file
diff --git a/openshift/templates/patroni/docker/contrib/root/usr/bin/entrypoint.sh b/openshift/templates/patroni/docker/contrib/root/usr/bin/entrypoint.sh
deleted file mode 100755
index 141d3f3f8..000000000
--- a/openshift/templates/patroni/docker/contrib/root/usr/bin/entrypoint.sh
+++ /dev/null
@@ -1,56 +0,0 @@
-#!/bin/bash
-
-if [[ $UID -ge 10000 ]]; then
- GID=$(id -g)
- sed -e "s/^postgres:x:[^:]*:[^:]*:/postgres:x:$UID:$GID:/" /etc/passwd > /tmp/passwd
- cat /tmp/passwd > /etc/passwd
- rm /tmp/passwd
-fi
-
-# FIX -> FATAL: data directory "..." has group or world access
-mkdir -p "$PATRONI_POSTGRESQL_DATA_DIR"
-chmod 700 "$PATRONI_POSTGRESQL_DATA_DIR"
-
-cat > /home/postgres/patroni.yml <<__EOF__
-bootstrap:
- post_bootstrap: /usr/share/scripts/patroni/post_init.sh
- dcs:
- postgresql:
- use_pg_rewind: true
- parameters:
- max_connections: ${POSTGRESQL_MAX_CONNECTIONS:-100}
- max_prepared_transactions: ${POSTGRESQL_MAX_PREPARED_TRANSACTIONS:-0}
- max_locks_per_transaction: ${POSTGRESQL_MAX_LOCKS_PER_TRANSACTION:-64}
- log_autovacuum_min_duration: 250
- log_checkpoints: on
- log_lock_waits: on
- log_min_duration_statement: 1000
- log_temp_files: 1000
- min_wal_size: 300MB
- max_wal_size: 1GB
- track_io_timing: on
- initdb:
- - auth-host: md5
- - auth-local: trust
- - encoding: UTF8
- - locale: en_US.UTF-8
- - data-checksums
- pg_hba:
- - host all all 0.0.0.0/0 md5
- - host replication ${PATRONI_REPLICATION_USERNAME} ${POD_IP}/16 md5
-restapi:
- connect_address: '${POD_IP}:8008'
-postgresql:
- connect_address: '${POD_IP}:5432'
- authentication:
- superuser:
- password: '${PATRONI_SUPERUSER_PASSWORD}'
- replication:
- password: '${PATRONI_REPLICATION_PASSWORD}'
-__EOF__
-
-unset PATRONI_SUPERUSER_PASSWORD PATRONI_REPLICATION_PASSWORD
-export KUBERNETES_NAMESPACE=$PATRONI_KUBERNETES_NAMESPACE
-export POD_NAME=$PATRONI_NAME
-
-exec /usr/bin/python3 /usr/local/bin/patroni /home/postgres/patroni.yml
\ No newline at end of file
diff --git a/openshift/templates/patroni/docker/contrib/root/usr/share/scripts/patroni/health_check.sh b/openshift/templates/patroni/docker/contrib/root/usr/share/scripts/patroni/health_check.sh
deleted file mode 100755
index acc485baa..000000000
--- a/openshift/templates/patroni/docker/contrib/root/usr/share/scripts/patroni/health_check.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env bash
-set -Eeu
-set -o pipefail
-
-pg_isready -q && patronictl list --format=json | jq -e ".[] | select(.Member == \"$(hostname)\" and .State == \"running\" and .\"Lag in MB\" == 0)"
diff --git a/openshift/templates/patroni/docker/contrib/root/usr/share/scripts/patroni/post_init.sh b/openshift/templates/patroni/docker/contrib/root/usr/share/scripts/patroni/post_init.sh
deleted file mode 100755
index 52fd5b51e..000000000
--- a/openshift/templates/patroni/docker/contrib/root/usr/share/scripts/patroni/post_init.sh
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/usr/bin/env bash
-set -Eeu
-
-if [[ (! -z "$APP_USER") && (! -z "$APP_PASSWORD") && (! -z "$APP_DATABASE")]]; then
- echo "Creating user ${APP_USER}"
- psql "$1" -w -c "create user ${APP_USER} WITH LOGIN ENCRYPTED PASSWORD '${APP_PASSWORD}'"
-
- echo "Creating database ${APP_DATABASE}"
- psql "$1" -w -c "CREATE DATABASE ${APP_DATABASE} OWNER ${APP_USER} ENCODING '${APP_DB_ENCODING:-UTF8}' LC_COLLATE = '${APP_DB_LC_COLLATE:-en_US.UTF-8}' LC_CTYPE = '${APP_DB_LC_CTYPE:-en_US.UTF-8}'"
-
- echo "Creating extensions"
- psql -U postgres -q -d "${APP_DATABASE}" -c 'create extension if not exists hstore'
-
-else
- echo "Skipping user creation"
- echo "Skipping database creation"
-fi
\ No newline at end of file
diff --git a/openshift/templates/patroni/secret-template.yaml b/openshift/templates/patroni/secret-template.yaml
deleted file mode 100644
index 504194557..000000000
--- a/openshift/templates/patroni/secret-template.yaml
+++ /dev/null
@@ -1,63 +0,0 @@
-apiVersion: template.openshift.io/v1
-kind: Template
-metadata:
- annotations:
- description: Patroni Postgresql database cluster template secret
- name: patroni-pgsql-secret-template
-objects:
-- apiVersion: v1
- kind: Secret
- metadata:
- labels:
- app: ${NAME}
- name: template.${NAME}-patroni
- stringData:
- replication-username: ${PATRONI_REPLICATION_USERNAME}
- replication-password: ${PATRONI_REPLICATION_PASSWORD}
- superuser-username: ${PATRONI_SUPERUSER_USERNAME}
- superuser-password: ${PATRONI_SUPERUSER_PASSWORD}
- app-db-name: ${APP_DB_NAME}
- app-db-username: ${APP_DB_USERNAME}
- app-db-password: ${APP_DB_PASSWORD}
- metabaseuser-name: ${METABASEUSER_NAME}
- metabaseuser-password: ${METABASEUSER_PASSWORD}
-parameters:
-- description: The name of the application for labelling all artifacts.
- displayName: Application Name
- name: NAME
- value: patroni
-- description: Username of the superuser account for initialization.
- displayName: Superuser Username
- name: PATRONI_SUPERUSER_USERNAME
- value: postgres
-# generate: expression
-# from: super-[a-zA-Z0-9]{6}
-- description: Password of the superuser account for initialization.
- displayName: Superuser Passsword
- name: PATRONI_SUPERUSER_PASSWORD
- generate: expression
- from: '[a-zA-Z0-9]{32}'
-- description: Username of the replication account for initialization.
- displayName: Replication Username
- name: PATRONI_REPLICATION_USERNAME
- value: replication
-# generate: expression
-# from: rep-[a-zA-Z0-9]{6}
-- description: Password of the replication account for initialization.
- displayName: Repication Passsword
- name: PATRONI_REPLICATION_PASSWORD
- generate: expression
- from: '[a-zA-Z0-9]{32}'
-- name: APP_DB_USERNAME
- from: zeva[a-z]{3}
- generate: expression
-- name: APP_DB_NAME
- value: zeva
-- name: APP_DB_PASSWORD
- generate: expression
- from: '[a-zA-Z0-9]{32}'
-- name: METABASEUSER_NAME
- value: metabaseuser
-- name: METABASEUSER_PASSWORD
- generate: expression
- from: '[a-zA-Z0-9]{8}'
diff --git a/openshift/templates/schemaspy/README.md b/openshift/templates/schemaspy/README.md
deleted file mode 100644
index 2b3bc08cf..000000000
--- a/openshift/templates/schemaspy/README.md
+++ /dev/null
@@ -1,5 +0,0 @@
-### Files included
-
-* schemaspy-bc.yaml schemaspy build config
-* schemaspy-dc.yaml schemaspy deployment config
-
diff --git a/openshift/templates/schemaspy/schemaspy-bc.yaml b/openshift/templates/schemaspy/schemaspy-bc.yaml
deleted file mode 100644
index 361d4c5a4..000000000
--- a/openshift/templates/schemaspy/schemaspy-bc.yaml
+++ /dev/null
@@ -1,44 +0,0 @@
-apiVersion: template.openshift.io/v1
-kind: Template
-metadata:
- creationTimestamp: null
- name: schemaspy
-objects:
- - apiVersion: image.openshift.io/v1
- kind: ImageStream
- metadata:
- annotations:
- description: Keeps track of changes in the schemaspy image
- creationTimestamp: null
- name: schemaspy
- spec:
- lookupPolicy:
- local: false
- status:
- dockerImageRepository: ""
- - apiVersion: build.openshift.io/v1
- kind: BuildConfig
- metadata:
- creationTimestamp: null
- name: schemaspy
- spec:
- nodeSelector: null
- output:
- to:
- kind: ImageStreamTag
- name: schemaspy:latest
- namspace: e52f12-tools
- postCommit: {}
- resources: {}
- runPolicy: Serial
- source:
- git:
- uri: https://github.com/bcgov/SchemaSpy.git
- type: Git
- strategy:
- dockerStrategy: {}
- type: Docker
- triggers:
- - type: ConfigChange
- status:
- lastVersion: 0
diff --git a/openshift/templates/schemaspy/schemaspy-dc.yaml b/openshift/templates/schemaspy/schemaspy-dc.yaml
deleted file mode 100644
index b44656778..000000000
--- a/openshift/templates/schemaspy/schemaspy-dc.yaml
+++ /dev/null
@@ -1,192 +0,0 @@
----
-kind: Template
-apiVersion: template.openshift.io/v1
-metadata:
- name: schemaspy-dc
- creationTimestamp:
-parameters:
- - name: SUFFIX
- displayName: suffic
- description: env name plus pr number, sample is -dev-120
- required: true
- - name: ENV_NAME
- value: dev
- displayName: Environment name
- description: 'Environment name, dev, test and prod'
- required: true
- - name: CPU_REQUEST
- displayName: Requested CPU
- description: Requested CPU
- required: true
- value: '20m'
- - name: CPU_LIMIT
- displayName: CPU upper limit
- description: CPU upper limit
- required: true
- value: '1000m'
- - name: MEMORY_REQUEST
- displayName: Requested memory
- description: Requested memory
- required: true
- value: '250M'
- - name: MEMORY_LIMIT
- displayName: Memory upper limit
- description: Memory upper limit
- required: true
- value: '1500M'
- - name: HEALTH_CHECK_DELAY
- value: '150'
- displayName:
- description: readyness and liveness initial delay seconds
- required: true
- - name: OCP_NAME
- displayName: Openshift name
- description: Openshift name
- required: true
-objects:
- - kind: DeploymentConfig
- apiVersion: apps.openshift.io/v1
- metadata:
- name: schemaspy-public${SUFFIX}
- creationTimestamp:
- annotations:
- description: Defines how to deploy the container.
- spec:
- strategy:
- type: Recreate
- recreateParams:
- timeoutSeconds: 1100
- resources: {}
- activeDeadlineSeconds: 1200
- triggers:
- - type: ImageChange
- imageChangeParams:
- automatic: true
- containerNames:
- - schemaspy-public
- from:
- kind: ImageStreamTag
- namespace: e52f12-tools
- name: schemaspy:prod
- lastTriggeredImage:
- - type: ConfigChange
- replicas: 1
- test: false
- selector:
- name: schemaspy-public${SUFFIX}
- template:
- metadata:
- name: schemaspy-public${SUFFIX}
- creationTimestamp:
- labels:
- name: schemaspy-public${SUFFIX}
- spec:
- initContainers:
- - name: wait-for-database
- image: toschneck/wait-for-it:latest
- command: ['sh', '-c', './wait-for-it.sh -t 0 zeva-backend${SUFFIX}.e52f12-${ENV_NAME}.svc.cluster.local:8080']
- containers:
- - name: schemaspy-public
- image:
- ports:
- - containerPort: 8080
- protocol: TCP
- env:
- - name: DATABASE_SERVICE_NAME
- value: patroni-master${SUFFIX}
- - name: POSTGRESQL_DATABASE
- valueFrom:
- secretKeyRef:
- key: app-db-name
- name: patroni${SUFFIX}
- - name: POSTGRESQL_USER
- valueFrom:
- secretKeyRef:
- key: app-db-username
- name: patroni${SUFFIX}
- - name: POSTGRESQL_PASSWORD
- valueFrom:
- secretKeyRef:
- key: app-db-password
- name: patroni${SUFFIX}
- - name: SCHEMASPY_ARGUMENTS
- value: -desc zeva-${SUFFIX}
- resources:
- limits:
- cpu: ${CPU_LIMIT}
- memory: ${MEMORY_LIMIT}
- requests:
- cpu: ${CPU_REQUEST}
- memory: ${MEMORY_REQUEST}
- livenessProbe:
- httpGet:
- path: "/"
- port: 8080
- scheme: HTTP
- initialDelaySeconds: ${{HEALTH_CHECK_DELAY}}
- timeoutSeconds: 3
- periodSeconds: 15
- successThreshold: 1
- failureThreshold: 70
- readinessProbe:
- httpGet:
- path: "/"
- port: 8080
- scheme: HTTP
- initialDelaySeconds: ${{HEALTH_CHECK_DELAY}}
- timeoutSeconds: 3
- periodSeconds: 15
- successThreshold: 1
- failureThreshold: 70
- terminationMessagePath: "/dev/termination-log"
- terminationMessagePolicy: File
- imagePullPolicy: IfNotPresent
- restartPolicy: Always
- terminationGracePeriodSeconds: 30
- dnsPolicy: ClusterFirst
- securityContext: {}
- schedulerName: default-scheduler
- status:
- latestVersion: 0
- observedGeneration: 0
- replicas: 0
- updatedReplicas: 0
- availableReplicas: 0
- unavailableReplicas: 0
- - kind: Service
- apiVersion: v1
- metadata:
- name: schemaspy-public${SUFFIX}
- creationTimestamp:
- annotations:
- description: Exposes and load balances the application pods
- spec:
- ports:
- - name: 8080-tcp
- protocol: TCP
- port: 8080
- targetPort: 8080
- selector:
- name: schemaspy-public${SUFFIX}
- type: ClusterIP
- sessionAffinity: None
- status:
- loadBalancer: {}
- - kind: Route
- apiVersion: route.openshift.io/v1
- metadata:
- name: schemaspy-public${SUFFIX}
- creationTimestamp:
- spec:
- host: schemaspy-public${SUFFIX}.${OCP_NAME}.gov.bc.ca
- to:
- kind: Service
- name: schemaspy-public${SUFFIX}
- weight: 100
- port:
- targetPort: 8080-tcp
- tls:
- termination: edge
- insecureEdgeTerminationPolicy: Allow
- wildcardPolicy: None
- status: