Skip to content

Commit 723fe2b

Browse files
committed
Enable SQL integration test and see failures with actual script execution
1 parent dc98496 commit 723fe2b

File tree

5 files changed

+99
-54
lines changed

5 files changed

+99
-54
lines changed

.github/workflows/build-test.yaml

+15-22
Original file line numberDiff line numberDiff line change
@@ -73,40 +73,32 @@ jobs:
7373
run: python3 -m pytest -v --cov=camp test
7474

7575
test-integration-sql:
76-
if: false # FIXME re-enable after SQL updates
7776
needs: [build]
7877
runs-on: ubuntu-22.04
79-
# Job executes within a container
80-
container: ubuntu:22.04
81-
82-
services:
83-
postgres:
84-
image: postgres:14
85-
env:
86-
POSTGRES_DB: runner
87-
POSTGRES_USER: postgres
88-
POSTGRES_PASSWORD: postgres
89-
options: >-
90-
--health-cmd pg_isready
91-
--health-interval 10s
92-
--health-timeout 5s
93-
--health-retries 5
78+
strategy:
79+
max-parallel: 4
80+
matrix:
81+
python-version: ["3.10"]
9482

9583
env:
9684
PGHOST: postgres
97-
PGDATABASE: runner
9885
PGUSER: postgres
9986
PGPASSWORD: postgres
87+
PGDATABASE: runner
10088
steps:
101-
- name: Prep Container OS
102-
run: |
103-
apt-get update
104-
apt-get install -y git postgresql-client-14 python3-pip
105-
pip3 install --upgrade pip
10689
- name: Checkout repository
10790
uses: actions/checkout@v3
10891
with:
10992
submodules: recursive
93+
- name: Set up Python ${{ matrix.python-version }}
94+
uses: actions/setup-python@v4
95+
with:
96+
python-version: ${{ matrix.python-version }}
97+
- name: Install dependencies
98+
run: |
99+
sudo apt-get update
100+
sudo apt-get install -y \
101+
postgresql-client-14
110102
- name: Test
111103
run: ./integration-test/run.sh sql
112104

@@ -117,6 +109,7 @@ jobs:
117109
max-parallel: 4
118110
matrix:
119111
python-version: ["3.10"]
112+
120113
steps:
121114
- name: Checkout repository
122115
uses: actions/checkout@v3

integration-test/run.sh

+31-9
Original file line numberDiff line numberDiff line change
@@ -22,21 +22,23 @@ if [[ ! -d ${SELFDIR}/deps/adms ]]
2222
then
2323
git clone --branch apl-fy24 https://github.com/JHUAPL-DTNMA/dtnma-adms.git ${SELFDIR}/deps/adms
2424
fi
25+
2526
if [[ ! -d ${SELFDIR}/deps/dtnma-ace ]]
2627
then
2728
git clone --branch apl-fy24 https://github.com/JHUAPL-DTNMA/dtnma-ace.git ${SELFDIR}/deps/dtnma-ace
2829
fi
2930
pip3 install ${SELFDIR}/deps/dtnma-ace
3031

32+
if [[ ! -d ${SELFDIR}/deps/dtnma-tools ]]
33+
then
34+
git clone --branch apl-fy24 https://github.com/JHUAPL-DTNMA/dtnma-tools.git ${SELFDIR}/deps/dtnma-tools
35+
pushd ${SELFDIR}/deps/dtnma-tools
36+
git submodule update --init --recursive
37+
popd
38+
fi
39+
3140
if [[ "$1" = "c" ]]
3241
then
33-
if [[ ! -d ${SELFDIR}/deps/dtnma-tools ]]
34-
then
35-
git clone --branch apl-fy24 https://github.com/JHUAPL-DTNMA/dtnma-tools.git ${SELFDIR}/deps/dtnma-tools
36-
pushd ${SELFDIR}/deps/dtnma-tools
37-
git submodule update --init --recursive
38-
popd
39-
fi
4042
pushd ${SELFDIR}/deps/dtnma-tools
4143
git pull
4244
./deps.sh
@@ -45,18 +47,38 @@ then
4547
then
4648
pushd ${SELFDIR}/deps/dtnma-tools
4749
./prep.sh -DTEST_MEMCHECK=OFF -DTEST_COVERAGE=OFF \
48-
-DBUILD_DOCS_API=OFF -DBUILD_DOCS_MAN=OFF
50+
-DBUILD_DOCS_API=OFF -DBUILD_DOCS_MAN=OFF
4951
./build.sh
5052
# verification that the initial build is good
5153
./build.sh check
5254
popd
5355
fi
5456
PYTEST_ARGS="${SELFDIR}/test_c_integration.py"
57+
elif [[ "$1" = "sql" ]]
58+
then
59+
DOCKER=${DOCKER:-docker}
60+
COMPOSE_ARGS="-f ${SELFDIR}/sql-compose.yml"
61+
${DOCKER} compose ${COMPOSE_ARGS} build
62+
${DOCKER} compose ${COMPOSE_ARGS} up --detach --force-recreate --remove-orphans
63+
64+
export PGHOST="localhost"
65+
# all other PG* test environment is passed through this script
66+
67+
PYTEST_ARGS="${SELFDIR}/test_sql_integration.py"
5568
else
5669
echo "Unknown test type \"$1\"" >/dev/stderr
5770
exit 1
5871
fi
5972

6073
echo "Running tests..."
6174
pip3 install '.[test]'
62-
python3 -m pytest -v --cov=camp ${PYTEST_ARGS}
75+
TESTEXIT=0
76+
python3 -m pytest -v --cov=camp --log-level=info ${PYTEST_ARGS} || TESTEXIT=$?
77+
78+
if [[ "$1" = "sql" ]]
79+
then
80+
${DOCKER} compose ${COMPOSE_ARGS} down --rmi local --volumes
81+
${DOCKER} compose ${COMPOSE_ARGS} rm --force --volumes
82+
fi
83+
84+
exit ${TESTEXIT}

integration-test/sql-compose.yml

+19
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
volumes:
2+
postgres-data:
3+
# temporary storage
4+
5+
services:
6+
postgres:
7+
build:
8+
context: deps/dtnma-tools/refdb-sql
9+
dockerfile: Containerfile
10+
hostname: postgres
11+
restart: no
12+
environment:
13+
POSTGRES_USER: ${PGUSER}
14+
POSTGRES_PASSWORD: ${PGPASSWORD}
15+
POSTGRES_DB: ${PGDATABASE}
16+
volumes:
17+
- "postgres-data:/var/lib/postgresql/data"
18+
ports:
19+
- "5432:5432/tcp"

integration-test/test_c_integration.py

+5-5
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
1-
import pytest
21
import logging
32
import os
4-
import ace
3+
import pytest
54
import shutil
65
import subprocess
6+
import ace
77
from camp.generators.lib.campch import yang_to_c
88
from .util import ADMS_DIR, DTNMA_TOOLS_DIR, adm_files, run_camp
99

@@ -42,9 +42,9 @@ def test_adms(adm):
4242
adm_set = ace.AdmSet()
4343
# generates a new file
4444
norm_name = adm_set.load_from_file(filepath).norm_name
45-
impl = f"{yang_to_c(norm_name)}.c"
46-
LOGGER.info('Looking for implementation source %s', impl)
47-
outdir = _find_dir(impl, OUT_DIR)
45+
filename = f"{yang_to_c(norm_name)}.c"
46+
LOGGER.info('Looking for implementation source %s', filename)
47+
outdir = _find_dir(filename, OUT_DIR)
4848
if outdir is None:
4949
pytest.skip('No existing source')
5050
LOGGER.info('Found at %s', outdir)
+29-18
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,13 @@
1-
import psycopg2
1+
import logging
22
import os
3-
import ace
3+
import psycopg2
44
import pytest
5-
5+
import tempfile
6+
import ace
7+
from camp.generators.lib.campch import yang_to_sql
68
from .util import ADMS_DIR, adm_files, run_camp
7-
from camp.generators.create_sql import yang_to_sql
9+
10+
LOGGER = logging.getLogger(__name__)
811

912

1013
@pytest.fixture(scope="session", autouse=True)
@@ -27,10 +30,9 @@ def setup():
2730
cursor = conn.cursor()
2831

2932
# reusable objects that the tests will need
30-
yield cursor,
33+
yield conn,
3134

3235
# teardown: close connections
33-
cursor.close()
3436
conn.close()
3537

3638

@@ -41,22 +43,31 @@ def test_adms(setup, adm):
4143
Resulting sql files will be placed in ADMS_DIR/amp-sql/Agent_Scripts and executed in the anms library.
4244
"""
4345

44-
if adm == 'ietf-amm.yang': # doesn't have unique enum
45-
pytest.xfail("ADM with known issue")
46-
47-
cursor = setup[0]
46+
conn = setup[0]
4847

4948
# input file full filepath
5049
filepath = os.path.join(ADMS_DIR, adm)
5150

51+
# output to temporary
52+
adm_set = ace.AdmSet()
53+
norm_name = adm_set.load_from_file(filepath).norm_name
54+
filename = f"{yang_to_sql(norm_name)}.sql"
55+
LOGGER.info('Expecting SQL source %s', filename)
56+
57+
outdir = tempfile.TemporaryDirectory()
58+
5259
# run camp
53-
exitcode = run_camp(filepath, ADMS_DIR, only_sql=True, only_ch=False)
60+
exitcode = run_camp(filepath, outdir.name, only_sql=True, only_ch=False)
5461
assert 0 == exitcode
5562

56-
# execute sql
57-
adm_set = ace.AdmSet()
58-
norm_name = yang_to_sql(adm_set.load_from_file(filepath).norm_name)
59-
sql_file = os.path.join(ADMS_DIR, "amp-sql", "Agent_Scripts", 'adm_{name}.sql'.format(name=norm_name))
60-
with open(sql_file, "r") as f:
61-
cursor.execute(f.read())
62-
cursor.execute("rollback")
63+
# verify the generated source executes
64+
file_path = os.path.join(outdir.name, filename)
65+
with open(file_path, "r") as srcfile:
66+
script = srcfile.read()
67+
LOGGER.info('Generated script:\n%s', script)
68+
69+
try:
70+
with conn.cursor() as curs:
71+
curs.execute(script)
72+
finally:
73+
conn.rollback()

0 commit comments

Comments
 (0)