diff --git a/.github/workflows/continuous-integration.yaml b/.github/workflows/continuous-integration.yaml index 0c7554e55..7ad20ec62 100644 --- a/.github/workflows/continuous-integration.yaml +++ b/.github/workflows/continuous-integration.yaml @@ -201,9 +201,9 @@ jobs: # Manage instance type INSTANCE_TYPE="c5.4xlarge" if [[ "${BUILD_TYPE}" == "weekly" ]]; then - INSTANCE_TYPE="c6i.16xlarge" + INSTANCE_TYPE="c6i.32xlarge" elif [[ "${BUILD_TYPE}" == "release" ]]; then - INSTANCE_TYPE="c6i.16xlarge" + INSTANCE_TYPE="c6i.32xlarge" fi # Manage python versions diff --git a/Makefile b/Makefile index 50e38c9fb..1b471327a 100644 --- a/Makefile +++ b/Makefile @@ -23,6 +23,17 @@ POETRY_VERSION:=1.8.4 APIDOCS_OUTPUT?="./docs/references/api" OPEN_PR="true" +# Check the total number of CPU cores and use min(4, TOTAL_CPUS/4) for pytest +UNAME_S := $(shell uname -s) +ifeq ($(UNAME_S),Darwin) # macOS + TOTAL_CPUS := $(shell sysctl -n hw.ncpu) +else # Assume Linux + TOTAL_CPUS := $(shell nproc) +endif +PYTEST_CORES := $(shell if [ `expr $(TOTAL_CPUS) / 4` -lt 4 ]; then expr $(TOTAL_CPUS) / 4; else echo 4; fi) +# Calculate cores per pytest worker: total_cores / pytest_cores +FHE_NUMPY_CORES := $(shell expr $(TOTAL_CPUS) / $(PYTEST_CORES)) + # At the end of the command, we currently need to force an 'import skorch' in Python in order to # avoid an obscure bug that led to all pytest commands to fail when installing dependencies with # Poetry >= 1.3. It is however not very clear how this import fixes the issue, as the bug was @@ -214,7 +225,12 @@ spcc_internal: $(SPCC_DEPS) .PHONY: pytest_internal # Run pytest pytest_internal: poetry run pytest --version - MKL_NUM_THREADS=4 OMP_NUM_THREADS=4 poetry run pytest $(TEST) \ + MKL_NUM_THREADS=$(FHE_NUMPY_CORES) \ + OMP_NUM_THREADS=$(FHE_NUMPY_CORES) \ + OPENBLAS_NUM_THREADS=$(FHE_NUMPY_CORES) \ + VECLIB_MAXIMUM_THREADS=$(FHE_NUMPY_CORES) \ + NUMEXPR_NUM_THREADS=$(FHE_NUMPY_CORES) \ + poetry run pytest $(TEST) \ -svv \ --count=$(COUNT) \ --randomly-dont-reorganize \ @@ -229,7 +245,15 @@ pytest_internal: # --durations=10 is to show the 10 slowest tests .PHONY: pytest_internal_parallel # Run pytest with multiple CPUs pytest_internal_parallel: - "$(MAKE)" pytest_internal PYTEST_OPTIONS="-n $(N_CPU) --durations=10 ${PYTEST_OPTIONS}" + @echo "Total CPUs: $(TOTAL_CPUS)" + @echo "Assigning $(PYTEST_CORES) cores to pytest" + @echo "Leaving $(FHE_NUMPY_CORES) cores per pytest worker for FHE/Numpy/sklearn" + MKL_NUM_THREADS=$(FHE_NUMPY_CORES) \ + OMP_NUM_THREADS=$(FHE_NUMPY_CORES) \ + OPENBLAS_NUM_THREADS=$(FHE_NUMPY_CORES) \ + VECLIB_MAXIMUM_THREADS=$(FHE_NUMPY_CORES) \ + NUMEXPR_NUM_THREADS=$(FHE_NUMPY_CORES) \ + "$(MAKE)" pytest_internal PYTEST_OPTIONS="-n $(PYTEST_CORES) --durations=10 ${PYTEST_OPTIONS}" # --global-coverage-infos-json=global-coverage-infos.json is to dump the coverage report in the file # --cov PATH is the directory PATH to consider for coverage. Default to SRC_DIR=src