diff --git a/MANIFEST.in b/MANIFEST.in index a4a46ee726..35b3a05c3a 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1 +1,10 @@ include requirements*.txt +include src/toil/server/api_spec/workflow_execution_service.swagger.yaml +include src/toil/test/cwl/colon_test_output_job.yaml +include src/toil/test/cwl/conditional_wf.yaml +include src/toil/test/cwl/mock_mpi/fake_mpi.yml +include src/toil/test/docs/scripts/* +include src/toil/test/utils/ABCWorkflowDebug/sleep.yaml +include src/toil/test/utils/ABCWorkflowDebug/* +recursive-include src/toil/test/ *.cwl +recursive-include src/toil/test/ *.txt diff --git a/Makefile b/Makefile index 185cb86f0a..714ad47609 100644 --- a/Makefile +++ b/Makefile @@ -118,6 +118,7 @@ endif endif develop: check_venv + python3 setup.py check pip install -e .$(extras) $(packages) clean_develop: check_venv diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000000..dbdf92a73e --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,3 @@ +[build-system] +requires = ["setuptools>=64", "setuptools_scm>=8"] +build-backend = "setuptools.build_meta" diff --git a/setup.py b/setup.py index b5e02886a0..ad0f716e69 100755 --- a/setup.py +++ b/setup.py @@ -111,9 +111,7 @@ def run_setup(): extras_require=extras_require, package_dir={"": "src"}, packages=find_packages(where="src"), - package_data={ - "": ["*.yml", "*.yaml", "cloud-config", "*.cwl"], - }, + include_package_data=True, # Unfortunately, the names of the entry points are hard-coded elsewhere in the code base so # you can't just change them here. Luckily, most of them are pretty unique strings, and thus # easy to search for. diff --git a/src/toil/test/__init__.py b/src/toil/test/__init__.py index 1ebaf74af3..0084af9898 100644 --- a/src/toil/test/__init__.py +++ b/src/toil/test/__init__.py @@ -13,6 +13,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import atexit import datetime import logging import os @@ -28,8 +29,10 @@ import zoneinfo from abc import ABCMeta, abstractmethod from collections.abc import Generator -from contextlib import contextmanager +from contextlib import ExitStack, contextmanager +from importlib.resources import as_file, files from inspect import getsource +from pathlib import Path from shutil import which from tempfile import mkstemp from textwrap import dedent @@ -52,6 +55,24 @@ logger = logging.getLogger(__name__) +def get_data(filename: str) -> str: + """Returns an absolute path for a file from this package.""" + # normalizing path depending on OS or else it will cause problem when joining path + filename = os.path.normpath(filename) + filepath = None + # import ipdb; ipdb.set_trace() + try: + file_manager = ExitStack() + atexit.register(file_manager.close) + traversable = files("toil") / filename + filepath = file_manager.enter_context(as_file(traversable)) + except ModuleNotFoundError: + pass + if not filepath or not os.path.isfile(filepath): + filepath = Path(os.path.dirname(__file__)) / ".." / ".." / ".." / filename + return str(filepath.resolve()) + + class ToilTest(unittest.TestCase): """ A common base class for Toil tests. diff --git a/src/toil/test/cwl/cwlTest.py b/src/toil/test/cwl/cwlTest.py index 4098976352..4993dd67d9 100644 --- a/src/toil/test/cwl/cwlTest.py +++ b/src/toil/test/cwl/cwlTest.py @@ -51,6 +51,7 @@ from toil.lib.threading import cpu_count from toil.test import ( ToilTest, + get_data, needs_aws_s3, needs_cwl, needs_docker, @@ -237,7 +238,6 @@ def setUp(self) -> None: """Runs anew before each test to create farm fresh temp dirs.""" self.outDir = f"/tmp/toil-cwl-test-{str(uuid.uuid4())}" os.makedirs(self.outDir) - self.rootDir = self._projectRootPath() self.jobStoreDir = f"./jobstore-{str(uuid.uuid4())}" def tearDown(self) -> None: @@ -254,7 +254,7 @@ def test_cwl_cmdline_input(self) -> None: """ from toil.cwl import cwltoil - cwlfile = "src/toil/test/cwl/conditional_wf.cwl" + cwlfile = get_data("test/cwl/conditional_wf.cwl") args = [cwlfile, "--message", "str", "--sleep", "2"] st = StringIO() # If the workflow runs, it must have had options @@ -278,8 +278,8 @@ def _tester( main_args.extend(["--logDebug", "--outdir", self.outDir]) main_args.extend( [ - os.path.join(self.rootDir, cwlfile), - os.path.join(self.rootDir, jobfile), + cwlfile, + jobfile, ] ) cwltoil.main(main_args, stdout=st) @@ -312,8 +312,8 @@ def _debug_worker_tester( "--debugWorker", "--outdir", self.outDir, - os.path.join(self.rootDir, cwlfile), - os.path.join(self.rootDir, jobfile), + cwlfile, + jobfile, ], stdout=st, ) @@ -325,46 +325,46 @@ def _debug_worker_tester( def revsort(self, cwl_filename: str, tester_fn: TesterFuncType) -> None: tester_fn( - "src/toil/test/cwl/" + cwl_filename, - "src/toil/test/cwl/revsort-job.json", + get_data(f"test/cwl/{cwl_filename}"), + get_data("test/cwl/revsort-job.json"), self._expected_revsort_output(self.outDir), ) def revsort_no_checksum(self, cwl_filename: str, tester_fn: TesterFuncType) -> None: tester_fn( - "src/toil/test/cwl/" + cwl_filename, - "src/toil/test/cwl/revsort-job.json", + get_data(f"test/cwl/{cwl_filename}"), + get_data("test/cwl/revsort-job.json"), self._expected_revsort_nochecksum_output(self.outDir), ) def download(self, inputs: str, tester_fn: TesterFuncType) -> None: - input_location = os.path.join("src/toil/test/cwl", inputs) + input_location = get_data(f"test/cwl/{inputs}") tester_fn( - "src/toil/test/cwl/download.cwl", + get_data("test/cwl/download.cwl"), input_location, self._expected_download_output(self.outDir), ) def load_contents(self, inputs: str, tester_fn: TesterFuncType) -> None: - input_location = os.path.join("src/toil/test/cwl", inputs) + input_location = get_data(f"test/cwl/{inputs}") tester_fn( - "src/toil/test/cwl/load_contents.cwl", + get_data("test/cwl/load_contents.cwl"), input_location, self._expected_load_contents_output(self.outDir), ) def download_directory(self, inputs: str, tester_fn: TesterFuncType) -> None: - input_location = os.path.join("src/toil/test/cwl", inputs) + input_location = get_data(f"test/cwl/{inputs}") tester_fn( - "src/toil/test/cwl/download_directory.cwl", + get_data("test/cwl/download_directory.cwl"), input_location, self._expected_download_output(self.outDir), ) def download_subdirectory(self, inputs: str, tester_fn: TesterFuncType) -> None: - input_location = os.path.join("src/toil/test/cwl", inputs) + input_location = get_data(f"test/cwl/{inputs}") tester_fn( - "src/toil/test/cwl/download_subdirectory.cwl", + get_data("test/cwl/download_subdirectory.cwl"), input_location, self._expected_download_output(self.outDir), ) @@ -374,24 +374,29 @@ def test_mpi(self) -> None: stdout = StringIO() main_args = [ + "--logDebug", "--outdir", self.outDir, "--enable-dev", "--enable-ext", "--mpi-config-file", - os.path.join(self.rootDir, "src/toil/test/cwl/mock_mpi/fake_mpi.yml"), - os.path.join(self.rootDir, "src/toil/test/cwl/mpi_simple.cwl"), + get_data("test/cwl/mock_mpi/fake_mpi.yml"), + get_data("test/cwl/mpi_simple.cwl"), ] path = os.environ["PATH"] - os.environ["PATH"] = f"{path}:{self.rootDir}/src/toil/test/cwl/mock_mpi/" + os.environ["PATH"] = ( + f"{path}:{os.path.dirname(get_data('test/cwl/mock_mpi/fake_mpi_run.py'))}" + ) cwltoil.main(main_args, stdout=stdout) os.environ["PATH"] = path - out = json.loads(stdout.getvalue()) + stdout_text = stdout.getvalue() + assert "pids" in stdout_text + out = json.loads(stdout_text) with open(out.get("pids", {}).get("location")[len("file://") :]) as f: two_pids = [int(i) for i in f.read().split()] - self.assertEqual(len(two_pids), 2) - self.assertTrue(isinstance(two_pids[0], int)) - self.assertTrue(isinstance(two_pids[1], int)) + assert len(two_pids) == 2 + assert isinstance(two_pids[0], int) + assert isinstance(two_pids[1], int) @needs_aws_s3 def test_s3_as_secondary_file(self) -> None: @@ -401,8 +406,8 @@ def test_s3_as_secondary_file(self) -> None: main_args = [ "--outdir", self.outDir, - os.path.join(self.rootDir, "src/toil/test/cwl/s3_secondary_file.cwl"), - os.path.join(self.rootDir, "src/toil/test/cwl/s3_secondary_file.json"), + get_data("test/cwl/s3_secondary_file.cwl"), + get_data("test/cwl/s3_secondary_file.json"), ] cwltoil.main(main_args, stdout=stdout) out = json.loads(stdout.getvalue()) @@ -434,8 +439,8 @@ def test_run_revsort_debug_worker(self) -> None: def test_run_colon_output(self) -> None: self._tester( - "src/toil/test/cwl/colon_test_output.cwl", - "src/toil/test/cwl/colon_test_output_job.yaml", + get_data("test/cwl/colon_test_output.cwl"), + get_data("test/cwl/colon_test_output_job.yaml"), self._expected_colon_output(self.outDir), out_name="result", ) @@ -464,8 +469,8 @@ def test_glob_dir_bypass_file_store(self) -> None: # We need to output to the current directory to make sure that # works. self._tester( - "src/toil/test/cwl/glob_dir.cwl", - "src/toil/test/cwl/empty.json", + get_data("test/cwl/glob_dir.cwl"), + get_data("test/cwl/empty.json"), self._expected_glob_dir_output(os.getcwd()), main_args=["--bypass-file-store"], output_here=True, @@ -480,8 +485,8 @@ def test_glob_dir_bypass_file_store(self) -> None: def test_required_input_condition_protection(self) -> None: # This doesn't run containerized self._tester( - "src/toil/test/cwl/not_run_required_input.cwl", - "src/toil/test/cwl/empty.json", + get_data("test/cwl/not_run_required_input.cwl"), + get_data("test/cwl/empty.json"), {}, ) @@ -506,7 +511,7 @@ def test_slurm_node_memory(self) -> None: "--slurmDefaultAllMem=True", "--outdir", self.outDir, - os.path.join(self.rootDir, "src/toil/test/cwl/measure_default_memory.cwl"), + get_data("test/cwl/measure_default_memory.cwl"), ] try: log.debug("Start test workflow") @@ -600,8 +605,8 @@ def test_load_contents_file(self) -> None: @unittest.skip("Fails too often due to remote service") def test_bioconda(self) -> None: self._tester( - "src/toil/test/cwl/seqtk_seq.cwl", - "src/toil/test/cwl/seqtk_seq_job.json", + get_data("test/cwl/seqtk_seq.cwl"), + get_data("test/cwl/seqtk_seq_job.json"), self._expected_seqtk_output(self.outDir), main_args=["--beta-conda-dependencies"], out_name="output1", @@ -610,8 +615,8 @@ def test_bioconda(self) -> None: @needs_docker def test_default_args(self) -> None: self._tester( - "src/toil/test/cwl/seqtk_seq.cwl", - "src/toil/test/cwl/seqtk_seq_job.json", + get_data("test/cwl/seqtk_seq.cwl"), + get_data("test/cwl/seqtk_seq_job.json"), self._expected_seqtk_output(self.outDir), main_args=[ "--default-container", @@ -625,8 +630,8 @@ def test_default_args(self) -> None: @unittest.skip("Fails too often due to remote service") def test_biocontainers(self) -> None: self._tester( - "src/toil/test/cwl/seqtk_seq.cwl", - "src/toil/test/cwl/seqtk_seq_job.json", + get_data("test/cwl/seqtk_seq.cwl"), + get_data("test/cwl/seqtk_seq_job.json"), self._expected_seqtk_output(self.outDir), main_args=["--beta-use-biocontainers"], out_name="output1", @@ -637,8 +642,8 @@ def test_biocontainers(self) -> None: @needs_local_cuda def test_cuda(self) -> None: self._tester( - "src/toil/test/cwl/nvidia_smi.cwl", - "src/toil/test/cwl/empty.json", + get_data("test/cwl/nvidia_smi.cwl"), + get_data("test/cwl/empty.json"), {}, out_name="result", ) @@ -709,8 +714,8 @@ def test_streamable(self, extra_args: Optional[list[str]] = None) -> None: Test that a file with 'streamable'=True is a named pipe. This is a CWL1.2 feature. """ - cwlfile = "src/toil/test/cwl/stream.cwl" - jobfile = "src/toil/test/cwl/stream.json" + cwlfile = get_data("test/cwl/stream.cwl") + jobfile = get_data("test/cwl/stream.json") out_name = "output" jobstore = f"--jobStore=aws:us-west-1:toil-stream-{uuid.uuid4()}" from toil.cwl import cwltoil @@ -721,8 +726,8 @@ def test_streamable(self, extra_args: Optional[list[str]] = None) -> None: "--outdir", self.outDir, jobstore, - os.path.join(self.rootDir, cwlfile), - os.path.join(self.rootDir, jobfile), + cwlfile, + jobfile, ] if extra_args: args = extra_args + args @@ -747,8 +752,8 @@ def test_preemptible(self) -> None: """ Tests that the http://arvados.org/cwl#UsePreemptible extension is supported. """ - cwlfile = "src/toil/test/cwl/preemptible.cwl" - jobfile = "src/toil/test/cwl/empty.json" + cwlfile = get_data("test/cwl/preemptible.cwl") + jobfile = get_data("test/cwl/empty.json") out_name = "output" from toil.cwl import cwltoil @@ -756,8 +761,8 @@ def test_preemptible(self) -> None: args = [ "--outdir", self.outDir, - os.path.join(self.rootDir, cwlfile), - os.path.join(self.rootDir, jobfile), + cwlfile, + jobfile, ] cwltoil.main(args, stdout=st) out = json.loads(st.getvalue()) @@ -771,16 +776,16 @@ def test_preemptible_expression(self) -> None: """ Tests that the http://arvados.org/cwl#UsePreemptible extension is validated. """ - cwlfile = "src/toil/test/cwl/preemptible_expression.cwl" - jobfile = "src/toil/test/cwl/preemptible_expression.json" + cwlfile = get_data("test/cwl/preemptible_expression.cwl") + jobfile = get_data("test/cwl/preemptible_expression.json") from toil.cwl import cwltoil st = StringIO() args = [ "--outdir", self.outDir, - os.path.join(self.rootDir, cwlfile), - os.path.join(self.rootDir, jobfile), + cwlfile, + jobfile, ] try: cwltoil.main(args, stdout=st) @@ -938,8 +943,7 @@ def setUp(self) -> None: """Runs anew before each test to create farm fresh temp dirs.""" self.outDir = f"/tmp/toil-cwl-test-{str(uuid.uuid4())}" os.makedirs(self.outDir) - self.rootDir = self._projectRootPath() - self.cwlSpec = os.path.join(self.rootDir, "src/toil/test/cwl/spec") + self.cwlSpec = get_data("test/cwl/spec") self.workDir = os.path.join(self.cwlSpec, "v1.0") # The latest cwl git commit hash from https://github.com/common-workflow-language/common-workflow-language. # Update it to get the latest tests. @@ -1072,15 +1076,13 @@ class CWLv11Test(ToilTest): Run the CWL 1.1 conformance tests in various environments. """ - rootDir: str cwlSpec: str test_yaml: str @classmethod def setUpClass(cls) -> None: """Runs anew before each test.""" - cls.rootDir = cls._projectRootPath() - cls.cwlSpec = os.path.join(cls.rootDir, "src/toil/test/cwl/spec_v11") + cls.cwlSpec = get_data("test/cwl/spec_v11") cls.test_yaml = os.path.join(cls.cwlSpec, "conformance_tests.yaml") # TODO: Use a commit zip in case someone decides to rewrite master's history? url = "https://github.com/common-workflow-language/cwl-v1.1.git" @@ -1152,7 +1154,7 @@ class CWLv12Test(ToilTest): def setUpClass(cls) -> None: """Runs anew before each test.""" cls.rootDir = cls._projectRootPath() - cls.cwlSpec = os.path.join(cls.rootDir, "src/toil/test/cwl/spec_v12") + cls.cwlSpec = get_data("test/cwl/spec_v12") cls.test_yaml = os.path.join(cls.cwlSpec, "conformance_tests.yaml") # TODO: Use a commit zip in case someone decides to rewrite master's history? url = "https://github.com/common-workflow-language/cwl-v1.2.git" @@ -1770,8 +1772,8 @@ def test_download_structure(tmp_path: Path) -> None: @pytest.mark.timeout(300) def test_import_on_workers() -> None: args = [ - "src/toil/test/cwl/download.cwl", - "src/toil/test/cwl/download_file.json", + get_data("test/cwl/download.cwl"), + get_data("test/cwl/download_file.json"), "--runImportsOnWorkers", "--importWorkersDisk=10MiB", "--realTimeLogging=True", diff --git a/src/toil/test/provisioners/aws/awsProvisionerTest.py b/src/toil/test/provisioners/aws/awsProvisionerTest.py index cae25e4dd6..c4e9349010 100644 --- a/src/toil/test/provisioners/aws/awsProvisionerTest.py +++ b/src/toil/test/provisioners/aws/awsProvisionerTest.py @@ -28,6 +28,7 @@ from toil.provisioners.aws.awsProvisioner import AWSProvisioner from toil.test import ( ToilTest, + get_data, integrative, needs_aws_ec2, needs_fetchable_appliance, @@ -286,7 +287,7 @@ def _getScript(self): # Fixme: making this file larger causes the test to hang f.write("01234567890123456789012345678901") self.rsyncUtil( - os.path.join(self._projectRootPath(), "src/toil/test/sort/sort.py"), + get_data("test/sort/sort.py"), ":" + self.script(), ) self.rsyncUtil(fileToSort, ":" + self.data("sortFile")) @@ -502,7 +503,7 @@ def _getScript(self): with open(sseKeyFile, "w") as f: f.write("01234567890123456789012345678901") self.rsyncUtil( - os.path.join(self._projectRootPath(), "src/toil/test/sort/sort.py"), + get_data("test/sort/sort.py"), ":" + self.script(), ) self.rsyncUtil(sseKeyFile, ":" + self.data("keyFile")) diff --git a/src/toil/test/provisioners/clusterTest.py b/src/toil/test/provisioners/clusterTest.py index d6428312fb..273c27a3ea 100644 --- a/src/toil/test/provisioners/clusterTest.py +++ b/src/toil/test/provisioners/clusterTest.py @@ -39,7 +39,7 @@ class AbstractClusterTest(ToilTest): def __init__(self, methodName: str) -> None: super().__init__(methodName=methodName) - self.keyName = os.getenv("TOIL_AWS_KEYNAME").strip() or "id_rsa" + self.keyName = os.getenv("TOIL_AWS_KEYNAME", "id_rsa").strip() self.clusterName = f"aws-provisioner-test-{uuid4()}" self.leaderNodeType = "t2.medium" self.clusterType = "mesos" diff --git a/src/toil/test/provisioners/gceProvisionerTest.py b/src/toil/test/provisioners/gceProvisionerTest.py index a2852e0561..f92d79bdad 100644 --- a/src/toil/test/provisioners/gceProvisionerTest.py +++ b/src/toil/test/provisioners/gceProvisionerTest.py @@ -21,6 +21,7 @@ from toil.test import ( ToilTest, + get_data, integrative, needs_fetchable_appliance, needs_google_project, @@ -215,7 +216,7 @@ def _getScript(self): # Fixme: making this file larger causes the test to hang f.write("01234567890123456789012345678901") self.rsyncUtil( - os.path.join(self._projectRootPath(), "src/toil/test/sort/sort.py"), + get_data("test/sort/sort.py"), ":/home/sort.py", ) self.rsyncUtil(fileToSort, ":/home/sortFile") @@ -325,7 +326,7 @@ def _getScript(self): with open(sseKeyFile, "w") as f: f.write("01234567890123456789012345678901") self.rsyncUtil( - os.path.join(self._projectRootPath(), "src/toil/test/sort/sort.py"), + get_data("test/sort/sort.py"), ":/home/sort.py", ) self.rsyncUtil(sseKeyFile, ":/home/keyFile") @@ -377,9 +378,7 @@ def setUp(self): def _getScript(self): self.rsyncUtil( - os.path.join( - self._projectRootPath(), "src/toil/test/provisioners/restartScript.py" - ), + get_data("test/provisioners/restartScript.py"), ":" + self.scriptName, ) diff --git a/src/toil/test/utils/ABCWorkflowDebug/debugWorkflow.py b/src/toil/test/utils/ABCWorkflowDebug/debugWorkflow.py index e32f59aeab..dfdf8da2a6 100644 --- a/src/toil/test/utils/ABCWorkflowDebug/debugWorkflow.py +++ b/src/toil/test/utils/ABCWorkflowDebug/debugWorkflow.py @@ -6,6 +6,7 @@ from toil.common import Toil from toil.job import Job from toil.lib.io import mkdtemp +from toil.test import get_data from toil.version import python logger = logging.getLogger(__name__) @@ -164,20 +165,18 @@ def broken_job(job, num): with Toil(options) as toil: B_file0 = toil.importFile( - "file://" - + os.path.abspath("src/toil/test/utils/ABCWorkflowDebug/B_file.txt") + f"file://{get_data('test/utils/ABCWorkflowDebug/B_file.txt')}" ) B_file0_preserveThisFilename = "B_file.txt" B_file = (B_file0, B_file0_preserveThisFilename) file_maker0 = toil.importFile( - "file://" - + os.path.abspath("src/toil/test/utils/ABCWorkflowDebug/mkFile.py") + f"file://{get_data('test/utils/ABCWorkflowDebug/mkFile.py')}" ) file_maker0_preserveThisFilename = "mkFile.py" file_maker = (file_maker0, file_maker0_preserveThisFilename) - filepath = os.path.abspath("src/toil/test/utils/ABCWorkflowDebug/ABC.txt") + filepath = get_data("test/utils/ABCWorkflowDebug/ABC.txt") job0 = Job.wrapJobFn(initialize_jobs) job1 = Job.wrapJobFn(writeA, file_maker) diff --git a/src/toil/test/utils/toilDebugTest.py b/src/toil/test/utils/toilDebugTest.py index 010cff6d79..7fc389bbd5 100644 --- a/src/toil/test/utils/toilDebugTest.py +++ b/src/toil/test/utils/toilDebugTest.py @@ -17,7 +17,7 @@ import tempfile from toil.lib.resources import glob -from toil.test import ToilTest, needs_wdl, slow +from toil.test import ToilTest, get_data, needs_wdl, slow from toil.version import python logger = logging.getLogger(__name__) @@ -28,7 +28,7 @@ def workflow_debug_jobstore() -> str: subprocess.check_call( [ python, - os.path.abspath("src/toil/test/utils/ABCWorkflowDebug/debugWorkflow.py"), + get_data("test/utils/ABCWorkflowDebug/debugWorkflow.py"), job_store_path, ] ) @@ -49,7 +49,7 @@ def testJobStoreContents(): subprocess.check_call( [ python, - os.path.abspath("src/toil/utils/toilDebugFile.py"), + get_data("utils/toilDebugFile.py"), workflow_debug_jobstore(), "--logDebug", "--listFilesInJobStore=True", @@ -86,7 +86,7 @@ def fetchFiles(symLink: bool, jobStoreDir: str, outputDir: str): contents = ["A.txt", "B.txt", "C.txt", "ABC.txt", "mkFile.py"] cmd = [ python, - os.path.abspath("src/toil/utils/toilDebugFile.py"), + get_data("utils/toilDebugFile.py"), jobStoreDir, "--fetch", "*A.txt", @@ -137,7 +137,7 @@ def _get_job_store_and_job_id(self): subprocess.check_call( [ python, - os.path.abspath("src/toil/test/docs/scripts/example_alwaysfail.py"), + get_data("test/docs/scripts/example_alwaysfail.py"), "--retryCount=0", "--logCritical", "--disableProgress", @@ -172,9 +172,7 @@ def _get_wdl_job_store_and_job_name(self): wf_result = subprocess.run( [ "toil-wdl-runner", - os.path.abspath( - "src/toil/test/docs/scripts/example_alwaysfail_with_files.wdl" - ), + get_data("test/docs/scripts/example_alwaysfail_with_files.wdl"), "--retryCount=0", "--logDebug", "--disableProgress", diff --git a/src/toil/test/utils/toilKillTest.py b/src/toil/test/utils/toilKillTest.py index 1f8ef4b36a..945e2bf08b 100644 --- a/src/toil/test/utils/toilKillTest.py +++ b/src/toil/test/utils/toilKillTest.py @@ -23,7 +23,7 @@ from toil.common import Toil from toil.jobStores.abstractJobStore import NoSuchFileException, NoSuchJobStoreException from toil.jobStores.utils import generate_locator -from toil.test import ToilTest, needs_aws_s3, needs_cwl +from toil.test import ToilTest, get_data, needs_aws_s3, needs_cwl logger = logging.getLogger(__name__) @@ -40,8 +40,8 @@ def __init__(self, *args, **kwargs): def setUp(self): """Shared test variables.""" - self.cwl = os.path.abspath("src/toil/test/utils/ABCWorkflowDebug/sleep.cwl") - self.yaml = os.path.abspath("src/toil/test/utils/ABCWorkflowDebug/sleep.yaml") + self.cwl = get_data("test/utils/ABCWorkflowDebug/sleep.cwl") + self.yaml = get_data("test/utils/ABCWorkflowDebug/sleep.yaml") def tearDown(self): """Default tearDown for unittest.""" @@ -90,8 +90,8 @@ def test_cwl_toil_kill(self): class ToilKillTestWithAWSJobStore(ToilKillTest): """A set of test cases for "toil kill" using the AWS job store.""" - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) + def setUp(self): + super().setUp() self.job_store = generate_locator("aws", decoration="testkill") diff --git a/src/toil/test/utils/utilsTest.py b/src/toil/test/utils/utilsTest.py index 1525f44d17..f2ee7c0f13 100644 --- a/src/toil/test/utils/utilsTest.py +++ b/src/toil/test/utils/utilsTest.py @@ -33,6 +33,7 @@ from toil.lib.bioio import system from toil.test import ( ToilTest, + get_data, get_temp_file, integrative, needs_aws_ec2, @@ -444,10 +445,10 @@ def testGetStatusFailedCWLWF(self): self.toilDir, "--clean=never", "--badWorker=1", - "src/toil/test/cwl/sorttool.cwl", + get_data("test/cwl/sorttool.cwl"), "--reverse", "--input", - "src/toil/test/cwl/whale.txt", + get_data("test/cwl/whale.txt"), f"--outdir={self.tempDir}", ] logger.info("Run command: %s", " ".join(cmd)) @@ -465,10 +466,10 @@ def testGetStatusSuccessfulCWLWF(self): "--jobStore", self.toilDir, "--clean=never", - "src/toil/test/cwl/sorttool.cwl", + get_data("test/cwl/sorttool.cwl"), "--reverse", "--input", - "src/toil/test/cwl/whale.txt", + get_data("test/cwl/whale.txt"), f"--outdir={self.tempDir}", ] wf = subprocess.Popen(cmd) @@ -487,7 +488,7 @@ def testPrintJobLog(self, mock_print): "--jobStore", self.toilDir, "--clean=never", - "src/toil/test/cwl/alwaysfails.cwl", + get_data("test/cwl/alwaysfails.cwl"), "--message", "Testing", ] diff --git a/src/toil/test/wdl/wdltoil_test.py b/src/toil/test/wdl/wdltoil_test.py index 68beb2bf79..f26d7a82a9 100644 --- a/src/toil/test/wdl/wdltoil_test.py +++ b/src/toil/test/wdl/wdltoil_test.py @@ -1,7 +1,6 @@ import json import logging import os -import pytest import re import shutil import string @@ -11,12 +10,14 @@ from unittest.mock import patch from uuid import uuid4 +import pytest import WDL.Error import WDL.Expr from toil.fileStores import FileID from toil.test import ( ToilTest, + get_data, needs_docker, needs_docker_cuda, needs_google_storage, @@ -203,8 +204,8 @@ def setUpClass(cls) -> None: def test_MD5sum(self): """Test if Toil produces the same outputs as known good outputs for WDL's GATK tutorial #1.""" - wdl = os.path.abspath("src/toil/test/wdl/md5sum/md5sum.1.0.wdl") - json_file = os.path.abspath("src/toil/test/wdl/md5sum/md5sum.json") + wdl = get_data("test/wdl/md5sum/md5sum.1.0.wdl") + json_file = get_data("test/wdl/md5sum/md5sum.json") result_json = subprocess.check_output( self.base_command @@ -221,7 +222,7 @@ def test_url_to_file(self): """ Test if web URL strings can be coerced to usable Files. """ - wdl = os.path.abspath("src/toil/test/wdl/testfiles/url_to_file.wdl") + wdl = get_data("test/wdl/testfiles/url_to_file.wdl") result_json = subprocess.check_output( self.base_command @@ -238,7 +239,7 @@ def test_wait(self): """ Test if Bash "wait" works in WDL scripts. """ - wdl = os.path.abspath("src/toil/test/wdl/testfiles/wait.wdl") + wdl = get_data("test/wdl/testfiles/wait.wdl") result_json = subprocess.check_output( self.base_command @@ -262,7 +263,7 @@ def test_all_call_outputs(self): """ Test if Toil can collect all call outputs from a workflow that doesn't expose them. """ - wdl = os.path.abspath("src/toil/test/wdl/testfiles/not_enough_outputs.wdl") + wdl = get_data("test/wdl/testfiles/not_enough_outputs.wdl") # With no flag we don't include the call outputs result_json = subprocess.check_output( @@ -319,7 +320,7 @@ def test_croo_detection(self): """ Test if Toil can detect and do something sensible with Cromwell Output Organizer workflows. """ - wdl = os.path.abspath("src/toil/test/wdl/testfiles/croo.wdl") + wdl = get_data("test/wdl/testfiles/croo.wdl") # With no flag we should include all task outputs result_json = subprocess.check_output( @@ -357,7 +358,7 @@ def test_caching(self): """ Test if Toil can cache task runs. """ - wdl = os.path.abspath('src/toil/test/wdl/testfiles/random.wdl') + wdl = get_data("test/wdl/testfiles/random.wdl") caching_env = dict(os.environ) caching_env["MINIWDL__CALL_CACHE__GET"] = "true" @@ -412,7 +413,7 @@ def test_url_to_optional_file(self): """ Test if missing and error-producing URLs are handled correctly for optional File? values. """ - wdl = os.path.abspath("src/toil/test/wdl/testfiles/url_to_optional_file.wdl") + wdl = get_data("test/wdl/testfiles/url_to_optional_file.wdl") def run_for_code(code: int) -> dict: """ @@ -457,8 +458,8 @@ def test_missing_output_directory(self): """ Test if Toil can run a WDL workflow into a new directory. """ - wdl = os.path.abspath("src/toil/test/wdl/md5sum/md5sum.1.0.wdl") - json_file = os.path.abspath("src/toil/test/wdl/md5sum/md5sum.json") + wdl = get_data("test/wdl/md5sum/md5sum.1.0.wdl") + json_file = get_data("test/wdl/md5sum/md5sum.json") subprocess.check_call( self.base_command + [ @@ -474,8 +475,8 @@ def test_missing_output_directory(self): @needs_singularity_or_docker def test_miniwdl_self_test(self, extra_args: Optional[list[str]] = None) -> None: """Test if the MiniWDL self test runs and produces the expected output.""" - wdl_file = os.path.abspath("src/toil/test/wdl/miniwdl_self_test/self_test.wdl") - json_file = os.path.abspath("src/toil/test/wdl/miniwdl_self_test/inputs.json") + wdl_file = get_data("test/wdl/miniwdl_self_test/self_test.wdl") + json_file = get_data("test/wdl/miniwdl_self_test/inputs.json") result_json = subprocess.check_output( self.base_command @@ -656,8 +657,8 @@ def test_giraffe(self): @needs_google_storage def test_gs_uri(self): """Test if Toil can access Google Storage URIs.""" - wdl = os.path.abspath("src/toil/test/wdl/md5sum/md5sum.1.0.wdl") - json_file = os.path.abspath("src/toil/test/wdl/md5sum/md5sum-gs.json") + wdl = get_data("test/wdl/md5sum/md5sum.1.0.wdl") + json_file = get_data("test/wdl/md5sum/md5sum-gs.json") result_json = subprocess.check_output( self.base_command + [wdl, json_file, "-o", self.output_dir, "--logDebug"]