diff --git a/new-docs/source/tutorial/1-getting-started.ipynb b/new-docs/source/tutorial/1-getting-started.ipynb index c6b7d881a..0cb4402c8 100644 --- a/new-docs/source/tutorial/1-getting-started.ipynb +++ b/new-docs/source/tutorial/1-getting-started.ipynb @@ -54,13 +54,13 @@ "metadata": {}, "outputs": [], "source": [ - "from fileformats.medimage import Nifti\n", + "from fileformats.medimage import Nifti1\n", "\n", "nifti_dir = test_dir / \"nifti\"\n", "nifti_dir.mkdir()\n", "\n", "for i in range(10):\n", - " Nifti.sample(nifti_dir, seed=i) # Create a dummy NIfTI file in the dest. directory" + " Nifti1.sample(nifti_dir, seed=i) # Create a dummy NIfTI file in the dest. directory" ] }, { @@ -116,7 +116,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "A newer version (0.25) of nipype/pydra is available. You are using 0.25.dev128+g1e817743.d20250104\n" + "A newer version (0.25) of nipype/pydra is available. You are using 0.25.dev141+g03c7438b.d20250123\n" ] } ], @@ -150,7 +150,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Result(output=LoadJsonOutputs(out={'a': True, 'b': 'two', 'c': 3, 'd': [7, 0.55, 6]}), runtime=None, errored=False)\n" + "Result(task=, outputs=LoadJsonOutputs(out={'a': True, 'b': 'two', 'c': 3, 'd': [7, 0.55, 6]}), runtime=None, errored=False)\n" ] } ], @@ -201,16 +201,19 @@ "metadata": {}, "outputs": [ { - "ename": "TypeError", - "evalue": "Task.__init__() missing 1 required positional argument: 'definition'", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[6], line 8\u001b[0m\n\u001b[1;32m 5\u001b[0m mrgrid \u001b[38;5;241m=\u001b[39m MrGrid(voxel\u001b[38;5;241m=\u001b[39m(\u001b[38;5;241m0.5\u001b[39m,\u001b[38;5;241m0.5\u001b[39m,\u001b[38;5;241m0.5\u001b[39m))\u001b[38;5;241m.\u001b[39msplit(\u001b[38;5;28minput\u001b[39m\u001b[38;5;241m=\u001b[39mnifti_dir\u001b[38;5;241m.\u001b[39miterdir())\n\u001b[1;32m 7\u001b[0m \u001b[38;5;66;03m# Run the task to resample all NIfTI files\u001b[39;00m\n\u001b[0;32m----> 8\u001b[0m outputs \u001b[38;5;241m=\u001b[39m \u001b[43mmrgrid\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 10\u001b[0m \u001b[38;5;66;03m# Print the locations of the output files\u001b[39;00m\n\u001b[1;32m 11\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;241m.\u001b[39mjoin(\u001b[38;5;28mstr\u001b[39m(p) \u001b[38;5;28;01mfor\u001b[39;00m p \u001b[38;5;129;01min\u001b[39;00m outputs\u001b[38;5;241m.\u001b[39moutput))\n", - "File \u001b[0;32m~/git/workflows/pydra/pydra/engine/specs.py:299\u001b[0m, in \u001b[0;36mTaskDef.__call__\u001b[0;34m(self, name, audit_flags, cache_dir, cache_locations, messengers, messenger_args, rerun)\u001b[0m\n\u001b[1;32m 296\u001b[0m task_type \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mTask\n\u001b[1;32m 297\u001b[0m definition \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\n\u001b[0;32m--> 299\u001b[0m task \u001b[38;5;241m=\u001b[39m \u001b[43mtask_type\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 300\u001b[0m \u001b[43m \u001b[49m\u001b[43mdefinition\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 301\u001b[0m \u001b[43m \u001b[49m\u001b[43mname\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mname\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 302\u001b[0m \u001b[43m \u001b[49m\u001b[43maudit_flags\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43maudit_flags\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 303\u001b[0m \u001b[43m \u001b[49m\u001b[43mcache_dir\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcache_dir\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 304\u001b[0m \u001b[43m \u001b[49m\u001b[43mcache_locations\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcache_locations\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 305\u001b[0m \u001b[43m \u001b[49m\u001b[43mmessenger_args\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmessenger_args\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 306\u001b[0m \u001b[43m \u001b[49m\u001b[43mmessengers\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmessengers\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 307\u001b[0m \u001b[43m \u001b[49m\u001b[43mrerun\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrerun\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 308\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 309\u001b[0m result \u001b[38;5;241m=\u001b[39m task()\n\u001b[1;32m 310\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m result\u001b[38;5;241m.\u001b[39merrored:\n", - "File \u001b[0;32m~/git/workflows/pydra/pydra/engine/core.py:708\u001b[0m, in \u001b[0;36mWorkflowTask.__init__\u001b[0;34m(self, definition, name, audit_flags, cache_dir, cache_locations, input_spec, cont_dim, messenger_args, messengers, output_spec, rerun, propagate_rerun, **kwargs)\u001b[0m\n\u001b[1;32m 662\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m__init__\u001b[39m(\n\u001b[1;32m 663\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 664\u001b[0m definition,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 678\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs,\n\u001b[1;32m 679\u001b[0m ):\n\u001b[1;32m 680\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 681\u001b[0m \u001b[38;5;124;03m Initialize a workflow.\u001b[39;00m\n\u001b[1;32m 682\u001b[0m \n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 705\u001b[0m \n\u001b[1;32m 706\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[0;32m--> 708\u001b[0m \u001b[38;5;28;43msuper\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[38;5;21;43m__init__\u001b[39;49m\u001b[43m(\u001b[49m\n\u001b[1;32m 709\u001b[0m \u001b[43m \u001b[49m\u001b[43mname\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mname\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 710\u001b[0m \u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 711\u001b[0m \u001b[43m \u001b[49m\u001b[43mcont_dim\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcont_dim\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 712\u001b[0m \u001b[43m \u001b[49m\u001b[43mcache_dir\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcache_dir\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 713\u001b[0m \u001b[43m \u001b[49m\u001b[43mcache_locations\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcache_locations\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 714\u001b[0m \u001b[43m \u001b[49m\u001b[43maudit_flags\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43maudit_flags\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 715\u001b[0m \u001b[43m \u001b[49m\u001b[43mmessengers\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmessengers\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 716\u001b[0m \u001b[43m \u001b[49m\u001b[43mmessenger_args\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmessenger_args\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 717\u001b[0m \u001b[43m \u001b[49m\u001b[43mrerun\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrerun\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 718\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 720\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mgraph \u001b[38;5;241m=\u001b[39m DiGraph(name\u001b[38;5;241m=\u001b[39mname)\n\u001b[1;32m 721\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mname2obj \u001b[38;5;241m=\u001b[39m {}\n", - "\u001b[0;31mTypeError\u001b[0m: Task.__init__() missing 1 required positional argument: 'definition'" + "name": "stdout", + "output_type": "stream", + "text": [ + "/Users/tclose/Library/Caches/pydra/0.24.dev36+g0deadc43/run-cache/Task_c989fc46725c0d124dc287f463674e63/out_file.mif\n", + "/Users/tclose/Library/Caches/pydra/0.24.dev36+g0deadc43/run-cache/Task_c989fc46725c0d124dc287f463674e63/out_file.mif\n", + "/Users/tclose/Library/Caches/pydra/0.24.dev36+g0deadc43/run-cache/Task_c989fc46725c0d124dc287f463674e63/out_file.mif\n", + "/Users/tclose/Library/Caches/pydra/0.24.dev36+g0deadc43/run-cache/Task_c989fc46725c0d124dc287f463674e63/out_file.mif\n", + "/Users/tclose/Library/Caches/pydra/0.24.dev36+g0deadc43/run-cache/Task_c989fc46725c0d124dc287f463674e63/out_file.mif\n", + "/Users/tclose/Library/Caches/pydra/0.24.dev36+g0deadc43/run-cache/Task_c989fc46725c0d124dc287f463674e63/out_file.mif\n", + "/Users/tclose/Library/Caches/pydra/0.24.dev36+g0deadc43/run-cache/Task_c989fc46725c0d124dc287f463674e63/out_file.mif\n", + "/Users/tclose/Library/Caches/pydra/0.24.dev36+g0deadc43/run-cache/Task_c989fc46725c0d124dc287f463674e63/out_file.mif\n", + "/Users/tclose/Library/Caches/pydra/0.24.dev36+g0deadc43/run-cache/Task_c989fc46725c0d124dc287f463674e63/out_file.mif\n", + "/Users/tclose/Library/Caches/pydra/0.24.dev36+g0deadc43/run-cache/Task_c989fc46725c0d124dc287f463674e63/out_file.mif\n" ] } ], @@ -219,13 +222,13 @@ "\n", "# Instantiate the task definition, \"splitting\" over all NIfTI files in the test directory\n", "# by splitting the \"input\" input field over all files in the directory\n", - "mrgrid = MrGrid(voxel=(0.5,0.5,0.5)).split(input=nifti_dir.iterdir())\n", + "mrgrid = MrGrid(operation=\"regrid\", voxel=(0.5,0.5,0.5)).split(in_file=nifti_dir.iterdir())\n", "\n", "# Run the task to resample all NIfTI files\n", "outputs = mrgrid()\n", "\n", "# Print the locations of the output files\n", - "print(\"\\n\".join(str(p) for p in outputs.output))" + "print(\"\\n\".join(str(p) for p in outputs.out_file))" ] }, { @@ -245,15 +248,32 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 7, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "/Users/tclose/Library/Caches/pydra/0.24.dev36+g0deadc43/run-cache/Task_c989fc46725c0d124dc287f463674e63/out_file.mif\n", + "/Users/tclose/Library/Caches/pydra/0.24.dev36+g0deadc43/run-cache/Task_c989fc46725c0d124dc287f463674e63/out_file.mif\n", + "/Users/tclose/Library/Caches/pydra/0.24.dev36+g0deadc43/run-cache/Task_c989fc46725c0d124dc287f463674e63/out_file.mif\n", + "/Users/tclose/Library/Caches/pydra/0.24.dev36+g0deadc43/run-cache/Task_c989fc46725c0d124dc287f463674e63/out_file.mif\n", + "/Users/tclose/Library/Caches/pydra/0.24.dev36+g0deadc43/run-cache/Task_c989fc46725c0d124dc287f463674e63/out_file.mif\n", + "/Users/tclose/Library/Caches/pydra/0.24.dev36+g0deadc43/run-cache/Task_c989fc46725c0d124dc287f463674e63/out_file.mif\n", + "/Users/tclose/Library/Caches/pydra/0.24.dev36+g0deadc43/run-cache/Task_c989fc46725c0d124dc287f463674e63/out_file.mif\n", + "/Users/tclose/Library/Caches/pydra/0.24.dev36+g0deadc43/run-cache/Task_c989fc46725c0d124dc287f463674e63/out_file.mif\n", + "/Users/tclose/Library/Caches/pydra/0.24.dev36+g0deadc43/run-cache/Task_c989fc46725c0d124dc287f463674e63/out_file.mif\n", + "/Users/tclose/Library/Caches/pydra/0.24.dev36+g0deadc43/run-cache/Task_c989fc46725c0d124dc287f463674e63/out_file.mif\n" + ] + } + ], "source": [ "\n", "\n", - "mrgrid_varying_vox_sizes = MrGrid().split(\n", - " (\"input\", \"voxel\"),\n", - " input=nifti_dir.iterdir(),\n", + "mrgrid_varying_vox_sizes = MrGrid(operation=\"regrid\").split(\n", + " (\"in_file\", \"voxel\"),\n", + " in_file=nifti_dir.iterdir(),\n", " # Define a list of voxel sizes to resample the NIfTI files to,\n", " # the list must be the same length as the list of NIfTI files\n", " voxel=[\n", @@ -270,7 +290,7 @@ " ],\n", ")\n", "\n", - "print(mrgrid_varying_vox_sizes().output)" + "print(\"\\n\".join(str(p) for p in outputs.out_file))" ] }, { diff --git a/new-docs/source/tutorial/tst.py b/new-docs/source/tutorial/tst.py index 2da864d27..cbcf155a6 100644 --- a/new-docs/source/tutorial/tst.py +++ b/new-docs/source/tutorial/tst.py @@ -1,15 +1,12 @@ from pathlib import Path from tempfile import mkdtemp -import shutil from fileformats.medimage import Nifti1 from pydra.tasks.mrtrix3.v3_0 import MrGrid -from pydra.utils import user_cache_dir if __name__ == "__main__": - test_dir = Path(mkdtemp()) - shutil.rmtree(user_cache_dir / "run-cache", ignore_errors=True) + test_dir = Path(mkdtemp()) nifti_dir = test_dir / "nifti" nifti_dir.mkdir() @@ -19,14 +16,23 @@ nifti_dir, seed=i ) # Create a dummy NIfTI file in the dest. directory - # Instantiate the task definition, "splitting" over all NIfTI files in the test directory - # by splitting the "input" input field over all files in the directory - mrgrid = MrGrid(operation="regrid", voxel=(0.5, 0.5, 0.5)).split( - in_file=nifti_dir.iterdir() + mrgrid_varying_vox_sizes = MrGrid(operation="regrid").split( + ("in_file", "voxel"), + in_file=nifti_dir.iterdir(), + # Define a list of voxel sizes to resample the NIfTI files to, + # the list must be the same length as the list of NIfTI files + voxel=[ + (1.0, 1.0, 1.0), + (1.0, 1.0, 1.0), + (1.0, 1.0, 1.0), + (0.5, 0.5, 0.5), + (0.75, 0.75, 0.75), + (0.5, 0.5, 0.5), + (0.5, 0.5, 0.5), + (1.0, 1.0, 1.0), + (1.25, 1.25, 1.25), + (1.25, 1.25, 1.25), + ], ) - # Run the task to resample all NIfTI files - outputs = mrgrid(worker="cf") - - # Print the locations of the output files - print("\n".join(str(p) for p in outputs.out_file)) +print(mrgrid_varying_vox_sizes().out_file) diff --git a/pydra/engine/core.py b/pydra/engine/core.py index 949c4373a..2593f0205 100644 --- a/pydra/engine/core.py +++ b/pydra/engine/core.py @@ -128,7 +128,7 @@ def __init__( self.definition = copy(definition) # We save the submitter is the definition is a workflow otherwise we don't # so the task can be pickled - self.submitter = submitter if is_workflow(definition) else None + self.submitter = submitter self.environment = environment if environment is not None else Native() self.name = name self.state_index = state_index diff --git a/pydra/engine/specs.py b/pydra/engine/specs.py index 62e7b6c6d..f088fbbda 100644 --- a/pydra/engine/specs.py +++ b/pydra/engine/specs.py @@ -237,7 +237,7 @@ def split( "set 'overwrite=True' to do so" ) if splitter: - unwraped_split = hlpst.unwrap_splitter(splitter) + unwraped_split = list(hlpst.unwrap_splitter(splitter)) if duplicated := [f for f, c in Counter(unwraped_split).items() if c > 1]: raise ValueError(f"Splitter fields {duplicated} are duplicated") split_names = set( @@ -584,17 +584,17 @@ def _run(self, task: "Task[PythonDef]") -> None: # Run the actual function returned = self.function(**inputs) # Collect the outputs and save them into the task.return_values dictionary - self.return_values = {f.name: f.default for f in attrs.fields(self.Outputs)} - return_names = list(self.return_values) + task.return_values = {f.name: f.default for f in attrs.fields(self.Outputs)} + return_names = list(task.return_values) if returned is None: - self.return_values = {nm: None for nm in return_names} - elif len(self.return_values) == 1: + task.return_values = {nm: None for nm in return_names} + elif len(task.return_values) == 1: # if only one element in the fields, everything should be returned together - self.return_values = {list(self.return_values)[0]: returned} + task.return_values = {list(task.return_values)[0]: returned} elif isinstance(returned, tuple) and len(return_names) == len(returned): - self.return_values = dict(zip(return_names, returned)) + task.return_values = dict(zip(return_names, returned)) elif isinstance(returned, dict): - self.return_values = {key: returned.get(key, None) for key in return_names} + task.return_values = {key: returned.get(key, None) for key in return_names} else: raise RuntimeError( f"expected {len(return_names)} elements, but {returned} were returned"