Skip to content

Commit

Permalink
Merge pull request #13 from kikkomep/feature/dataset-type-support
Browse files Browse the repository at this point in the history
Feature/dataset type support
  • Loading branch information
ilveroluca authored Feb 27, 2017
2 parents 1410964 + dc70afd commit 40b8dcc
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 5 deletions.
10 changes: 10 additions & 0 deletions docs/config-file.rst
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,16 @@ should be used for that input.

The relative path is interpreted relative to the `Base path`_.

By default Galaxy automatically detects the type of the input dataset by its file extension.
If you need to specify the type of the input dataset you can use the following extended syntax:

.. code-block:: YAML
inputs:
DataMatrix:
file: "sacurine/input/dataMatrix.tsv"
type: "tsv"
Specifying workflow outputs
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Expand Down
14 changes: 9 additions & 5 deletions wft4galaxy/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -283,9 +283,9 @@ def set_inputs(self, inputs):
:return: a dictionary of mappings (see :class:`WorkflowTestConfiguration`)
"""
for name, config in inputs.items():
self.add_input(name, config["file"])
self.add_input(name, config["file"], config["type"] if "type" in config else None)

def add_input(self, name, file):
def add_input(self, name, file, type=None):
"""
Add a new input mapping.
Expand All @@ -297,7 +297,7 @@ def add_input(self, name, file):
"""
if not name:
raise ValueError("Input name not defined")
self._inputs[name] = {"name": name, "file": file if isinstance(file, list) else [file]}
self._inputs[name] = {"name": name, "file": file if isinstance(file, list) else [file], "type": type}

def remove_input(self, name):
"""
Expand Down Expand Up @@ -1151,8 +1151,12 @@ def run_test(self, base_path=None, inputs=None, params=None, expected_outputs=No
for label, config in inputs.items():
datamap[label] = []
for filename in config["file"]:
datamap[label].append(history.upload_dataset(filename if _os.path.isabs(filename)
else _os.path.join(base_path, filename)))
dataset_filename = filename if _os.path.isabs(filename) else _os.path.join(base_path, filename)
if config["type"]:
datamap[label].append(
history.upload_dataset(dataset_filename, file_type=config["type"]))
else:
datamap[label].append(history.upload_dataset(dataset_filename))

# run the workflow
_logger.info("Workflow '%s' (id: %s) running ...", workflow.name, workflow.id)
Expand Down

0 comments on commit 40b8dcc

Please sign in to comment.