diff --git a/.travis.yml b/.travis.yml
index cd295baa79d0..cca0293dbaaa 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -10,6 +10,7 @@ env:
- TOX_ENV=qunit
- TOX_ENV=py27-first_startup
- TOX_ENV=py27-lint_docstring_include_list
+ - TOX_ENV=cwl_green
matrix:
include:
diff --git a/client/galaxy/scripts/mvc/form/form-parameters.js b/client/galaxy/scripts/mvc/form/form-parameters.js
index fe2f3c9dd5b1..848e83d5e985 100644
--- a/client/galaxy/scripts/mvc/form/form-parameters.js
+++ b/client/galaxy/scripts/mvc/form/form-parameters.js
@@ -4,6 +4,7 @@
import { getGalaxyInstance } from "app";
import Utils from "utils/utils";
import Ui from "mvc/ui/ui-misc";
+import UiField from "mvc/ui/ui-field";
import SelectContent from "mvc/ui/ui-select-content";
import SelectLibrary from "mvc/ui/ui-select-library";
import SelectFtp from "mvc/ui/ui-select-ftp";
@@ -34,6 +35,7 @@ export default Backbone.Model.extend({
ftpfile: "_fieldFtp",
upload: "_fieldUpload",
rules: "_fieldRulesEdit",
+ field: "_fieldField",
genomespacefile: "_fieldGenomeSpace"
},
@@ -233,5 +235,13 @@ export default Backbone.Model.extend({
id: `field-${input_def.id}`,
onchange: input_def.onchange
});
+ },
+
+ _fieldField: function(input_def) {
+ return new UiField({
+ id: `field-${input_def.id}`,
+ onchange: input_def.onchange
+ });
}
+
});
diff --git a/client/galaxy/scripts/mvc/ui/ui-field.js b/client/galaxy/scripts/mvc/ui/ui-field.js
new file mode 100644
index 000000000000..e1152e9c63c7
--- /dev/null
+++ b/client/galaxy/scripts/mvc/ui/ui-field.js
@@ -0,0 +1,96 @@
+/** Renders the color picker used e.g. in the tool form **/
+import Utils from "utils/utils";
+import Ui from "mvc/ui/ui-misc";
+
+/** Renders an input element used e.g. in the tool form */
+export default Backbone.View.extend({
+ initialize: function(options) {
+ this.model =
+ (options && options.model) ||
+ new Backbone.Model({
+ value: {"src": "json", "value": null, "representation": "null"},
+ }).set(options);
+ this.$el = $("
").addClass("ui-field");
+ console.log(this.model.get("value"));
+ var menuButton = new Ui.ButtonMenu({
+ id: "options",
+ icon: "fa-caret-down",
+ title: "Input Type",
+ tooltip: "View available input type options"
+ });
+ menuButton.addMenu({
+ title: "Integer",
+ onclick: () => {
+ this._changeType("integer")
+ }
+ });
+ menuButton.addMenu({
+ title: "Leave Unselected",
+ onclick: () => {
+ this._changeType("null")
+ }
+ });
+ this.$menuButton = menuButton;
+ this.$inputDiv = $("").addClass("select-input");
+
+ this.$el.append(menuButton.$el);
+ this.$el.append(this.$inputDiv);
+ this.setElement(this.$el);
+ this.listenTo(this.model, "change", this.render, this);
+ this.render();
+ },
+ value: function(new_val) {
+ var options = this.model.attributes;
+ if (new_val) {
+ this.model.set("value", new_val);
+ this.model.trigger("change");
+ options.onchange(new_val);
+ }
+ return this.model.get("value");
+ },
+ render: function() {
+ const value = this.model.get("value");
+ const rep = value.representation;
+ if ( rep == "null" ) {
+ this.$inputDiv.html($("No value selected (null)
"));
+ } else if ( rep == "integer" ) {
+ const tagName = this.model.get("area") ? "textarea" : "input";
+ this.$inputDiv.html($(`<${tagName} value="${value.value}"/>`));
+ console.log(this.$inputDiv.find("input"));
+ this.$inputDiv.find("input").on("change", () => { this._onchange() });
+ }
+ return this;
+ },
+ _changeType: function(representation) {
+ const previousValue = this.model.get("value");
+ const previousRawValue = previousValue.value;
+ if ( representation == "null" ) {
+ this.model.set("value", {"src": "json", "value": null, "representation": "null"});
+ } else if ( representation == "integer" ) {
+ var value = parseInt(previousRawValue);
+ if ( isNaN( value ) ) {
+ value = 0;
+ }
+ this.model.set("value", {"src": "json", "value": 0, "representation": "integer"});
+ }
+ },
+ _rawValue: function(previousValue) {
+ const rep = previousValue.representation;
+ let rawVal;
+ if ( rep == "null" ) {
+ rawVal = null;
+ } else if ( rep == "integer" ) {
+ rawVal = parseInt(this.$inputDiv.find("input").val());
+ }
+ console.log("_rawValue returning " + rawVal);
+ return rawVal;
+
+ },
+ _onchange: function() {
+ const previousValue = this.model.get("value");
+ const newValue = this._rawValue(previousValue);
+ previousValue["value"] = newValue;
+ this.value(previousValue);
+ this.model.get("onchange") && this.model.get("onchange")(this.model.get("value"));
+ }
+});
diff --git a/config/datatypes_conf.xml.sample b/config/datatypes_conf.xml.sample
index 29daee09b6f3..880bfb114f81 100644
--- a/config/datatypes_conf.xml.sample
+++ b/config/datatypes_conf.xml.sample
@@ -202,7 +202,7 @@
-
+
@@ -395,6 +395,7 @@
+
diff --git a/config/tool_conf.xml.sample b/config/tool_conf.xml.sample
index 575d19786ff9..c4373ce462df 100644
--- a/config/tool_conf.xml.sample
+++ b/config/tool_conf.xml.sample
@@ -32,9 +32,11 @@
+
+
diff --git a/lib/galaxy/app.py b/lib/galaxy/app.py
index 8871407a942b..f9154e17213e 100644
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -17,6 +17,7 @@
from galaxy.managers.histories import HistoryManager
from galaxy.managers.libraries import LibraryManager
from galaxy.managers.tags import GalaxyTagManager
+from galaxy.managers.tools import DynamicToolManager
from galaxy.openid.providers import OpenIDProviders
from galaxy.queue_worker import GalaxyQueueWorker
from galaxy.tools.cache import (
@@ -101,6 +102,7 @@ def __init__(self, **kwargs):
self.test_data_resolver = test_data.TestDataResolver(file_dirs=self.config.tool_test_data_directories)
self.library_folder_manager = FolderManager()
self.library_manager = LibraryManager()
+ self.dynamic_tool_manager = DynamicToolManager(self)
# Tool Data Tables
self._configure_tool_data_tables(from_shed_config=False)
diff --git a/lib/galaxy/config.py b/lib/galaxy/config.py
index cf23dca18546..874dc0d2f6a0 100644
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -313,6 +313,7 @@ def __init__(self, **kwargs):
log.warning("preserve_python_environment set to unknown value [%s], defaulting to legacy_only")
preserve_python_environment = "legacy_only"
self.preserve_python_environment = preserve_python_environment
+ self.nodejs_path = kwargs.get("nodejs_path", None)
# Older default container cache path, I don't think anyone is using it anymore and it wasn't documented - we
# should probably drop the backward compatiblity to save the path check.
self.container_image_cache_path = self.resolve_path(kwargs.get("container_image_cache_path", "database/container_images"))
@@ -387,9 +388,10 @@ def __init__(self, **kwargs):
# These are not even beta - just experiments - don't use them unless
# you want yours tools to be broken in the future.
self.enable_beta_tool_formats = string_as_bool(kwargs.get('enable_beta_tool_formats', 'False'))
+ # Should CWL artifacts be loaded with strict validation enabled.
+ self.strict_cwl_validation = string_as_bool(kwargs.get('strict_cwl_validation', 'True'))
# Beta containers interface used by GIEs
self.enable_beta_containers_interface = string_as_bool(kwargs.get('enable_beta_containers_interface', 'False'))
-
# Certain modules such as the pause module will automatically cause
# workflows to be scheduled in job handlers the way all workflows will
# be someday - the following two properties can also be used to force this
@@ -1033,6 +1035,8 @@ def _configure_toolbox(self):
self.citations_manager = CitationsManager(self)
+ from galaxy.managers.tools import DynamicToolManager
+ self.dynamic_tools_manager = DynamicToolManager(self)
self._toolbox_lock = threading.RLock()
# Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file.
tool_configs = self.config.tool_configs
diff --git a/lib/galaxy/dataset_collections/builder.py b/lib/galaxy/dataset_collections/builder.py
index bdb21e75cbff..dd36cd3c1849 100644
--- a/lib/galaxy/dataset_collections/builder.py
+++ b/lib/galaxy/dataset_collections/builder.py
@@ -2,21 +2,23 @@
from galaxy.util.odict import odict
-def build_collection(type, dataset_instances):
+def build_collection(type, dataset_instances, fields=None):
"""
Build DatasetCollection with populated DatasetcollectionElement objects
corresponding to the supplied dataset instances or throw exception if
this is not a valid collection of the specified type.
"""
- dataset_collection = model.DatasetCollection()
- set_collection_elements(dataset_collection, type, dataset_instances)
+ dataset_collection = model.DatasetCollection(fields=fields)
+ set_collection_elements(dataset_collection, type, dataset_instances, fields=fields)
return dataset_collection
-def set_collection_elements(dataset_collection, type, dataset_instances):
+def set_collection_elements(dataset_collection, type, dataset_instances, fields=None):
element_index = 0
elements = []
- for element in type.generate_elements(dataset_instances):
+ if fields == "auto":
+ fields = guess_fields(dataset_instances)
+ for element in type.generate_elements(dataset_instances, fields=fields):
element.element_index = element_index
element.collection = dataset_collection
elements.append(element)
@@ -28,6 +30,16 @@ def set_collection_elements(dataset_collection, type, dataset_instances):
return dataset_collection
+def guess_fields(dataset_instances):
+ fields = []
+ for identifier, element in dataset_instances.items():
+ # TODO: Make generic enough to handle nested record types.
+ assert element.history_content_type == "dataset"
+ fields.append({"class": "File", "name": identifier})
+
+ return fields
+
+
class CollectionBuilder(object):
""" Purely functional builder pattern for building a dataset collection. """
diff --git a/lib/galaxy/dataset_collections/matching.py b/lib/galaxy/dataset_collections/matching.py
index c696a4d04baf..2af8c465724c 100644
--- a/lib/galaxy/dataset_collections/matching.py
+++ b/lib/galaxy/dataset_collections/matching.py
@@ -15,8 +15,10 @@ class CollectionsToMatch(object):
def __init__(self):
self.collections = {}
+ self.uses_ephemeral_collections = False
def add(self, input_name, hdca, subcollection_type=None, linked=True):
+ self.uses_ephemeral_collections = self.uses_ephemeral_collections or not hasattr(hdca, "hid")
self.collections[input_name] = bunch.Bunch(
hdca=hdca,
subcollection_type=subcollection_type,
@@ -46,6 +48,7 @@ def __init__(self):
self.collections = {}
self.subcollection_types = {}
self.action_tuples = {}
+ self.uses_ephemeral_collections = False
def __attempt_add_to_linked_match(self, input_name, hdca, collection_type_description, subcollection_type):
structure = get_structure(hdca, collection_type_description, leaf_subcollection_type=subcollection_type)
@@ -86,12 +89,21 @@ def map_over_action_tuples(self, input_name):
def is_mapped_over(self, input_name):
return input_name in self.collections
+ @property
+ def implicit_inputs(self):
+ if not self.uses_ephemeral_collections:
+ # Consider doing something smarter here.
+ return list(self.collections.items())
+ else:
+ return []
+
@staticmethod
def for_collections(collections_to_match, collection_type_descriptions):
if not collections_to_match.has_collections():
return None
matching_collections = MatchingCollections()
+ matching_collections.uses_ephemeral_collections = collections_to_match.uses_ephemeral_collections
for input_key, to_match in sorted(collections_to_match.items()):
hdca = to_match.hdca
collection_type_description = collection_type_descriptions.for_collection_type(hdca.collection.collection_type)
diff --git a/lib/galaxy/dataset_collections/registry.py b/lib/galaxy/dataset_collections/registry.py
index a95e47309eed..8f8d482916ca 100644
--- a/lib/galaxy/dataset_collections/registry.py
+++ b/lib/galaxy/dataset_collections/registry.py
@@ -1,10 +1,15 @@
from galaxy import model
from .types import (
list,
- paired
+ paired,
+ record,
)
-PLUGIN_CLASSES = [list.ListDatasetCollectionType, paired.PairedDatasetCollectionType]
+PLUGIN_CLASSES = [
+ list.ListDatasetCollectionType,
+ paired.PairedDatasetCollectionType,
+ record.RecordDatasetCollectionType,
+]
class DatasetCollectionTypesRegistry(object):
@@ -15,12 +20,12 @@ def __init__(self, app):
def get(self, plugin_type):
return self.__plugins[plugin_type]
- def prototype(self, plugin_type):
+ def prototype(self, plugin_type, fields=None):
plugin_type_object = self.get(plugin_type)
if not hasattr(plugin_type_object, 'prototype_elements'):
raise Exception("Cannot pre-determine structure for collection of type %s" % plugin_type)
dataset_collection = model.DatasetCollection()
- elements = [e for e in plugin_type_object.prototype_elements()]
+ elements = [e for e in plugin_type_object.prototype_elements(fields=fields)]
dataset_collection.elements = elements
return dataset_collection
diff --git a/lib/galaxy/dataset_collections/type_description.py b/lib/galaxy/dataset_collections/type_description.py
index e002247fab6f..6969385ae53f 100644
--- a/lib/galaxy/dataset_collections/type_description.py
+++ b/lib/galaxy/dataset_collections/type_description.py
@@ -7,9 +7,9 @@ def __init__(self, type_registry):
# I think.
self.type_registry = type_registry
- def for_collection_type(self, collection_type):
+ def for_collection_type(self, collection_type, fields=None):
assert collection_type is not None
- return CollectionTypeDescription(collection_type, self)
+ return CollectionTypeDescription(collection_type, self, fields=fields)
class CollectionTypeDescription(object):
@@ -43,9 +43,10 @@ class CollectionTypeDescription(object):
'paired'
"""
- def __init__(self, collection_type, collection_type_description_factory):
+ def __init__(self, collection_type, collection_type_description_factory, fields=None):
self.collection_type = collection_type
self.collection_type_description_factory = collection_type_description_factory
+ self.fields = fields
self.__has_subcollections = self.collection_type.find(":") > 0
def child_collection_type(self):
@@ -83,9 +84,13 @@ def has_subcollections_of_type(self, other_collection_type):
collection_type = self.collection_type
return collection_type.endswith(other_collection_type) and collection_type != other_collection_type
- def is_subcollection_of_type(self, other_collection_type):
+ def is_subcollection_of_type(self, other_collection_type, proper=True):
+ """If proper is False, than a type is consider a subcollection of itself."""
if not hasattr(other_collection_type, 'collection_type'):
other_collection_type = self.collection_type_description_factory.for_collection_type(other_collection_type)
+ if not proper and self.can_match_type(other_collection_type):
+ return True
+
return other_collection_type.has_subcollections_of_type(self)
def can_match_type(self, other_collection_type):
diff --git a/lib/galaxy/dataset_collections/types/__init__.py b/lib/galaxy/dataset_collections/types/__init__.py
index 3f89cfa3011b..04d5112a384d 100644
--- a/lib/galaxy/dataset_collections/types/__init__.py
+++ b/lib/galaxy/dataset_collections/types/__init__.py
@@ -15,7 +15,7 @@
class DatasetCollectionType(object):
@abstractmethod
- def generate_elements(self, dataset_instances):
+ def generate_elements(self, dataset_instances, **kwds):
""" Generate DatasetCollectionElements with corresponding
to the supplied dataset instances or throw exception if
this is not a valid collection of the specified type.
@@ -24,5 +24,8 @@ def generate_elements(self, dataset_instances):
class BaseDatasetCollectionType(DatasetCollectionType):
+ def __init__(self, **kwds):
+ pass
+
def _validation_failed(self, message):
raise exceptions.ObjectAttributeInvalidException(message)
diff --git a/lib/galaxy/dataset_collections/types/list.py b/lib/galaxy/dataset_collections/types/list.py
index 7297f0514697..ca77be909aaf 100644
--- a/lib/galaxy/dataset_collections/types/list.py
+++ b/lib/galaxy/dataset_collections/types/list.py
@@ -7,10 +7,7 @@ class ListDatasetCollectionType(BaseDatasetCollectionType):
"""
collection_type = "list"
- def __init__(self):
- pass
-
- def generate_elements(self, elements):
+ def generate_elements(self, elements, **kwds):
for identifier, element in elements.items():
association = DatasetCollectionElement(
element=element,
diff --git a/lib/galaxy/dataset_collections/types/paired.py b/lib/galaxy/dataset_collections/types/paired.py
index d16234910504..ca4f4d793a49 100644
--- a/lib/galaxy/dataset_collections/types/paired.py
+++ b/lib/galaxy/dataset_collections/types/paired.py
@@ -13,10 +13,7 @@ class PairedDatasetCollectionType(BaseDatasetCollectionType):
"""
collection_type = "paired"
- def __init__(self):
- pass
-
- def generate_elements(self, elements):
+ def generate_elements(self, elements, **kwds):
forward_dataset = elements.get(FORWARD_IDENTIFIER, None)
reverse_dataset = elements.get(REVERSE_IDENTIFIER, None)
if not forward_dataset or not reverse_dataset:
@@ -32,7 +29,7 @@ def generate_elements(self, elements):
yield left_association
yield right_association
- def prototype_elements(self):
+ def prototype_elements(self, **kwds):
left_association = DatasetCollectionElement(
element=HistoryDatasetAssociation(),
element_identifier=FORWARD_IDENTIFIER,
diff --git a/lib/galaxy/dataset_collections/types/record.py b/lib/galaxy/dataset_collections/types/record.py
new file mode 100644
index 000000000000..26a165e2fe28
--- /dev/null
+++ b/lib/galaxy/dataset_collections/types/record.py
@@ -0,0 +1,43 @@
+from galaxy.exceptions import RequestParameterMissingException
+from galaxy.model import DatasetCollectionElement, HistoryDatasetAssociation
+
+from ..types import BaseDatasetCollectionType
+
+
+class RecordDatasetCollectionType(BaseDatasetCollectionType):
+ """Arbitrary CWL-style record type."""
+
+ collection_type = "record"
+
+ def generate_elements(self, elements, **kwds):
+ fields = kwds.get("fields", None)
+ if fields is None:
+ raise RequestParameterMissingException("Missing or null parameter fields required for record types.")
+ if len(elements) != len(fields):
+ self._validation_failed("Supplied element do not match fields.")
+ index = 0
+ for identifier, element in elements.items():
+ field = fields[index]
+ if field["name"] != identifier:
+ self._validation_failed("Supplied element do not match fields.")
+
+ # TODO: validate type and such.
+ association = DatasetCollectionElement(
+ element=element,
+ element_identifier=identifier,
+ )
+ yield association
+ index += 1
+
+ def prototype_elements(self, fields=None, **kwds):
+ if fields is None:
+ raise RequestParameterMissingException("Missing or null parameter fields required for record types.")
+ for field in fields:
+ name = field.get("name", None)
+ assert name
+ assert field.get("type", "File")
+ field_dataset = DatasetCollectionElement(
+ element=HistoryDatasetAssociation(),
+ element_identifier=name,
+ )
+ yield field_dataset
diff --git a/lib/galaxy/datatypes/converters/tar_to_directory.xml b/lib/galaxy/datatypes/converters/tar_to_directory.xml
index 6865b86ee1d6..daafc23e0ce0 100644
--- a/lib/galaxy/datatypes/converters/tar_to_directory.xml
+++ b/lib/galaxy/datatypes/converters/tar_to_directory.xml
@@ -1,15 +1,21 @@
+ cp '$provided_metadata' 'galaxy.json';
mkdir '$output1.files_path';
cd '$output1.files_path';
python -c "from galaxy.util.compression_utils import CompressedFile; CompressedFile('$input1').extract('.');"
+ ls '$output1.files_path'
+
+ {"output1": {"cwl_filename": "${input1.cwl_filename}"}}
+
+
-
+
diff --git a/lib/galaxy/datatypes/data.py b/lib/galaxy/datatypes/data.py
index e17b6f68be5a..eacf102f54fb 100644
--- a/lib/galaxy/datatypes/data.py
+++ b/lib/galaxy/datatypes/data.py
@@ -370,6 +370,7 @@ def display_data(self, trans, data, preview=False, filename=None, to_ext=None, *
trans.response.headers['X-Content-Type-Options'] = 'nosniff'
if isinstance(data, six.string_types):
return smart_str(data)
+ log.info("FILENAME IS %s" % filename)
if filename and filename != "index":
# For files in extra_files_path
file_path = trans.app.object_store.get_filename(data.dataset, extra_dir='dataset_%s_files' % data.dataset.id, alt_name=filename)
diff --git a/lib/galaxy/datatypes/text.py b/lib/galaxy/datatypes/text.py
index c2e71444cf1a..c563c2d318c8 100644
--- a/lib/galaxy/datatypes/text.py
+++ b/lib/galaxy/datatypes/text.py
@@ -106,6 +106,30 @@ def display_peek(self, dataset):
return "JSON file (%s)" % (nice_size(dataset.get_size()))
+class ExpressionJson(Json):
+ """ Represents the non-data input or output to a tool or workflow.
+ """
+ file_ext = "json"
+ MetadataElement(name="json_type", default=None, desc="JavaScript or JSON type of expression", readonly=True, visible=True, no_value=None)
+
+ def set_meta(self, dataset, **kwd):
+ """
+ """
+ json_type = "null"
+ with open(dataset.file_name) as f:
+ obj = json.load(f)
+ if isinstance(obj, int):
+ json_type = "int"
+ elif isinstance(obj, float):
+ json_type = "float"
+ elif isinstance(obj, list):
+ json_type = "list"
+ elif isinstance(obj, dict):
+ json_type = "object"
+
+ dataset.metadata.json_type = json_type
+
+
@build_sniff_from_prefix
class Ipynb(Json):
file_ext = "ipynb"
diff --git a/lib/galaxy/dependencies/pipfiles/default/pinned-requirements.txt b/lib/galaxy/dependencies/pipfiles/default/pinned-requirements.txt
index d396c0c0e519..7b995461bd9d 100644
--- a/lib/galaxy/dependencies/pipfiles/default/pinned-requirements.txt
+++ b/lib/galaxy/dependencies/pipfiles/default/pinned-requirements.txt
@@ -56,7 +56,7 @@ functools32==3.2.3.post2 ; python_version == '2.7'
future==0.16.0
futures==3.2.0 ; python_version == '2.6' or python_version == '2.7'
galaxy-sequence-utils==1.1.3
-gxformat2==0.7.1
+gxformat2==0.8.0
h5py==2.8.0
idna==2.7
ipaddress==1.0.22 ; python_version < '3.3'
@@ -161,3 +161,6 @@ webencodings==0.5.1
webob==1.8.3
whoosh==2.7.4
wrapt==1.10.11
+# For CWL support.
+cwltool==1.0.20180721142728
+cwltest==1.0.20170809112706 # TODO: only required for testing...
diff --git a/lib/galaxy/exceptions/__init__.py b/lib/galaxy/exceptions/__init__.py
index db59cd80d747..e7ce2f8e8f50 100644
--- a/lib/galaxy/exceptions/__init__.py
+++ b/lib/galaxy/exceptions/__init__.py
@@ -59,6 +59,15 @@ class DuplicatedSlugException(MessageException):
err_code = error_codes.USER_SLUG_DUPLICATE
+class ObjectHashExistsException(MessageException):
+ status_code = 303
+ err_code = error_codes.OBJECT_HASH_EXISTS
+
+ def __init__(self, other_id, **kwds):
+ kwds["other_id"] = other_id
+ super(ObjectHashExistsException, self).__init__(**kwds)
+
+
class DuplicatedIdentifierException(MessageException):
status_code = 400
err_code = error_codes.USER_IDENTIFIER_DUPLICATE
diff --git a/lib/galaxy/exceptions/error_codes.json b/lib/galaxy/exceptions/error_codes.json
index aa790ec16b87..dc07419a0243 100644
--- a/lib/galaxy/exceptions/error_codes.json
+++ b/lib/galaxy/exceptions/error_codes.json
@@ -4,6 +4,11 @@
"code": 0,
"message": "Unknown error occurred while processing request."
},
+ {
+ "name": "OBJECT_HASH_EXISTS",
+ "code": 30301,
+ "message": "An object with this hash already exists."
+ },
{
"name": "USER_CANNOT_RUN_AS",
"code": 400001,
diff --git a/lib/galaxy/jobs/__init__.py b/lib/galaxy/jobs/__init__.py
index 0da8c26ad05e..dd6f698e61f3 100644
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -44,7 +44,7 @@
# that import Galaxy internals - but it shouldn't be used in Galaxy's code
# itself.
TOOL_PROVIDED_JOB_METADATA_FILE = 'galaxy.json'
-TOOL_PROVIDED_JOB_METADATA_KEYS = ['name', 'info', 'dbkey']
+TOOL_PROVIDED_JOB_METADATA_KEYS = ['name', 'info', 'dbkey', 'cwl_filename']
# Override with config.default_job_shell.
DEFAULT_JOB_SHELL = '/bin/bash'
@@ -761,6 +761,10 @@ def can_split(self):
# Should the job handler split this job up?
return self.app.config.use_tasked_jobs and self.tool.parallelism
+ @property
+ def is_cwl_job(self):
+ return self.tool.tool_type in ["galactic_cwl", "cwl"]
+
def get_job_runner_url(self):
log.warning('(%s) Job runner URLs are deprecated, use destinations instead.' % self.job_id)
return self.job_destination.url
@@ -884,10 +888,13 @@ def get_special():
# if the server was stopped and restarted before the job finished
job.command_line = unicodify(self.command_line)
job.dependencies = self.tool.dependencies
+ param_dict = tool_evaluator.param_dict
+ job.cwl_command_state = param_dict.get('__cwl_command_state', None)
+ job.cwl_command_state_version = param_dict.get('__cwl_command_state_version', None)
self.sa_session.add(job)
self.sa_session.flush()
# Return list of all extra files
- self.param_dict = tool_evaluator.param_dict
+ self.param_dict = param_dict
version_string_cmd_raw = self.tool.version_string_cmd
if version_string_cmd_raw:
version_command_template = string.Template(version_string_cmd_raw)
@@ -1301,6 +1308,8 @@ def finish(
return self.fail("Job %s's output dataset(s) could not be read" % job.id)
job_context = ExpressionContext(dict(stdout=job.stdout, stderr=job.stderr))
+ log.info("Finishing job with stdout [%s]" % job_context["stdout"])
+ log.info("Finishing job with stderr [%s]" % job_context["stderr"])
for dataset_assoc in job.output_datasets + job.output_library_datasets:
context = self.get_dataset_finish_context(job_context, dataset_assoc)
# should this also be checking library associations? - can a library item be added from a history before the job has ended? -
@@ -1356,8 +1365,9 @@ def finish(
dataset.mark_unhidden()
elif not purged:
# If the tool was expected to set the extension, attempt to retrieve it
- if dataset.ext == 'auto':
- dataset.extension = context.get('ext', 'data')
+ context_ext = context.get('ext', 'data')
+ if dataset.ext == 'auto' or dataset.ext == 'data' and context_ext != 'data':
+ dataset.extension = context_ext
dataset.init_meta(copy_from=dataset)
# if a dataset was copied, it won't appear in our dictionary:
# either use the metadata from originating output dataset, or call set_meta on the copies
@@ -2233,7 +2243,10 @@ def version_path(self):
return self.job_wrapper.get_version_string_path()
def tool_directory(self):
- return os.path.abspath(self.job_wrapper.tool.tool_dir)
+ tool_dir = self.job_wrapper.tool.tool_dir
+ if tool_dir is not None:
+ tool_dir = os.path.abspath(tool_dir)
+ return tool_dir
def home_directory(self):
return self.job_wrapper.home_directory()
diff --git a/lib/galaxy/jobs/command_factory.py b/lib/galaxy/jobs/command_factory.py
index 7922531ee8b1..52d5d1c9e05b 100644
--- a/lib/galaxy/jobs/command_factory.py
+++ b/lib/galaxy/jobs/command_factory.py
@@ -6,6 +6,7 @@
)
from galaxy import util
+from galaxy.util import bunch
from galaxy.jobs.runners.util.job_script import (
check_script_integrity,
INTEGRITY_INJECTION,
@@ -17,6 +18,15 @@
CAPTURE_RETURN_CODE = "return_code=$?"
YIELD_CAPTURED_CODE = 'sh -c "exit $return_code"'
SETUP_GALAXY_FOR_METADATA = """
+if [ "$GALAXY_LIB" != "None" ]; then
+ if [ -n "$PYTHONPATH" ]; then
+ PYTHONPATH="$GALAXY_LIB:$PYTHONPATH"
+ else
+ PYTHONPATH="$GALAXY_LIB"
+ fi
+ export PYTHONPATH
+fi
+PATH="$_GALAXY_PATH"
[ "$GALAXY_VIRTUAL_ENV" = "None" ] && GALAXY_VIRTUAL_ENV="$_GALAXY_VIRTUAL_ENV"; _galaxy_setup_environment True
"""
@@ -76,19 +86,26 @@ def build_command(
external_command_shell = container.shell
else:
external_command_shell = shell
- externalized_commands = __externalize_commands(job_wrapper, external_command_shell, commands_builder, remote_command_params)
if container and modify_command_for_container:
- # Stop now and build command before handling metadata and copying
- # working directory files back. These should always happen outside
- # of docker container - no security implications when generating
- # metadata and means no need for Galaxy to be available to container
- # and not copying workdir outputs back means on can be more restrictive
- # of where container can write to in some circumstances.
- run_in_container_command = container.containerize_command(
- externalized_commands
- )
+ if not job_wrapper.tool.may_use_container_entry_point:
+ externalized_commands = __externalize_commands(job_wrapper, external_command_shell, commands_builder, remote_command_params)
+ # Stop now and build command before handling metadata and copying
+ # working directory files back. These should always happen outside
+ # of docker container - no security implications when generating
+ # metadata and means no need for Galaxy to be available to container
+ # and not copying workdir outputs back means on can be more restrictive
+ # of where container can write to in some circumstances.
+ run_in_container_command = container.containerize_command(
+ externalized_commands
+ )
+ else:
+ tool_commands = commands_builder.build()
+ run_in_container_command = container.containerize_command(
+ tool_commands
+ )
commands_builder = CommandsBuilder(run_in_container_command)
else:
+ externalized_commands = __externalize_commands(job_wrapper, external_command_shell, commands_builder, remote_command_params)
commands_builder = CommandsBuilder(externalized_commands)
# Don't need to create a separate tool working directory for Pulsar
@@ -99,7 +116,13 @@ def build_command(
# Remove the working directory incase this is for instance a SLURM re-submission.
# xref https://github.com/galaxyproject/galaxy/issues/3289
- commands_builder.prepend_command("rm -rf working; mkdir -p working; cd working")
+ if not job_wrapper.is_cwl_job:
+ commands_builder.prepend_command("rm -rf working; mkdir -p working; cd working")
+ else:
+ # Can't do the rm -rf working for CWL jobs since we may have staged outputs
+ # into that directory. This does mean CWL is incompatible with job manager triggered
+ # retries - what can we do with that information?
+ commands_builder.prepend_command("cd working")
if include_work_dir_outputs:
__handle_work_dir_outputs(commands_builder, job_wrapper, runner, remote_command_params)
@@ -172,6 +195,11 @@ def __handle_work_dir_outputs(commands_builder, job_wrapper, runner, remote_comm
work_dir_outputs = runner.get_work_dir_outputs(job_wrapper, **work_dir_outputs_kwds)
if work_dir_outputs:
commands_builder.capture_return_code()
+ if job_wrapper.is_cwl_job:
+ metadata_script_file = join(job_wrapper.working_directory, "relocate_dynamic_outputs.py")
+ relocate_contents = 'from galaxy_ext.cwl.handle_outputs import relocate_dynamic_outputs; relocate_dynamic_outputs()'
+ write_script(metadata_script_file, relocate_contents, bunch.Bunch(check_job_script_integrity=False))
+ commands_builder.append_command("python %s" % metadata_script_file)
copy_commands = map(__copy_if_exists_command, work_dir_outputs)
commands_builder.append_commands(copy_commands)
@@ -203,11 +231,17 @@ def __handle_metadata(commands_builder, job_wrapper, runner, remote_command_para
kwds={'overwrite': False}
) or ''
metadata_command = metadata_command.strip()
- if metadata_command:
- # Place Galaxy and its dependencies in environment for metadata regardless of tool.
- metadata_command = "%s%s" % (SETUP_GALAXY_FOR_METADATA, metadata_command)
+ if metadata_command or job_wrapper.is_cwl_job:
+ command = SETUP_GALAXY_FOR_METADATA
+ if job_wrapper.is_cwl_job:
+ relocate_script_file = join(job_wrapper.working_directory, "relocate_dynamic_outputs.py")
+ relocate_contents = 'from galaxy_ext.cwl.handle_outputs import relocate_dynamic_outputs; relocate_dynamic_outputs()'
+ write_script(relocate_script_file, relocate_contents, bunch.Bunch(check_job_script_integrity=False))
+ command += "\ncd working; python %s; cd .." % relocate_script_file
+ if metadata_command:
+ command += "\n%s" % metadata_command
commands_builder.capture_return_code()
- commands_builder.append_command(metadata_command)
+ commands_builder.append_command(command)
def __copy_if_exists_command(work_dir_output):
diff --git a/lib/galaxy/jobs/output_checker.py b/lib/galaxy/jobs/output_checker.py
index 6fa2e3deab3f..5db5f45e8833 100644
--- a/lib/galaxy/jobs/output_checker.py
+++ b/lib/galaxy/jobs/output_checker.py
@@ -133,6 +133,8 @@ def check_output(tool, stdout, stderr, tool_exit_code, job):
# log.debug( "Tool did not define exit code or stdio handling; "
# + "checking stderr for success" )
if stderr:
+ peak = stderr[0:10000]
+ log.debug("Tool produced standard error failing job - [%s]" % peak)
state = DETECTED_JOB_STATE.GENERIC_ERROR
else:
state = DETECTED_JOB_STATE.OK
diff --git a/lib/galaxy/jobs/runners/local.py b/lib/galaxy/jobs/runners/local.py
index e42f13490408..c28b65a8833f 100644
--- a/lib/galaxy/jobs/runners/local.py
+++ b/lib/galaxy/jobs/runners/local.py
@@ -63,7 +63,7 @@ def __command_line(self, job_wrapper):
if slots:
slots_statement = 'GALAXY_SLOTS="%d"; export GALAXY_SLOTS; GALAXY_SLOTS_CONFIGURED="1"; export GALAXY_SLOTS_CONFIGURED;' % (int(slots))
else:
- slots_statement = 'GALAXY_SLOTS="1"; export GALAXY_SLOTS;'
+ slots_statement = 'GALAXY_SLOTS="%d"; export GALAXY_SLOTS;' % (job_wrapper.tool.cores_min)
job_id = job_wrapper.get_id_tag()
job_file = JobState.default_job_file(job_wrapper.working_directory, job_id)
diff --git a/lib/galaxy/jobs/runners/util/job_script/DEFAULT_JOB_FILE_TEMPLATE.sh b/lib/galaxy/jobs/runners/util/job_script/DEFAULT_JOB_FILE_TEMPLATE.sh
index 81107c568b58..74c047970a48 100644
--- a/lib/galaxy/jobs/runners/util/job_script/DEFAULT_JOB_FILE_TEMPLATE.sh
+++ b/lib/galaxy/jobs/runners/util/job_script/DEFAULT_JOB_FILE_TEMPLATE.sh
@@ -32,6 +32,7 @@ _GALAXY_VIRTUAL_ENV="$galaxy_virtual_env"
PRESERVE_GALAXY_ENVIRONMENT="$preserve_python_environment"
GALAXY_LIB="$galaxy_lib"
_galaxy_setup_environment "$PRESERVE_GALAXY_ENVIRONMENT"
+_GALAXY_PATH="$PATH"
GALAXY_PYTHON=`command -v python`
cd $working_directory
$memory_statement
diff --git a/lib/galaxy/managers/collections.py b/lib/galaxy/managers/collections.py
index 4aa717461f62..b6ee14f08b2b 100644
--- a/lib/galaxy/managers/collections.py
+++ b/lib/galaxy/managers/collections.py
@@ -87,7 +87,7 @@ def precreate_dataset_collection(self, structure, allow_unitialized_element=True
def create(self, trans, parent, name, collection_type, element_identifiers=None,
elements=None, implicit_collection_info=None, trusted_identifiers=None,
- hide_source_items=False, tags=None, copy_elements=False):
+ hide_source_items=False, tags=None, copy_elements=False, fields=None):
"""
PRECONDITION: security checks on ability to add to parent
occurred during load.
@@ -106,6 +106,7 @@ def create(self, trans, parent, name, collection_type, element_identifiers=None,
elements=elements,
hide_source_items=hide_source_items,
copy_elements=copy_elements,
+ fields=fields,
)
implicit_inputs = []
@@ -126,8 +127,11 @@ def _create_instance_for_collection(self, trans, parent, name, dataset_collectio
collection=dataset_collection,
name=name,
)
+
if implicit_inputs:
for input_name, input_collection in implicit_inputs:
+ if getattr(input_collection, "ephemeral", False):
+ input_collection = input_collection.persistent_object
dataset_collection_instance.add_implicit_input_collection(input_name, input_collection)
if implicit_output_name:
@@ -160,19 +164,20 @@ def _create_instance_for_collection(self, trans, parent, name, dataset_collectio
return self.__persist(dataset_collection_instance, flush=flush)
def create_dataset_collection(self, trans, collection_type, element_identifiers=None, elements=None,
- hide_source_items=None, copy_elements=False):
+ hide_source_items=None, copy_elements=False, fields=None):
# Make sure at least one of these is None.
assert element_identifiers is None or elements is None
-
if element_identifiers is None and elements is None:
raise RequestParameterInvalidException(ERROR_INVALID_ELEMENTS_SPECIFICATION)
if not collection_type:
raise RequestParameterInvalidException(ERROR_NO_COLLECTION_TYPE)
- collection_type_description = self.collection_type_descriptions.for_collection_type(collection_type)
+ collection_type_description = self.collection_type_descriptions.for_collection_type(collection_type, fields=fields)
has_subcollections = collection_type_description.has_subcollections()
+
# If we have elements, this is an internal request, don't need to load
# objects from identifiers.
+ has_subcollections = collection_type_description.has_subcollections()
if elements is None:
elements = self._element_identifiers_to_elements(trans,
collection_type_description=collection_type_description,
@@ -183,12 +188,12 @@ def create_dataset_collection(self, trans, collection_type, element_identifiers=
if has_subcollections:
# Nested collection - recursively create collections as needed.
self.__recursively_create_collections_for_elements(trans, elements, hide_source_items, copy_elements=copy_elements)
- # else if elements is set, it better be an ordered dict!
if elements is not self.ELEMENTS_UNINITIALIZED:
type_plugin = collection_type_description.rank_type_plugin()
- dataset_collection = builder.build_collection(type_plugin, elements)
+ dataset_collection = builder.build_collection(type_plugin, elements, fields=fields)
else:
+ # TODO: Pass fields here - need test case first.
dataset_collection = model.DatasetCollection(populated=False)
dataset_collection.collection_type = collection_type
return dataset_collection
@@ -221,6 +226,8 @@ def _append_tags(self, dataset_collection_instance, implicit_inputs=None, tags=N
tags = tags or {}
implicit_inputs = implicit_inputs or []
for _, v in implicit_inputs:
+ if getattr(v, "ephemeral", False):
+ v = v.persistent_object
for tag in v.auto_propagated_tags:
tags[tag.value] = tag
for _, tag in tags.items():
diff --git a/lib/galaxy/managers/collections_util.py b/lib/galaxy/managers/collections_util.py
index 456b2f986c38..3fbfad33b2e2 100644
--- a/lib/galaxy/managers/collections_util.py
+++ b/lib/galaxy/managers/collections_util.py
@@ -29,7 +29,8 @@ def api_payload_to_create_params(payload):
element_identifiers=payload.get("element_identifiers"),
name=payload.get("name", None),
hide_source_items=string_as_bool(payload.get("hide_source_items", False)),
- copy_elements=string_as_bool(payload.get("copy_elements", False))
+ copy_elements=string_as_bool(payload.get("copy_elements", False)),
+ fields=payload.get("fields", None),
)
return params
diff --git a/lib/galaxy/managers/hdas.py b/lib/galaxy/managers/hdas.py
index 38c03c867844..f6500c6283dd 100644
--- a/lib/galaxy/managers/hdas.py
+++ b/lib/galaxy/managers/hdas.py
@@ -294,7 +294,8 @@ def __init__(self, app):
'annotation',
- 'api_type'
+ 'api_type',
+ 'cwl_file_name',
], include_keys_from='summary')
self.add_view('extended', [
@@ -348,7 +349,8 @@ def add_serializers(self):
# TODO: to DatasetAssociationSerializer
'accessible' : lambda i, k, user=None, **c: self.manager.is_accessible(i, user, **c),
'api_type' : lambda *a, **c: 'file',
- 'type' : lambda *a, **c: 'file'
+ 'type' : lambda *a, **c: 'file',
+ 'cwl_file_name' : lambda i, k, **c: i.cwl_filename,
})
def serialize(self, hda, keys, user=None, **context):
diff --git a/lib/galaxy/managers/tools.py b/lib/galaxy/managers/tools.py
new file mode 100644
index 000000000000..1ee29256e5cb
--- /dev/null
+++ b/lib/galaxy/managers/tools.py
@@ -0,0 +1,129 @@
+import logging
+
+from galaxy.exceptions import ObjectHashExistsException
+
+from uuid import uuid4
+
+from .base import ModelManager
+
+from galaxy import exceptions
+from galaxy import model
+from galaxy.tools.cwl import tool_proxy, tool_proxy_from_persistent_representation
+from galaxy.tools.hash import build_tool_hash
+
+log = logging.getLogger(__name__)
+
+
+class DynamicToolManager(ModelManager):
+ """ Manages dynamic tools stored in Galaxy's database.
+ """
+ model_class = model.DynamicTool
+
+ def __init__(self, app):
+ super(DynamicToolManager, self).__init__(app)
+
+ def get_tool_by_uuid(self, uuid):
+ dynamic_tool = self._one_or_none(
+ self.query().filter(self.model_class.uuid == uuid)
+ )
+ return dynamic_tool
+
+ def get_tool_by_id_or_hash(self, id_or_hash):
+ dynamic_tool = self._one_or_none(
+ self.query().filter(self.model_class.tool_id == id_or_hash)
+ )
+ if dynamic_tool is None:
+ dynamic_tool = self.get_tool_by_hash(id_or_hash)
+ return dynamic_tool
+
+ def get_tool_by_hash(self, tool_hash):
+ return self._one_or_none(
+ self.query().filter(self.model_class.tool_hash == tool_hash)
+ )
+
+ def create_tool(self, trans, tool_payload, allow_load=True):
+ src = tool_payload.get("src", "representation")
+ is_path = src == "from_path"
+
+ if is_path:
+ from galaxy.managers.workflows import artifact_class
+ tool_format, representation, object_id = artifact_class(None, tool_payload)
+ else:
+ assert src == "representation"
+ if "representation" not in tool_payload:
+ raise exceptions.ObjectAttributeMissingException(
+ "A tool 'representation' is required."
+ )
+
+ representation = tool_payload["representation"]
+ if "class" not in representation:
+ raise exceptions.ObjectAttributeMissingException(
+ "Current tool representations require 'class'."
+ )
+
+ tool_format = representation["class"]
+ if tool_format == "GalaxyTool":
+ uuid = tool_payload.get("uuid", None)
+ if uuid is None:
+ uuid = uuid4()
+
+ tool_id = representation.get("id", None)
+ if tool_id is None:
+ tool_id = str(uuid)
+
+ tool_version = representation.get("version", None)
+ tool_hash = build_tool_hash(representation)
+ value = representation
+ elif tool_format in ["CommandLineTool", "ExpressionTool"]:
+ # CWL tools
+ uuid = None
+ tool_id = representation.get("id", None)
+ tool_version = representation.get("version", None)
+ tool_directory = tool_payload.get("tool_directory", None)
+ tool_path = tool_payload.get("path", None)
+
+ if is_path:
+ proxy = tool_proxy(tool_path=tool_path)
+ id_proxy = tool_proxy_from_persistent_representation(proxy.to_persistent_representation())
+ tool_hash = build_tool_hash(id_proxy.to_persistent_representation())
+ tool_id = proxy.galaxy_id()
+ elif "pickle" in representation:
+ # It has already been proxies and pickled - just take the tool
+ # hash.
+ tool_hash = build_tool_hash(representation)
+ else:
+ # Else - build a tool proxy so that we can convert to the presistable
+ # hash.
+ proxy = tool_proxy(tool_object=representation, tool_directory=tool_directory)
+ id_proxy = tool_proxy_from_persistent_representation(proxy.to_persistent_representation())
+ tool_hash = build_tool_hash(id_proxy.to_persistent_representation())
+ tool_id = id_proxy.galaxy_id()
+ value = representation
+ else:
+ raise Exception("Unknown tool type encountered.")
+ # TODO: enforce via DB constraint and catch appropriate
+ # exception.
+ existing_tool = self.get_tool_by_hash(tool_hash)
+ if existing_tool is not None and not allow_load:
+ raise ObjectHashExistsException(existing_tool.id)
+ elif existing_tool:
+ dynamic_tool = existing_tool
+ else:
+ dynamic_tool = self.create(
+ tool_format=tool_format,
+ tool_id=tool_id,
+ tool_version=tool_version,
+ tool_hash=tool_hash,
+ tool_path=tool_path,
+ tool_directory=tool_directory,
+ uuid=uuid,
+ value=value,
+ )
+ tool = self.app.toolbox.load_dynamic_tool(dynamic_tool)
+
+ # assert tool.id == dynamic_tool.tool_id, "%s != %s" % (tool.id, dynamic_tool.tool_id)
+ assert tool.tool_hash == dynamic_tool.tool_hash
+ return dynamic_tool
+
+ def list_tools(self, active=True):
+ return self.query().filter(active=active)
diff --git a/lib/galaxy/managers/workflows.py b/lib/galaxy/managers/workflows.py
index c447a0b4eb65..5a0ba7e08301 100644
--- a/lib/galaxy/managers/workflows.py
+++ b/lib/galaxy/managers/workflows.py
@@ -2,6 +2,7 @@
import json
import logging
+import os
import uuid
from collections import namedtuple
@@ -11,6 +12,7 @@
ImportOptions,
python_to_workflow,
)
+from gxformat2.converter import ordered_load
from six import string_types
from sqlalchemy import and_
from sqlalchemy.orm import joinedload, subqueryload
@@ -22,6 +24,7 @@
)
from galaxy.jobs.actions.post import ActionBox
from galaxy.model.item_attrs import UsesAnnotations
+from galaxy.tools.cwl import workflow_proxy
from galaxy.tools.parameters import (
params_to_incoming,
visit_input_values
@@ -35,7 +38,6 @@
from galaxy.util.json import safe_loads
from galaxy.util.sanitize_html import sanitize_html
from galaxy.web import url_for
-from galaxy.workflow import modules
from galaxy.workflow.modules import (
is_tool_module_type,
module_factory,
@@ -235,13 +237,47 @@ def serialize_workflow_invocations(self, invocations, **kwd):
CreatedWorkflow = namedtuple("CreatedWorkflow", ["stored_workflow", "workflow", "missing_tools"])
+def artifact_class(trans, as_dict):
+ object_id = as_dict.get("object_id", None)
+ if as_dict.get("src", None) == "from_path":
+ if trans and not trans.user_is_admin:
+ raise exceptions.AdminRequiredException()
+
+ workflow_path = as_dict.get("path")
+ with open(workflow_path, "r") as f:
+ as_dict = ordered_load(f)
+
+ artifact_class = as_dict.get("class", None)
+ if artifact_class is None and "$graph" in as_dict:
+ object_id = object_id or "main"
+ graph = as_dict["$graph"]
+ target_object = None
+ if isinstance(graph, dict):
+ target_object = graph.get(object_id)
+ else:
+ for item in graph:
+ found_id = item.get("id")
+ if found_id == object_id or found_id == "#" + object_id:
+ target_object = item
+
+ if target_object and target_object.get("class"):
+ artifact_class = target_object["class"]
+
+ return artifact_class, as_dict, object_id
+
+
class WorkflowContentsManager(UsesAnnotations):
def __init__(self, app):
self.app = app
self._resource_mapper_function = get_resource_mapper_function(app)
- def normalize_workflow_format(self, as_dict):
+ def ensure_raw_description(self, dict_or_raw_description):
+ if not isinstance(dict_or_raw_description, RawWorkflowDescription):
+ dict_or_raw_description = RawWorkflowDescription(dict_or_raw_description)
+ return dict_or_raw_description
+
+ def normalize_workflow_format(self, trans, as_dict):
"""Process incoming workflow descriptions for consumption by other methods.
Currently this mostly means converting format 2 workflows into standard Galaxy
@@ -250,8 +286,18 @@ def normalize_workflow_format(self, as_dict):
side the data model and apply updates in a way that largely preserves YAML structure
so workflows can be extracted.
"""
- workflow_class = as_dict.get("class", None)
- if workflow_class == "GalaxyWorkflow" or "$graph" in as_dict or "yaml_content" in as_dict:
+ workflow_directory = None
+ workflow_path = None
+
+ if as_dict.get("src", None) == "from_path":
+ if not trans.user_is_admin:
+ raise exceptions.AdminRequiredException()
+
+ workflow_path = as_dict.get("path")
+ workflow_directory = os.path.normpath(os.path.dirname(workflow_path))
+
+ workflow_class, as_dict, object_id = artifact_class(trans, as_dict)
+ if workflow_class == "GalaxyWorkflow" or "yaml_content" in as_dict:
if not self.app.config.enable_beta_workflow_format:
raise exceptions.ConfigDoesNotAllowException("Format2 workflows not enabled.")
@@ -259,13 +305,28 @@ def normalize_workflow_format(self, as_dict):
galaxy_interface = Format2ConverterGalaxyInterface()
import_options = ImportOptions()
import_options.deduplicate_subworkflows = True
- as_dict = python_to_workflow(as_dict, galaxy_interface, workflow_directory=None, import_options=import_options)
- return as_dict
-
- def build_workflow_from_dict(
+ as_dict = python_to_workflow(as_dict, galaxy_interface, workflow_directory=workflow_directory, import_options=import_options)
+ elif workflow_class == "Workflow":
+ from galaxy.tools.cwl import workflow_proxy
+ # TODO: consume and use object_id...
+ if object_id:
+ workflow_path += "#" + object_id
+ wf_proxy = workflow_proxy(workflow_path)
+ tool_reference_proxies = wf_proxy.tool_reference_proxies()
+ for tool_reference_proxy in tool_reference_proxies:
+ # TODO: Namespace IDS in workflows.
+ representation = tool_reference_proxy.to_persistent_representation()
+ self.app.dynamic_tool_manager.create_tool(trans, {
+ "representation": representation,
+ }, allow_load=True)
+ as_dict = wf_proxy.to_dict()
+
+ return RawWorkflowDescription(as_dict, workflow_path)
+
+ def build_workflow_from_raw_description(
self,
trans,
- data,
+ raw_workflow_description,
source=None,
add_to_menu=False,
publish=False,
@@ -273,16 +334,22 @@ def build_workflow_from_dict(
exact_tools=True,
fill_defaults=False,
):
+ data = raw_workflow_description.as_dict
# Put parameters in workflow mode
trans.workflow_building_mode = workflow_building_modes.ENABLED
+
# If there's a source, put it in the workflow name.
+ if 'name' not in data:
+ raise Exception("Invalid workflow format detected [%s]" % data)
+
+ workflow_input_name = data['name']
if source:
- name = "%s (imported from %s)" % (data['name'], source)
+ name = "%s (imported from %s)" % (workflow_input_name, source)
else:
- name = data['name']
- workflow, missing_tool_tups = self._workflow_from_dict(
+ name = workflow_input_name
+ workflow, missing_tool_tups = self._workflow_from_raw_description(
trans,
- data,
+ raw_workflow_description,
name=name,
exact_tools=exact_tools,
fill_defaults=fill_defaults,
@@ -293,6 +360,7 @@ def build_workflow_from_dict(
if create_stored_workflow:
# Connect up
stored = model.StoredWorkflow()
+ stored.from_path = raw_workflow_description.workflow_path
stored.name = workflow.name
workflow.stored_workflow = stored
stored.latest_workflow = workflow
@@ -327,13 +395,15 @@ def build_workflow_from_dict(
missing_tools=missing_tool_tups
)
- def update_workflow_from_dict(self, trans, stored_workflow, workflow_data, **kwds):
+ def update_workflow_from_raw_description(self, trans, stored_workflow, raw_workflow_description, **kwds):
+ raw_workflow_description = self.ensure_raw_description(raw_workflow_description)
+
# Put parameters in workflow mode
trans.workflow_building_mode = workflow_building_modes.ENABLED
- workflow, missing_tool_tups = self._workflow_from_dict(
+ workflow, missing_tool_tups = self._workflow_from_raw_description(
trans,
- workflow_data,
+ raw_workflow_description,
name=stored_workflow.name,
**kwds
)
@@ -349,6 +419,8 @@ def update_workflow_from_dict(self, trans, stored_workflow, workflow_data, **kwd
stored_workflow.latest_workflow = workflow
# Persist
trans.sa_session.flush()
+ if stored_workflow.from_path:
+ self._sync_stored_workflow(trans, stored_workflow)
# Return something informative
errors = []
if workflow.has_errors:
@@ -357,9 +429,14 @@ def update_workflow_from_dict(self, trans, stored_workflow, workflow_data, **kwd
errors.append("This workflow contains cycles")
return workflow, errors
- def _workflow_from_dict(self, trans, data, name, **kwds):
+ def _workflow_from_raw_description(self, trans, raw_workflow_description, name, **kwds):
+ data = raw_workflow_description.as_dict
if isinstance(data, string_types):
data = json.loads(data)
+ if "src" in data:
+ assert data["src"] == "path"
+ wf_proxy = workflow_proxy(data["path"])
+ data = wf_proxy.to_dict()
# Create new workflow from source data
workflow = model.Workflow()
@@ -449,6 +526,18 @@ def to_format_2(wf_dict, **kwds):
wf_dict['version'] = len(stored.workflows) - 1
return wf_dict
+ def _sync_stored_workflow(self, trans, stored_workflow):
+ workflow_path = stored_workflow.from_path
+ workflow = stored_workflow.latest_workflow
+ with open(workflow_path, "w") as f:
+ if workflow_path.endswith(".ga"):
+ wf_dict = self._workflow_to_dict_export(trans, stored_workflow, workflow=workflow)
+ json.dump(wf_dict, f, indent=4)
+ else:
+ wf_dict = self._workflow_to_dict_export(trans, stored_workflow, workflow=workflow)
+ wf_dict = from_galaxy_native(wf_dict, None, json_wrapper=True)
+ f.write(wf_dict["yaml_content"])
+
def _workflow_to_dict_run(self, trans, stored, workflow):
"""
Builds workflow dictionary used by run workflow form
@@ -490,7 +579,7 @@ def _workflow_to_dict_run(self, trans, stored, workflow):
step_model = None
if step.type == 'tool':
incoming = {}
- tool = trans.app.toolbox.get_tool(step.tool_id, tool_version=step.tool_version)
+ tool = trans.app.toolbox.get_tool(step.tool_id, tool_version=step.tool_version, tool_hash=step.tool_hash)
params_to_incoming(incoming, tool.inputs, step.state.inputs, trans.app)
step_model = tool.to_json(trans, incoming, workflow_building_mode=workflow_building_modes.USE_HISTORY)
step_model['post_job_actions'] = [{
@@ -700,6 +789,7 @@ def _workflow_to_dict_export(self, trans, stored=None, workflow=None):
'tool_id': content_id, # For workflows exported to older Galaxies,
# eliminate after a few years...
'tool_version': step.tool_version,
+ 'tool_hash': step.tool_hash,
'name': module.get_name(),
'tool_state': json.dumps(tool_state),
'errors': module.get_errors(),
@@ -716,6 +806,17 @@ def _workflow_to_dict_export(self, trans, stored=None, workflow=None):
'changeset_revision': module.tool.changeset_revision,
'tool_shed': module.tool.tool_shed
}
+
+ tool_representation = None
+ tool_hash = step.tool_hash
+ if tool_hash is not None:
+ dynamic_tool = self.app.dynamic_tool_manager.get_tool_by_hash(
+ tool_hash
+ )
+ tool_representation = json.dumps(dynamic_tool.value)
+ step.tool_representation = tool_representation
+ step_dict['tool_representation'] = tool_representation
+
pja_dict = {}
for pja in step.post_job_actions:
pja_dict[pja.action_type + pja.output_name] = dict(
@@ -771,6 +872,14 @@ def _workflow_to_dict_export(self, trans, stored=None, workflow=None):
for output in module.get_data_outputs():
step_dict['outputs'].append({'name': output['name'], 'type': output['extensions'][0]})
+ step_in = {}
+ for step_input in step.inputs:
+ if step_input.default_value_set:
+ step_in[step_input.name] = {"default": step_input.default_value}
+
+ if step_in:
+ step_dict["in"] = step_in
+
# Connections
input_connections = step.input_connections
if step.type is None or step.type == 'tool':
@@ -813,7 +922,7 @@ def callback(input, prefixed_name, **kwargs):
# newer Galaxy instances can be used with older Galaxy
# instances if they do no include multiple input
# tools. This should be removed at some point. Mirrored
- # hack in _workflow_from_dict should never be removed so
+ # hack in _workflow_from_raw_description should never be removed so
# existing workflow exports continue to function.
for input_name, input_conn in dict(input_conn_dict).items():
if len(input_conn) == 1:
@@ -955,7 +1064,7 @@ def __module_from_dict(self, trans, steps, steps_by_external_id, step_dict, **kw
"""
step = model.WorkflowStep()
# TODO: Consider handling position inside module.
- step.position = step_dict['position']
+ step.position = step_dict.get('position', {"left": 0, "top": 0})
if step_dict.get("uuid", None) and step_dict['uuid'] != "None":
step.uuid = step_dict["uuid"]
if "label" in step_dict:
@@ -973,6 +1082,19 @@ def __module_from_dict(self, trans, steps, steps_by_external_id, step_dict, **kw
# Stick this in the step temporarily
step.temp_input_connections = step_dict['input_connections']
+ if "inputs" in step_dict:
+ for input_dict in step_dict["inputs"]:
+ step_input = model.WorkflowStepInput(step)
+ step_input.name = input_dict["name"]
+ step_input.merge_type = input_dict.get("merge_type", step_input.default_merge_type)
+ step_input.scatter_type = input_dict.get("scatter_type", step_input.default_scatter_type)
+ value_from = input_dict.get("value_from", None)
+ if value_from is None:
+ # Super hacky - we probably need distinct value from and
+ # default handling.
+ value_from = input_dict.get("default")
+ step_input.value_from = value_from
+
# Create the model class for the step
steps.append(step)
external_id = step_dict["id"]
@@ -998,6 +1120,16 @@ def __module_from_dict(self, trans, steps, steps_by_external_id, step_dict, **kw
label=label,
)
trans.sa_session.add(m)
+
+ if "in" in step_dict:
+ for input_name, input_dict in step_dict["in"].items():
+ step_input = step.get_or_add_input(input_name)
+ NO_DEFAULT_DEFINED = object()
+ default = input_dict.get("default", NO_DEFAULT_DEFINED)
+ if default is not NO_DEFAULT_DEFINED:
+ step_input.default_value = default
+ step_input.default_value_set = True
+
return module, step
def __load_subworkflow_from_step_dict(self, trans, step_dict, subworkflow_id_map, **kwds):
@@ -1023,8 +1155,9 @@ def __load_subworkflow_from_step_dict(self, trans, step_dict, subworkflow_id_map
return subworkflow
def __build_embedded_subworkflow(self, trans, data, **kwds):
- subworkflow = self.build_workflow_from_dict(
- trans, data, create_stored_workflow=False, fill_defaults=kwds.get("fill_defaults", False)
+ raw_workflow_description = self.ensure_raw_description(data)
+ subworkflow = self.build_workflow_from_raw_description(
+ trans, raw_workflow_description, create_stored_workflow=False, fill_defaults=kwds.get("fill_defaults", False)
).workflow
return subworkflow
@@ -1041,23 +1174,21 @@ def __connect_workflow_steps(self, steps, steps_by_external_id):
continue
if not isinstance(conn_list, list): # Older style singleton connection
conn_list = [conn_list]
+
for conn_dict in conn_list:
if 'output_name' not in conn_dict or 'id' not in conn_dict:
template = "Invalid connection [%s] - must be dict with output_name and id fields."
message = template % conn_dict
raise exceptions.MessageException(message)
- conn = model.WorkflowStepConnection()
- conn.input_step = step
- conn.input_name = input_name
- conn.output_name = conn_dict['output_name']
external_id = conn_dict['id']
if external_id not in steps_by_external_id:
raise KeyError("Failed to find external id %s in %s" % (external_id, steps_by_external_id.keys()))
- conn.output_step = steps_by_external_id[external_id]
+ output_step = steps_by_external_id[external_id]
+ output_name = conn_dict["output_name"]
input_subworkflow_step_index = conn_dict.get('input_subworkflow_step_id', None)
- if input_subworkflow_step_index is not None:
- conn.input_subworkflow_step = step.subworkflow.step_by_index(input_subworkflow_step_index)
+
+ step.add_connection(input_name, output_name, output_step, input_subworkflow_step_index)
del step.temp_input_connections
@@ -1079,6 +1210,13 @@ def __init__(self, workflow, errors):
self.errors = errors
+class RawWorkflowDescription(object):
+
+ def __init__(self, as_dict, workflow_path=None):
+ self.as_dict = as_dict
+ self.workflow_path = workflow_path
+
+
class Format2ConverterGalaxyInterface(ImporterGalaxyInterface):
def import_workflow(self, workflow, **kwds):
diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py
index 887f49c48184..957426421c4b 100644
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -519,6 +519,27 @@ def __init__(self, user, token=None):
self.expiration_time = galaxy.model.orm.now.now() + timedelta(hours=24)
+class DynamicTool(Dictifiable):
+ dict_collection_visible_keys = ('id', 'tool_id', 'tool_format', 'tool_version', 'uuid', 'active', 'hidden', 'tool_hash')
+ dict_element_visible_keys = ('id', 'tool_id', 'tool_format', 'tool_version', 'uuid', 'active', 'hidden', 'tool_hash')
+
+ def __init__(self, tool_format=None, tool_id=None, tool_version=None, tool_hash=None, tool_path=None, tool_directory=None,
+ uuid=None, active=True, hidden=True, value=None):
+ self.tool_format = tool_format
+ self.tool_id = tool_id
+ self.tool_version = tool_version
+ self.tool_hash = tool_hash
+ self.tool_path = tool_path
+ self.tool_directory = tool_directory
+ self.active = active
+ self.hidden = hidden
+ self.value = value
+ if uuid is None:
+ self.uuid = uuid4()
+ else:
+ self.uuid = UUID(str(uuid))
+
+
class BaseJobMetric(object):
def __init__(self, plugin, metric_name, metric_value):
@@ -584,6 +605,7 @@ def __init__(self):
self.user_id = None
self.tool_id = None
self.tool_version = None
+ self.tool_hash = None
self.copied_from_job_id = None
self.command_line = None
self.dependencies = []
@@ -883,6 +905,8 @@ def to_dict(self, view='collection', system_details=False):
# System level details that only admins should have.
rval['external_id'] = self.job_runner_external_id
rval['command_line'] = self.command_line
+ rval['cwl_command_state'] = self.cwl_command_state
+ rval['cwl_command_state_version'] = self.cwl_command_state_version
if view == 'element':
param_dict = dict([(p.name, p.value) for p in self.parameters])
@@ -2269,6 +2293,19 @@ def has_data(self):
"""Detects whether there is any data"""
return self.dataset.has_data()
+ def get_cwl_filename(self):
+ return self.dataset.cwl_filename
+
+ def set_cwl_filename(self, cwl_filename):
+ # This should be a write-once property intrinsic to the underlying
+ # dataset for pure CWL workflows. We may wish to revisit that for
+ # usability longer term.
+ if self.dataset.cwl_filename is not None:
+ raise Exception("Underlying dataset already has a cwlfilename set.")
+ self.dataset.cwl_filename = cwl_filename
+
+ cwl_filename = property(get_cwl_filename, set_cwl_filename)
+
def get_raw_data(self):
"""Returns the full data. To stream it open the file_name and read/write as needed"""
return self.datatype.get_raw_data(self)
@@ -3298,10 +3335,13 @@ def __init__(
id=None,
collection_type=None,
populated=True,
+ fields=None,
element_count=None
):
self.id = id
self.collection_type = collection_type
+ # TODO: persist fields...
+ self.fields = fields
if not populated:
self.populated_state = DatasetCollection.populated_states.NEW
self.element_count = element_count
@@ -3378,6 +3418,10 @@ def populated_optimized(self):
return self._populated_optimized
+ @property
+ def allow_implicit_mapping(self):
+ return self.collection_type != "record"
+
@property
def populated(self):
top_level_populated = self.populated_state == DatasetCollection.populated_states.OK
@@ -4063,6 +4107,11 @@ def log_str(self):
class WorkflowStep(RepresentById):
+ STEP_TYPE_TO_INPUT_TYPE = {
+ "data_input": "dataset",
+ "data_collection_input": "dataset_collection",
+ "parameter_input": "parameter",
+ }
def __init__(self):
self.id = None
@@ -4071,12 +4120,59 @@ def __init__(self):
self.tool_inputs = None
self.tool_errors = None
self.position = None
- self.input_connections = []
+ self.inputs = []
self.config = None
self.label = None
self.uuid = uuid4()
self.workflow_outputs = []
self._input_connections_by_name = None
+ self._inputs_by_name = None
+
+ @property
+ def input_default_value(self):
+ tool_inputs = self.tool_inputs
+ tool_state = tool_inputs
+ default_value = tool_state.get("default_value")
+ if default_value:
+ default_value = json.loads(default_value)["value"]
+ return default_value
+
+ @property
+ def input_type(self):
+ assert self.type and self.type in self.STEP_TYPE_TO_INPUT_TYPE, "step.input_type can only be called on input step types"
+ return self.STEP_TYPE_TO_INPUT_TYPE[self.type]
+
+ def get_input(self, input_name):
+ for step_input in self.inputs:
+ if step_input.name == input_name:
+ return step_input
+
+ return None
+
+ def get_or_add_input(self, input_name):
+ step_input = self.get_input(input_name)
+
+ if step_input is None:
+ step_input = WorkflowStepInput(self)
+ step_input.name = input_name
+ return step_input
+
+ def add_connection(self, input_name, output_name, output_step, input_subworkflow_step_index=None):
+ step_input = self.get_or_add_input(input_name)
+
+ conn = WorkflowStepConnection()
+ conn.input_step_input = step_input
+ conn.output_name = output_name
+ conn.output_step = output_step
+ if input_subworkflow_step_index is not None:
+ input_subworkflow_step = self.subworkflow.step_by_index(input_subworkflow_step_index)
+ conn.input_subworkflow_step = input_subworkflow_step
+ return conn
+
+ @property
+ def input_connections(self):
+ connections = [_ for step_input in self.inputs for _ in step_input.connections]
+ return connections
@property
def unique_workflow_outputs(self):
@@ -4112,6 +4208,12 @@ def input_connections_by_name(self):
self.setup_input_connections_by_name()
return self._input_connections_by_name
+ @property
+ def inputs_by_name(self):
+ if self._inputs_by_name is None:
+ self.setup_inputs_by_name()
+ return self._inputs_by_name
+
def setup_input_connections_by_name(self):
# Ensure input_connections has already been set.
@@ -4124,6 +4226,17 @@ def setup_input_connections_by_name(self):
input_connections_by_name[input_name].append(conn)
self._input_connections_by_name = input_connections_by_name
+ def setup_inputs_by_name(self):
+ # Ensure input_connections has already been set.
+
+ # Make connection information available on each step by input name.
+ inputs_by_name = {}
+ for step_input in self.inputs:
+ input_name = step_input.name
+ assert input_name not in inputs_by_name
+ inputs_by_name[input_name] = step_input
+ self._inputs_by_name = inputs_by_name
+
def create_or_update_workflow_output(self, output_name, label, uuid):
output = self.workflow_output_for(output_name)
if output is None:
@@ -4151,7 +4264,7 @@ def copy_to(self, copied_step, step_mapping):
copied_step.position = self.position
copied_step.config = self.config
copied_step.label = self.label
- copied_step.input_connections = copy_list(self.input_connections)
+ copied_step.inputs = copy_list(self.inputs, copied_step)
subworkflow_step_mapping = {}
subworkflow = self.subworkflow
@@ -4162,8 +4275,7 @@ def copy_to(self, copied_step, step_mapping):
subworkflow_step_mapping[subworkflow_step.id] = copied_subworkflow_step
for old_conn, new_conn in zip(self.input_connections, copied_step.input_connections):
- # new_conn.input_step = new_
- new_conn.input_step = step_mapping[old_conn.input_step_id]
+ new_conn.input_step_input = copied_step.get_or_add_input(old_conn.input_name)
new_conn.output_step = step_mapping[old_conn.output_step_id]
if old_conn.input_subworkflow_step_id:
new_conn.input_subworkflow_step = subworkflow_step_mapping[old_conn.input_subworkflow_step_id]
@@ -4175,7 +4287,31 @@ def copy_to(self, copied_step, step_mapping):
copied_step.workflow_outputs = copy_list(self.workflow_outputs, copied_step)
def log_str(self):
- return "WorkflowStep[index=%d,type=%s]" % (self.order_index, self.type)
+ return "WorkflowStep[index=%d,type=%s,label=%s]" % (self.order_index, self.type, self.label)
+
+
+class WorkflowStepInput(RepresentById):
+ default_merge_type = "merge_flattened"
+ default_scatter_type = "dotproduct"
+
+ def __init__(self, workflow_step):
+ self.workflow_step = workflow_step
+ self.name = None
+ self.default_value = None
+ self.default_value_set = False
+ self.merge_type = self.default_merge_type
+ self.scatter_type = self.default_scatter_type
+
+ def copy(self, copied_step):
+ copied_step_input = WorkflowStepInput(copied_step)
+ copied_step_input.name = self.name
+ copied_step_input.default_value = self.default_value
+ copied_step_input.default_value_set = self.default_value_set
+ copied_step_input.merge_type = self.merge_type
+ copied_step_input.scatter_type = self.scatter_type
+
+ copied_step_input.connections = copy_list(self.connections)
+ return copied_step_input
class WorkflowStepConnection(RepresentById):
@@ -4189,20 +4325,36 @@ class WorkflowStepConnection(RepresentById):
def __init__(self):
self.output_step_id = None
self.output_name = None
- self.input_step_id = None
- self.input_name = None
+ self.input_step_input_id = None
@property
def non_data_connection(self):
return (self.output_name == self.input_name == WorkflowStepConnection.NON_DATA_CONNECTION)
+ @property
+ def input_name(self):
+ return self.input_step_input.name
+
+ @property
+ def input_step(self):
+ return self.input_step_input and self.input_step_input.workflow_step
+
+ @property
+ def input_step_id(self):
+ input_step = self.input_step
+ return input_step and input_step.id
+
def copy(self):
# TODO: handle subworkflow ids...
copied_connection = WorkflowStepConnection()
copied_connection.output_name = self.output_name
- copied_connection.input_name = self.input_name
return copied_connection
+ def log_str(self):
+ return "WorkflowStepConnection[output_step_id=%s,output_name=%s,input_step_id=%s,input_name=%s]" % (
+ self.output_step_id, self.output_name, self.input_step_id, self.input_name
+ )
+
class WorkflowOutput(RepresentById):
diff --git a/lib/galaxy/model/mapping.py b/lib/galaxy/model/mapping.py
index 0fc1203b8411..2d61f1658e0d 100644
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -151,6 +151,25 @@
Column("expiration_time", DateTime),
Column("user_id", Integer, ForeignKey("galaxy_user.id"), index=True))
+
+model.DynamicTool.table = Table(
+ "dynamic_tool", metadata,
+ Column("id", Integer, primary_key=True),
+ Column("uuid", UUIDType()),
+ Column("create_time", DateTime, default=now),
+ Column("update_time", DateTime, index=True, default=now, onupdate=now),
+ Column("tool_id", Unicode(255)),
+ Column("tool_version", Unicode(255)),
+ Column("tool_format", Unicode(255)),
+ Column("tool_hash", Unicode(500), unique=True),
+ Column("tool_path", Unicode(255), unique=True),
+ Column("tool_directory", Unicode(255), unique=False),
+ Column("hidden", Boolean),
+ Column("active", Boolean),
+ Column("value", JSONType()),
+)
+
+
model.History.table = Table(
"history", metadata,
Column("id", Integer, primary_key=True),
@@ -229,6 +248,7 @@
Column("object_store_id", TrimmedString(255), index=True),
Column("external_filename", TEXT),
Column("_extra_files_path", TEXT),
+ Column("cwl_filename", TEXT),
Column('file_size', Numeric(15, 0)),
Column('total_size', Numeric(15, 0)),
Column('uuid', UUIDType()))
@@ -552,6 +572,8 @@
Column("object_store_id", TrimmedString(255), index=True),
Column("imported", Boolean, default=False, index=True),
Column("params", TrimmedString(255), index=True),
+ Column("cwl_command_state", JSONType, nullable=True),
+ Column("cwl_command_state_version", Integer, default=1),
Column("handler", TrimmedString(255), index=True))
model.JobStateHistory.table = Table(
@@ -874,6 +896,7 @@
Column("deleted", Boolean, default=False),
Column("importable", Boolean, default=False),
Column("slug", TEXT, index=True),
+ Column("from_path", TEXT, index=True),
Column("published", Boolean, index=True, default=False))
model.Workflow.table = Table(
@@ -899,6 +922,7 @@
Column("type", String(64)),
Column("tool_id", TEXT),
Column("tool_version", TEXT),
+ Column("tool_hash", TEXT),
Column("tool_inputs", JSONType),
Column("tool_errors", JSONType),
Column("position", JSONType),
@@ -908,6 +932,21 @@
# Column( "input_connections", JSONType ),
Column("label", Unicode(255)))
+
+model.WorkflowStepInput.table = Table(
+ "workflow_step_input", metadata,
+ Column("id", Integer, primary_key=True),
+ Column("workflow_step_id", Integer, ForeignKey("workflow_step.id"), index=True),
+ Column("name", TEXT),
+ Column("merge_type", TEXT),
+ Column("scatter_type", TEXT),
+ Column("value_from", JSONType),
+ Column("value_from_type", TEXT),
+ Column("default_value", JSONType),
+ Column("default_value_set", Boolean, default=False),
+ Column("runtime_value", Boolean))
+
+
model.WorkflowRequestStepState.table = Table(
"workflow_request_step_states", metadata,
Column("id", Integer, primary_key=True),
@@ -953,9 +992,8 @@
"workflow_step_connection", metadata,
Column("id", Integer, primary_key=True),
Column("output_step_id", Integer, ForeignKey("workflow_step.id"), index=True),
- Column("input_step_id", Integer, ForeignKey("workflow_step.id"), index=True),
+ Column("input_step_input_id", Integer, ForeignKey("workflow_step_input.id"), index=True),
Column("output_name", TEXT),
- Column("input_name", TEXT),
Column("input_subworkflow_step_id", Integer, ForeignKey("workflow_step.id"), index=True),
)
@@ -1496,6 +1534,8 @@ def simple_mapping(model, **kwds):
mapper(model.ValidationError, model.ValidationError.table)
+simple_mapping(model.DynamicTool)
+
simple_mapping(model.HistoryDatasetAssociation,
dataset=relation(model.Dataset,
primaryjoin=(model.Dataset.table.c.id == model.HistoryDatasetAssociation.table.c.dataset_id), lazy=False),
@@ -2215,7 +2255,14 @@ def simple_mapping(model, **kwds):
backref="workflow_steps"),
annotations=relation(model.WorkflowStepAnnotationAssociation,
order_by=model.WorkflowStepAnnotationAssociation.table.c.id,
- backref="workflow_steps")
+ backref="workflow_steps"),
+))
+
+mapper(model.WorkflowStepInput, model.WorkflowStepInput.table, properties=dict(
+ workflow_step=relation(model.WorkflowStep,
+ backref=backref("inputs", uselist=True),
+ cascade="all",
+ primaryjoin=(model.WorkflowStep.table.c.id == model.WorkflowStepInput.table.c.workflow_step_id))
))
mapper(model.WorkflowOutput, model.WorkflowOutput.table, properties=dict(
@@ -2225,10 +2272,10 @@ def simple_mapping(model, **kwds):
))
mapper(model.WorkflowStepConnection, model.WorkflowStepConnection.table, properties=dict(
- input_step=relation(model.WorkflowStep,
- backref="input_connections",
+ input_step_input=relation(model.WorkflowStepInput,
+ backref="connections",
cascade="all",
- primaryjoin=(model.WorkflowStepConnection.table.c.input_step_id == model.WorkflowStep.table.c.id)),
+ primaryjoin=(model.WorkflowStepConnection.table.c.input_step_input_id == model.WorkflowStepInput.table.c.id)),
input_subworkflow_step=relation(model.WorkflowStep,
backref=backref("parent_workflow_input_connections", uselist=True),
primaryjoin=(model.WorkflowStepConnection.table.c.input_subworkflow_step_id == model.WorkflowStep.table.c.id),
diff --git a/lib/galaxy/model/migrate/versions/0136_collection_and_workflow_state.py b/lib/galaxy/model/migrate/versions/0136_collection_and_workflow_state.py
index 0a787a5f7bb1..d2597359041a 100644
--- a/lib/galaxy/model/migrate/versions/0136_collection_and_workflow_state.py
+++ b/lib/galaxy/model/migrate/versions/0136_collection_and_workflow_state.py
@@ -93,14 +93,6 @@ def upgrade(migrate_engine):
for table in tables.values():
__create(table)
- def nextval(table, col='id'):
- if migrate_engine.name in ['postgres', 'postgresql']:
- return "nextval('%s_%s_seq')" % (table, col)
- elif migrate_engine.name in ['mysql', 'sqlite']:
- return "null"
- else:
- raise Exception("Unhandled database type")
-
# Set default for creation to scheduled, actual mapping has new as default.
workflow_invocation_step_state_column = Column("state", TrimmedString(64), default="scheduled")
if migrate_engine.name in ['postgres', 'postgresql']:
diff --git a/lib/galaxy/model/migrate/versions/0145_add_workflow_step_input.py b/lib/galaxy/model/migrate/versions/0145_add_workflow_step_input.py
new file mode 100644
index 000000000000..01187b3bd171
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0145_add_workflow_step_input.py
@@ -0,0 +1,127 @@
+"""
+Migration script for workflow step input table.
+"""
+from __future__ import print_function
+
+import logging
+
+from sqlalchemy import Boolean, Column, ForeignKey, Integer, MetaData, Table, TEXT
+
+from galaxy.model.custom_types import JSONType
+
+log = logging.getLogger(__name__)
+metadata = MetaData()
+
+from_path_column = Column("from_path", TEXT, nullable=True)
+
+
+def get_new_tables():
+
+ WorkflowStepInput_table = Table(
+ "workflow_step_input", metadata,
+ Column("id", Integer, primary_key=True),
+ Column("workflow_step_id", Integer, ForeignKey("workflow_step.id"), index=True),
+ Column("name", TEXT),
+ Column("merge_type", TEXT),
+ Column("scatter_type", TEXT),
+ Column("value_from", JSONType),
+ Column("value_from_type", TEXT),
+ Column("default_value", JSONType),
+ Column("default_value_set", Boolean, default=False),
+ Column("runtime_value", Boolean, default=False),
+ )
+
+ WorkflowStepConnection_table = Table(
+ "workflow_step_connection", metadata,
+ Column("id", Integer, primary_key=True),
+ Column("output_step_id", Integer, ForeignKey("workflow_step.id"), index=True),
+ Column("input_step_input_id", Integer, ForeignKey("workflow_step_input.id"), index=True),
+ Column("output_name", TEXT),
+ Column("input_subworkflow_step_id", Integer, ForeignKey("workflow_step.id"), index=True),
+ )
+
+ return [
+ WorkflowStepInput_table, WorkflowStepConnection_table
+ ]
+
+
+def upgrade(migrate_engine):
+ metadata.bind = migrate_engine
+ print(__doc__)
+ metadata.reflect()
+
+ LegacyWorkflowStepConnection_table = Table("workflow_step_connection", metadata, autoload=True)
+ for index in LegacyWorkflowStepConnection_table.indexes:
+ index.drop()
+ LegacyWorkflowStepConnection_table.rename("workflow_step_connection_premigrate145")
+ # Try to deregister that table to work around some caching problems it seems.
+ LegacyWorkflowStepConnection_table.deregister()
+ metadata._remove_table("workflow_step_connection", metadata.schema)
+
+ metadata.reflect()
+ tables = get_new_tables()
+ for table in tables:
+ __create(table)
+
+ insert_step_inputs_cmd = \
+ "INSERT INTO workflow_step_input (workflow_step_id, name) " + \
+ "SELECT id, input_name FROM workflow_step_connection_premigrate145"
+
+ migrate_engine.execute(insert_step_inputs_cmd)
+
+ # TODO: verify order here.
+ insert_step_connections_cmd = \
+ "INSERT INTO workflow_step_connection (output_step_id, input_step_input_id, output_name, input_subworkflow_step_id) " + \
+ "SELECT wsc.output_step_id, wsi.id, wsc.output_name, wsc.input_subworkflow_step_id " + \
+ "FROM workflow_step_connection_premigrate145 as wsc left outer join workflow_step_input as wsi on wsc.input_step_id = wsi.workflow_step_id and wsc.input_name = wsi.name ORDER BY wsc.id"
+
+ migrate_engine.execute(insert_step_connections_cmd)
+
+ __add_column(from_path_column, "stored_workflow", metadata)
+
+
+def downgrade(migrate_engine):
+ metadata.bind = migrate_engine
+
+ tables = get_new_tables()
+ for table in tables:
+ __drop(table)
+
+ metadata._remove_table("workflow_step_connection", metadata.schema)
+ metadata.reflect()
+
+ # Drop new workflow invocation step and job association table and restore legacy data.
+ LegacyWorkflowStepConnection_table = Table("workflow_step_connection_premigrate145", metadata, autoload=True)
+ LegacyWorkflowStepConnection_table.rename("workflow_step_connection")
+
+ __drop_column(from_path_column, "stored_workflow", metadata)
+
+
+def __add_column(column, table_name, metadata, **kwds):
+ try:
+ table = Table(table_name, metadata, autoload=True)
+ column.create(table, **kwds)
+ except Exception:
+ log.exception("Adding column %s failed.", column)
+
+
+def __drop_column(column_name, table_name, metadata):
+ try:
+ table = Table(table_name, metadata, autoload=True)
+ getattr(table.c, column_name).drop()
+ except Exception:
+ log.exception("Dropping column %s failed.", column_name)
+
+
+def __create(table):
+ try:
+ table.create()
+ except Exception:
+ log.exception("Creating %s table failed.", table.name)
+
+
+def __drop(table):
+ try:
+ table.drop()
+ except Exception:
+ log.exception("Dropping %s table failed.", table.name)
diff --git a/lib/galaxy/model/migrate/versions/0146_dynamic_tools.py b/lib/galaxy/model/migrate/versions/0146_dynamic_tools.py
new file mode 100644
index 000000000000..862cc7d58d3c
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0146_dynamic_tools.py
@@ -0,0 +1,95 @@
+"""
+"""
+import datetime
+import logging
+
+from sqlalchemy import Boolean, Column, DateTime, Integer, MetaData, Table, Unicode, Text
+
+from galaxy.model.custom_types import JSONType, UUIDType
+
+now = datetime.datetime.utcnow
+log = logging.getLogger(__name__)
+metadata = MetaData()
+
+now = datetime.datetime.utcnow
+log = logging.getLogger(__name__)
+metadata = MetaData()
+
+
+DynamicTool_table = Table(
+ "dynamic_tool", metadata,
+ Column("id", Integer, primary_key=True),
+ Column("uuid", UUIDType()),
+ Column("create_time", DateTime, default=now),
+ Column("update_time", DateTime, default=now, onupdate=now),
+ Column("tool_id", Unicode(255)),
+ Column("tool_version", Unicode(255)),
+ Column("tool_format", Unicode(255)),
+ Column("tool_hash", Unicode(500)),
+ Column("tool_path", Unicode(255)),
+ Column("tool_directory", Unicode(255)),
+ Column("hidden", Boolean),
+ Column("active", Boolean),
+ Column("value", JSONType()),
+)
+
+TABLES = [
+ DynamicTool_table,
+]
+
+
+def upgrade(migrate_engine):
+ metadata.bind = migrate_engine
+ print __doc__
+ metadata.reflect()
+
+ for table in TABLES:
+ __create(table)
+
+ __add_column(Column("tool_hash", Text), "workflow_step", metadata)
+ __add_column(Column("tool_hash", Text), "job", metadata)
+
+
+def downgrade(migrate_engine):
+ metadata.bind = migrate_engine
+ metadata.reflect()
+
+ for table in TABLES:
+ __drop(table)
+
+ __drop_column("tool_hash", "workflow_step", metadata)
+ __drop_column("tool_hash", "job", metadata)
+
+
+def __add_column(column, table_name, metadata, **kwds):
+ try:
+ table = Table(table_name, metadata, autoload=True)
+ column.create(table, **kwds)
+ except Exception as e:
+ print str(e)
+ log.exception("Adding column %s column failed." % column)
+
+
+def __drop_column(column_name, table_name, metadata):
+ try:
+ table = Table(table_name, metadata, autoload=True)
+ getattr(table.c, column_name).drop()
+ except Exception as e:
+ print str(e)
+ log.exception("Dropping column %s failed." % column_name)
+
+
+def __create(table):
+ try:
+ table.create()
+ except Exception as e:
+ print str(e)
+ log.exception("Creating %s table failed: %s" % (table.name, str(e)))
+
+
+def __drop(table):
+ try:
+ table.drop()
+ except Exception as e:
+ print str(e)
+ log.exception("Dropping %s table failed: %s" % (table.name, str(e)))
diff --git a/lib/galaxy/model/migrate/versions/0147_cwl_state.py b/lib/galaxy/model/migrate/versions/0147_cwl_state.py
new file mode 100644
index 000000000000..a938d1db7da4
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0147_cwl_state.py
@@ -0,0 +1,55 @@
+"""
+Migration script to allow invalidation of job external output metadata temp files
+"""
+import datetime
+import logging
+
+from sqlalchemy import Integer, Column, MetaData, Table, TEXT
+from galaxy.model.custom_types import JSONType
+
+now = datetime.datetime.utcnow
+log = logging.getLogger(__name__)
+metadata = MetaData()
+
+
+def upgrade(migrate_engine):
+ metadata.bind = migrate_engine
+ print __doc__
+ metadata.reflect()
+
+ cwl_command_column = Column("cwl_command_state", JSONType, default=True)
+ cwl_command_version_column = Column("cwl_command_state_version", Integer, default=True)
+
+ cwl_file_name = Column("cwl_filename", TEXT, default=None, )
+
+ __add_column(cwl_command_column, "job", metadata)
+ __add_column(cwl_command_version_column, "job", metadata)
+ __add_column(cwl_file_name, "dataset", metadata)
+
+
+def downgrade(migrate_engine):
+ metadata.bind = migrate_engine
+ metadata.reflect()
+
+ __drop_column("cwl_command_state", "job", metadata)
+ __drop_column("cwl_command_state_version", "job", metadata)
+
+ __drop_column("cwl_filename", "dataset", metadata)
+
+
+def __add_column(column, table_name, metadata, **kwds):
+ try:
+ table = Table(table_name, metadata, autoload=True)
+ column.create(table, **kwds)
+ except Exception as e:
+ print str(e)
+ log.exception("Adding column %s failed." % column)
+
+
+def __drop_column(column_name, table_name, metadata):
+ try:
+ table = Table(table_name, metadata, autoload=True)
+ getattr(table.c, column_name).drop()
+ except Exception as e:
+ print str(e)
+ log.exception("Dropping column %s failed." % column_name)
diff --git a/lib/galaxy/tools/__init__.py b/lib/galaxy/tools/__init__.py
index b45621ec217c..938e8dc8e683 100755
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -5,6 +5,7 @@
import logging
import os
import re
+import uuid
import tarfile
import tempfile
import threading
@@ -36,6 +37,7 @@
from galaxy.tools.deps import (
CachedDependencyManager,
)
+from galaxy.tools import expressions
from galaxy.tools.fetcher import ToolLocationFetcher
from galaxy.tools.parameters import (
check_param,
@@ -62,10 +64,13 @@
from galaxy.tools.parameters.grouping import Conditional, ConditionalWhen, Repeat, Section, UploadDataset
from galaxy.tools.parameters.input_translation import ToolInputTranslator
from galaxy.tools.parameters.meta import expand_meta_parameters
+from galaxy.tools.parameters.wrapped_json import json_wrap
from galaxy.tools.parser import (
get_tool_source,
+ get_tool_source_from_representation,
ToolOutputCollectionPart
)
+from galaxy.tools.cwl import needs_shell_quoting, shellescape, to_galaxy_parameters
from galaxy.tools.parser.xml import XmlPageSource
from galaxy.tools.test import parse_tests
from galaxy.tools.toolbox import BaseGalaxyToolBox
@@ -75,6 +80,7 @@
listify,
Params,
rst_to_html,
+ safe_makedirs,
string_as_bool,
unicodify
)
@@ -102,6 +108,12 @@
log = logging.getLogger(__name__)
+REQUIRES_JS_RUNTIME_MESSAGE = ("The tool [%s] requires a nodejs runtime to execute "
+ "but node nor nodejs could be found on Galaxy's PATH and "
+ "no runtime was configured using the nodejs_path option in "
+ "galaxy.ini.")
+
+
HELP_UNINITIALIZED = threading.Lock()
MODEL_TOOLS_PATH = os.path.abspath(os.path.dirname(__file__))
# Tools that require Galaxy's Python environment to be preserved.
@@ -259,20 +271,49 @@ def tools_by_id(self):
return self._tools_by_id
def create_tool(self, config_file, **kwds):
- try:
+ tool_source = get_tool_source(
+ config_file,
+ enable_beta_formats=getattr(self.app.config, "enable_beta_tool_formats", False),
+ tool_location_fetcher=self.tool_location_fetcher,
+ strict_cwl_validation=getattr(self.app.config, "strict_cwl_validation", True),
+ )
+ return self._create_tool_from_source(tool_source, config_file=config_file, **kwds)
+
+ def _create_tool_from_source(self, tool_source, **kwds):
+ return create_tool_from_source(self.app, tool_source, **kwds)
+
+ def create_dynamic_tool(self, dynamic_tool, **kwds):
+ tool_format = dynamic_tool.tool_format
+ tool_representation = dynamic_tool.value
+ get_source_kwds = dict(
+ tool_format=tool_format,
+ tool_representation=tool_representation,
+ )
+ if dynamic_tool.tool_directory:
+ get_source_kwds["tool_directory"] = dynamic_tool.tool_directory
+ if dynamic_tool.tool_path:
+ config_file = dynamic_tool.tool_path
tool_source = get_tool_source(
config_file,
- enable_beta_formats=getattr(self.app.config, "enable_beta_tool_formats", False),
+ enable_beta_formats=getattr(self.app.config, "enable_beta_tool_formats", True),
tool_location_fetcher=self.tool_location_fetcher,
+ strict_cwl_validation=getattr(self.app.config, "strict_cwl_validation", True),
)
- except Exception as e:
- # capture and log parsing errors
- global_tool_errors.add_error(config_file, "Tool XML parsing", e)
- raise e
- return self._create_tool_from_source(tool_source, config_file=config_file, **kwds)
+ else:
+ tool_source = get_tool_source_from_representation(**get_source_kwds)
+ kwds["dynamic"] = True
+ tool = self._create_tool_from_source(tool_source, **kwds)
+ if dynamic_tool.tool_hash:
+ tool.tool_hash = dynamic_tool.tool_hash
+ else:
+ from galaxy.tools.hash import build_tool_hash
+ tool.tool_hash = build_tool_hash(tool._cwl_tool_proxy.to_persistent_representation())
- def _create_tool_from_source(self, tool_source, **kwds):
- return create_tool_from_source(self.app, tool_source, **kwds)
+ if not tool.id:
+ tool.id = dynamic_tool.tool_id
+ if not tool.name:
+ tool.name = tool.id
+ return tool
def get_tool_components(self, tool_id, tool_version=None, get_loaded_tools_by_lineage=False, set_selected=False):
"""
@@ -389,12 +430,18 @@ class Tool(Dictifiable):
requires_setting_metadata = True
default_tool_action = DefaultToolAction
dict_collection_visible_keys = ['id', 'name', 'version', 'description', 'labels']
+ may_use_container_entry_point = False
- def __init__(self, config_file, tool_source, app, guid=None, repository_id=None, allow_code_files=True):
+ def __init__(self, config_file, tool_source, app, guid=None, repository_id=None, allow_code_files=True, dynamic=False):
"""Load a tool from the config named by `config_file`"""
# Determine the full path of the directory where the tool config is
- self.config_file = config_file
- self.tool_dir = os.path.dirname(config_file)
+ if config_file is not None:
+ self.config_file = config_file
+ self.tool_dir = os.path.dirname(config_file)
+ else:
+ self.config_file = None
+ self.tool_dir = None
+
self.app = app
self.repository_id = repository_id
self._allow_code_files = allow_code_files
@@ -414,6 +461,7 @@ def __init__(self, config_file, tool_source, app, guid=None, repository_id=None,
self.display_interface = True
self.require_login = False
self.rerun = False
+ self.tool_hash = None
# Define a place to keep track of all input These
# differ from the inputs dictionary in that inputs can be page
# elements like conditionals, but input_params are basic form
@@ -442,7 +490,7 @@ def __init__(self, config_file, tool_source, app, guid=None, repository_id=None,
self.tool_errors = None
# Parse XML element containing configuration
try:
- self.parse(tool_source, guid=guid)
+ self.parse(tool_source, guid=guid, dynamic=dynamic)
except Exception as e:
global_tool_errors.add_error(config_file, "Tool Loading", e)
raise e
@@ -589,7 +637,7 @@ def allow_user_access(self, user, attempting_access=True):
return False
return True
- def parse(self, tool_source, guid=None):
+ def parse(self, tool_source, guid=None, dynamic=False):
"""
Read tool configuration from the element `root` and fill in `self`.
"""
@@ -600,7 +648,8 @@ def parse(self, tool_source, guid=None):
self.id = self.old_id
else:
self.id = guid
- if not self.id:
+
+ if not dynamic and not self.id:
raise Exception("Missing tool 'id' for tool at '%s'" % tool_source)
profile = packaging.version.parse(str(self.profile))
@@ -611,7 +660,9 @@ def parse(self, tool_source, guid=None):
# Get the (user visible) name of the tool
self.name = tool_source.parse_name()
- if not self.name:
+ if not self.name and dynamic:
+ self.name = self.id
+ if not dynamic and not self.name:
raise Exception("Missing tool 'name' for tool with id '%s' at '%s'" % (self.id, tool_source))
self.version = tool_source.parse_version()
@@ -695,10 +746,13 @@ def parse(self, tool_source, guid=None):
# a 'default' will be provided that uses the 'default' handler and
# 'default' destination. I thought about moving this to the
# job_config, but it makes more sense to store here. -nate
- self_ids = [self.id.lower()]
- if self.old_id != self.id:
- # Handle toolshed guids
- self_ids = [self.id.lower(), self.id.lower().rsplit('/', 1)[0], self.old_id.lower()]
+ if self.id:
+ self_ids = [self.id.lower()]
+ if self.old_id != self.id:
+ # Handle toolshed guids
+ self_ids = [self.id.lower(), self.id.lower().rsplit('/', 1)[0], self.old_id.lower()]
+ else:
+ self_ids = []
self.all_ids = self_ids
# In the toolshed context, there is no job config.
@@ -745,6 +799,10 @@ def parse(self, tool_source, guid=None):
module, cls = action
mod = __import__(module, globals(), locals(), [cls])
self.tool_action = getattr(mod, cls)()
+ if getattr(self.tool_action, "requires_js_runtime", False):
+ if expressions.find_engine(self.app.config) is None:
+ message = REQUIRES_JS_RUNTIME_MESSAGE % self.tool_id
+ raise Exception(message)
# Tests
self.__parse_tests(tool_source)
@@ -761,6 +819,8 @@ def parse(self, tool_source, guid=None):
# Record macro paths so we can reload a tool if any of its macro has changes
self._macro_paths = tool_source.macro_paths()
+ self.cores_min = tool_source.parse_cores_min()
+
def __parse_legacy_features(self, tool_source):
self.code_namespace = dict()
self.hook_map = {}
@@ -1285,6 +1345,18 @@ def check_workflow_compatible(self, tool_source):
# outputs?
return True
+ def inputs_from_dict(self, as_dict):
+ """Extra inputs from input dictionary (e.g. API payload).
+
+ Translate for tool type as needed.
+ """
+ inputs = as_dict.get('inputs', {})
+ inputs_representation = as_dict.get('inputs_representation', 'galaxy')
+ if inputs_representation != "galaxy":
+ raise exceptions.RequestParameterInvalidException("Only galaxy inputs representation is allowed for normal tools.")
+ # TODO: Consider <>.
+ return inputs
+
def new_state(self, trans):
"""
Create a new `DefaultToolState` for this tool. It will be initialized
@@ -1672,7 +1744,7 @@ def call_hook(self, hook_name, *args, **kwargs):
def exec_before_job(self, app, inp_data, out_data, param_dict={}):
pass
- def exec_after_process(self, app, inp_data, out_data, param_dict, job=None):
+ def exec_after_process(self, app, inp_data, out_data, param_dict, job=None, **kwds):
pass
def job_failed(self, job_wrapper, message, exception=False):
@@ -1816,7 +1888,8 @@ def to_dict(self, trans, link_details=False, io_details=False):
# If an admin user, expose the path to the actual tool config XML file.
if trans.user_is_admin:
- tool_dict['config_file'] = os.path.abspath(self.config_file)
+ config_file = None if not self.config_file else os.path.abspath(self.config_file)
+ tool_dict['config_file'] = config_file
# Add link details.
if link_details:
@@ -1839,7 +1912,7 @@ def to_dict(self, trans, link_details=False, io_details=False):
tool_dict['panel_section_id'], tool_dict['panel_section_name'] = self.get_panel_section()
tool_class = self.__class__
- regular_form = tool_class == Tool or isinstance(self, DatabaseOperationTool)
+ regular_form = tool_class == Tool or isinstance(self, DatabaseOperationTool) or tool_class == CwlTool or tool_class == GalacticCwlTool
tool_dict["form_style"] = "regular" if regular_form else "special"
return tool_dict
@@ -2160,6 +2233,71 @@ def exec_before_job(self, app, inp_data, out_data, param_dict=None):
out.close()
+class ExpressionTool(Tool):
+ tool_type = 'expression'
+ EXPRESSION_INPUTS_NAME = "_expression_inputs_.json"
+
+ def parse_command(self, tool_source):
+ self.command = expressions.EXPRESSION_SCRIPT_CALL
+ self.interpreter = None
+ self._expression = tool_source.parse_expression().strip()
+
+ def parse_outputs(self, tool_source):
+ # Setup self.outputs and self.output_collections
+ super(ExpressionTool, self).parse_outputs(tool_source)
+
+ # Validate these outputs for expression tools.
+ if len(self.output_collections) != 0:
+ message = "Expression tools may not declare output collections at this time."
+ raise Exception(message)
+ for output in self.outputs.values():
+ if not hasattr(output, "from_expression"):
+ message = "Expression tools may not declare output datasets at this time."
+ raise Exception(message)
+
+ def exec_before_job(self, app, inp_data, out_data, param_dict=None):
+ super(ExpressionTool, self).exec_before_job(app, inp_data, out_data, param_dict=param_dict)
+ local_working_directory = param_dict["__local_working_directory__"]
+ expression_inputs_path = os.path.join(local_working_directory, 'working', ExpressionTool.EXPRESSION_INPUTS_NAME)
+
+ outputs = []
+ for i, (out_name, data) in enumerate(out_data.iteritems()):
+ output_def = self.outputs[out_name]
+ wrapped_data = param_dict.get(out_name)
+ file_name = str(wrapped_data)
+
+ outputs.append(dict(
+ name=out_name,
+ from_expression=output_def.from_expression,
+ path=file_name,
+ ))
+
+ if param_dict is None:
+ raise Exception("Internal error - param_dict is empty.")
+
+ job = {}
+ json_wrap(self.inputs, param_dict, job, handle_files='OBJECT')
+ expression_inputs = {
+ 'job': job,
+ 'script': self._expression,
+ 'outputs': outputs,
+ }
+ expressions.write_evalute_script(os.path.join(local_working_directory, 'working'))
+ with open(expression_inputs_path, "w") as f:
+ json.dump(expression_inputs, f)
+
+ def parse_environment_variables(self, tool_source):
+ """ Setup environment variable for inputs file.
+ """
+ environmnt_variables_raw = super(ExpressionTool, self).parse_environment_variables(tool_source)
+ expression_script_inputs = dict(
+ name="GALAXY_EXPRESSION_INPUTS",
+ template=ExpressionTool.EXPRESSION_INPUTS_NAME,
+ )
+ environmnt_variables_raw.append(expression_script_inputs)
+ return environmnt_variables_raw
+
+
class DataSourceTool(OutputParameterJSONTool):
"""
Alternate implementation of Tool for data_source tools -- those that
@@ -2244,7 +2382,7 @@ class SetMetadataTool(Tool):
tool_type = 'set_metadata'
requires_setting_metadata = False
- def exec_after_process(self, app, inp_data, out_data, param_dict, job=None):
+ def exec_after_process(self, app, inp_data, out_data, param_dict, job=None, **kwds):
for name, dataset in inp_data.items():
external_metadata = JobExternalOutputMetadataWrapper(job)
if external_metadata.external_metadata_set_successfully(dataset, app.model.context):
@@ -2285,6 +2423,157 @@ class ImportHistoryTool(Tool):
tool_type = 'import_history'
+class CwlCommandBindingTool(Tool):
+ """Tools that use CWL to bind parameters to command-line descriptions."""
+
+ def exec_before_job(self, app, inp_data, out_data, param_dict=None):
+ super(CwlCommandBindingTool, self).exec_before_job(app, inp_data, out_data, param_dict=param_dict)
+ # Working directory on Galaxy server (instead of remote compute).
+ local_working_directory = param_dict["__local_working_directory__"]
+ log.info("exec_before_job for CWL tool")
+ if param_dict is None:
+ raise Exception("Internal error - param_dict is empty.")
+
+ input_json = self.param_dict_to_cwl_inputs(param_dict, local_working_directory)
+
+ output_dict = {}
+ for name, dataset in out_data.items():
+ output_dict[name] = {
+ "id": dataset.dataset.id,
+ "path": dataset.file_name,
+ }
+
+ # prevent unset optional file to trigger 'ValidationException' exception
+ input_json = {k:v for k, v in input_json.iteritems() if not (isinstance(v, dict) and v.get('class') == 'File' and v.get('location') == 'None')}
+
+ # prevent empty string
+ input_json = {k:v for k, v in input_json.iteritems() if v != ''}
+
+ # handle 'Directory' type (uncompress tar file)
+ for k, v in input_json.iteritems():
+ if isinstance(v, dict) and v['class'] == 'Directory':
+ if 'archive_nameext' in v and v['archive_nameext'] == '.tar':
+
+ tar_file_location = v['archive_location']
+ directory_name = v['name']
+
+ assert os.path.exists(tar_file_location), tar_file_location
+
+ tmp_dir = os.path.join(local_working_directory, 'direx', str(uuid.uuid4())) # direx for "DIR EXtract"
+ directory_location = os.path.join(tmp_dir, directory_name)
+
+ os.makedirs(tmp_dir)
+
+ assert os.path.exists(tmp_dir), tmp_dir
+
+ bkp_cwd = os.getcwd(); os.chdir(tmp_dir)
+ tar = tarfile.open(tar_file_location); tar.extractall(); tar.close()
+ os.chdir(bkp_cwd)
+
+ assert os.path.exists(directory_location), directory_location
+
+ v['location'] = directory_location
+ v['nameext'] = 'None'
+ v['nameroot'] = directory_name
+ v['basename'] = directory_name
+ #v['size'] =
+
+ cwl_job_proxy = self._cwl_tool_proxy.job_proxy(
+ input_json,
+ output_dict,
+ local_working_directory,
+ )
+ cwl_command_line = cwl_job_proxy.command_line
+ cwl_stdin = cwl_job_proxy.stdin
+ cwl_stdout = cwl_job_proxy.stdout
+ cwl_stderr = cwl_job_proxy.stderr
+ env = cwl_job_proxy.environment
+
+ def needs_shell_quoting_hack(arg):
+ if arg == "$GALAXY_SLOTS":
+ return False
+ else:
+ return needs_shell_quoting(arg)
+
+ command_line = " ".join([shellescape.quote(arg) if needs_shell_quoting_hack(arg) else arg for arg in cwl_command_line])
+ if cwl_stdin:
+ command_line += ' < "' + cwl_stdin + '"'
+ if cwl_stdout:
+ command_line += ' > "' + cwl_stdout + '"'
+ if cwl_stderr:
+ command_line += ' 2> "' + cwl_stderr + '"'
+ cwl_job_state = {
+ 'args': cwl_command_line,
+ 'stdin': cwl_stdin,
+ 'stdout': cwl_stdout,
+ 'stderr': cwl_stderr,
+ 'env': env,
+ }
+ tool_working_directory = os.path.join(local_working_directory, 'working')
+ # Move to prepare...
+ safe_makedirs(tool_working_directory)
+ cwl_job_proxy.stage_files()
+
+ cwl_job_proxy.rewrite_inputs_for_staging()
+ log.debug("REWRITTEN INPUTS_DICT %s" % cwl_job_proxy._input_dict)
+ # Write representation to disk that can be reloaded at runtime
+ # and outputs collected before Galaxy metadata is gathered.
+ cwl_job_proxy.save_job()
+
+ param_dict["__cwl_command"] = command_line
+ log.info("__cwl_command is %s" % command_line)
+ param_dict["__cwl_command_state"] = cwl_job_state
+ param_dict["__cwl_command_version"] = 1
+ log.info("CwlTool.exec_before_job() generated command_line %s" % command_line)
+
+ def parse(self, tool_source, **kwds):
+ super(CwlCommandBindingTool, self).parse(tool_source, **kwds)
+ cwl_tool_proxy = getattr(tool_source, 'tool_proxy', None)
+ if cwl_tool_proxy is None:
+ raise Exception("parse() called on tool source not defining a proxy object to underlying CWL tool.")
+ self._cwl_tool_proxy = cwl_tool_proxy
+
+ def param_dict_to_cwl_inputs(self, param_dict, local_working_directory):
+ """Map Galaxy API inputs description to a CWL job json."""
+ raise NotImplementedError()
+
+
+class GalacticCwlTool(CwlCommandBindingTool):
+ """A CWL tool with a gx:Interface defined so Galaxy tool state can be used."""
+ tool_type = 'galactic_cwl'
+
+ def param_dict_to_cwl_inputs(self, param_dict, local_working_directory):
+ from galaxy.tools.cwl.representation import galactic_flavored_to_cwl_job
+ input_json = galactic_flavored_to_cwl_job(self, param_dict, local_working_directory)
+ return input_json
+
+
+class CwlTool(CwlCommandBindingTool):
+ tool_type = 'cwl'
+ may_use_container_entry_point = True
+
+ def param_dict_to_cwl_inputs(self, param_dict, local_working_directory):
+ """Map Galaxy API inputs description to a CWL job json."""
+ from galaxy.tools.cwl import to_cwl_job
+ input_json = to_cwl_job(self, param_dict, local_working_directory)
+ return input_json
+
+ def inputs_from_dict(self, as_dict):
+ """Extra inputs from input dictionary (e.g. API payload).
+
+ Translate for tool type as needed.
+ """
+ inputs = as_dict.get('inputs', {})
+ inputs_representation = as_dict.get('inputs_representation', 'galaxy')
+ if inputs_representation not in ["galaxy", "cwl"]:
+ raise exceptions.RequestParameterInvalidException("Inputs representation must be galaxy or cwl.")
+
+ if inputs_representation == "cwl":
+ inputs = to_galaxy_parameters(self, inputs)
+
+ return inputs
+
+
class DataManagerTool(OutputParameterJSONTool):
tool_type = 'manage_data'
default_tool_action = DataManagerToolAction
@@ -2413,6 +2702,33 @@ def _outputs_dict(self):
return odict()
+class UsesExpressions:
+ requires_js_runtime = True
+
+ def _expression_environment(self, hda):
+ # TODO: use json_wrap HDA stuff for this for this...
+ raw_as_dict = hda.to_dict()
+ filtered_as_dict = {}
+ # We are more conservative with the API provided to tools
+ # than the API exposed via the web API, so cut down on what
+ # is supplied to the tool. Also, no reason to leak unneeded
+ # data prematurely regardless.
+ for key, value in raw_as_dict.iteritems():
+ include = False
+ if key.startswith("metadata_"):
+ include = True
+ elif key in FilterTool.exposed_hda_keys:
+ include = True
+ if include:
+ filtered_as_dict[key] = value
+ return filtered_as_dict
+
+ def _eval_expression(self, expression, environment_dict):
+ environment = expressions.jshead([], environment_dict)
+ result = expressions.execjs(self.app.config, expression, environment)
+ return result
+
+
class UnzipCollectionTool(DatabaseOperationTool):
tool_type = 'unzip_collection'
@@ -2649,6 +2965,28 @@ def element_is_valid(self, element):
return element.element_object.has_data()
+class FilterTool(DatabaseOperationTool, UsesExpressions):
+ exposed_hda_keys = ['file_size', 'file_ext', 'genome_build']
+ tool_type = 'filter_collection'
+
+ def produce_outputs(self, trans, out_data, output_collections, incoming, history):
+ hdca = incoming["input"]
+ expression = incoming["expression"]
+ assert hdca.collection.collection_type == "list"
+ new_elements = odict()
+ for dce in hdca.collection.elements:
+ element = dce.element_object
+ environment_dict = self._expression_environment(element)
+ result = self._eval_expression(expression, environment_dict)
+ if result:
+ element_identifier = dce.element_identifier
+ new_elements[element_identifier] = element.copy()
+
+ output_collections.create_collection(
+ self.outputs.values()[0], "output", elements=new_elements
+ )
+
+
class FlattenTool(DatabaseOperationTool):
tool_type = 'flatten_collection'
@@ -2896,13 +3234,42 @@ def produce_outputs(self, trans, out_data, output_collections, incoming, history
)
+class GroupTool(DatabaseOperationTool, UsesExpressions):
+ tool_type = 'group_collection'
+
+ def produce_outputs(self, trans, out_data, output_collections, incoming, history):
+ hdca = incoming["input"]
+ expression = incoming["expression"]
+ new_elements = odict()
+ for dce in hdca.collection.elements:
+ element = dce.element_object
+ environment_dict = self._expression_environment(element)
+ result = str(self._eval_expression(expression, environment_dict))
+ if not result:
+ continue
+
+ if result not in new_elements:
+ result_elements = {}
+ result_elements["src"] = "new_collection"
+ result_elements["collection_type"] = "list"
+ result_elements["elements"] = odict()
+ new_elements[result] = result_elements
+
+ new_elements[result]["elements"][dce.element_identifier] = element.copy()
+
+ output_collections.create_collection(
+ self.outputs.values()[0], "output", elements=new_elements
+ )
+
+
# Populate tool_type to ToolClass mappings
tool_types = {}
-for tool_class in [Tool, SetMetadataTool, OutputParameterJSONTool,
+for tool_class in [Tool, SetMetadataTool, OutputParameterJSONTool, ExpressionTool,
DataManagerTool, DataSourceTool, AsyncDataSourceTool,
UnzipCollectionTool, ZipCollectionTool, MergeCollectionTool, RelabelFromFileTool, FilterFromFileTool,
BuildListCollectionTool, ExtractDatasetCollectionTool,
- DataDestinationTool]:
+ DataDestinationTool,
+ CwlTool, GalacticCwlTool]:
tool_types[tool_class.tool_type] = tool_class
diff --git a/lib/galaxy/tools/actions/__init__.py b/lib/galaxy/tools/actions/__init__.py
index da9abcfd2749..64b288ad0499 100644
--- a/lib/galaxy/tools/actions/__init__.py
+++ b/lib/galaxy/tools/actions/__init__.py
@@ -77,7 +77,9 @@ def record_permission(action, role_id):
def visitor(input, value, prefix, parent=None, **kwargs):
def process_dataset(data, formats=None):
- if not data or isinstance(data, RuntimeValue):
+ # default file coming from a workflow
+ is_workflow_default = isinstance(data, dict) and data.get("class") == "File"
+ if not data or isinstance(data, RuntimeValue) or is_workflow_default:
return None
if formats is None:
formats = input.formats
@@ -480,7 +482,10 @@ def handle_output(name, output, hidden=None):
assert not element_identifiers # known_outputs must have been empty
element_kwds = dict(elements=collections_manager.ELEMENTS_UNINITIALIZED)
else:
- element_kwds = dict(element_identifiers=element_identifiers)
+ element_kwds = dict(
+ element_identifiers=element_identifiers,
+ fields=output.structure.fields,
+ )
output_collections.create_collection(
output=output,
name=name,
@@ -670,6 +675,7 @@ def _new_job_for_session(self, trans, tool, history):
job.tool_version = tool.version
except AttributeError:
job.tool_version = "1.0.0"
+ job.tool_hash = tool.tool_hash
return job, galaxy_session
def _record_inputs(self, trans, tool, job, incoming, inp_data, inp_dataset_collections):
@@ -684,6 +690,9 @@ def _record_inputs(self, trans, tool, job, incoming, inp_data, inp_dataset_colle
reductions[name] = []
reductions[name].append(dataset_collection)
+ if getattr(dataset_collection, "ephemeral", False):
+ dataset_collection = dataset_collection.persistent_object
+
# TODO: verify can have multiple with same name, don't want to lose traceability
if isinstance(dataset_collection, model.HistoryDatasetCollectionAssociation):
# FIXME: when recording inputs for special tools (e.g. ModelOperationToolAction),
diff --git a/lib/galaxy/tools/actions/metadata.py b/lib/galaxy/tools/actions/metadata.py
index e60cd9fd5f88..49d9860b5e4c 100644
--- a/lib/galaxy/tools/actions/metadata.py
+++ b/lib/galaxy/tools/actions/metadata.py
@@ -62,6 +62,7 @@ def execute_via_app(self, tool, app, session_id, history_id, user=None,
job.tool_version = tool.version
except AttributeError:
job.tool_version = "1.0.1"
+ job.tool_hash = tool.tool_hash
job.state = job.states.WAITING # we need to set job state to something other than NEW, or else when tracking jobs in db it will be picked up before we have added input / output parameters
job.set_handler(tool.get_job_handler(job_params))
sa_session.add(job)
diff --git a/lib/galaxy/tools/actions/upload_common.py b/lib/galaxy/tools/actions/upload_common.py
index 5becfe0a775d..bd1d2e41c44e 100644
--- a/lib/galaxy/tools/actions/upload_common.py
+++ b/lib/galaxy/tools/actions/upload_common.py
@@ -398,6 +398,7 @@ def create_job(trans, params, tool, json_file_path, outputs, folder=None, histor
job.history_id = history.id
job.tool_id = tool.id
job.tool_version = tool.version
+ job.tool_hash = tool.tool_hash
job.set_state(job.states.UPLOAD)
trans.sa_session.add(job)
trans.sa_session.flush()
diff --git a/lib/galaxy/tools/cwl/cwltool_deps.py b/lib/galaxy/tools/cwl/cwltool_deps.py
index 345e4c3080da..fe540e732361 100644
--- a/lib/galaxy/tools/cwl/cwltool_deps.py
+++ b/lib/galaxy/tools/cwl/cwltool_deps.py
@@ -29,8 +29,10 @@
try:
from cwltool.context import LoadingContext # Introduced in cwltool 1.0.20180615183820
+ from cwltool.context import RuntimeContext
except (ImportError, SyntaxError):
LoadingContext = None
+ RuntimeContext = None
try:
from cwltool import load_tool
@@ -56,6 +58,7 @@
needs_shell_quoting = re.compile(r"""(^$|[\s|&;()<>\'"$@])""").search
+beta_relaxed_fmt_check = True # if set to true, file format checking is not perfomed.
def ensure_cwltool_available():
"""Assert optional dependencies proxied via this module are available at runtime.
diff --git a/lib/galaxy/tools/cwl/parser.py b/lib/galaxy/tools/cwl/parser.py
index 9d7de0d0bf4b..8b58e02e228c 100644
--- a/lib/galaxy/tools/cwl/parser.py
+++ b/lib/galaxy/tools/cwl/parser.py
@@ -19,9 +19,11 @@
from galaxy.util.bunch import Bunch
from galaxy.util.odict import odict
from .cwltool_deps import (
+ beta_relaxed_fmt_check,
ensure_cwltool_available,
pathmapper,
process,
+ RuntimeContext,
)
from .representation import (
field_to_field_type,
@@ -32,7 +34,7 @@
USE_STEP_PARAMETERS,
)
from .schema import non_strict_schema_loader, schema_loader
-from .util import SECONDARY_FILES_EXTRA_PREFIX
+from .util import guess_artifact_type, SECONDARY_FILES_EXTRA_PREFIX
log = logging.getLogger(__name__)
@@ -45,8 +47,10 @@
"EnvVarRequirement",
"InitialWorkDirRequirement",
"InlineJavascriptRequirement",
+ "ResourceRequirement",
"ShellCommandRequirement",
"ScatterFeatureRequirement",
+ "SchemaDefRequirement",
"SubworkflowFeatureRequirement",
"StepInputExpressionRequirement",
"MultipleInputFeatureRequirement",
@@ -56,8 +60,10 @@
SUPPORTED_WORKFLOW_REQUIREMENTS = SUPPORTED_TOOL_REQUIREMENTS + [
]
+PERSISTED_REPRESENTATION = "cwl_tool_object"
-def tool_proxy(tool_path=None, tool_object=None, strict_cwl_validation=True):
+
+def tool_proxy(tool_path=None, tool_object=None, strict_cwl_validation=True, tool_directory=None):
""" Provide a proxy object to cwltool data structures to just
grab relevant data.
"""
@@ -65,14 +71,19 @@ def tool_proxy(tool_path=None, tool_object=None, strict_cwl_validation=True):
tool = to_cwl_tool_object(
tool_path=tool_path,
tool_object=tool_object,
- strict_cwl_validation=strict_cwl_validation
+ strict_cwl_validation=strict_cwl_validation,
+ tool_directory=tool_directory,
)
return tool
-def tool_proxy_from_persistent_representation(persisted_tool, strict_cwl_validation=True):
+def tool_proxy_from_persistent_representation(persisted_tool, strict_cwl_validation=True, tool_directory=None):
ensure_cwltool_available()
- tool = to_cwl_tool_object(persisted_tool=persisted_tool, strict_cwl_validation=strict_cwl_validation)
+ if PERSISTED_REPRESENTATION == "cwl_tool_object":
+ kwds = {"cwl_tool_object": ToolProxy.from_persistent_representation(persisted_tool)}
+ else:
+ kwds = {"raw_process_reference": ToolProxy.from_persistent_representation(raw_process_reference)}
+ tool = to_cwl_tool_object(strict_cwl_validation=strict_cwl_validation, tool_directory=tool_directory, **kwds)
return tool
@@ -94,18 +105,25 @@ def load_job_proxy(job_directory, strict_cwl_validation=True):
cwl_tool = tool_proxy(tool_path, strict_cwl_validation=strict_cwl_validation)
else:
persisted_tool = job_objects["tool_representation"]
- cwl_tool = tool_proxy_from_persistent_representation(persisted_tool, strict_cwl_validation=strict_cwl_validation)
+ cwl_tool = tool_proxy_from_persistent_representation(persisted_tool=persisted_tool, strict_cwl_validation=strict_cwl_validation)
cwl_job = cwl_tool.job_proxy(job_inputs, output_dict, job_directory=job_directory)
return cwl_job
-def to_cwl_tool_object(tool_path=None, tool_object=None, persisted_tool=None, strict_cwl_validation=True):
+def to_cwl_tool_object(tool_path=None, tool_object=None, cwl_tool_object=None, raw_process_reference=None, strict_cwl_validation=True, tool_directory=None):
schema_loader = _schema_loader(strict_cwl_validation)
- if tool_path is not None:
+ if raw_process_reference is None and tool_path is not None:
+ assert cwl_tool_object is None
+ assert tool_object is None
+
+ raw_process_reference = schema_loader.raw_process_reference(tool_path)
cwl_tool = schema_loader.tool(
- path=tool_path
+ raw_process_reference=raw_process_reference,
)
elif tool_object is not None:
+ assert raw_process_reference is None
+ assert cwl_tool_object is None
+
# Allow loading tools from YAML...
from ruamel import yaml as ryaml
import json
@@ -113,9 +131,12 @@ def to_cwl_tool_object(tool_path=None, tool_object=None, persisted_tool=None, st
tool_object = ryaml.round_trip_load(as_str)
from schema_salad import sourceline
from schema_salad.ref_resolver import file_uri
- uri = file_uri(os.getcwd()) + "/"
+ path = tool_directory
+ if path is None:
+ path = os.getcwd()
+ uri = file_uri(path) + "/"
sourceline.add_lc_filename(tool_object, uri)
- tool_object, _ = schema_loader.raw_document_loader.resolve_all(tool_object, uri)
+ # tool_object, _ = schema_loader.raw_document_loader.resolve_all(tool_object, uri, checklinks=False)
raw_process_reference = schema_loader.raw_process_reference_for_object(
tool_object,
uri=uri
@@ -124,7 +145,7 @@ def to_cwl_tool_object(tool_path=None, tool_object=None, persisted_tool=None, st
raw_process_reference=raw_process_reference,
)
else:
- cwl_tool = ToolProxy.from_persistent_representation(persisted_tool)
+ cwl_tool = cwl_tool_object
if isinstance(cwl_tool, int):
raise Exception("Failed to load tool.")
@@ -134,10 +155,10 @@ def to_cwl_tool_object(tool_path=None, tool_object=None, persisted_tool=None, st
# between Galaxy and cwltool.
_hack_cwl_requirements(cwl_tool)
check_requirements(raw_tool)
- return cwl_tool_object_to_proxy(cwl_tool, tool_path=tool_path)
+ return cwl_tool_object_to_proxy(cwl_tool, raw_process_reference=raw_process_reference, tool_path=tool_path)
-def cwl_tool_object_to_proxy(cwl_tool, tool_path=None):
+def cwl_tool_object_to_proxy(cwl_tool, raw_process_reference=None, tool_path=None):
raw_tool = cwl_tool.tool
if "class" not in raw_tool:
raise Exception("File does not declare a class, not a valid Draft 3+ CWL tool.")
@@ -153,7 +174,7 @@ def cwl_tool_object_to_proxy(cwl_tool, tool_path=None):
if top_level_object and ("cwlVersion" not in raw_tool):
raise Exception("File does not declare a CWL version, pre-draft 3 CWL tools are not supported.")
- proxy = proxy_class(cwl_tool, tool_path)
+ proxy = proxy_class(cwl_tool, raw_process_reference, tool_path)
return proxy
@@ -203,9 +224,15 @@ def check_requirements(rec, tool=True):
@six.add_metaclass(ABCMeta)
class ToolProxy(object):
- def __init__(self, tool, tool_path=None):
+ def __init__(self, tool, raw_process_reference=None, tool_path=None):
self._tool = tool
self._tool_path = tool_path
+ self._raw_process_reference = raw_process_reference
+ # remove input parameter formats from CWL files so that cwltool
+ # does not complain they are missing in the input data
+ for input_field in self._tool.inputs_record_schema["fields"]:
+ if 'format' in input_field:
+ del input_field['format']
def job_proxy(self, input_dict, output_dict, job_directory="."):
""" Build a cwltool.job.Job describing computation using a input_json
@@ -223,8 +250,9 @@ def galaxy_id(self):
raw_id = self.id
tool_id = None
# don't reduce "search.cwl#index" to search
- if raw_id and "#" not in raw_id:
- tool_id = os.path.splitext(os.path.basename(raw_id))[0]
+ if raw_id:
+ tool_id = os.path.basename(raw_id)
+ # tool_id = os.path.splitext(os.path.basename(raw_id))[0]
if not tool_id:
from galaxy.tools.hash import build_tool_hash
tool_id = build_tool_hash(self.to_persistent_representation())
@@ -256,9 +284,15 @@ def to_persistent_representation(self):
over the wire, but serialization in a database."""
# TODO: Replace this with some more readable serialization,
# I really don't like using pickle here.
+ # print("with removed...")
+ # print(remove_pickle_problems(self._tool).tool)
+ if PERSISTED_REPRESENTATION == "cwl_tool_object":
+ persisted_obj = remove_pickle_problems(self._tool)
+ else:
+ persisted_obj = self._raw_process_reference
return {
"class": self._class,
- "pickle": base64.b64encode(pickle.dumps(remove_pickle_problems(self._tool), -1)),
+ "pickle": base64.b64encode(pickle.dumps(persisted_obj, pickle.HIGHEST_PROTOCOL)),
}
@staticmethod
@@ -268,38 +302,59 @@ def from_persistent_representation(as_object):
raise Exception("Failed to deserialize tool proxy from JSON object - no class found.")
if "pickle" not in as_object:
raise Exception("Failed to deserialize tool proxy from JSON object - no pickle representation found.")
- return pickle.loads(base64.b64decode(as_object["pickle"]))
+ to_unpickle = base64.b64decode(as_object["pickle"])
+ return pickle.loads(to_unpickle)
class CommandLineToolProxy(ToolProxy):
_class = "CommandLineTool"
def description(self):
- return self._tool.tool.get('doc')
+ return ''
+ #return self._tool.tool.get('doc')
def label(self):
- return self._tool.tool.get('label')
+ label = self._tool.tool.get('label')
- def input_fields(self):
- input_records_schema = self._tool.inputs_record_schema
- schema_type = input_records_schema["type"]
- if schema_type in self._tool.schemaDefs:
- input_records_schema = self._tool.schemaDefs[schema_type]
+ if label is not None:
+ return label.partition(":")[0] # return substring before ':'
+ else:
+ return ''
+ def input_fields(self):
+ input_records_schema = self._eval_schema(self._tool.inputs_record_schema)
if input_records_schema["type"] != "record":
raise Exception("Unhandled CWL tool input structure")
- return input_records_schema["fields"]
+ # TODO: handle this somewhere else?
+ # schemadef_req_tool_param
+ rval = []
+ for input in input_records_schema["fields"]:
+ import copy
+ input_copy = copy.deepcopy(input)
+ input_type = input.get("type")
+ if isinstance(input_type, list) or isinstance(input_type, dict):
+ rval.append(input_copy)
+ continue
+
+ if input_type in self._tool.schemaDefs:
+ input_copy["type"] = self._tool.schemaDefs[input_type]
+
+ rval.append(input_copy)
+ return rval
+
+ def _eval_schema(self, io_schema):
+ schema_type = io_schema.get("type")
+ if schema_type in self._tool.schemaDefs:
+ io_schema = self._tool.schemaDefs[schema_type]
+ return io_schema
def input_instances(self):
return [_outer_field_to_input_instance(_) for _ in self.input_fields()]
def output_instances(self):
- outputs_schema = self._tool.outputs_record_schema
- schema_type = outputs_schema["type"]
- if schema_type in self._tool.schemaDefs:
- outputs_schema = self._tool.schemaDefs[schema_type]
-
+
+ outputs_schema = self._eval_schema(self._tool.outputs_record_schema)
if outputs_schema["type"] != "record":
raise Exception("Unhandled CWL tool output structure")
@@ -310,15 +365,20 @@ def output_instances(self):
return rval
def docker_identifier(self):
+ for hint in self.hints_or_requirements_of_class("DockerRequirement"):
+ if "dockerImageId" in hint:
+ return hint["dockerImageId"]
+ else:
+ return hint["dockerPull"]
+
+ return None
+
+ def hints_or_requirements_of_class(self, class_name):
tool = self._tool.tool
reqs_and_hints = tool.get("requirements", []) + tool.get("hints", [])
for hint in reqs_and_hints:
- if hint["class"] == "DockerRequirement":
- if "dockerImageId" in hint:
- return hint["dockerImageId"]
- else:
- return hint["dockerPull"]
- return None
+ if hint["class"] == class_name:
+ yield hint
def software_requirements(self):
# Roughest imaginable pass at parsing requirements, really need to take in specs, handle
@@ -371,16 +431,25 @@ def is_command_line_job(self):
def _ensure_cwl_job_initialized(self):
if self._cwl_job is None:
-
- self._cwl_job = next(self._tool_proxy._tool.job(
- self._input_dict,
- self._output_callback,
+ job_args = dict(
basedir=self._job_directory,
select_resources=self._select_resources,
outdir=os.path.join(self._job_directory, "working"),
tmpdir=os.path.join(self._job_directory, "cwltmp"),
stagedir=os.path.join(self._job_directory, "cwlstagedir"),
use_container=False,
+ beta_relaxed_fmt_check=beta_relaxed_fmt_check,
+ )
+ args = []
+ kwargs = {}
+ if RuntimeContext is not None:
+ args.append(RuntimeContext(job_args))
+ else:
+ kwargs = job_args
+ self._cwl_job = next(self._tool_proxy._tool.job(
+ self._input_dict,
+ self._output_callback,
+ *args, **kwargs
))
self._is_command_line_job = hasattr(self._cwl_job, "command_line")
@@ -437,7 +506,7 @@ def stage_recursive(value):
log.info("skipping simple value...")
stage_recursive(self._input_dict)
- def _select_resources(self, request):
+ def _select_resources(self, request, runtime_context=None):
new_request = request.copy()
new_request["cores"] = "$GALAXY_SLOTS"
return new_request
@@ -463,6 +532,13 @@ def stdout(self):
else:
return None
+ @property
+ def stderr(self):
+ if self.is_command_line_job:
+ return self.cwl_job().stderr
+ else:
+ return None
+
@property
def environment(self):
if self.is_command_line_job:
@@ -489,8 +565,12 @@ def _output_callback(self, out, process_status):
def collect_outputs(self, tool_working_directory):
if not self.is_command_line_job:
cwl_job = self.cwl_job()
- cwl_job.run(
- )
+ if RuntimeContext is not None:
+ cwl_job.run(
+ RuntimeContext({})
+ )
+ else:
+ cwl_job.run()
if not self._ok:
raise Exception("Final process state not ok, [%s]" % self._process_status)
return self._final_output
@@ -570,6 +650,27 @@ def __init__(self, workflow, workflow_path=None):
def cwl_id(self):
return self._workflow.tool["id"]
+ def get_outputs_for_label(self, label):
+ outputs = []
+ for output in self._workflow.tool['outputs']:
+ step, output_name = split_step_references(
+ output["outputSource"],
+ multiple=False,
+ workflow_id=self.cwl_id,
+ )
+ if step == label:
+ output_id = output["id"]
+ if "#" not in self.cwl_id:
+ _, output_label = output_id.rsplit("#", 1)
+ else:
+ _, output_label = output_id.rsplit("/", 1)
+
+ outputs.append({
+ "output_name": output_name,
+ "label": output_label,
+ })
+ return outputs
+
def tool_references(self):
"""Fetch tool source definitions for all referenced tools."""
references = []
@@ -660,6 +761,7 @@ def to_dict(self):
for i, step_proxy in enumerate(step_proxies):
input_connections = input_connections_by_step[i]
steps[index] = step_proxy.to_dict(input_connections)
+ print("Adding label %s" % steps[index]["label"])
index += 1
return {
@@ -683,12 +785,14 @@ def jsonld_id_to_label(self, id):
def cwl_input_to_galaxy_step(self, input, i):
input_type = input["type"]
+ label = self.jsonld_id_to_label(input["id"])
input_as_dict = {
"id": i,
- "label": self.jsonld_id_to_label(input["id"]),
+ "label": label,
"position": {"left": 0, "top": 0},
"annotation": self.cwl_object_to_annotation(input),
"input_connections": {}, # Should the Galaxy API really require this? - Seems to.
+ "workflow_outputs": self.get_outputs_for_label(label),
}
if input_type == "File" and "default" not in input:
@@ -806,25 +910,7 @@ def label(self):
return label
def galaxy_workflow_outputs_list(self):
- outputs = []
- for output in self._workflow_proxy._workflow.tool['outputs']:
- step, output_name = split_step_references(
- output["outputSource"],
- multiple=False,
- workflow_id=self._workflow_proxy.cwl_id,
- )
- if step == self.label:
- output_id = output["id"]
- if "#" not in self._workflow_proxy.cwl_id:
- _, output_label = output_id.rsplit("#", 1)
- else:
- _, output_label = output_id.rsplit("/", 1)
-
- outputs.append({
- "output_name": output_name,
- "label": output_label,
- })
- return outputs
+ return self._workflow_proxy.get_outputs_for_label(self.label)
@property
def cwl_tool_object(self):
@@ -1180,6 +1266,38 @@ def __init__(self, name, output_data_type, output_type, path=None, fields=None):
self.fields = fields
+def get_outputs(path):
+ tool_or_workflow = guess_artifact_type(path)
+ if tool_or_workflow == "tool":
+ from galaxy.tools.parser import get_tool_source
+ tool_source = get_tool_source(path)
+ output_datasets, _ = tool_source.parse_outputs(None)
+ outputs = [ToolOutput(o) for o in output_datasets.values()]
+ return outputs
+ else:
+ workflow = workflow_proxy(path, strict_cwl_validation=False)
+ return [CwlWorkflowOutput(label) for label in workflow.output_labels]
+
+
+# Lighter-weight variant of Planemo runnable outputs.
+class CwlWorkflowOutput(object):
+
+ def __init__(self, label):
+ self._label = label
+
+ def get_id(self):
+ return self._label
+
+
+class ToolOutput(object):
+
+ def __init__(self, tool_output):
+ self._tool_output = tool_output
+
+ def get_id(self):
+ return self._tool_output.name
+
+
__all__ = (
'tool_proxy',
'load_job_proxy',
diff --git a/lib/galaxy/tools/cwl/representation.py b/lib/galaxy/tools/cwl/representation.py
index 304dcbb06838..175b14819a78 100644
--- a/lib/galaxy/tools/cwl/representation.py
+++ b/lib/galaxy/tools/cwl/representation.py
@@ -13,6 +13,9 @@
from galaxy.util.bunch import Bunch
from .util import set_basename_and_derived_properties
+from galaxy.util.none_like import NoneDataset
+from galaxy.util.object_wrapper import SafeStringWrapper
+
log = logging.getLogger(__name__)
NOT_PRESENT = object()
@@ -117,7 +120,8 @@ def type_descriptions_for_field_types(field_types):
type_representation_names_for_field_type = CWL_TYPE_TO_REPRESENTATIONS.get(field_type)
except TypeError:
raise Exception("Failed to convert field_type %s" % field_type)
- assert type_representation_names_for_field_type is not None, field_type
+ if type_representation_names_for_field_type is None:
+ raise Exception("Failed to convert type %s" % field_type)
type_representation_names.update(type_representation_names_for_field_type)
type_representations = []
for type_representation in TYPE_REPRESENTATIONS:
@@ -157,7 +161,10 @@ def dataset_wrapper_to_file_json(inputs_dir, dataset_wrapper):
path = new_input_path
raw_file_object["location"] = path
- raw_file_object["size"] = int(dataset_wrapper.get_size())
+
+ if not isinstance(dataset_wrapper.unsanitized, NoneDataset):
+ raw_file_object["size"] = int(dataset_wrapper.get_size())
+
set_basename_and_derived_properties(raw_file_object, str(dataset_wrapper.cwl_filename or dataset_wrapper.name))
return raw_file_object
@@ -165,8 +172,30 @@ def dataset_wrapper_to_file_json(inputs_dir, dataset_wrapper):
def dataset_wrapper_to_directory_json(inputs_dir, dataset_wrapper):
assert dataset_wrapper.ext == "directory"
- return {"location": dataset_wrapper.extra_files_path,
- "class": "Directory"}
+ # get directory name
+ archive_name = str(dataset_wrapper.cwl_filename or dataset_wrapper.name)
+ nameroot, nameext = os.path.splitext(archive_name)
+ directory_name = nameroot # assume archive file name contains the directory name
+
+ # get archive location
+ #
+ # note
+ # when user uploads a tar file with 'directory' type,
+ # tar file location ends up in dataset_wrapper.unsanitized.file_name
+ #
+ try:
+ archive_location = dataset_wrapper.unsanitized.file_name
+ except:
+ archive_location = None
+
+ directory_json = {"location": dataset_wrapper.extra_files_path,
+ "class": "Directory",
+ "name": directory_name,
+ "archive_location": archive_location,
+ "archive_nameext": nameext,
+ "archive_nameroot": nameroot}
+
+ return directory_json
def collection_wrapper_to_array(inputs_dir, wrapped_value):
@@ -183,6 +212,106 @@ def collection_wrapper_to_record(inputs_dir, wrapped_value):
return rval
+def galactic_flavored_to_cwl_job(tool, param_dict, local_working_directory):
+
+ def simple_value(input, param_dict_value, type_representation_name=None):
+ type_representation = type_representation_from_name(type_representation_name)
+ # Hmm... cwl_type isn't really the cwl type in every case,
+ # like in the case of json for instance.
+
+ if type_representation.galaxy_param_type == NO_GALAXY_INPUT:
+ assert param_dict_value is None
+ return None
+
+ if type_representation.name == "file":
+ dataset_wrapper = param_dict_value
+ return dataset_wrapper_to_file_json(inputs_dir, dataset_wrapper)
+ elif type_representation.name == "directory":
+ dataset_wrapper = param_dict_value
+ return dataset_wrapper_to_directory_json(inputs_dir, dataset_wrapper)
+ elif type_representation.name == "integer":
+ return int(str(param_dict_value))
+ elif type_representation.name == "long":
+ return int(str(param_dict_value))
+ elif type_representation.name in ["float", "double"]:
+ return float(str(param_dict_value))
+ elif type_representation.name == "boolean":
+ return string_as_bool(param_dict_value)
+ elif type_representation.name == "text":
+ return str(param_dict_value)
+ elif type_representation.name == "enum":
+ return str(param_dict_value)
+ elif type_representation.name == "json":
+ raw_value = param_dict_value.value
+ return json.loads(raw_value)
+ elif type_representation.name == "field":
+ if param_dict_value is None:
+ return None
+ if hasattr(param_dict_value, "value"):
+ # Is InputValueWrapper
+ return param_dict_value.value
+ elif not param_dict_value.is_collection:
+ # Is DatasetFilenameWrapper
+ return dataset_wrapper_to_file_json(inputs_dir, param_dict_value)
+ else:
+ # Is DatasetCollectionWrapper
+ hdca_wrapper = param_dict_value
+ if hdca_wrapper.collection_type == "list":
+ # TODO: generalize to lists of lists and lists of non-files...
+ return collection_wrapper_to_array(inputs_dir, hdca_wrapper)
+ elif hdca_wrapper.collection_type.collection_type == "record":
+ return collection_wrapper_to_record(inputs_dir, hdca_wrapper)
+
+ elif type_representation.name == "array":
+ # TODO: generalize to lists of lists and lists of non-files...
+ return collection_wrapper_to_array(inputs_dir, param_dict_value)
+ elif type_representation.name == "record":
+ return collection_wrapper_to_record(inputs_dir, param_dict_value)
+ else:
+ return str(param_dict_value)
+
+
+ inputs_dir = os.path.join(local_working_directory, "_inputs")
+
+ inputs = {}
+
+ # TODO: walk tree
+ for input_name, input_param in tool.inputs.items():
+ if input_param.type == "data":
+ # Probably need to be passing in the wrappers and using them - this seems to be
+ # an HDA.
+ map_to = input_param.map_to
+ inputs_at_depth = inputs
+ if map_to:
+
+ while "/" in map_to:
+ first, map_to = map_to.split("/", 1)
+ if first not in inputs_at_depth:
+ inputs_at_depth[first] = {}
+ inputs_at_depth = inputs_at_depth[first]
+ else:
+ map_to = input_param.name
+ inputs_at_depth[map_to] = dataset_wrapper_to_file_json(inputs_dir, param_dict[input_name])
+ else:
+ matched_field = None
+ for field in tool._cwl_tool_proxy.input_fields():
+ if field["name"] == input_name: # CWL <=> Galaxy
+ matched_field = field
+ field_type = field_to_field_type(matched_field)
+ if isinstance(field_type, list):
+ assert USE_FIELD_TYPES
+ type_descriptions = [FIELD_TYPE_REPRESENTATION]
+ else:
+ type_descriptions = type_descriptions_for_field_types([field_type])
+ assert len(type_descriptions) == 1
+ type_description_name = type_descriptions[0].name
+
+ inputs[input_name] = simple_value(input_param, param_dict[input_name], type_description_name)
+
+ log.info("job inputs is %s" % inputs)
+ return inputs
+
+
def to_cwl_job(tool, param_dict, local_working_directory):
""" tool is Galaxy's representation of the tool and param_dict is the
parameter dictionary with wrapped values.
@@ -229,7 +358,11 @@ def simple_value(input, param_dict_value, type_representation_name=None):
return None
if hasattr(param_dict_value, "value"):
# Is InputValueWrapper
- return param_dict_value.value["value"]
+ rval = param_dict_value.value
+ if isinstance(rval, dict) and "src" in rval and rval["src"] == "json":
+ # needed for wf_step_connect_undeclared_param, so non-file defaults?
+ return rval["value"]
+ return rval
elif not param_dict_value.is_collection:
# Is DatasetFilenameWrapper
return dataset_wrapper_to_file_json(inputs_dir, param_dict_value)
diff --git a/lib/galaxy/tools/cwl/runtime_actions.py b/lib/galaxy/tools/cwl/runtime_actions.py
index 0618e9ea2a47..5b2060ff6b81 100644
--- a/lib/galaxy/tools/cwl/runtime_actions.py
+++ b/lib/galaxy/tools/cwl/runtime_actions.py
@@ -47,12 +47,57 @@ def _possible_uri_to_path(location):
def file_dict_to_description(file_dict):
- assert file_dict["class"] == "File", file_dict
+ output_class = file_dict["class"]
+ assert output_class in ["File", "Directory"], file_dict
location = file_dict["location"]
if location.startswith("_:"):
+ assert output_class == "File"
return LiteralFileDescription(file_dict["contents"])
- else:
+ elif output_class == "File":
return PathFileDescription(_possible_uri_to_path(location))
+ else:
+ return PathDirectoryDescription(_possible_uri_to_path(location))
+
+
+class FileDescription(object):
+ pass
+
+
+class PathFileDescription(object):
+
+ def __init__(self, path):
+ self.path = path
+
+ def write_to(self, destination):
+ # TODO: Move if we can be sure this is in the working directory for instance...
+ shutil.copy(self.path, destination)
+
+
+class PathDirectoryDescription(object):
+
+ def __init__(self, path):
+ self.path = path
+
+ def write_to(self, destination):
+ shutil.copytree(self.path, destination)
+
+
+class LiteralFileDescription(object):
+
+ def __init__(self, content):
+ self.content = content
+
+ def write_to(self, destination):
+ with open(destination, "wb") as f:
+ f.write(self.content.encode("UTF-8"))
+
+
+def _possible_uri_to_path(location):
+ if location.startswith("file://"):
+ path = ref_resolver.uri_file_path(location)
+ else:
+ path = location
+ return path
def handle_outputs(job_directory=None):
@@ -157,14 +202,29 @@ def handle_known_output(output, output_key, output_name):
raise Exception("Unknown output type [%s] encountered" % output)
provided_metadata[output_name] = file_metadata
+ def handle_known_output_json(output, output_name):
+ target_path = job_proxy.output_path(output_name)
+ with open(target_path, "w") as f:
+ f.write(json.dumps(output))
+ provided_metadata[output_name] = {
+ "ext": "expression.json",
+ }
+
+ handled_outputs = []
for output_name, output in outputs.items():
+ handled_outputs.append(output_name)
if isinstance(output, dict) and "location" in output:
handle_known_output(output, output_name, output_name)
elif isinstance(output, dict):
prefix = "%s|__part__|" % output_name
for record_key, record_value in output.items():
record_value_output_key = "%s%s" % (prefix, record_key)
- handle_known_output(record_value, record_value_output_key, output_name)
+ if isinstance(record_value, dict) and "class" in record_value:
+ handle_known_output(record_value, record_value_output_key, output_name)
+ else:
+ # param_evaluation_noexpr
+ handle_known_output_json(output, output_name)
+
elif isinstance(output, list):
elements = []
for index, el in enumerate(output):
@@ -178,12 +238,12 @@ def handle_known_output(output, output_key, output_name):
elements.append({"name": str(index), "filename": target_path, "ext": "expression.json"})
provided_metadata[output_name] = {"elements": elements}
else:
- target_path = job_proxy.output_path(output_name)
- with open(target_path, "w") as f:
- f.write(json.dumps(output))
- provided_metadata[output_name] = {
- "ext": "expression.json",
- }
+ handle_known_output_json(output, output_name)
+
+ for output_instance in job_proxy._tool_proxy.output_instances():
+ output_name = output_instance.name
+ if output_name not in handled_outputs:
+ handle_known_output_json(None, output_name)
with open("galaxy.json", "w") as f:
json.dump(provided_metadata, f)
diff --git a/lib/galaxy/tools/cwl/schema.py b/lib/galaxy/tools/cwl/schema.py
index f7be6580a3fb..25253eaa3a6d 100644
--- a/lib/galaxy/tools/cwl/schema.py
+++ b/lib/galaxy/tools/cwl/schema.py
@@ -20,18 +20,18 @@ class SchemaLoader(object):
def __init__(self, strict=True):
self._strict = strict
- self._raw_document_loader = None
@property
def raw_document_loader(self):
ensure_cwltool_available()
- from cwltool.load_tool import jobloaderctx
- return schema_salad.ref_resolver.Loader(jobloaderctx)
+ from cwltool.load_tool import default_loader
+ return default_loader(None)
def raw_process_reference(self, path):
uri = "file://" + os.path.abspath(path)
fileuri, _ = urldefrag(uri)
- return RawProcessReference(self.raw_document_loader.fetch(fileuri), uri)
+ process_object = self.raw_document_loader.fetch(fileuri)
+ return RawProcessReference(process_object, uri)
def raw_process_reference_for_object(self, object, uri=None):
if uri is None:
@@ -43,6 +43,7 @@ def process_definition(self, raw_reference):
self.raw_document_loader,
raw_reference.process_object,
raw_reference.uri,
+ strict=False,
)
process_def = ProcessDefinition(
process_object,
@@ -67,7 +68,7 @@ def tool(self, **kwds):
raw_process_reference = self.raw_process_reference(kwds["path"])
process_definition = self.process_definition(raw_process_reference)
- args = {"strict": self._strict}
+ args = {"strict": self._strict, "do_validate": False}
make_tool = kwds.get("make_tool", default_make_tool)
if LoadingContext is not None:
args["construct_tool_object"] = make_tool
diff --git a/lib/galaxy/tools/cwl/util.py b/lib/galaxy/tools/cwl/util.py
index 50d26580d726..6fc6a5276ad5 100644
--- a/lib/galaxy/tools/cwl/util.py
+++ b/lib/galaxy/tools/cwl/util.py
@@ -7,6 +7,7 @@
import os
import tarfile
import tempfile
+import yaml
from collections import namedtuple
from six import (
@@ -69,6 +70,14 @@ def abs_path_or_uri(path_or_uri, relative_to):
return path_or_uri
+def abs_path(path_or_uri, relative_to):
+ path_or_uri = abs_path_or_uri(path_or_uri, relative_to)
+ if path_or_uri.startswith("file://"):
+ path_or_uri = path_or_uri[len("file://"):]
+
+ return path_or_uri
+
+
def path_or_uri_to_uri(path_or_uri):
if "://" not in path_or_uri:
return "file://%s" % path_or_uri
@@ -91,31 +100,36 @@ def galactic_job_json(
datasets = []
dataset_collections = []
- def upload_file(file_path, secondary_files, **kwargs):
- file_path = abs_path_or_uri(file_path, test_data_directory)
- target = FileUploadTarget(file_path, secondary_files, **kwargs)
- upload_response = upload_func(target)
+ def response_to_hda(target, upload_response):
+ assert isinstance(upload_response, dict), upload_response
+ assert "outputs" in upload_response, upload_response
+ assert len(upload_response["outputs"]) > 0, upload_response
dataset = upload_response["outputs"][0]
datasets.append((dataset, target))
dataset_id = dataset["id"]
return {"src": "hda", "id": dataset_id}
+ def upload_file(file_path, secondary_files, **kwargs):
+ file_path = abs_path_or_uri(file_path, test_data_directory)
+ target = FileUploadTarget(file_path, secondary_files, **kwargs)
+ upload_response = upload_func(target)
+ return response_to_hda(target, upload_response)
+
+ def upload_file_literal(contents):
+ target = FileLiteralTarget(contents)
+ upload_response = upload_func(target)
+ return response_to_hda(target, upload_response)
+
def upload_tar(file_path):
file_path = abs_path_or_uri(file_path, test_data_directory)
target = DirectoryUploadTarget(file_path)
upload_response = upload_func(target)
- dataset = upload_response["outputs"][0]
- datasets.append((dataset, target))
- dataset_id = dataset["id"]
- return {"src": "hda", "id": dataset_id}
+ return response_to_hda(target, upload_response)
def upload_object(the_object):
target = ObjectUploadTarget(the_object)
upload_response = upload_func(target)
- dataset = upload_response["outputs"][0]
- datasets.append((dataset, target))
- dataset_id = dataset["id"]
- return {"src": "hda", "id": dataset_id}
+ return response_to_hda(target, upload_response)
def replacement_item(value, force_to_file=False):
is_dict = isinstance(value, dict)
@@ -151,6 +165,10 @@ def replacement_item(value, force_to_file=False):
def replacement_file(value):
file_path = value.get("location", None) or value.get("path", None)
if file_path is None:
+ contents = value.get("contents", None)
+ if contents is not None:
+ return upload_file_literal(contents)
+
return value
filetype = value.get('filetype', None)
@@ -231,11 +249,11 @@ def replacement_collection(value):
def replacement_record(value):
collection_element_identifiers = []
for record_key, record_value in value.items():
- if record_value.get("class") != "File":
+ if not isinstance(record_value, dict) or record_value.get("class") != "File":
dataset = replacement_item(record_value, force_to_file=True)
collection_element = dataset.copy()
else:
- dataset = upload_file(record_value["location"])
+ dataset = upload_file(record_value["location"], [])
collection_element = dataset.copy()
collection_element["name"] = record_key
@@ -267,6 +285,16 @@ def _ensure_file_exists(file_path):
raise Exception(message)
+@python_2_unicode_compatible
+class FileLiteralTarget(object):
+
+ def __init__(self, contents, **kwargs):
+ self.contents = contents
+
+ def __str__(self):
+ return "FileLiteralTarget[path=%s] with %s" % (self.path, self.properties)
+
+
@python_2_unicode_compatible
class FileUploadTarget(object):
@@ -341,7 +369,7 @@ def element_to_cwl_json(element):
element["object"]["history_content_type"],
element["object"]["id"],
)
- return output_to_cwl_json(element_output, get_metadata, get_dataset, get_extra_files)
+ return output_to_cwl_json(element_output, get_metadata, get_dataset, get_extra_files, pseduo_location=pseduo_location)
output_metadata = get_metadata(galaxy_output.history_content_type, galaxy_output.history_content_id)
@@ -360,6 +388,8 @@ def dataset_dict_to_json_content(dataset_dict):
return dataset_dict_to_json_content(dataset_dict)
else:
file_or_directory = "Directory" if ext == "directory" else "File"
+ secondary_files = []
+
if file_or_directory == "File":
dataset_dict = get_dataset(output_metadata)
properties = output_properties(pseduo_location=pseduo_location, **dataset_dict)
@@ -375,21 +405,57 @@ def dataset_dict_to_json_content(dataset_dict):
if found_index:
ec = get_dataset(output_metadata, filename=SECONDARY_FILES_INDEX_PATH)
index = dataset_dict_to_json_content(ec)
+
+ def dir_listing(dir_path):
+ listing = []
+ for extra_file in extra_files:
+ path = extra_file["path"]
+ extra_file_class = extra_file["class"]
+ extra_file_basename = os.path.basename(path)
+ if os.path.join(dir_path, extra_file_basename) != path:
+ continue
+
+ if extra_file_class == "File":
+ ec = get_dataset(output_metadata, filename=path)
+ ec["basename"] = extra_file_basename
+ ec_properties = output_properties(pseduo_location=pseduo_location, **ec)
+ elif extra_file_class == "Directory":
+ ec_properties = {}
+ ec_properties["class"] = "Directory"
+ ec_properties["location"] = ec_basename
+ ec_properties["listing"] = dir_listing(path)
+ else:
+ raise Exception("Unknown output type encountered....")
+ listing.append(ec_properties)
+ return listing
+
for basename in index["order"]:
for extra_file in extra_files:
- if extra_file["class"] == "File":
- path = extra_file["path"]
- if path == os.path.join(SECONDARY_FILES_EXTRA_PREFIX, basename):
- ec = get_dataset(output_metadata, filename=path)
- if not STORE_SECONDARY_FILES_WITH_BASENAME:
- ec["basename"] = basename + os.path.basename(path)
- else:
- ec["basename"] = os.path.basename(path)
- ec_properties = output_properties(pseduo_location=pseduo_location, **ec)
- if "secondaryFiles" not in properties:
- properties["secondaryFiles"] = []
-
- properties["secondaryFiles"].append(ec_properties)
+ path = extra_file["path"]
+ if path != os.path.join(SECONDARY_FILES_EXTRA_PREFIX, basename):
+ continue
+
+ extra_file_class = extra_file["class"]
+
+ # This is wrong...
+ if not STORE_SECONDARY_FILES_WITH_BASENAME:
+ ec_basename = basename + os.path.basename(path)
+ else:
+ ec_basename = os.path.basename(path)
+
+ if extra_file_class == "File":
+ ec = get_dataset(output_metadata, filename=path)
+ ec["basename"] = ec_basename
+ ec_properties = output_properties(pseduo_location=pseduo_location, **ec)
+ elif extra_file_class == "Directory":
+ ec_properties = {}
+ ec_properties["class"] = "Directory"
+ ec_properties["location"] = ec_basename
+ ec_properties["listing"] = dir_listing(path)
+ else:
+ raise Exception("Unknown output type encountered....")
+ secondary_files.append(ec_properties)
+
else:
basename = output_metadata.get("cwl_file_name")
if not basename:
@@ -411,6 +477,8 @@ def dataset_dict_to_json_content(dataset_dict):
ec_properties = output_properties(pseduo_location=pseduo_location, **ec)
listing.append(ec_properties)
+ if secondary_files:
+ properties["secondaryFiles"] = secondary_files
return properties
elif output_metadata["history_content_type"] == "dataset_collection":
@@ -425,3 +493,24 @@ def dataset_dict_to_json_content(dataset_dict):
return rval
else:
raise NotImplementedError("Unknown history content type encountered")
+
+def download_output(galaxy_output, get_metadata, get_dataset, get_extra_files, output_path):
+ output_metadata = get_metadata(galaxy_output.history_content_type, galaxy_output.history_content_id)
+ dataset_dict = get_dataset(output_metadata)
+ with open(output_path, 'wb') as fh:
+ fh.write(dataset_dict['content'])
+
+
+def guess_artifact_type(path):
+ # TODO: Handle IDs within files.
+ tool_or_workflow = "workflow"
+ try:
+ with open(path, "r") as f:
+ artifact = yaml.load(f)
+
+ tool_or_workflow = "tool" if artifact["class"] != "Workflow" else "workflow"
+
+ except Exception as e:
+ print(e)
+
+ return tool_or_workflow
diff --git a/lib/galaxy/tools/deps/containers.py b/lib/galaxy/tools/deps/containers.py
index a43516e615d0..6410536a3f4d 100644
--- a/lib/galaxy/tools/deps/containers.py
+++ b/lib/galaxy/tools/deps/containers.py
@@ -423,9 +423,14 @@ def add_var(name, value):
if self.job_info.job_directory and self.job_info.job_directory_type == "pulsar":
# We have a Pulsar job directory, so everything needed (excluding index
# files) should be available in job_directory...
- defaults = "$job_directory:default_ro,$tool_directory:default_ro,$job_directory/outputs:rw,$working_directory:rw"
+ defaults = "$job_directory:default_ro"
+ if self.job_info.tool_directory:
+ defaults += ",$tool_directory:default_ro"
+ defaults += ",$job_directory/outputs:rw,$working_directory:rw"
else:
- defaults = "$galaxy_root:default_ro,$tool_directory:default_ro"
+ defaults = "$galaxy_root:default_ro"
+ if self.job_info.tool_directory:
+ defaults += ",$tool_directory:default_ro"
if self.job_info.job_directory:
defaults += ",$job_directory:default_ro"
if self.job_info.tmp_directory is not None:
@@ -444,7 +449,18 @@ def add_var(name, value):
# index data without deployer worrying about above details.
variables["defaults"] = string.Template(defaults).safe_substitute(variables)
- return template.safe_substitute(variables)
+ volumes_str = template.safe_substitute(variables)
+
+ # Not all tools have a tool_directory - strip this out if supplied by
+ # job_conf.
+ tool_directory_index = volumes_str.find("$tool_directory")
+ if tool_directory_index > 0:
+ end_index = volumes_str.find(",", tool_directory_index)
+ if end_index < 0:
+ end_index = len(volumes_str)
+ volumes_str = volumes_str[0:tool_directory_index] + volumes_str[end_index:len(volumes_str)]
+
+ return volumes_str
class DockerContainer(Container, HasDockerLikeVolumes):
diff --git a/lib/galaxy/tools/evaluation.py b/lib/galaxy/tools/evaluation.py
index b244364ac6e7..6ae7080de5ed 100644
--- a/lib/galaxy/tools/evaluation.py
+++ b/lib/galaxy/tools/evaluation.py
@@ -16,6 +16,7 @@
from galaxy.tools.parameters.basic import (
DataCollectionToolParameter,
DataToolParameter,
+ FieldTypeToolParameter,
SelectToolParameter,
)
from galaxy.tools.parameters.grouping import (
@@ -28,6 +29,7 @@
DatasetFilenameWrapper,
DatasetListWrapper,
ElementIdentifierMapper,
+ FilenameWrapper,
InputValueWrapper,
RawObjectWrapper,
SelectToolParameterWrapper,
@@ -195,7 +197,8 @@ def wrap_input(input_values, input):
datatypes_registry=self.app.datatypes_registry,
tool=self.tool,
name=input.name)
-
+ elif isinstance(input, DataToolParameter) and value is None and input.default:
+ input_values[input.name] = FilenameWrapper(os.path.abspath(input.default))
elif isinstance(input, DataToolParameter):
# FIXME: We're populating param_dict with conversions when
# wrapping values, this should happen as a separate
@@ -258,6 +261,23 @@ def wrap_input(input_values, input):
**wrapper_kwds
)
input_values[input.name] = wrapper
+ elif isinstance(input, FieldTypeToolParameter):
+ if value is None:
+ field_wrapper = None
+ else:
+ assert "value" in value, value
+ assert "src" in value
+ src = value["src"]
+ if src == "json":
+ field_wrapper = InputValueWrapper(input, value, param_dict)
+ elif src == "hda":
+ field_wrapper = DatasetFilenameWrapper(value["value"],
+ datatypes_registry=self.app.datatypes_registry,
+ tool=self,
+ name=input.name)
+ else:
+ assert False
+ input_values[input.name] = field_wrapper
elif isinstance(input, SelectToolParameter):
input_values[input.name] = SelectToolParameterWrapper(
input, value, other_values=param_dict, path_rewriter=self.unstructured_path_rewriter)
@@ -297,7 +317,7 @@ def __populate_input_dataset_wrappers(self, param_dict, input_datasets, input_da
wrapper = wrappers[0]
param_dict[name] = wrapper
continue
- if not isinstance(param_dict_value, (DatasetFilenameWrapper, DatasetListWrapper)):
+ if not isinstance(param_dict_value, (DatasetFilenameWrapper, DatasetListWrapper, FilenameWrapper)):
wrapper_kwds = dict(
datatypes_registry=self.app.datatypes_registry,
tool=self,
@@ -380,7 +400,7 @@ def get_data_table_entry(table_name, query_attr, query_val, return_attr):
param_dict['__tool_directory__'] = self.compute_environment.tool_directory()
param_dict['__get_data_table_entry__'] = get_data_table_entry
-
+ param_dict['__local_working_directory__'] = self.local_working_directory
# We add access to app here, this allows access to app.config, etc
param_dict['__app__'] = RawObjectWrapper(self.app)
# More convienent access to app.config.new_file_path; we don't need to
@@ -471,20 +491,27 @@ def __build_command_line(self):
command_line = None
if not command:
return
- try:
- # Substituting parameters into the command
- command_line = fill_template(command, context=param_dict)
- cleaned_command_line = []
- # Remove leading and trailing whitespace from each line for readability.
- for line in command_line.split('\n'):
- cleaned_command_line.append(line.strip())
- command_line = '\n'.join(cleaned_command_line)
- # Remove newlines from command line, and any leading/trailing white space
- command_line = command_line.replace("\n", " ").replace("\r", " ").strip()
- except Exception:
- # Modify exception message to be more clear
- # e.args = ( 'Error substituting into command line. Params: %r, Command: %s' % ( param_dict, self.command ), )
- raise
+
+ # TODO: do not allow normal jobs to set this in this fashion
+ # TODO: this approach replaces specifies a command block as $__cwl_command_state
+ # and that other approach needs to be unraveled.
+ if "__cwl_command" in param_dict:
+ command_line = param_dict["__cwl_command"]
+ else:
+ try:
+ # Substituting parameters into the command
+ command_line = fill_template(command, context=param_dict)
+ cleaned_command_line = []
+ # Remove leading and trailing whitespace from each line for readability.
+ for line in command_line.split('\n'):
+ cleaned_command_line.append(line.strip())
+ command_line = '\n'.join(cleaned_command_line)
+ # Remove newlines from command line, and any leading/trailing white space
+ command_line = command_line.replace("\n", " ").replace("\r", " ").strip()
+ except Exception:
+ # Modify exception message to be more clear
+ # e.args = ( 'Error substituting into command line. Params: %r, Command: %s' % ( param_dict, self.command ), )
+ raise
if interpreter:
# TODO: path munging for cluster/dataset server relocatability
executable = command_line.split()[0]
@@ -517,7 +544,12 @@ def __build_config_files(self):
def __build_environment_variables(self):
param_dict = self.param_dict
environment_variables = []
- for environment_variable_def in self.tool.environment_variables:
+ environment_variables_raw = self.tool.environment_variables
+ for key, value in param_dict.get("__cwl_command_state", {}).get("env", {}).items():
+ environment_variable = dict(name=key, template=value)
+ environment_variables_raw.append(environment_variable)
+
+ for environment_variable_def in environment_variables_raw:
directory = self.local_working_directory
environment_variable = environment_variable_def.copy()
environment_variable_template = environment_variable_def["template"]
diff --git a/lib/galaxy/tools/execute.py b/lib/galaxy/tools/execute.py
index 54794f8ea07b..aa62449a80d2 100644
--- a/lib/galaxy/tools/execute.py
+++ b/lib/galaxy/tools/execute.py
@@ -192,9 +192,11 @@ def record_error(self, error):
@property
def on_text(self):
if self._on_text is None:
- collection_names = ["collection %d" % c.hid for c in self.collection_info.collections.values()]
- self._on_text = on_text_for_names(collection_names)
-
+ if not self.collection_info.uses_ephemeral_collections:
+ collection_names = ["collection %d" % c.hid for c in self.collection_info.collections.values()]
+ self._on_text = on_text_for_names(collection_names)
+ else:
+ self._on_text = "implicitly created collection from inputs"
return self._on_text
def output_name(self, trans, history, params, output):
diff --git a/lib/galaxy/tools/expressions/__init__.py b/lib/galaxy/tools/expressions/__init__.py
new file mode 100644
index 000000000000..b731044825aa
--- /dev/null
+++ b/lib/galaxy/tools/expressions/__init__.py
@@ -0,0 +1,20 @@
+from .evaluation import evaluate
+from .sandbox import execjs, interpolate
+from .util import jshead, find_engine
+from .script import (
+ write_evalute_script,
+ EXPRESSION_SCRIPT_CALL,
+ EXPRESSION_SCRIPT_NAME,
+)
+
+
+__all__ = (
+ 'evaluate',
+ 'execjs',
+ 'EXPRESSION_SCRIPT_CALL',
+ 'EXPRESSION_SCRIPT_NAME',
+ 'find_engine',
+ 'interpolate',
+ 'jshead',
+ 'write_evalute_script',
+)
diff --git a/lib/galaxy/tools/expressions/cwlNodeEngine.js b/lib/galaxy/tools/expressions/cwlNodeEngine.js
new file mode 100644
index 000000000000..1129584977f0
--- /dev/null
+++ b/lib/galaxy/tools/expressions/cwlNodeEngine.js
@@ -0,0 +1,46 @@
+#!/usr/bin/env nodejs
+
+"use strict";
+
+process.stdin.setEncoding('utf8');
+
+var incoming = "";
+
+process.stdin.on('readable', function() {
+ var chunk = process.stdin.read();
+ if (chunk !== null) {
+ incoming += chunk;
+ }
+});
+
+process.stdin.on('end', function() {
+ var j = JSON.parse(incoming);
+ var exp = ""
+
+ if (j.script[0] == "{") {
+ exp = "{return function()" + j.script + "();}";
+ }
+ else {
+ exp = "{return " + j.script + ";}";
+ }
+
+ var fn = '"use strict";\n';
+
+ if (j.engineConfig) {
+ for (var index = 0; index < j.engineConfig.length; ++index) {
+ fn += j.engineConfig[index] + "\n";
+ }
+ }
+
+ fn += "var $job = " + JSON.stringify(j.job) + ";\n";
+ fn += "var $self = " + JSON.stringify(j.context) + ";\n"
+
+ fn += "var $runtime = " + JSON.stringify(j.runtime) + ";\n"
+ fn += "var $tmpdir = " + JSON.stringify(j.tmpdir) + ";\n"
+ fn += "var $outdir = " + JSON.stringify(j.outdir) + ";\n"
+
+
+ fn += "(function()" + exp + ")()";
+
+ process.stdout.write(JSON.stringify(require("vm").runInNewContext(fn, {})));
+});
diff --git a/lib/galaxy/tools/expressions/evaluation.py b/lib/galaxy/tools/expressions/evaluation.py
new file mode 100644
index 000000000000..89becc1f8628
--- /dev/null
+++ b/lib/galaxy/tools/expressions/evaluation.py
@@ -0,0 +1,37 @@
+import json
+import os
+import subprocess
+
+from .util import find_engine
+
+FILE_DIRECTORY = os.path.normpath(os.path.dirname(os.path.join(__file__)))
+NODE_ENGINE = os.path.join(FILE_DIRECTORY, "cwlNodeEngine.js")
+
+
+def evaluate(config, input):
+ application = find_engine(config)
+
+ default_context = {
+ "engineConfig": [],
+ "job": {},
+ "context": None,
+ "outdir": None,
+ "tmpdir": None,
+ }
+
+ new_input = default_context
+ new_input.update(input)
+
+ sp = subprocess.Popen([application, NODE_ENGINE],
+ shell=False,
+ close_fds=True,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE)
+
+ (stdoutdata, stderrdata) = sp.communicate(json.dumps(new_input) + "\n\n")
+ if sp.returncode != 0:
+ args = (json.dumps(new_input, indent=4), stdoutdata, stderrdata)
+ message = "Expression engine returned non-zero exit code on evaluation of\n%s%s%s" % args
+ raise Exception(message)
+
+ return json.loads(stdoutdata)
diff --git a/lib/galaxy/tools/expressions/sandbox.py b/lib/galaxy/tools/expressions/sandbox.py
new file mode 100644
index 000000000000..a3646e100d4b
--- /dev/null
+++ b/lib/galaxy/tools/expressions/sandbox.py
@@ -0,0 +1,151 @@
+import subprocess
+import json
+import threading
+
+from .util import find_engine
+
+
+class JavascriptException(Exception):
+ pass
+
+
+def execjs(config, js, jslib):
+ application = find_engine(config)
+ try:
+ nodejs = subprocess.Popen([application], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ except OSError as e:
+ if e.errno == 2:
+ nodejs = subprocess.Popen(["docker", "run",
+ "--attach=STDIN", "--attach=STDOUT", "--attach=STDERR",
+ "--interactive",
+ "--rm",
+ "commonworkflowlanguage/nodejs-engine", "nodejs"],
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ else:
+ raise
+
+ fn = "\"use strict\";%s\n(function()%s)()" % (jslib, js if isinstance(js, basestring) and len(js) > 1 and js[0] == '{' else ("{return (%s);}" % js))
+ script = "console.log(JSON.stringify(require(\"vm\").runInNewContext(%s, {})));\n" % json.dumps(fn)
+
+ def term():
+ try:
+ nodejs.terminate()
+ except OSError:
+ pass
+
+ # Time out after 5 seconds
+ tm = threading.Timer(5, term)
+ tm.start()
+
+ stdoutdata, stderrdata = nodejs.communicate(script)
+ tm.cancel()
+
+ if nodejs.returncode != 0:
+ raise JavascriptException("Returncode was: %s\nscript was: %s\nstdout was: '%s'\nstderr was: '%s'\n" % (nodejs.returncode, script, stdoutdata, stderrdata))
+ else:
+ return json.loads(stdoutdata)
+
+
+class SubstitutionError(Exception):
+ pass
+
+
+DEFAULT = 0
+DOLLAR = 1
+PAREN = 2
+BRACE = 3
+SINGLE_QUOTE = 4
+DOUBLE_QUOTE = 5
+BACKSLASH = 6
+
+
+def scanner(scan):
+
+ i = 0
+ stack = [DEFAULT]
+ start = 0
+ while i < len(scan):
+ state = stack[-1]
+ c = scan[i]
+
+ if state == DEFAULT:
+ if c == '$':
+ stack.append(DOLLAR)
+ elif c == '\\':
+ stack.append(BACKSLASH)
+ elif state == BACKSLASH:
+ stack.pop()
+ if stack[-1] == DEFAULT:
+ return [i - 1, i + 1]
+ elif state == DOLLAR:
+ if c == '(':
+ start = i - 1
+ stack.append(PAREN)
+ elif c == '{':
+ start = i - 1
+ stack.append(BRACE)
+ elif state == PAREN:
+ if c == '(':
+ stack.append(PAREN)
+ elif c == ')':
+ stack.pop()
+ if stack[-1] == DOLLAR:
+ return [start, i + 1]
+ elif c == "'":
+ stack.append(SINGLE_QUOTE)
+ elif c == '"':
+ stack.append(DOUBLE_QUOTE)
+ elif state == BRACE:
+ if c == '{':
+ stack.append(BRACE)
+ elif c == '}':
+ stack.pop()
+ if stack[-1] == DOLLAR:
+ return [start, i + 1]
+ elif c == "'":
+ stack.append(SINGLE_QUOTE)
+ elif c == '"':
+ stack.append(DOUBLE_QUOTE)
+ elif state == SINGLE_QUOTE:
+ if c == "'":
+ stack.pop()
+ elif c == '\\':
+ stack.append(BACKSLASH)
+ elif state == DOUBLE_QUOTE:
+ if c == '"':
+ stack.pop()
+ elif c == '\\':
+ stack.append(BACKSLASH)
+ i += 1
+
+ if len(stack) > 1:
+ raise SubstitutionError("Substitution error, unfinished block starting at position {}: {}".format(start, scan[start:]))
+ else:
+ return None
+
+
+def interpolate(scan, jslib):
+ scan = scan.strip()
+ parts = []
+ w = scanner(scan)
+ while w:
+ parts.append(scan[0:w[0]])
+
+ if scan[w[0]] == '$':
+ e = execjs(scan[w[0] + 1:w[1]], jslib)
+ if w[0] == 0 and w[1] == len(scan):
+ return e
+ leaf = json.dumps(e, sort_keys=True)
+ if leaf[0] == '"':
+ leaf = leaf[1:-1]
+ parts.append(leaf)
+ elif scan[w[0]] == '\\':
+ e = scan[w[1] - 1]
+ parts.append(e)
+
+ scan = scan[w[1]:]
+ w = scanner(scan)
+ parts.append(scan)
+ return ''.join(parts)
diff --git a/lib/galaxy/tools/expressions/script.py b/lib/galaxy/tools/expressions/script.py
new file mode 100644
index 000000000000..0cb18ae74648
--- /dev/null
+++ b/lib/galaxy/tools/expressions/script.py
@@ -0,0 +1,15 @@
+import os
+
+EXPRESSION_SCRIPT_NAME = "_evaluate_expression_.py"
+EXPRESSION_SCRIPT_CALL = "python %s" % EXPRESSION_SCRIPT_NAME
+
+
+def write_evalute_script(in_directory):
+ """ Responsible for writing the script that evaluates expressions
+ in Galaxy jobs.
+ """
+ script = os.path.join(in_directory, EXPRESSION_SCRIPT_NAME)
+ with open(script, "w") as f:
+ f.write('from galaxy_ext.expressions.handle_job import run; run()')
+
+ return script
diff --git a/lib/galaxy/tools/expressions/util.py b/lib/galaxy/tools/expressions/util.py
new file mode 100644
index 000000000000..b7ea1bf090aa
--- /dev/null
+++ b/lib/galaxy/tools/expressions/util.py
@@ -0,0 +1,13 @@
+import json
+from galaxy.tools.deps.commands import which
+
+
+def find_engine(config):
+ nodejs_path = getattr(config, "nodejs_path", None)
+ if nodejs_path is None:
+ nodejs_path = which("nodejs") or which("node") or None
+ return nodejs_path
+
+
+def jshead(engine_config, root_vars):
+ return "\n".join(engine_config + ["var %s = %s;" % (k, json.dumps(v)) for k, v in root_vars.items()])
diff --git a/lib/galaxy/tools/filter_collection.xml b/lib/galaxy/tools/filter_collection.xml
new file mode 100644
index 000000000000..c775de22403a
--- /dev/null
+++ b/lib/galaxy/tools/filter_collection.xml
@@ -0,0 +1,40 @@
+
+ to a dataset collection
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/lib/galaxy/tools/group_collection.xml b/lib/galaxy/tools/group_collection.xml
new file mode 100644
index 000000000000..7dea81248fad
--- /dev/null
+++ b/lib/galaxy/tools/group_collection.xml
@@ -0,0 +1,46 @@
+
+ into a list of lists of datasets
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/lib/galaxy/tools/hash.py b/lib/galaxy/tools/hash.py
new file mode 100644
index 000000000000..5ff1abcc20bc
--- /dev/null
+++ b/lib/galaxy/tools/hash.py
@@ -0,0 +1,12 @@
+import hashlib
+import json
+
+
+def build_tool_hash(as_dict):
+ # http://stackoverflow.com/a/22003440
+ as_str = json.dumps(as_dict, sort_keys=True)
+
+ m = hashlib.sha256()
+ m.update(as_str)
+ hash = m.hexdigest()
+ return hash
diff --git a/lib/galaxy/tools/imp_exp/__init__.py b/lib/galaxy/tools/imp_exp/__init__.py
index f1c73789cf13..2bc456f3acd8 100644
--- a/lib/galaxy/tools/imp_exp/__init__.py
+++ b/lib/galaxy/tools/imp_exp/__init__.py
@@ -214,6 +214,7 @@ def as_hda(obj_dct):
imported_job.imported = True
imported_job.tool_id = job_attrs['tool_id']
imported_job.tool_version = job_attrs['tool_version']
+ imported_job.tool_version = job_attrs.get('tool_hash', None)
imported_job.set_state(job_attrs['state'])
imported_job.info = job_attrs.get('info', None)
imported_job.exit_code = job_attrs.get('exit_code', None)
diff --git a/lib/galaxy/tools/parameters/basic.py b/lib/galaxy/tools/parameters/basic.py
index cb3c5ae00b05..20699141f736 100644
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -108,6 +108,10 @@ def __init__(self, tool, input_source, context=None):
self.validators = []
for elem in input_source.parse_validator_elems():
self.validators.append(validation.Validator.from_element(self, elem))
+ if hasattr(input_source, "parse_map_to"):
+ self.map_to = input_source.parse_map_to()
+ else:
+ self.map_to = None
@property
def visible(self):
@@ -1603,6 +1607,13 @@ def get_initial_value(self, trans, other_values):
return hdca
def to_json(self, value, app, use_security):
+ if getattr(value, "ephemeral", False):
+ # wf_wc_scatter_multiple_flattened
+ value = value.persistent_object
+ if value.id is None:
+ app.model.context.add(value)
+ app.model.context.flush()
+
def single_to_json(value):
src = None
if isinstance(value, dict) and 'src' in value and 'id' in value:
@@ -1703,6 +1714,12 @@ def __init__(self, tool, input_source, trans=None):
if not self.multiple and (self.max is not None):
raise ValueError("Cannot specify max property on single data parameter '%s'. Set multiple=\"true\" to enable this option." % self.name)
self.is_dynamic = True
+ default = input_source.get('default')
+ if default is not None:
+ default = os.path.join(tool.tool_dir, default)
+ self.optional = True
+ self.default = default
+
self._parse_options(input_source)
# Load conversions required for the dataset input
self.conversions = []
@@ -2299,6 +2316,76 @@ def to_text(self, value):
return ""
+class FieldTypeToolParameter(ToolParameter):
+ """CWL field type defined parameter source."""
+
+ def __init__(self, tool, input_source, context=None):
+ input_source = ensure_input_source(input_source)
+ ToolParameter.__init__(self, tool, input_source)
+ # self.field_type = input_source.parse_field_type()
+
+ def from_json(self, value, trans, other_values={}):
+ if trans.workflow_building_mode is workflow_building_modes.ENABLED:
+ return None
+
+ if value is None:
+ return None
+
+ if not isinstance(value, dict) or "src" not in value:
+ value = {"src": "json", "value": value}
+ return self.to_python(value, trans.app)
+
+ def to_json(self, value, app, use_security):
+ """Convert a value to a string representation suitable for persisting"""
+ assert isinstance(value, dict)
+ assert "src" in value
+ return value
+
+ def to_python(self, value, app):
+ """Convert a value created with to_json back to an object representation"""
+ if value is None:
+ return None
+ # return super(FieldTypeToolParameter, self).to_python(value, app)
+ if not isinstance(value, dict):
+ value = json.loads(value)
+ assert isinstance(value, dict)
+ assert "src" in value
+ src = value["src"]
+ if "value" in value:
+ # We have an expanded value, not an ID
+ return value
+ elif src in ["hda", "hdca", "dce"]:
+ id = value['id'] if isinstance(value['id'], int) else app.security.decode_id(value['id'])
+ if src == 'dce':
+ value = app.model.context.query(app.model.DatasetCollectionElement).get(id)
+ elif src == 'hdca':
+ value = app.model.context.query(app.model.HistoryDatasetCollectionAssociation).get(id)
+ else:
+ value = app.model.context.query(app.model.HistoryDatasetAssociation).get(id)
+
+ return {"src": src, "value": value}
+
+ def value_to_basic(self, value, app, use_security=False):
+ if is_runtime_value(value):
+ return runtime_to_json(value)
+
+ if value is None:
+ return None
+
+ assert isinstance(value, dict), "value [%s] is not valid for [%s]" % (value, self)
+ assert "src" in value
+ src = value["src"]
+ if src in ["hda", "hdca", "dce"]:
+ id = value["value"].id if not use_security else app.security.encode_id(value["value"].id)
+ value = {"src": "hda", "id": id}
+
+ return json.dumps(value)
+
+ def value_from_basic(self, value, app, ignore_errors=False):
+ return super(FieldTypeToolParameter, self).value_from_basic(value, app, ignore_errors)
+ # return json.loads(value)
+
+
parameter_types = dict(
text=TextToolParameter,
integer=IntegerToolParameter,
@@ -2319,8 +2406,11 @@ def to_text(self, value):
data_collection=DataCollectionToolParameter,
library_data=LibraryDatasetToolParameter,
rules=RulesListToolParameter,
+ field=FieldTypeToolParameter,
drill_down=DrillDownSelectToolParameter
)
+#directory=DataToolParameter
+#directory=FileToolParameter
def runtime_to_json(runtime_value):
diff --git a/lib/galaxy/tools/parameters/grouping.py b/lib/galaxy/tools/parameters/grouping.py
index b722e78e9186..131d6250ecba 100644
--- a/lib/galaxy/tools/parameters/grouping.py
+++ b/lib/galaxy/tools/parameters/grouping.py
@@ -217,11 +217,14 @@ def get_composite_dataset_name(self, context):
if dataset_name is None:
filenames = list()
for composite_file in context.get('files', []):
+ if dataset_name is None and composite_file.get("NAME", None) is not None:
+ dataset_name = composite_file.get("NAME")
if not composite_file.get('ftp_files', ''):
filenames.append((composite_file.get('file_data') or {}).get('filename', ''))
else:
filenames.append(composite_file.get('ftp_files', [])[0])
- dataset_name = os.path.commonprefix(filenames).rstrip('.') or None
+ if dataset_name is None:
+ dataset_name = os.path.commonprefix(filenames).rstrip('.') or None
if dataset_name is None:
dataset_name = 'Uploaded Composite Dataset (%s)' % self.get_file_type(context)
return dataset_name
@@ -709,6 +712,10 @@ def nested_to_dict(input):
cond_dict["test_param"] = nested_to_dict(self.test_param)
return cond_dict
+ @property
+ def case_strings(self):
+ return [c.value for c in self.cases]
+
class ConditionalWhen(Dictifiable):
dict_collection_visible_keys = ['value']
diff --git a/lib/galaxy/tools/parameters/output_collect.py b/lib/galaxy/tools/parameters/output_collect.py
index e3adcaa5d25d..c295cee91559 100644
--- a/lib/galaxy/tools/parameters/output_collect.py
+++ b/lib/galaxy/tools/parameters/output_collect.py
@@ -403,6 +403,8 @@ def populate_collection_elements(self, collection, root_collection_builder, file
if dbkey == INPUT_DBKEY_TOKEN:
dbkey = self.input_dbkey
+ cwl_filename = fields_match.cwl_filename or None
+
# Create new primary dataset
dataset_name = fields_match.name or designation
@@ -419,6 +421,9 @@ def populate_collection_elements(self, collection, root_collection_builder, file
link_data=link_data,
tag_list=tag_list,
)
+ if cwl_filename:
+ dataset.cwl_filename = cwl_filename
+
log.debug(
"(%s) Created dynamic collection dataset for path [%s] with element identifier [%s] for output [%s] %s",
self.job.id,
@@ -889,6 +894,10 @@ def tag_list(self):
def object_id(self):
return self.as_dict.get("object_id", None)
+ @property
+ def cwl_filename(self):
+ return self.as_dict.get("cwl_filename")
+
class RegexCollectedDatasetMatch(JsonCollectedDatasetMatch):
diff --git a/lib/galaxy/tools/parameters/wrapped.py b/lib/galaxy/tools/parameters/wrapped.py
index 309cdf7fdc37..755e4b43061c 100644
--- a/lib/galaxy/tools/parameters/wrapped.py
+++ b/lib/galaxy/tools/parameters/wrapped.py
@@ -1,6 +1,9 @@
+import os
+
from galaxy.tools.parameters.basic import (
DataCollectionToolParameter,
DataToolParameter,
+ FieldTypeToolParameter,
SelectToolParameter
)
from galaxy.tools.parameters.grouping import (
@@ -13,6 +16,7 @@
DatasetFilenameWrapper,
DatasetListWrapper,
ElementIdentifierMapper,
+ FilenameWrapper,
InputValueWrapper,
SelectToolParameterWrapper
)
@@ -77,6 +81,9 @@ def wrap_values(self, inputs, input_values, skip_missing_values=False):
datatypes_registry=trans.app.datatypes_registry,
tool=tool,
name=input.name)
+ elif isinstance(input, DataToolParameter) and input.default and value is None:
+ input_values[input.name] = \
+ FilenameWrapper(os.path.abspath(input.default))
elif isinstance(input, DataToolParameter):
wrapper_kwds = dict(
datatypes_registry=trans.app.datatypes_registry,
@@ -98,6 +105,31 @@ def wrap_values(self, inputs, input_values, skip_missing_values=False):
tool=tool,
name=input.name,
)
+ elif isinstance(input, FieldTypeToolParameter):
+ if value is None:
+ return None
+
+ if not isinstance(value, dict):
+ raise Exception("Simple values [%s] need to be wrapped in a JSON envelope" % input)
+
+ assert "value" in value, value
+ assert "src" in value
+ src = value["src"]
+ if src == "json":
+ return InputValueWrapper(input, value, incoming)
+ elif src == "hda":
+ return DatasetFilenameWrapper(value["value"],
+ datatypes_registry=trans.app.datatypes_registry,
+ tool=tool,
+ name=input.name)
+ elif src == "hdca":
+ return DatasetCollectionWrapper(None,
+ value["value"],
+ datatypes_registry=trans.app.datatypes_registry,
+ tool=tool,
+ name=input.name)
+ else:
+ assert False, "Unknown src encountered [%s] for field type value [%s]" % (src, value)
else:
input_values[input.name] = InputValueWrapper(input, value, incoming)
diff --git a/lib/galaxy/tools/parameters/wrapped_json.py b/lib/galaxy/tools/parameters/wrapped_json.py
index c045c9595ef0..69549943143f 100644
--- a/lib/galaxy/tools/parameters/wrapped_json.py
+++ b/lib/galaxy/tools/parameters/wrapped_json.py
@@ -62,8 +62,14 @@ def _data_input_to_path(v):
json_value = _data_input_to_path(value)
elif handle_files == "skip":
return SKIP_INPUT
- else:
- raise NotImplementedError()
+ elif handle_files == "OBJECT":
+ if value:
+ if isinstance(value, list):
+ value = value[0]
+ return _hda_to_object(value)
+ else:
+ return None
+ raise NotImplementedError()
elif input_type == "data_collection":
if handle_files == "skip":
return SKIP_INPUT
@@ -88,6 +94,21 @@ def _data_input_to_path(v):
return json_value
+def _hda_to_object(hda):
+ hda_dict = hda.to_dict()
+ metadata_dict = {}
+
+ for key, value in hda_dict.items():
+ if key.startswith("metadata_"):
+ metadata_dict[key[len("metadata_"):]] = value
+
+ return {
+ 'file_ext': hda_dict['file_ext'],
+ 'name': hda_dict['name'],
+ 'metadata': metadata_dict,
+ }
+
+
def _cast_if_not_none(value, cast_to, empty_to_none=False):
# log.debug("value [%s], type[%s]" % (value, type(value)))
if value is None or (empty_to_none and str(value) == ''):
diff --git a/lib/galaxy/tools/parser/__init__.py b/lib/galaxy/tools/parser/__init__.py
index 7f7fdf15579e..771fbaf3202d 100644
--- a/lib/galaxy/tools/parser/__init__.py
+++ b/lib/galaxy/tools/parser/__init__.py
@@ -1,6 +1,6 @@
""" Package responsible for parsing tools from files/abstract tool sources.
"""
-from .factory import get_input_source, get_tool_source
+from .factory import get_input_source, get_tool_source, get_tool_source_from_representation
from .interface import ToolSource
from .output_objects import (
ToolOutputCollectionPart,
@@ -9,6 +9,7 @@
__all__ = (
"get_input_source",
"get_tool_source",
+ "get_tool_source_from_representation",
"ToolOutputCollectionPart",
"ToolSource",
)
diff --git a/lib/galaxy/tools/parser/cwl.py b/lib/galaxy/tools/parser/cwl.py
index 576023848b52..b18534750869 100644
--- a/lib/galaxy/tools/parser/cwl.py
+++ b/lib/galaxy/tools/parser/cwl.py
@@ -1,7 +1,6 @@
import logging
-import os
-from galaxy.tools.cwl import tool_proxy
+from galaxy.tools.cwl import tool_proxy, tool_proxy_from_persistent_representation
from galaxy.tools.deps import requirements
from galaxy.util.odict import odict
from .error_level import StdioErrorLevel
@@ -9,35 +8,58 @@
PageSource,
PagesSource,
ToolSource,
- ToolStdioExitCode
+ ToolStdioExitCode,
)
+from .output_collection_def import dataset_collector_descriptions_from_list
from .output_actions import ToolOutputActionGroup
+from .output_objects import ToolOutputCollection
+from .output_objects import ToolOutputCollectionStructure
from .output_objects import ToolOutput
-from .yaml import YamlInputSource
+from .yaml import YamlInputSource, YamlPageSource
+
+GX_INTERFACE_NAMESPACE = "http://galaxyproject.org/cwl#interface"
+
+CWL_DEFAULT_FILE_OUTPUT = "data" # set to _sniff_ to sniff output types automatically.
log = logging.getLogger(__name__)
class CwlToolSource(ToolSource):
- def __init__(self, tool_file, strict_cwl_validation=True):
- self._cwl_tool_file = tool_file
- self._id, _ = os.path.splitext(os.path.basename(tool_file))
- self._tool_proxy = None
+ def __init__(self, tool_file=None, tool_object=None, strict_cwl_validation=True, tool_directory=None):
self._source_path = tool_file
+ self._source_object = tool_object
+ self._tool_proxy = None
self._strict_cwl_validation = strict_cwl_validation
+ self._tool_directory = tool_directory
@property
def tool_proxy(self):
if self._tool_proxy is None:
- self._tool_proxy = tool_proxy(self._source_path, strict_cwl_validation=self._strict_cwl_validation)
+ if self._source_path is not None:
+ self._tool_proxy = tool_proxy(self._source_path, strict_cwl_validation=self._strict_cwl_validation, tool_directory=self._tool_directory)
+ else:
+ if "pickle" not in self._source_object:
+ self._tool_proxy = tool_proxy(tool_object=self._source_object, strict_cwl_validation=self._strict_cwl_validation, tool_directory=self._tool_directory)
+ else:
+ self._tool_proxy = tool_proxy_from_persistent_representation(self._source_object, strict_cwl_validation=self._strict_cwl_validation, tool_directory=self._tool_directory)
return self._tool_proxy
+ def _get_gx_interface(self):
+ rval = None
+ for h in self.tool_proxy.hints_or_requirements_of_class(GX_INTERFACE_NAMESPACE):
+ rval = strip_namespace(h, GX_INTERFACE_NAMESPACE[:-len("interface")])
+
+ return rval
+
def parse_tool_type(self):
- return 'cwl'
+ if self._get_gx_interface() is not None:
+ return 'galactic_cwl'
+ else:
+ return 'cwl'
def parse_id(self):
- return self._id
+ return self.tool_proxy.galaxy_id()
def parse_name(self):
return self.tool_proxy.label() or self.parse_id()
@@ -78,16 +100,37 @@ def parse_strict_shell(self):
def parse_stdio(self):
# TODO: remove duplication with YAML
- # New format - starting out just using exit code.
- exit_code_lower = ToolStdioExitCode()
- exit_code_lower.range_start = float("-inf")
- exit_code_lower.range_end = -1
- exit_code_lower.error_level = StdioErrorLevel.FATAL
- exit_code_high = ToolStdioExitCode()
- exit_code_high.range_start = 1
- exit_code_high.range_end = float("inf")
- exit_code_lower.error_level = StdioErrorLevel.FATAL
- return [exit_code_lower, exit_code_high], []
+ exit_codes = []
+
+ success_codes = sorted(set(self.tool_proxy._tool.tool.get("successCodes") or [0]))
+
+ last_range_start = None
+ last_range_end = None
+ last_success_code = None
+
+ for success_code in success_codes:
+ if last_success_code is not None and success_code == last_success_code + 1:
+ last_success_code = success_code
+ continue
+
+ exit_code = ToolStdioExitCode()
+ range_start = float("-inf")
+ if last_success_code is not None:
+ range_start = last_success_code + 1
+
+ exit_code.range_end = success_code - 1
+ exit_code.error_level = StdioErrorLevel.FATAL
+ exit_codes.append(exit_code)
+
+ last_success_code = success_code
+
+ exit_code = ToolStdioExitCode()
+ exit_code.range_start = last_success_code + 1
+ exit_code.range_end = float("inf")
+ exit_code.error_level = StdioErrorLevel.FATAL
+ exit_codes.append(exit_code)
+
+ return exit_codes, []
def parse_interpreter(self):
return None
@@ -99,26 +142,48 @@ def parse_description(self):
return self.tool_proxy.description()
def parse_input_pages(self):
- page_source = CwlPageSource(self.tool_proxy)
+ gx_interface = self._get_gx_interface()
+ if gx_interface is None:
+ page_source = CwlPageSource(self.tool_proxy)
+ else:
+ print(gx_interface)
+ page_source = YamlPageSource(gx_interface["inputs"])
return PagesSource([page_source])
def parse_outputs(self, tool):
output_instances = self.tool_proxy.output_instances()
outputs = odict()
+ output_collections = odict()
output_defs = []
for output_instance in output_instances:
output_defs.append(self._parse_output(tool, output_instance))
+
# TODO: parse outputs collections
for output_def in output_defs:
- outputs[output_def.name] = output_def
- return outputs, odict()
+ if isinstance(output_def, ToolOutput):
+ outputs[output_def.name] = output_def
+ else:
+ outputs[output_def.name] = output_def
+ output_collections[output_def.name] = output_def
+ return outputs, output_collections
def _parse_output(self, tool, output_instance):
+ output_type = output_instance.output_data_type
+ if isinstance(output_type, dict) and output_type.get("type") == "record":
+ return self._parse_output_record(tool, output_instance)
+ elif isinstance(output_type, dict) and output_type.get("type") == "array":
+ return self._parse_output_array(tool, output_instance)
+ else:
+ return self._parse_output_data(tool, output_instance)
+
+ def _parse_output_data(self, tool, output_instance):
name = output_instance.name
# TODO: handle filters, actions, change_format
output = ToolOutput(name)
if "File" in output_instance.output_data_type:
- output.format = "_sniff_"
+ output.format = CWL_DEFAULT_FILE_OUTPUT
+ elif "Directory" in output_instance.output_data_type:
+ output.format = "directory"
else:
output.format = "expression.json"
output.change_format = []
@@ -134,6 +199,35 @@ def _parse_output(self, tool, output_instance):
output.actions = ToolOutputActionGroup(output, None)
return output
+ def _parse_output_record(self, tool, output_instance):
+ name = output_instance.name
+ # TODO: clean output bindings and other non-structure information
+ # from this.
+ fields = output_instance.output_data_type.get("fields")
+ output_collection = ToolOutputCollection(
+ name,
+ ToolOutputCollectionStructure(
+ collection_type="record",
+ fields=fields,
+ ),
+ )
+ return output_collection
+
+ def _parse_output_array(self, tool, output_instance):
+ name = output_instance.name
+ # TODO: Handle nested arrays and such...
+ dataset_collector_descriptions = dataset_collector_descriptions_from_list(
+ [{"from_provided_metadata": True}],
+ )
+ output_collection = ToolOutputCollection(
+ name,
+ ToolOutputCollectionStructure(
+ collection_type="list",
+ dataset_collector_descriptions=dataset_collector_descriptions,
+ ),
+ )
+ return output_collection
+
def parse_requirements_and_containers(self):
containers = []
docker_identifier = self.tool_proxy.docker_identifier()
@@ -148,7 +242,31 @@ def parse_requirements_and_containers(self):
))
def parse_profile(self):
- return "16.04"
+ return "17.09"
+
+ def parse_provided_metadata_style(self):
+ return "default"
+
+ def parse_cores_min(self):
+ for h in self.tool_proxy.hints_or_requirements_of_class("ResourceRequirement"):
+ cores_min = h.get("coresMin")
+ if cores_min:
+ return cores_min
+
+ return 1
+
+
+def strip_namespace(ordered_dict, namespace):
+ if isinstance(ordered_dict, dict):
+ value = odict()
+ for k, v in ordered_dict.items():
+ if k.startswith(namespace):
+ k = k[len(namespace):]
+ value[k] = strip_namespace(v, namespace)
+ return value
+ elif isinstance(ordered_dict, list):
+ return list(map(lambda v: strip_namespace(v, namespace), ordered_dict))
+ return ordered_dict
class CwlPageSource(PageSource):
diff --git a/lib/galaxy/tools/parser/factory.py b/lib/galaxy/tools/parser/factory.py
index 28c3e8772574..e8c7c8f3469d 100644
--- a/lib/galaxy/tools/parser/factory.py
+++ b/lib/galaxy/tools/parser/factory.py
@@ -16,7 +16,7 @@
log = logging.getLogger(__name__)
-def get_tool_source(config_file=None, xml_tree=None, enable_beta_formats=True, tool_location_fetcher=None):
+def get_tool_source(config_file=None, xml_tree=None, enable_beta_formats=True, tool_location_fetcher=None, strict_cwl_validation=True):
"""Return a ToolSource object corresponding to supplied source.
The supplied source may be specified as a file path (using the config_file
@@ -41,8 +41,8 @@ def get_tool_source(config_file=None, xml_tree=None, enable_beta_formats=True, t
as_dict = ordered_load(f)
return YamlToolSource(as_dict, source_path=config_file)
elif config_file.endswith(".json") or config_file.endswith(".cwl"):
- log.info("Loading CWL tool - this is experimental - tool likely will not function in future at least in same way.")
- return CwlToolSource(config_file)
+ log.info("Loading CWL tool [%s]. This is experimental - tool likely will not function in future at least in same way." % config_file)
+ return CwlToolSource(config_file, strict_cwl_validation=strict_cwl_validation)
else:
tree, macro_paths = load_tool_with_refereces(config_file)
return XmlToolSource(tree, source_path=config_file, macro_paths=macro_paths)
@@ -63,6 +63,24 @@ def construct_mapping(loader, node):
return yaml.load(stream, OrderedLoader)
+def get_tool_source_from_representation(tool_format, tool_representation, strict_cwl_validation=True, tool_directory=None):
+ # TODO: PRE-MERGE - ensure strict_cwl_validation is being set on caller - ignored right now.
+ # TODO: make sure whatever is consuming this method uses ordered load.
+ log.info("Loading dynamic tool - this is experimental - tool may not function in future.")
+ if tool_format == "GalaxyTool":
+ if "version" not in tool_representation:
+ tool_representation["version"] = "1.0.0" # Don't require version for embedded tools.
+ return YamlToolSource(tool_representation)
+ elif tool_format in ["CommandLineTool", "ExpressionTool"]:
+ return CwlToolSource(
+ tool_object=tool_representation,
+ strict_cwl_validation=strict_cwl_validation,
+ tool_directory=tool_directory
+ )
+ else:
+ raise Exception("Unknown tool representation format [%s]." % tool_format)
+
+
def get_input_source(content):
"""Wrap an XML element in a XmlInputSource if needed.
diff --git a/lib/galaxy/tools/parser/interface.py b/lib/galaxy/tools/parser/interface.py
index 2cd31a1aa6f0..3d27fea65348 100644
--- a/lib/galaxy/tools/parser/interface.py
+++ b/lib/galaxy/tools/parser/interface.py
@@ -88,6 +88,11 @@ def parse_command(self):
""" Return string contianing command to run.
"""
+ def parse_expression(self):
+ """ Return string contianing command to run.
+ """
+ return None
+
@abstractmethod
def parse_environment_variables(self):
""" Return environment variable templates to expose.
@@ -213,6 +218,11 @@ def parse_profile(self):
""" Return tool profile version as Galaxy major e.g. 16.01 or 16.04.
"""
+ def parse_cores_min(self):
+ """Return minimum number of cores required to run this tool.
+ """
+ return 1
+
def macro_paths(self):
return []
diff --git a/lib/galaxy/tools/parser/output_objects.py b/lib/galaxy/tools/parser/output_objects.py
index bfe51934ff9b..97ed2b3775c9 100644
--- a/lib/galaxy/tools/parser/output_objects.py
+++ b/lib/galaxy/tools/parser/output_objects.py
@@ -24,12 +24,13 @@ class ToolOutput(ToolOutputBase):
(format, metadata_source, parent)
"""
- dict_collection_visible_keys = ['name', 'format', 'label', 'hidden']
+ dict_collection_visible_keys = ['name', 'format', 'label', 'hidden', 'output_type']
def __init__(self, name, format=None, format_source=None, metadata_source=None,
parent=None, label=None, filters=None, actions=None, hidden=False,
implicit=False):
super(ToolOutput, self).__init__(name, label=label, filters=filters, hidden=hidden)
+ self.output_type = "data"
self.format = format
self.format_source = format_source
self.metadata_source = metadata_source
@@ -70,6 +71,27 @@ def to_dict(self, view='collection', value_mapper=None, app=None):
return as_dict
+class ToolExpressionOutput(ToolOutputBase):
+ dict_collection_visible_keys = ('name', 'format', 'label', 'hidden', 'output_type')
+
+ def __init__(self, name, output_type, from_expression,
+ label=None, filters=None, actions=None, hidden=False):
+ super(ToolExpressionOutput, self).__init__(name, label=label, filters=filters, hidden=hidden)
+ self.output_type = output_type # JSON type...
+ self.from_expression = from_expression
+ self.format = "expression.json" # galaxy.datatypes.text.ExpressionJson.file_ext
+
+ self.format_source = None
+ self.metadata_source = None
+ self.parent = None
+ self.actions = actions
+
+ # Initialize default values
+ self.change_format = []
+ self.implicit = False
+ self.from_work_dir = None
+
+
class ToolOutputCollection(ToolOutputBase):
"""
Represents a HistoryDatasetCollectionAssociation of output datasets produced
@@ -85,6 +107,7 @@ class ToolOutputCollection(ToolOutputBase):
"""
+ dict_collection_visible_keys = ('name', 'format', 'label', 'hidden', 'output_type')
dict_collection_visible_keys = ['name', 'default_format', 'label', 'hidden', 'inherit_format', 'inherit_metadata']
@@ -102,6 +125,7 @@ def __init__(
inherit_metadata=False
):
super(ToolOutputCollection, self).__init__(name, label=label, filters=filters, hidden=hidden)
+ self.output_type = "collection"
self.collection = True
self.default_format = default_format
self.structure = structure
@@ -183,18 +207,24 @@ def __init__(
collection_type_from_rules=None,
structured_like=None,
dataset_collector_descriptions=None,
+ fields=None,
):
self.collection_type = collection_type
self.collection_type_source = collection_type_source
self.collection_type_from_rules = collection_type_from_rules
self.structured_like = structured_like
self.dataset_collector_descriptions = dataset_collector_descriptions
+ self.fields = fields
if collection_type and collection_type_source:
raise ValueError("Cannot set both type and type_source on collection output.")
if collection_type is None and structured_like is None and dataset_collector_descriptions is None and collection_type_source is None and collection_type_from_rules is None:
raise ValueError("Output collection types must specify source of collection type information (e.g. structured_like or type_source).")
if dataset_collector_descriptions and (structured_like or collection_type_from_rules):
raise ValueError("Cannot specify dynamic structure (discovered_datasets) and collection type attributes structured_like or collection_type_from_rules.")
+ if collection_type == "record" and fields is None:
+ raise ValueError("If record outputs are defined, fields must be defined as well.")
+ if fields is not None and collection_type != "record":
+ raise ValueError("If fields are specified for outputs, the collection type must be record.")
self.dynamic = dataset_collector_descriptions is not None
def collection_prototype(self, inputs, type_registry):
@@ -204,7 +234,7 @@ def collection_prototype(self, inputs, type_registry):
else:
collection_type = self.collection_type
assert collection_type
- collection_prototype = type_registry.prototype(collection_type)
+ collection_prototype = type_registry.prototype(collection_type, fields=self.fields)
collection_prototype.collection_type = collection_type
return collection_prototype
diff --git a/lib/galaxy/tools/parser/xml.py b/lib/galaxy/tools/parser/xml.py
index 0e5bd18d8e06..c6b14afdba0e 100644
--- a/lib/galaxy/tools/parser/xml.py
+++ b/lib/galaxy/tools/parser/xml.py
@@ -22,6 +22,7 @@
from .output_actions import ToolOutputActionGroup
from .output_collection_def import dataset_collector_descriptions_from_elem
from .output_objects import (
+ ToolExpressionOutput,
ToolOutput,
ToolOutputCollection,
ToolOutputCollectionStructure
@@ -111,6 +112,12 @@ def parse_command(self):
command_el = self._command_el
return ((command_el is not None) and command_el.text) or None
+ def parse_expression(self):
+ """ Return string contianing command to run.
+ """
+ expression_el = self.root.find("expression")
+ return ((expression_el is not None) and expression_el.text) or None
+
def parse_environment_variables(self):
environment_variables_el = self.root.find("environment_variables")
if environment_variables_el is None:
@@ -256,7 +263,12 @@ def _parse(data_elem, **kwds):
for _ in out_elem.findall("data"):
_parse(_)
- for collection_elem in out_elem.findall("collection"):
+ def _parse_expression(output_elem, **kwds):
+ output_def = self._parse_expression_output(output_elem, tool, **kwds)
+ data_dict[output_def.name] = output_def
+ return output_def
+
+ def _parse_collection(collection_elem):
name = collection_elem.get("name")
label = xml_text(collection_elem, "label")
default_format = collection_elem.get("format", "data")
@@ -312,6 +324,22 @@ def _parse(data_elem, **kwds):
output_collection.outputs[output_name] = data
output_collections[name] = output_collection
+ for out_child in out_elem.getchildren():
+ if out_child.tag == "data":
+ _parse(out_child)
+ elif out_child.tag == "collection":
+ _parse_collection(out_child)
+ elif out_child.tag == "output":
+ output_type = out_child.get("type")
+ if output_type == "data":
+ _parse(out_child)
+ elif output_type == "collection":
+ _parse_collection(out_child)
+ else:
+ _parse_expression(out_child)
+ else:
+ log.warn("Unknown output tag encountered [%s]" % out_child.tag)
+
for output_def in data_dict.values():
outputs[output_def.name] = output_def
return outputs, output_collections
@@ -323,6 +351,7 @@ def _parse_output(
default_format="data",
default_format_source=None,
default_metadata_source="",
+ expression_type=None,
):
output = ToolOutput(data_elem.get("name"))
output_format = data_elem.get("format", default_format)
@@ -347,6 +376,22 @@ def _parse_output(
output.dataset_collector_descriptions = dataset_collector_descriptions_from_elem(data_elem, legacy=self.legacy_defaults)
return output
+ def _parse_expression_output(self, output_elem, tool, **kwds):
+ output_type = output_elem.get("type")
+ from_expression = output_elem.get("from")
+ output = ToolExpressionOutput(
+ output_elem.get("name"),
+ output_type,
+ from_expression,
+ )
+ output.path = output_elem.get("value")
+ output.label = xml_text(output_elem, "label")
+
+ output.hidden = string_as_bool(output_elem.get("hidden", ""))
+ output.actions = ToolOutputActionGroup(output, output_elem.find('actions'))
+ output.dataset_collector_descriptions = []
+ return output
+
def parse_stdio(self):
command_el = self._command_el
detect_errors = None
diff --git a/lib/galaxy/tools/parser/yaml.py b/lib/galaxy/tools/parser/yaml.py
index 4a8d796e2735..bd731fc1387a 100644
--- a/lib/galaxy/tools/parser/yaml.py
+++ b/lib/galaxy/tools/parser/yaml.py
@@ -54,6 +54,9 @@ def parse_require_login(self, default):
def parse_command(self):
return self.root_dict.get("command")
+ def parse_expression(self):
+ return self.root_dict.get("expression")
+
def parse_environment_variables(self):
return []
@@ -342,6 +345,9 @@ def parse_static_options(self):
static_options.append((label, value, selected))
return static_options
+ def parse_map_to(self):
+ return self.input_dict.get("mapTo")
+
def _ensure_has(dict, defaults):
for key, value in defaults.items():
diff --git a/lib/galaxy/tools/toolbox/base.py b/lib/galaxy/tools/toolbox/base.py
index 651540254c2e..4edaeb8e96aa 100644
--- a/lib/galaxy/tools/toolbox/base.py
+++ b/lib/galaxy/tools/toolbox/base.py
@@ -52,6 +52,7 @@ def __init__(self, config_filenames, tool_root_dir, app):
# shed_tool_conf.xml file.
self._dynamic_tool_confs = []
self._tools_by_id = {}
+ self._tools_by_hash = {}
self._integrated_section_by_tool = {}
# Tool lineages can contain chains of related tools with different ids
# so each will be present once in the above dictionary. The following
@@ -97,6 +98,9 @@ def handle_panel_update(self, section_dict):
def create_tool(self, config_file, repository_id=None, guid=None, **kwds):
raise NotImplementedError()
+ def create_dynamic_tool(self, dynamic_tool):
+ raise NotImplementedError()
+
def _init_tools_from_configs(self, config_filenames):
""" Read through all tool config files and initialize tools in each
with init_tools_from_config below.
@@ -175,6 +179,26 @@ def _init_tools_from_config(self, config_filename):
config_elems=config_elems)
self._dynamic_tool_confs.append(shed_tool_conf_dict)
+ def _get_tool_by_hash(self, tool_hash):
+ if tool_hash in self._tools_by_hash:
+ return self._tools_by_hash[tool_hash]
+
+ dynamic_tool = self.app.dynamic_tool_manager.get_tool_by_hash(tool_hash)
+ if dynamic_tool:
+ return self.load_dynamic_tool(dynamic_tool)
+
+ return None
+
+ def load_dynamic_tool(self, dynamic_tool):
+ if not dynamic_tool.active:
+ return None
+
+ tool = self.create_dynamic_tool(dynamic_tool)
+ self.register_tool(tool)
+ assert tool.tool_hash
+ self._tools_by_hash[tool.tool_hash] = tool
+ return tool
+
def load_item(self, item, tool_path, panel_dict=None, integrated_panel_dict=None, load_panel_dict=True, guid=None, index=None, internal=False):
with self.app._toolbox_lock:
item = ensure_tool_conf_item(item)
@@ -402,7 +426,7 @@ def _load_integrated_tool_panel_keys(self):
elif elem.tag == 'label':
self._integrated_tool_panel.stub_label(key)
- def get_tool(self, tool_id, tool_version=None, get_all_versions=False, exact=False):
+ def get_tool(self, tool_id, tool_version=None, get_all_versions=False, exact=False, tool_hash=None):
"""Attempt to locate a tool in the tool box. Note that `exact` only refers to the `tool_id`, not the `tool_version`."""
if tool_version:
tool_version = str(tool_version)
@@ -410,6 +434,12 @@ def get_tool(self, tool_id, tool_version=None, get_all_versions=False, exact=Fal
if get_all_versions and exact:
raise AssertionError("Cannot specify get_tool with both get_all_versions and exact as True")
+ if tool_id is None:
+ if tool_hash is not None:
+ tool_id = self._get_tool_by_hash(tool_hash).id
+ if tool_id is None:
+ raise AssertionError("get_tool called with tool_id as None")
+
if "/repos/" in tool_id: # test if tool came from a toolshed
tool_id_without_tool_shed = tool_id.split("/repos/")[1]
available_tool_sheds = [urlparse(_) for _ in self.app.tool_shed_registry.tool_sheds.values()]
diff --git a/lib/galaxy/tools/wrappers.py b/lib/galaxy/tools/wrappers.py
index 94818e938a54..2a32124efb7c 100644
--- a/lib/galaxy/tools/wrappers.py
+++ b/lib/galaxy/tools/wrappers.py
@@ -176,6 +176,32 @@ def __iter__(self):
return self.value.__iter__()
+class FilenameWrapper(ToolParameterValueWrapper):
+
+ def __init__(self, file_name, dataset_path=None, identifier=None):
+ self.file_name = file_name
+ self.false_path = getattr(dataset_path, "false_path", None)
+ self.false_extra_files_path = getattr(dataset_path, "false_extra_files_path", None)
+ self._element_identifier = identifier
+
+ def __str__(self):
+ if self.false_path is not None:
+ return self.false_path
+ else:
+ return self.file_name
+
+ @property
+ def element_identifier(self):
+ identifier = self._element_identifier
+ if identifier is None:
+ identifier = self.name
+ return identifier
+
+ @property
+ def is_collection(self):
+ return False
+
+
class DatasetFilenameWrapper(ToolParameterValueWrapper):
"""
Wraps a dataset so that __str__ returns the filename, but all other
@@ -393,7 +419,7 @@ def __init__(self, job_working_directory, has_collection, dataset_paths=[], **kw
else:
self.__input_supplied = True
- if hasattr(has_collection, "name"):
+ if hasattr(has_collection, "history_content_type"):
# It is a HistoryDatasetCollectionAssociation
collection = has_collection.collection
self.name = has_collection.name
@@ -440,6 +466,9 @@ def keys(self):
return []
return self.__element_instances.keys()
+ def items(self):
+ return self.__element_instances.items()
+
@property
def is_collection(self):
return True
diff --git a/lib/galaxy/web/base/controller.py b/lib/galaxy/web/base/controller.py
index c222ff247415..07d2a9770b8f 100644
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -1243,9 +1243,10 @@ def _workflow_from_dict(self, trans, data, source=None, add_to_menu=False, publi
"""
# TODO: replace this method with direct access to manager.
workflow_contents_manager = workflows.WorkflowContentsManager(self.app)
- created_workflow = workflow_contents_manager.build_workflow_from_dict(
+ raw_workflow_description = workflow_contents_manager.ensure_raw_description(data)
+ created_workflow = workflow_contents_manager.build_workflow_from_raw_description(
trans,
- data,
+ raw_workflow_description,
source=source,
add_to_menu=add_to_menu,
publish=publish,
diff --git a/lib/galaxy/webapps/galaxy/api/dynamic_tools.py b/lib/galaxy/webapps/galaxy/api/dynamic_tools.py
new file mode 100644
index 000000000000..663994657ec1
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/dynamic_tools.py
@@ -0,0 +1,68 @@
+from galaxy import web, util
+from galaxy.exceptions import ObjectNotFound
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.web import _future_expose_api_anonymous_and_sessionless as expose_api_anonymous_and_sessionless
+from galaxy.web.base.controller import BaseAPIController
+
+
+import logging
+log = logging.getLogger(__name__)
+
+
+class DynamicToolsController(BaseAPIController):
+ """
+ RESTful controller for interactions with dynamic tools.
+
+ Dynamic tools are tools defined in the database. Use the tools controller
+ to run these tools and view functional information.
+ """
+
+ def __init__(self, app):
+ super(DynamicToolsController, self).__init__(app)
+
+ @expose_api_anonymous_and_sessionless
+ def index(self, trans, **kwds):
+ """
+ GET /api/dynamic_tools: returns a list of dynamic tools.
+
+ This returns meta-information about the dynamic tool, such as
+ tool_hash. To use the tool or view funtional information such as
+ inputs and outputs, use the standard tools API indexed by the
+ ID (and optionally version) returned from this endpoint.
+ """
+ manager = self.app.dynamic_tools_manager
+ return list(
+ map(lambda t: t.to_dict(), manager.list_tools())
+ )
+
+ @expose_api_anonymous_and_sessionless
+ def show(self, trans, id, **kwd):
+ """
+ GET /api/dynamic_tools/{tool_id|tool_hash|uuid}
+ """
+ manager = self.app.dynamic_tools_manager
+ if util.is_uuid(id):
+ dynamic_tool = manager.get_by_uuid(id)
+ else:
+ dynamic_tool = manager.get_by_id_or_hash(id)
+ if dynamic_tool is None:
+ raise ObjectNotFound()
+
+ return dynamic_tool.to_dict()
+
+ @web.require_admin
+ @expose_api
+ def create(self, trans, payload, **kwd):
+ """
+ POST /api/dynamic_tools
+
+ If ``tool_id`` appears in the payload this executes tool using
+ specified inputs and returns tool's outputs. Otherwise, the payload
+ is expected to be a tool definition to dynamically load into Galaxy's
+ toolbox.
+ """
+ dynamic_tool = self.app.dynamic_tools_manager.create_tool(
+ trans, payload, allow_load=util.asbool(kwd.get("allow_load", True))
+ )
+ rval = dynamic_tool.to_dict()
+ return rval
diff --git a/lib/galaxy/webapps/galaxy/api/jobs.py b/lib/galaxy/webapps/galaxy/api/jobs.py
index ac51424c2438..b9aa5b312d74 100644
--- a/lib/galaxy/webapps/galaxy/api/jobs.py
+++ b/lib/galaxy/webapps/galaxy/api/jobs.py
@@ -140,6 +140,8 @@ def show(self, trans, id, **kwd):
job_dict['user_email'] = job.user.email
else:
job_dict['user_email'] = None
+ job_dict['cwl_command_state'] = job.cwl_command_state
+ job_dict['cwl_command_state_version'] = job.cwl_command_state_version
def metric_to_dict(metric):
metric_name = metric.metric_name
diff --git a/lib/galaxy/webapps/galaxy/api/tools.py b/lib/galaxy/webapps/galaxy/api/tools.py
index 4175723548d1..313694a562a1 100644
--- a/lib/galaxy/webapps/galaxy/api/tools.py
+++ b/lib/galaxy/webapps/galaxy/api/tools.py
@@ -15,6 +15,7 @@
from galaxy.web.base.controller import UsesVisualizationMixin
from ._fetch_util import validate_and_normalize_targets
+
log = logging.getLogger(__name__)
# Do not allow these tools to be called directly - they (it) enforces extra security and
@@ -408,12 +409,17 @@ def fetch(self, trans, payload, **kwd):
def create(self, trans, payload, **kwd):
"""
POST /api/tools
- Executes tool using specified inputs and returns tool's outputs.
+
+ If ``tool_id`` appears in the payload this executes tool using
+ specified inputs and returns tool's outputs. Otherwise, the payload
+ is expected to be a tool definition to dynamically load into Galaxy's
+ toolbox.
"""
tool_id = payload.get("tool_id")
+ tool_hash = payload.get("tool_hash")
if tool_id in PROTECTED_TOOLS:
raise exceptions.RequestParameterInvalidException("Cannot execute tool [%s] directly, must use alternative endpoint." % tool_id)
- if tool_id is None:
+ if tool_id is None and tool_hash is None:
raise exceptions.RequestParameterInvalidException("Must specify a valid tool_id to use this endpoint.")
return self._create(trans, payload, **kwd)
@@ -427,7 +433,18 @@ def _create(self, trans, payload, **kwd):
# Get tool.
tool_version = payload.get('tool_version', None)
- tool = trans.app.toolbox.get_tool(payload['tool_id'], tool_version) if 'tool_id' in payload else None
+ tool_id = payload.get('tool_id', None)
+ tool_hash = payload.get('tool_hash', None)
+ get_kwds = dict(
+ tool_id=tool_id,
+ tool_hash=tool_hash,
+ tool_version=tool_version,
+ )
+ if tool_id is None and tool_hash is None:
+ raise exceptions.RequestParameterMissingException("Must specify either a tool_id or a tool_hash.")
+
+ tool = trans.app.toolbox.get_tool(**get_kwds)
+ log.info("found tool [%s] with kwds [%s]" % (tool, get_kwds))
if not tool or not tool.allow_user_access(trans.user):
raise exceptions.MessageException('Tool not found or not accessible.')
if trans.app.config.user_activation_on:
@@ -446,8 +463,8 @@ def _create(self, trans, payload, **kwd):
else:
target_history = None
- # Set up inputs.
- inputs = payload.get('inputs', {})
+ inputs = tool.inputs_from_dict(payload)
+
# Find files coming in as multipart file data and add to inputs.
for k, v in payload.items():
if k.startswith('files_') or k.startswith('__files_'):
diff --git a/lib/galaxy/webapps/galaxy/api/workflows.py b/lib/galaxy/webapps/galaxy/api/workflows.py
index 481b553f50b0..775f36d8c3c0 100644
--- a/lib/galaxy/webapps/galaxy/api/workflows.py
+++ b/lib/galaxy/webapps/galaxy/api/workflows.py
@@ -284,6 +284,7 @@ def create(self, trans, payload, **kwd):
'workflow_id',
'installed_repository_file',
'from_history_id',
+ 'from_path',
'shared_workflow_id',
'workflow',
])
@@ -313,10 +314,17 @@ def create(self, trans, payload, **kwd):
archive_file = payload.get('archive_file')
archive_data = None
if archive_source:
- try:
- archive_data = requests.get(archive_source).text
- except Exception:
- raise exceptions.MessageException("Failed to open URL '%s'." % escape(archive_source))
+ if archive_source.startswith("file://"):
+ if not trans.user_is_admin:
+ raise exceptions.AdminRequiredException()
+ workflow_src = {"src": "from_path", "path": archive_source[len("file://"):]}
+ payload["workflow"] = workflow_src
+ return self.__api_import_new_workflow(trans, payload, **kwd)
+ else:
+ try:
+ archive_data = requests.get(archive_source).text
+ except Exception:
+ raise exceptions.MessageException("Failed to open URL '%s'." % escape(archive_source))
elif hasattr(archive_file, 'file'):
uploaded_file = archive_file.file
uploaded_file_name = uploaded_file.name
@@ -326,7 +334,7 @@ def create(self, trans, payload, **kwd):
raise exceptions.MessageException("You attempted to upload an empty file.")
else:
raise exceptions.MessageException("Please provide a URL or file.")
- return self.__api_import_from_archive(trans, archive_data, "uploaded file")
+ return self.__api_import_from_archive(trans, archive_data, "uploaded file", from_path=os.path.abspath(uploaded_file_name))
if 'from_history_id' in payload:
from_history_id = payload.get('from_history_id')
@@ -350,6 +358,15 @@ def create(self, trans, payload, **kwd):
item['url'] = url_for('workflow', id=item['id'])
return item
+ if 'from_path' in payload:
+ from_path = payload.get('from_path')
+ object_id = payload.get("object_id")
+ workflow_src = {"src": "from_path", "path": from_path}
+ if object_id is not None:
+ workflow_src["object_id"] = object_id
+ payload["workflow"] = workflow_src
+ return self.__api_import_new_workflow(trans, payload, **kwd)
+
if 'shared_workflow_id' in payload:
workflow_id = payload['shared_workflow_id']
return self.__api_import_shared_workflow(trans, workflow_id, payload)
@@ -492,7 +509,8 @@ def update(self, trans, id, payload, **kwds):
stored_workflow = self.__get_stored_workflow(trans, id)
workflow_dict = payload.get('workflow') or payload
if workflow_dict:
- workflow_dict = self.__normalize_workflow(workflow_dict)
+ raw_workflow_description = self.__normalize_workflow(trans, workflow_dict)
+ workflow_dict = raw_workflow_description.as_dict
new_workflow_name = workflow_dict.get('name') or workflow_dict.get('name')
if new_workflow_name and new_workflow_name != stored_workflow.name:
sanitized_name = sanitize_html(new_workflow_name)
@@ -524,10 +542,10 @@ def update(self, trans, id, payload, **kwds):
if 'steps' in workflow_dict:
try:
from_dict_kwds = self.__import_or_update_kwds(payload)
- workflow, errors = self.workflow_contents_manager.update_workflow_from_dict(
+ workflow, errors = self.workflow_contents_manager.update_workflow_from_raw_description(
trans,
stored_workflow,
- workflow_dict,
+ raw_workflow_description,
**from_dict_kwds
)
except workflows.MissingToolsException:
@@ -566,15 +584,17 @@ def build_module(self, trans, payload={}):
#
# -- Helper methods --
#
- def __api_import_from_archive(self, trans, archive_data, source=None):
+ def __api_import_from_archive(self, trans, archive_data, source=None, from_path=None):
try:
data = json.loads(archive_data)
+ if from_path is not None:
+ data.update({"src": "from_path", "path": from_path})
except Exception:
raise exceptions.MessageException("The data content does not appear to be a valid workflow.")
if not data:
raise exceptions.MessageException("The data content is missing.")
- data = self.__normalize_workflow(data)
- workflow, missing_tool_tups = self._workflow_from_dict(trans, data, source=source)
+ raw_workflow_description = self.__normalize_workflow(trans, data)
+ workflow, missing_tool_tups = self._workflow_from_dict(trans, raw_workflow_description, source=source)
workflow = workflow.latest_workflow
if workflow.has_errors:
return {"message": "Imported, but some steps in this workflow have validation errors.", "status": "error"}
@@ -586,7 +606,8 @@ def __api_import_from_archive(self, trans, archive_data, source=None):
def __api_import_new_workflow(self, trans, payload, **kwd):
data = payload['workflow']
- data = self.__normalize_workflow(data)
+ raw_workflow_description = self.__normalize_workflow(trans, data)
+ data = raw_workflow_description.as_dict
import_tools = util.string_as_bool(payload.get("import_tools", False))
if import_tools and not trans.user_is_admin:
raise exceptions.AdminRequiredException()
@@ -601,7 +622,7 @@ def __api_import_new_workflow(self, trans, payload, **kwd):
raise exceptions.RequestParameterInvalidException("Published workflow must be importable.")
from_dict_kwds["publish"] = publish
- workflow, missing_tool_tups = self._workflow_from_dict(trans, data, **from_dict_kwds)
+ workflow, missing_tool_tups = self._workflow_from_dict(trans, raw_workflow_description, **from_dict_kwds)
if importable:
self._make_item_accessible(trans.sa_session, workflow)
trans.sa_session.flush()
@@ -654,8 +675,8 @@ def __import_or_update_kwds(self, payload):
'fill_defaults': fill_defaults,
}
- def __normalize_workflow(self, as_dict):
- return self.workflow_contents_manager.normalize_workflow_format(as_dict)
+ def __normalize_workflow(self, trans, as_dict):
+ return self.workflow_contents_manager.normalize_workflow_format(trans, as_dict)
@expose_api
def import_shared_workflow_deprecated(self, trans, payload, **kwd):
diff --git a/lib/galaxy/webapps/galaxy/buildapp.py b/lib/galaxy/webapps/galaxy/buildapp.py
index 20f7f3612580..f73ee60e92b8 100644
--- a/lib/galaxy/webapps/galaxy/buildapp.py
+++ b/lib/galaxy/webapps/galaxy/buildapp.py
@@ -334,6 +334,7 @@ def populate_api_routes(webapp, app):
webapp.mapper.connect('/api/tools/{id:.+?}/build_dependency_cache', action='build_dependency_cache', controller="tools", conditions=dict(method=["POST"]))
webapp.mapper.connect('/api/tools/{id:.+?}', action='show', controller="tools")
webapp.mapper.resource('tool', 'tools', path_prefix='/api')
+ webapp.mapper.resource('dynamic_tools', 'dynamic_tools', path_prefix='/api')
webapp.mapper.connect('/api/dependency_resolvers/clean', action="clean", controller="tool_dependencies", conditions=dict(method=["POST"]))
webapp.mapper.connect('/api/dependency_resolvers/dependency', action="manager_dependency", controller="tool_dependencies", conditions=dict(method=["GET"]))
diff --git a/lib/galaxy/webapps/galaxy/controllers/tool_runner.py b/lib/galaxy/webapps/galaxy/controllers/tool_runner.py
index 5a456179d4c9..16f61e1c80e3 100644
--- a/lib/galaxy/webapps/galaxy/controllers/tool_runner.py
+++ b/lib/galaxy/webapps/galaxy/controllers/tool_runner.py
@@ -61,7 +61,7 @@ def __tool_404__():
redirect=redirect))
if not tool.allow_user_access(trans.user):
return __tool_404__()
- if tool.tool_type == 'default':
+ if tool.tool_type in ['default', 'cwl', 'galactic_cwl']:
return trans.response.send_redirect(url_for(controller='root', tool_id=tool_id))
# execute tool without displaying form (used for datasource tools)
diff --git a/lib/galaxy/webapps/tool_shed/model/__init__.py b/lib/galaxy/webapps/tool_shed/model/__init__.py
index e2720dc25186..bf3ec3b30db0 100644
--- a/lib/galaxy/webapps/tool_shed/model/__init__.py
+++ b/lib/galaxy/webapps/tool_shed/model/__init__.py
@@ -479,10 +479,34 @@ def __init__(self):
self.tool_inputs = None
self.tool_errors = None
self.position = None
- self.input_connections = []
+ self.inputs = []
self.config = None
self.label = None
+ def get_or_add_input(self, input_name):
+ for step_input in self.inputs:
+ if step_input.name == input_name:
+ return step_input
+
+ step_input = WorkflowStepInput()
+ step_input.workflow_step = self
+ step_input.name = input_name
+ self.inputs.append(step_input)
+ return step_input
+
+ @property
+ def input_connections(self):
+ connections = [_ for step_input in self.inputs for _ in step_input.connections]
+ return connections
+
+
+class WorkflowStepInput(object):
+
+ def __init__(self):
+ self.id = None
+ self.name = None
+ self.connections = []
+
class WorkflowStepConnection(object):
diff --git a/lib/galaxy/workflow/extract.py b/lib/galaxy/workflow/extract.py
index 44bdda300a21..bf2f41657871 100644
--- a/lib/galaxy/workflow/extract.py
+++ b/lib/galaxy/workflow/extract.py
@@ -124,10 +124,10 @@ def extract_steps(trans, history=None, job_ids=None, dataset_ids=None, dataset_c
else:
log.info("Cannot find implicit input collection for %s" % input_name)
if other_hid in hid_to_output_pair:
+ step_input = step.get_or_add_input(input_name)
other_step, other_name = hid_to_output_pair[other_hid]
conn = model.WorkflowStepConnection()
- conn.input_step = step
- conn.input_name = input_name
+ conn.input_step_input = step_input
# Should always be connected to an earlier step
conn.output_step = other_step
conn.output_name = other_name
diff --git a/lib/galaxy/workflow/modules.py b/lib/galaxy/workflow/modules.py
index 9743d4f42c9a..aaf451402dd7 100644
--- a/lib/galaxy/workflow/modules.py
+++ b/lib/galaxy/workflow/modules.py
@@ -22,6 +22,7 @@
DefaultToolState,
ToolInputsNotReadyException
)
+from galaxy.tools.cwl.util import set_basename_and_derived_properties
from galaxy.tools.execute import execute, MappingParameters, PartialJobExecution
from galaxy.tools.parameters import (
check_param,
@@ -34,6 +35,7 @@
DataCollectionToolParameter,
DataToolParameter,
is_runtime_value,
+ FieldTypeToolParameter,
parameter_types,
runtime_to_json,
SelectToolParameter,
@@ -72,6 +74,7 @@ class WorkflowModule(object):
def __init__(self, trans, content_id=None, **kwds):
self.trans = trans
+ self.app = trans.app
self.content_id = content_id
self.state = DefaultToolState()
@@ -197,11 +200,14 @@ def get_runtime_inputs(self, **kwds):
"""
return {}
- def compute_runtime_state(self, trans, step_updates=None):
+ def compute_runtime_state(self, trans, step=None, step_updates=None):
""" Determine the runtime state (potentially different from self.state
which describes configuration state). This (again unlike self.state) is
currently always a `DefaultToolState` object.
+ If `step` is not `None`, it will be used to search for default values
+ defined in workflow input steps.
+
If `step_updates` is `None`, this is likely for rendering the run form
for instance and no runtime properties are available and state must be
solely determined by the default runtime state described by the step.
@@ -211,6 +217,21 @@ def compute_runtime_state(self, trans, step_updates=None):
"""
state = self.get_runtime_state()
step_errors = {}
+
+ if step is not None:
+
+ def update_value(input, context, prefixed_name, **kwargs):
+ step_input = step.get_input(prefixed_name)
+ if step_input is None:
+ return NO_REPLACEMENT
+
+ if step_input.default_value_set:
+ return step_input.default_value
+
+ return NO_REPLACEMENT
+
+ visit_input_values(self.get_runtime_inputs(), state.inputs, update_value, no_replacement_value=NO_REPLACEMENT)
+
if step_updates:
def update_value(input, context, prefixed_name, **kwargs):
@@ -303,12 +324,31 @@ def _find_collections_to_match(self, progress, step, all_inputs):
if multiple:
continue
+ step_input = step.inputs_by_name.get(name, None)
+ scatter_type = "dotproduct"
+ if step_input:
+ scatter_type = step_input.scatter_type
+ assert scatter_type in ["dotproduct", "disabled"], "Unimplemented scatter type [%s]" % scatter_type
+
+ workflow_mapping_structure = progress.workflow_mapping_structure
+ if workflow_mapping_structure and workflow_mapping_structure.is_leaf and scatter_type == "disabled":
+ continue
+
data = progress.replacement_for_input(step, input_dict)
- can_map_over = hasattr(data, "collection") # and data.collection.allow_implicit_mapping
+ can_map_over = hasattr(data, "collection") and data.collection.allow_implicit_mapping
if not can_map_over:
continue
+ if workflow_mapping_structure and not workflow_mapping_structure.is_leaf:
+ # TODO: replace assert with a proper exception
+ if not workflow_mapping_structure.collection_type_description.is_subcollection_of_type(
+ data.collection.collection_type, proper=False
+ ):
+ template = "Workflow input replacement of collection type [%s] is not a super collection of workflow collection type [%s]."
+ message = template % (data.collection.collection_type, workflow_mapping_structure.collection_type_description)
+ raise Exception(message)
+
is_data_param = input_dict["input_type"] == "dataset"
if is_data_param:
collections_to_match.add(name, data)
@@ -371,28 +411,23 @@ def get_name(self):
def get_all_inputs(self, data_only=False, connectable_only=False):
""" Get configure time data input descriptions. """
# Filter subworkflow steps and get inputs
- step_to_input_type = {
- "data_input": "dataset",
- "data_collection_input": "dataset_collection",
- "parameter_input": "parameter",
- }
inputs = []
if hasattr(self.subworkflow, 'input_steps'):
for step in self.subworkflow.input_steps:
name = step.label
+ step_module = module_factory.from_workflow_step(self.trans, step)
if not name:
- step_module = module_factory.from_workflow_step(self.trans, step)
name = "%s:%s" % (step.order_index, step_module.get_name())
- step_type = step.type
- assert step_type in step_to_input_type
input = dict(
input_subworkflow_step_id=step.order_index,
name=name,
label=name,
multiple=False,
extensions=["data"],
- input_type=step_to_input_type[step_type],
+ input_type=step.input_type,
)
+ if hasattr(step_module, "collection_type"):
+ input["collection_types"] = [step_module.collection_type]
inputs.append(input)
return inputs
@@ -433,7 +468,12 @@ def execute(self, trans, progress, invocation_step, use_cached_job=False):
inputs, etc...
"""
step = invocation_step.workflow_step
- subworkflow_invoker = progress.subworkflow_invoker(trans, step, use_cached_job=use_cached_job)
+ all_inputs = self.get_all_inputs()
+ collection_info = self.compute_collection_info(progress, step, all_inputs)
+ subworkflow_mapping_structure = collection_info.structure if collection_info else None
+ subworkflow_invoker = progress.subworkflow_invoker(
+ trans, step, subworkflow_mapping_structure, use_cached_job=use_cached_job
+ )
subworkflow_invoker.invoke()
subworkflow = subworkflow_invoker.workflow
subworkflow_progress = subworkflow_invoker.progress
@@ -599,41 +639,56 @@ class InputParameterModule(WorkflowModule):
name = "Input parameter"
default_parameter_type = "text"
default_optional = False
+ default_default_value = ''
parameter_type = default_parameter_type
optional = default_optional
+ default_value = default_default_value
def get_inputs(self):
# TODO: Use an external xml or yaml file to load the parameter definition
parameter_type = self.state.inputs.get("parameter_type", self.default_parameter_type)
optional = self.state.inputs.get("optional", self.default_optional)
+ default_value = self.state.inputs.get("default_value", self.default_value) or ''
input_parameter_type = SelectToolParameter(None, XML(
'''
-
+
+
- '''))
+ ''' % parameter_type))
for i, option in enumerate(input_parameter_type.static_options):
option = list(option)
if option[1] == parameter_type:
# item 0 is option description, item 1 is value, item 2 is "selected"
option[2] = True
input_parameter_type.static_options[i] = tuple(option)
+ input_default_value = FieldTypeToolParameter(None, XML(
+ '''
+
+
+ '''
+ % default_value))
return odict([("parameter_type", input_parameter_type),
- ("optional", BooleanToolParameter(None, Element("param", name="optional", label="Optional", type="boolean", value=optional)))])
+ ("optional", BooleanToolParameter(None, Element("param", name="optional", label="Optional", type="boolean", value=optional))),
+ ("default_value", input_default_value)])
def get_runtime_inputs(self, **kwds):
parameter_type = self.state.inputs.get("parameter_type", self.default_parameter_type)
optional = self.state.inputs.get("optional", self.default_optional)
- if parameter_type not in ["text", "boolean", "integer", "float", "color"]:
+ if parameter_type not in ["text", "boolean", "integer", "float", "color", "field"]:
raise ValueError("Invalid parameter type for workflow parameters encountered.")
parameter_class = parameter_types[parameter_type]
parameter_kwds = {}
- if parameter_type in ["integer", "float"]:
- parameter_kwds["value"] = str(0)
+ default_value = self.state.inputs.get("default_value", self.default_default_value)
+ if default_value:
+ parameter_kwds["value"] = str(default_value)
+ else:
+ if parameter_type in ["integer", "float"]:
+ parameter_kwds["value"] = str(0)
# TODO: Use a dict-based description from YAML tool source
element = Element("param", name="input", label=self.label, type=parameter_type, optional=str(optional), **parameter_kwds)
@@ -658,7 +713,77 @@ def get_all_outputs(self, data_only=False):
def execute(self, trans, progress, invocation_step, use_cached_job=False):
step = invocation_step.workflow_step
- step_outputs = dict(output=step.state.inputs['input'])
+ input_value = step.state.inputs['input']
+ if input_value is None:
+ default_value = loads(step.tool_inputs.get("default_value", "{}"))
+ input_value = default_value.get("value")
+ step_outputs = dict(output=input_value)
+ progress.set_outputs_for_input(invocation_step, step_outputs)
+
+
+# TODO: Implementation of this was for older framework - need to redo it now
+# that everything is treated like a tool.
+class ExpressionModule(WorkflowModule):
+ default_expression = "true"
+ default_inputs = []
+ type = "expression"
+ expression = default_expression
+ inputs = default_inputs
+ state_fields = [
+ "expression",
+ "inputs",
+ ]
+
+ @classmethod
+ def default_state(Class):
+ return dict(
+ expression=Class.default_expression,
+ inputs=Class.default_inputs[:],
+ )
+
+ def _abstract_config_form(self):
+ # TODO: Redo this with the new framework.
+ # TODO: add ability to specify input...
+ return None
+
+ def get_runtime_inputs(self, **kwds):
+ input_defs = odict.odict()
+ for input in self.state.get("inputs", self.default_inputs):
+ name = input.get("name")
+ label = input.get("label", name)
+ parameter_type = input.get("parameter_type")
+ optional = input.get("optional", False)
+ if parameter_type not in ["text", "boolean", "integer", "float", "color"]:
+ raise ValueError("Invalid parameter type for workflow parameters encountered.")
+ parameter_class = parameter_types[parameter_type]
+ parameter_kwds = {}
+ if parameter_type in ["integer", "float"]:
+ parameter_kwds["value"] = str(0)
+
+ # TODO: Use a dict-based description from YAML tool source
+ element = Element("param", name=name, label=label, type=parameter_type, optional=str(optional), **parameter_kwds)
+ input_def = parameter_class(None, element)
+ input_defs[name] = input_def
+ input_defs
+
+ def get_runtime_state(self):
+ state = DefaultToolState()
+
+ state.inputs = odict.odict()
+ for input in self.state.get("inputs", self.default_inputs):
+ name = input.get("name")
+ state.inputs[name] = None
+
+ return state
+
+ def get_runtime_input_dicts(self, step_annotation):
+ return [dict(description=step_annotation)]
+
+ def get_data_inputs(self):
+ return []
+
+ def execute(self, trans, progress, invocation_step):
+ step_outputs = None, dict(output=invocation_step.step.state.inputs['input'])
progress.set_outputs_for_input(invocation_step, step_outputs)
@@ -720,11 +845,12 @@ class ToolModule(WorkflowModule):
type = "tool"
name = "Tool"
- def __init__(self, trans, tool_id, tool_version=None, exact_tools=True, **kwds):
+ def __init__(self, trans, tool_id, tool_version=None, exact_tools=True, tool_hash=None, **kwds):
super(ToolModule, self).__init__(trans, content_id=tool_id, **kwds)
self.tool_id = tool_id
self.tool_version = tool_version
- self.tool = trans.app.toolbox.get_tool(tool_id, tool_version=tool_version, exact=exact_tools)
+ self.tool_hash = tool_hash
+ self.tool = trans.app.toolbox.get_tool(tool_id, tool_version=tool_version, exact=exact_tools, tool_hash=tool_hash)
if self.tool and tool_version and exact_tools and str(self.tool.version) != str(tool_version):
log.info("Exact tool specified during workflow module creation for [%s] but couldn't find correct version [%s]." % (tool_id, tool_version))
self.tool = None
@@ -738,12 +864,13 @@ def __init__(self, trans, tool_id, tool_version=None, exact_tools=True, **kwds):
@classmethod
def from_dict(Class, trans, d, **kwds):
tool_id = d.get('content_id') or d.get('tool_id')
- if tool_id is None:
- raise exceptions.RequestParameterInvalidException("No tool id could be located for step [%s]." % d)
tool_version = d.get('tool_version')
if tool_version:
tool_version = str(tool_version)
- module = super(ToolModule, Class).from_dict(trans, d, tool_id=tool_id, tool_version=tool_version, **kwds)
+ tool_hash = d.get('tool_hash', None)
+ if tool_id is None and tool_hash is None:
+ raise exceptions.RequestParameterInvalidException("No content id could be located for for step [%s]" % d)
+ module = super(ToolModule, Class).from_dict(trans, d, tool_id=tool_id, tool_version=tool_version, tool_hash=tool_hash, **kwds)
module.post_job_actions = d.get('post_job_actions', {})
module.workflow_outputs = d.get('workflow_outputs', [])
if module.tool:
@@ -759,16 +886,20 @@ def from_dict(Class, trans, d, **kwds):
@classmethod
def from_workflow_step(Class, trans, step, **kwds):
- tool_id = trans.app.toolbox.get_tool_id(step.tool_id) or step.tool_id
+ if step.tool_id is not None:
+ tool_id = trans.app.toolbox.get_tool_id(step.tool_id) or step.tool_id
+ else:
+ tool_id = None
tool_version = step.tool_version
- module = super(ToolModule, Class).from_workflow_step(trans, step, tool_id=tool_id, tool_version=tool_version, **kwds)
+ tool_hash = step.tool_hash
+ module = super(ToolModule, Class).from_workflow_step(trans, step, tool_id=tool_id, tool_version=tool_version, tool_hash=tool_hash, **kwds)
module.workflow_outputs = step.workflow_outputs
module.post_job_actions = {}
for pja in step.post_job_actions:
module.post_job_actions[pja.action_type] = pja
if module.tool:
message = ""
- if step.tool_id != module.tool_id: # This means the exact version of the tool is not installed. We inform the user.
+ if step.tool_id and step.tool_id != module.tool_id: # This means the exact version of the tool is not installed. We inform the user.
old_tool_shed = step.tool_id.split("/repos/")[0]
if old_tool_shed not in tool_id: # Only display the following warning if the tool comes from a different tool shed
old_tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(trans.app, old_tool_shed)
@@ -793,7 +924,11 @@ def from_workflow_step(Class, trans, step, **kwds):
def save_to_step(self, step):
super(ToolModule, self).save_to_step(step)
step.tool_id = self.tool_id
- step.tool_version = self.get_version()
+ if self.tool:
+ step.tool_version = self.get_version()
+ else:
+ step.tool_version = self.tool_version
+ step.tool_hash = self.tool_hash
for k, v in self.post_job_actions.items():
pja = self.__to_pja(k, v, step)
self.trans.sa_session.add(pja)
@@ -972,7 +1107,7 @@ def recover_state(self, state, **kwds):
"""
super(ToolModule, self).recover_state(state, **kwds)
if kwds.get("fill_defaults", False) and self.tool:
- self.compute_runtime_state(self.trans, step_updates=None)
+ self.compute_runtime_state(self.trans, step=None, step_updates=None)
self.augment_tool_state_for_input_connections(**kwds)
self.tool.check_and_update_param_values(self.state.inputs, self.trans, workflow_building_mode=True)
@@ -1042,14 +1177,14 @@ def get_runtime_state(self):
def get_runtime_inputs(self, **kwds):
return self.get_inputs()
- def compute_runtime_state(self, trans, step_updates=None):
+ def compute_runtime_state(self, trans, step=None, step_updates=None):
# Warning: This method destructively modifies existing step state.
if self.tool:
step_errors = {}
state = self.state
self.runtime_post_job_actions = {}
+ state, step_errors = super(ToolModule, self).compute_runtime_state(trans, step, step_updates)
if step_updates:
- state, step_errors = super(ToolModule, self).compute_runtime_state(trans, step_updates)
self.runtime_post_job_actions = step_updates.get(RUNTIME_POST_JOB_ACTIONS_KEY, {})
step_metadata_runtime_state = self.__step_meta_runtime_state()
if step_metadata_runtime_state:
@@ -1070,10 +1205,98 @@ def decode_runtime_state(self, runtime_state):
else:
raise ToolMissingException("Tool %s missing. Cannot recover runtime state." % self.tool_id)
+ def evaluate_value_from_expressions(self, progress, step, execution_state):
+ value_from_expressions = {}
+ replacements = {}
+
+ for key, value in execution_state.inputs.items():
+ step_input = step.inputs_by_name.get(key, None)
+ if step_input and step_input.value_from is not None:
+ value_from_expressions[key] = step_input.value_from
+
+ if not value_from_expressions:
+ return replacements
+
+ hda_references = []
+
+ def to_cwl(value):
+ if isinstance(value, model.HistoryDatasetAssociation):
+ # I think the following two checks are needed but they may
+ # not be needed.
+ if not value.dataset.in_ready_state():
+ why = "dataset [%s] is needed for valueFrom expression and is non-ready" % value.id
+ raise DelayedWorkflowEvaluation(why=why)
+
+ if not value.is_ok:
+ raise CancelWorkflowEvaluation()
+
+ hda_references.append(value)
+ if value.ext == "expression.json":
+ with open(value.file_name, "r") as f:
+ return loads(f.read())
+ else:
+ properties = {
+ "class": "File",
+ "location": "step_input://%d" % len(hda_references),
+ }
+ set_basename_and_derived_properties(
+ properties, value.dataset.cwl_filename or value.name
+ )
+ return properties
+
+ elif hasattr(value, "collection"):
+ collection = value.collection
+ if collection.collection_type == "list":
+ return map(to_cwl, collection.dataset_instances)
+ elif collection.collection_type == "record":
+ rval = {}
+ for element in collection.elements:
+ rval[element.element_identifier] = to_cwl(element.element_object)
+ return rval
+ else:
+ return value
+
+ def from_cwl(value):
+ # TODO: turn actual files into HDAs here ... somehow I suppose. Things with
+ # file:// locations for instance.
+ if isinstance(value, dict) and "class" in value and "location" in value:
+ if value["class"] == "File":
+ # This is going to re-file -> HDA this each iteration I think, not a good
+ # implementation.
+ return progress.raw_to_galaxy(value)
+
+ assert value["location"].startswith("step_input://"), "Invalid location %s" % value
+ return hda_references[int(value["location"][len("step_input://"):]) - 1]
+ elif isinstance(value, dict):
+ raise NotImplementedError()
+ else:
+ return value
+
+ step_state = {}
+ for key, value in execution_state.inputs.items():
+ step_state[key] = to_cwl(value)
+
+ replacements = {}
+ for key, value_from in value_from_expressions.items():
+ from cwltool.expression import do_eval
+ as_cwl_value = do_eval(
+ value_from,
+ step_state,
+ [{"class": "InlineJavascriptRequirement"}],
+ None,
+ None,
+ {},
+ context=step_state[key],
+ )
+ new_val = from_cwl(as_cwl_value)
+ replacements[key] = new_val
+
+ return replacements
+
def execute(self, trans, progress, invocation_step, use_cached_job=False):
invocation = invocation_step.workflow_invocation
step = invocation_step.workflow_step
- tool = trans.app.toolbox.get_tool(step.tool_id, tool_version=step.tool_version)
+ tool = trans.app.toolbox.get_tool(step.tool_id, tool_version=step.tool_version, tool_hash=step.tool_hash)
if not tool.is_workflow_compatible:
message = "Specified tool [%s] in workflow is not workflow-compatible." % tool.id
raise Exception(message)
@@ -1110,7 +1333,7 @@ def callback(input, prefixed_name, **kwargs):
replacement = NO_REPLACEMENT
if iteration_elements and prefixed_name in iteration_elements:
- if isinstance(input, DataToolParameter):
+ if not isinstance(input, DataCollectionToolParameter):
# Pull out dataset instance from element.
replacement = iteration_elements[prefixed_name].dataset_instance
if hasattr(iteration_elements[prefixed_name], u'element_identifier') and iteration_elements[prefixed_name].element_identifier:
@@ -1124,6 +1347,28 @@ def callback(input, prefixed_name, **kwargs):
if replacement is not NO_REPLACEMENT:
found_replacement_keys.add(prefixed_name)
+ is_data = isinstance(input, DataToolParameter) or isinstance(input, DataCollectionToolParameter) or isinstance(input, FieldTypeToolParameter)
+ if not is_data and getattr(replacement, "history_content_type", None) == "dataset" and getattr(replacement, "ext", None) == "expression.json":
+ if isinstance(replacement, model.HistoryDatasetAssociation):
+ if not replacement.dataset.in_ready_state():
+ why = "dataset [%s] is needed for non-data connection and is non-ready" % replacement.id
+ raise DelayedWorkflowEvaluation(why=why)
+
+ if not replacement.is_ok:
+ raise CancelWorkflowEvaluation()
+
+ with open(replacement.file_name, "r") as f:
+ replacement = loads(f.read())
+
+ if isinstance(input, FieldTypeToolParameter):
+ if isinstance(replacement, model.HistoryDatasetAssociation):
+ replacement = {"src": "hda", "value": replacement}
+ elif isinstance(replacement, model.HistoryDatasetCollectionAssociation):
+ replacement = {"src": "hdca", "value": replacement}
+ elif replacement is not NO_REPLACEMENT:
+ replacement = {"src": "json", "value": replacement}
+
+ log.info("replacement for [%s] is [%s]" % (prefixed_name, replacement))
return replacement
try:
@@ -1134,10 +1379,38 @@ def callback(input, prefixed_name, **kwargs):
message = message_template % (tool.name, k.message)
raise exceptions.MessageException(message)
+ extra_step_state = {}
+ for step_input in step.inputs:
+ step_input_name = step_input.name
+ if step_input_name not in execution_state.inputs and step_input_name in all_inputs_by_name:
+ value = progress.replacement_for_input(step, all_inputs_by_name[step_input_name])
+ # TODO: only do this for values... is everything with a default
+ # this way a field parameter? I guess not?
+ extra_step_state[step_input.name] = value
+
unmatched_input_connections = expected_replacement_keys - found_replacement_keys
if unmatched_input_connections:
log.warn("Failed to use input connections for inputs [%s]" % unmatched_input_connections)
+ expression_replacements = self.evaluate_value_from_expressions(
+ progress, step, execution_state
+ )
+
+ def expression_callback(input, prefixed_name, **kwargs):
+ replacement = NO_REPLACEMENT
+
+ if prefixed_name in expression_replacements:
+ expression_replacement = expression_replacements[prefixed_name]
+ if isinstance(input, FieldTypeToolParameter):
+ replacement = {"src": "json", "value": expression_replacement}
+ else:
+ replacement = expression_replacement
+
+ return replacement
+
+ # Replace expression values with those calculated...
+ visit_input_values(tool.inputs, execution_state.inputs, expression_callback, no_replacement_value=NO_REPLACEMENT)
+
param_combinations.append(execution_state.inputs)
complete = False
@@ -1351,6 +1624,30 @@ def load_module_sections(trans):
return module_sections
+class EphemeralCollection(object):
+ """Interface for collecting datasets together in workflows and treating as collections.
+
+ These aren't real collections in the database - just datasets groupped together
+ in someway by workflows for passing data around as collections.
+ """
+
+ # Used to distinguish between datasets and collections frequently.
+ ephemeral = True
+ history_content_type = "dataset_collection"
+ name = "Dynamically generated collection"
+
+ def __init__(self, collection, history):
+ self.collection = collection
+ self.history = history
+
+ hdca = model.HistoryDatasetCollectionAssociation(
+ collection=collection,
+ history=history,
+ )
+ hdca.history.add_dataset_collection(hdca)
+ self.persistent_object = hdca
+
+
class DelayedWorkflowEvaluation(Exception):
def __init__(self, why=None):
@@ -1385,6 +1682,7 @@ def inject(self, step, step_args=None, steps=None, **kwargs):
step.upgrade_messages = {}
# Make connection information available on each step by input name.
+ step.setup_inputs_by_name()
step.setup_input_connections_by_name()
# Populate module.
@@ -1404,7 +1702,7 @@ def inject(self, step, step_args=None, steps=None, **kwargs):
subworkflow = step.subworkflow
populate_module_and_state(self.trans, subworkflow, param_map=unjsonified_subworkflow_param_map)
- state, step_errors = module.compute_runtime_state(self.trans, step_args)
+ state, step_errors = module.compute_runtime_state(self.trans, step, step_args)
step.state = state
# Fix any missing parameters
@@ -1422,6 +1720,8 @@ def populate_module_and_state(trans, workflow, param_map, allow_tool_state_corre
for step in workflow.steps:
step_args = param_map.get(step.id, {})
step_errors = module_injector.inject(step, step_args=step_args)
+ # RESTRICT TO THIS JUST TO CONNECTIONS
+ step.upgrade_messages = []
if step_errors:
raise exceptions.MessageException(step_errors, err_data={step.order_index: step_errors})
if step.upgrade_messages:
diff --git a/lib/galaxy/workflow/run.py b/lib/galaxy/workflow/run.py
index b75bee7ecac3..27cccb86d664 100644
--- a/lib/galaxy/workflow/run.py
+++ b/lib/galaxy/workflow/run.py
@@ -1,4 +1,5 @@
import logging
+import os
import uuid
from galaxy import model
@@ -11,6 +12,7 @@
WorkflowRunConfig
)
+
log = logging.getLogger(__name__)
@@ -272,7 +274,7 @@ def _invoke_step(self, invocation_step):
class WorkflowProgress(object):
- def __init__(self, workflow_invocation, inputs_by_step_id, module_injector, param_map, jobs_per_scheduling_iteration=-1):
+ def __init__(self, workflow_invocation, inputs_by_step_id, module_injector, param_map, workflow_mapping_structure=None, jobs_per_scheduling_iteration=-1):
self.outputs = odict()
self.module_injector = module_injector
self.workflow_invocation = workflow_invocation
@@ -280,6 +282,7 @@ def __init__(self, workflow_invocation, inputs_by_step_id, module_injector, para
self.param_map = param_map
self.jobs_per_scheduling_iteration = jobs_per_scheduling_iteration
self.jobs_scheduled_this_iteration = 0
+ self.workflow_mapping_structure = workflow_mapping_structure
@property
def maximum_jobs_to_schedule_or_none(self):
@@ -288,6 +291,10 @@ def maximum_jobs_to_schedule_or_none(self):
else:
return None
+ @property
+ def trans(self):
+ return self.module_injector.trans
+
def record_executed_job_count(self, job_count):
self.jobs_scheduled_this_iteration += job_count
@@ -318,6 +325,106 @@ def remaining_steps(self):
remaining_steps.append((step, invocation_step))
return remaining_steps
+ def replacement_for_input_connections(self, step, input_dict, connections):
+ replacement = modules.NO_REPLACEMENT
+
+ prefixed_name = input_dict["name"]
+ step_input = step.inputs_by_name.get(prefixed_name, None)
+
+ merge_type = model.WorkflowStepInput.default_merge_type
+ if step_input:
+ merge_type = step_input.merge_type
+
+ is_data = input_dict["input_type"] in ["dataset", "dataset_collection"]
+ if len(connections) == 1:
+ replacement = self.replacement_for_connection(connections[0], is_data=is_data)
+ else:
+ # We've mapped multiple individual inputs to a single parameter,
+ # promote output to a collection.
+ inputs = []
+ input_history_content_type = None
+ input_collection_type = None
+ for i, c in enumerate(connections):
+ input_from_connection = self.replacement_for_connection(c, is_data=is_data)
+ is_data = hasattr(input_from_connection, "history_content_type")
+ if is_data:
+ input_history_content_type = input_from_connection.history_content_type
+ if i == 0:
+ if input_history_content_type == "dataset_collection":
+ input_collection_type = input_from_connection.collection.collection_type
+ else:
+ input_collection_type = None
+ else:
+ if input_collection_type is None:
+ if input_history_content_type != "dataset":
+ raise Exception("Cannot map over a combination of datasets and collections.")
+ else:
+ if input_history_content_type != "dataset_collection":
+ raise Exception("Cannot merge over combinations of datasets and collections.")
+ elif input_from_connection.collection.collection_type != input_collection_type:
+ raise Exception("Cannot merge collections of different collection types.")
+
+ inputs.append(input_from_connection)
+
+ if input_dict["input_type"] == "dataset_collection":
+ # TODO: Implement more nested types here...
+ assert input_dict["collection_types"] == ["list"], input.collection_types
+
+ collection = model.DatasetCollection()
+ # If individual datasets provided (type is None) - premote to a list.
+ collection.collection_type = input_collection_type or "list"
+ elements = []
+
+ next_index = 0
+ if input_collection_type is None:
+
+ if merge_type == "merge_nested":
+ raise NotImplementedError()
+
+ for input in inputs:
+ element = model.DatasetCollectionElement(
+ element=input,
+ element_index=next_index,
+ element_identifier=str(next_index),
+ )
+ elements.append(element)
+ next_index += 1
+
+ elif input_collection_type == "list":
+ if merge_type == "merge_flattened":
+ for input in inputs:
+ for dataset_instance in input.dataset_instances:
+ element = model.DatasetCollectionElement(
+ element=dataset_instance,
+ element_index=next_index,
+ element_identifier=str(next_index),
+ )
+ elements.append(element)
+ next_index += 1
+ elif merge_type == "merge_nested":
+ # Increase nested level of collection
+ collection.collection_type = "list:%s" % input_collection_type
+ for input in inputs:
+ element = model.DatasetCollectionElement(
+ element=input.collection,
+ element_index=next_index,
+ element_identifier=str(next_index),
+ )
+ elements.append(element)
+ next_index += 1
+ else:
+ raise NotImplementedError()
+
+ collection.elements = elements
+
+ ephemeral_collection = modules.EphemeralCollection(
+ collection=collection,
+ history=self.workflow_invocation.history,
+ )
+ replacement = ephemeral_collection
+
+ return replacement
+
def replacement_for_input(self, step, input_dict):
replacement = modules.NO_REPLACEMENT
prefixed_name = input_dict["name"]
@@ -333,8 +440,9 @@ def replacement_for_input(self, step, input_dict):
if isinstance(replacement[0], model.HistoryDatasetCollectionAssociation):
replacement = replacement[0]
else:
- is_data = input_dict["input_type"] in ["dataset", "dataset_collection"]
- replacement = self.replacement_for_connection(connection[0], is_data=is_data)
+ replacement = self.replacement_for_input_connections(
+ step, input_dict, connection,
+ )
return replacement
@@ -367,6 +475,22 @@ def replacement_for_connection(self, connection, is_data=True):
delayed_why = "dependent collection [%s] not yet populated with datasets" % replacement.id
raise modules.DelayedWorkflowEvaluation(why=delayed_why)
+
+ is_hda = isinstance(replacement, model.HistoryDatasetAssociation)
+ is_hdca = isinstance(replacement, model.HistoryDatasetCollectionAssociation)
+ if not is_data and (is_hda or is_hdca):
+ dataset_instances = []
+ if is_hda:
+ dataset_instances = [replacement]
+ else:
+ dataset_instances = replacement.dataset_instances
+
+ for dataset_instance in dataset_instances:
+ if dataset_instance.is_pending:
+ raise modules.DelayedWorkflowEvaluation()
+ elif not dataset_instance.is_ok:
+ raise modules.CancelWorkflowEvaluation()
+
return replacement
def get_replacement_workflow_output(self, workflow_output):
@@ -379,6 +503,40 @@ def get_replacement_workflow_output(self, workflow_output):
else:
return step_outputs[output_name]
+ def raw_to_galaxy(self, as_dict_value):
+ trans = self.trans
+ app = trans.app
+ history = self.workflow_invocation.history
+
+ from galaxy.tools.cwl.util import abs_path
+ relative_to = "/" # TODO
+ path = abs_path(as_dict_value.get("location"), relative_to)
+
+ name = os.path.basename(path)
+ primary_data = model.HistoryDatasetAssociation(
+ name=name,
+ extension="data", # TODO: cwl default...
+ designation=None,
+ visible=True,
+ dbkey="?",
+ create_dataset=True,
+ flush=False,
+ sa_session=trans.sa_session
+ )
+ log.info("path is %s" % path)
+ primary_data.link_to(path)
+ permissions = app.security_agent.history_get_default_permissions(history)
+ app.security_agent.set_all_dataset_permissions(primary_data.dataset, permissions, new=True, flush=False)
+ trans.sa_session.add(primary_data)
+ trans.sa_session.flush()
+ history.add_dataset(primary_data)
+ primary_data.init_meta()
+ primary_data.set_meta()
+ primary_data.set_peek()
+ primary_data.raw_set_dataset_state('ok')
+ trans.sa_session.flush()
+ return primary_data
+
def set_outputs_for_input(self, invocation_step, outputs=None):
step = invocation_step.workflow_step
@@ -388,12 +546,24 @@ def set_outputs_for_input(self, invocation_step, outputs=None):
if self.inputs_by_step_id:
step_id = step.id
if step_id not in self.inputs_by_step_id and 'output' not in outputs:
- template = "Step with id %s not found in inputs_step_id (%s)"
- message = template % (step_id, self.inputs_by_step_id)
- raise ValueError(message)
+ default_value = step.input_default_value
+ if default_value:
+ outputs['output'] = default_value
+ else:
+ template = "Step with id %s not found in inputs_step_id (%s)"
+ message = template % (step.log_str(), self.inputs_by_step_id)
+ raise ValueError(message)
elif step_id in self.inputs_by_step_id:
outputs['output'] = self.inputs_by_step_id[step_id]
+ output = outputs.get('output')
+ # TODO: handle extra files and directory types and collections and all the stuff...
+ if output and isinstance(output, dict) and output.get("class") == "File":
+ primary_data = self.raw_to_galaxy(output)
+ outputs['output'] = primary_data
+
+ log.info("outputs are")
+ log.info(outputs)
self.set_step_outputs(invocation_step, outputs)
def set_step_outputs(self, invocation_step, outputs, already_persisted=False):
@@ -444,10 +614,10 @@ def _subworkflow_invocation(self, step):
raise Exception("Failed to find persisted workflow invocation for step [%s]" % step.id)
return subworkflow_invocation
- def subworkflow_invoker(self, trans, step, use_cached_job=False):
+ def subworkflow_invoker(self, trans, step, structure, use_cached_job=False):
subworkflow_invocation = self._subworkflow_invocation(step)
workflow_run_config = workflow_request_to_run_config(trans, subworkflow_invocation)
- subworkflow_progress = self.subworkflow_progress(subworkflow_invocation, step, workflow_run_config.param_map)
+ subworkflow_progress = self.subworkflow_progress(subworkflow_invocation, step, workflow_run_config.param_map, structure)
subworkflow_invocation = subworkflow_progress.workflow_invocation
return WorkflowInvoker(
trans,
@@ -456,31 +626,35 @@ def subworkflow_invoker(self, trans, step, use_cached_job=False):
progress=subworkflow_progress,
)
- def subworkflow_progress(self, subworkflow_invocation, step, param_map):
+ def subworkflow_progress(self, subworkflow_invocation, step, param_map, structure):
subworkflow = subworkflow_invocation.workflow
subworkflow_inputs = {}
for input_subworkflow_step in subworkflow.input_steps:
- connection_found = False
+ subworkflow_step_id = input_subworkflow_step.id
+ connections = []
for input_connection in step.input_connections:
if input_connection.input_subworkflow_step == input_subworkflow_step:
- subworkflow_step_id = input_subworkflow_step.id
- is_data = input_connection.output_step.type != "parameter_input"
- replacement = self.replacement_for_connection(
- input_connection,
- is_data=is_data,
- )
- subworkflow_inputs[subworkflow_step_id] = replacement
- connection_found = True
- break
+ connections.append(input_connection)
- if not connection_found:
+ if not connections:
raise Exception("Could not find connections for all subworkflow inputs.")
+ replacement = self.replacement_for_input_connections(
+ step,
+ dict(
+ name=input_subworkflow_step.label, # TODO: only module knows this unfortunately
+ input_type=input_subworkflow_step.input_type,
+ ),
+ connections,
+ )
+ subworkflow_inputs[subworkflow_step_id] = replacement
+
return WorkflowProgress(
subworkflow_invocation,
subworkflow_inputs,
self.module_injector,
- param_map=param_map
+ param_map=param_map,
+ workflow_mapping_structure=structure,
)
def _recover_mapping(self, step_invocation):
diff --git a/lib/galaxy/workflow/run_request.py b/lib/galaxy/workflow/run_request.py
index ff4950194388..9d0d87dc8fd0 100644
--- a/lib/galaxy/workflow/run_request.py
+++ b/lib/galaxy/workflow/run_request.py
@@ -82,10 +82,13 @@ def _normalize_inputs(steps, inputs, inputs_by):
for possible_input_key in possible_input_keys:
if possible_input_key in inputs:
inputs_key = possible_input_key
- if not inputs_key:
- message = "Workflow cannot be run because an expected input step '%s' has no input dataset." % step.id
+ default_value = json.loads(step.tool_inputs.get("default_value") or 'null')
+ optional = json.loads(step.tool_inputs.get("optional") or 'false')
+ if not inputs_key and not default_value and not optional:
+ message = "Workflow cannot be run because an expected input step '%s' (%s) has no input dataset." % (step.id, step.label)
raise exceptions.MessageException(message)
- normalized_inputs[step.id] = inputs[inputs_key]
+ if inputs_key:
+ normalized_inputs[step.id] = inputs[inputs_key]
return normalized_inputs
@@ -274,13 +277,17 @@ def build_workflow_run_configs(trans, workflow, payload):
if step.type == "parameter_input":
if normalized_key in param_map:
value = param_map.pop(normalized_key)
- normalized_inputs[normalized_key] = value["input"]
+ input_value = value["input"]
+ if isinstance(input_value, dict) and input_value.get("src") == "json":
+ input_value = input_value.get("value")
+ normalized_inputs[normalized_key] = input_value
+
steps_by_id = workflow.steps_by_id
# Set workflow inputs.
for key, input_dict in normalized_inputs.items():
step = steps_by_id[key]
- if step.type == 'parameter_input':
+ if step.type == 'parameter_input' and (step.tool_inputs["parameter_type"] != "field" or not isinstance(input_dict, dict) or "id" not in input_dict):
continue
if 'src' not in input_dict:
raise exceptions.RequestParameterInvalidException("Not input source type defined for input '%s'." % input_dict)
diff --git a/lib/galaxy/workflow/steps.py b/lib/galaxy/workflow/steps.py
index 87a0d6d0a6f3..f19dd9ab3b61 100644
--- a/lib/galaxy/workflow/steps.py
+++ b/lib/galaxy/workflow/steps.py
@@ -9,18 +9,24 @@
topsort_levels
)
+import logging
+log = logging.getLogger(__name__)
+
def attach_ordered_steps(workflow, steps):
""" Attempt to topologically order steps and attach to workflow. If this
fails - the workflow contains cycles so it mark it as such.
"""
+ log.info("unordered steps %s" % [s.label for s in steps])
ordered_steps = order_workflow_steps(steps)
+ log.info("ordered steps %s" % [s.label for s in ordered_steps])
workflow.has_cycles = True
if ordered_steps:
workflow.has_cycles = False
workflow.steps = ordered_steps
for i, step in enumerate(workflow.steps):
step.order_index = i
+ log.info("i is %s" % i)
return workflow.has_cycles
diff --git a/lib/galaxy_ext/cwl/__init__.py b/lib/galaxy_ext/cwl/__init__.py
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/lib/galaxy_ext/cwl/handle_outputs.py b/lib/galaxy_ext/cwl/handle_outputs.py
new file mode 100644
index 000000000000..4ded2dd1297f
--- /dev/null
+++ b/lib/galaxy_ext/cwl/handle_outputs.py
@@ -0,0 +1,21 @@
+"""
+"""
+
+import logging
+import os
+import sys
+
+# insert *this* galaxy before all others on sys.path
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)))
+
+from galaxy.tools.cwl import handle_outputs
+
+# ensure supported version
+assert sys.version_info[:2] >= (2, 6) and sys.version_info[:2] <= (2, 7), 'Python version must be 2.6 or 2.7, this is: %s' % sys.version
+
+logging.basicConfig()
+log = logging.getLogger(__name__)
+
+
+def relocate_dynamic_outputs():
+ handle_outputs()
diff --git a/lib/galaxy_ext/expressions/__init__.py b/lib/galaxy_ext/expressions/__init__.py
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/lib/galaxy_ext/expressions/handle_job.py b/lib/galaxy_ext/expressions/handle_job.py
new file mode 100644
index 000000000000..fdb175f2e641
--- /dev/null
+++ b/lib/galaxy_ext/expressions/handle_job.py
@@ -0,0 +1,49 @@
+"""
+Execute an external process to evaluate expressions for Galaxy jobs.
+
+Galaxy should be importable on sys.path .
+"""
+
+import json
+import logging
+import os
+import sys
+
+# insert *this* galaxy before all others on sys.path
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)))
+
+# ensure supported version
+assert sys.version_info[:2] >= (2, 7) and sys.version_info[:2] <= (2, 7), 'Python version must be 2.7, this is: %s' % sys.version
+
+logging.basicConfig()
+log = logging.getLogger(__name__)
+
+from galaxy.tools.expressions import evaluate
+
+try:
+ from cwltool import expression
+except ImportError:
+ expression = None
+
+
+def run(environment_path=None):
+ if expression is None:
+ raise Exception("Python library cwltool must available to evaluate expressions.")
+
+ if environment_path is None:
+ environment_path = os.environ.get("GALAXY_EXPRESSION_INPUTS")
+ with open(environment_path, "r") as f:
+ raw_inputs = json.load(f)
+
+ outputs = raw_inputs["outputs"]
+ inputs = raw_inputs.copy()
+ del inputs["outputs"]
+
+ result = evaluate(None, inputs)
+
+ for output in outputs:
+ path = output["path"]
+ from_expression = "$(" + output["from_expression"] + ")"
+ output_value = expression.param_interpolate(from_expression, result)
+ with open(path, "w") as f:
+ json.dump(output_value, f)
diff --git a/lib/tool_shed/util/workflow_util.py b/lib/tool_shed/util/workflow_util.py
index 9de41eb748b2..976220cfc3e4 100644
--- a/lib/tool_shed/util/workflow_util.py
+++ b/lib/tool_shed/util/workflow_util.py
@@ -306,13 +306,12 @@ def get_workflow_from_dict(trans, workflow_dict, tools_metadata, repository_id,
# Input connections.
for input_name, conn_dict in step.temp_input_connections.items():
if conn_dict:
+ step_input = step.get_or_add_input(input_name)
output_step = steps_by_external_id[conn_dict['id']]
conn = trans.model.WorkflowStepConnection()
- conn.input_step = step
- conn.input_name = input_name
+ conn.input_step_input = step_input
conn.output_step = output_step
conn.output_name = conn_dict['output_name']
- step.input_connections.append(conn)
del step.temp_input_connections
# Order the steps if possible.
attach_ordered_steps(workflow, steps)
diff --git a/scripts/cwl-runner b/scripts/cwl-runner
new file mode 100755
index 000000000000..36931e87b375
--- /dev/null
+++ b/scripts/cwl-runner
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+CWD=`pwd`
+cd `dirname $0`/..
+. ./scripts/common_startup_functions.sh >&2
+setup_python >&2
+python ./scripts/run_cwl.py --cwd="$CWD" "$@"
diff --git a/scripts/end_cwl_conformance.bash b/scripts/end_cwl_conformance.bash
new file mode 100644
index 000000000000..2962bbfd2578
--- /dev/null
+++ b/scripts/end_cwl_conformance.bash
@@ -0,0 +1,14 @@
+#!/bin/bash
+
+set +x
+
+: ${GALAXY_BOOTSTRAP_DATABASE:=1}
+
+if [ $GALAXY_BOOTSTRAP_DATABASE -eq 1 ]; then
+ docker stop "${GALAXY_BOOTSTRAP_DATABASE_CONTAINER}" || /bin/true
+ docker rm "${GALAXY_BOOTSTRAP_DATABASE_CONTAINER}" || /bin/true
+fi
+
+export GALAXY_SKIP_CLIENT_BUILD=1
+export GALAXY_PID=cwl.pid
+./run.sh --stop-daemon
diff --git a/scripts/run_cwl.py b/scripts/run_cwl.py
new file mode 100644
index 000000000000..d1b941e342d2
--- /dev/null
+++ b/scripts/run_cwl.py
@@ -0,0 +1,114 @@
+#!/usr/bin/env python
+from __future__ import absolute_import, print_function
+
+import argparse
+import json
+import os
+import sys
+import tempfile
+
+from bioblend import galaxy
+
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, 'test')))
+sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, 'lib')))
+
+from galaxy.tools.cwl.parser import get_outputs
+from galaxy.version import VERSION
+
+from base.populators import ( # noqa: I100,I202
+ CwlPopulator,
+ GiDatasetPopulator,
+ GiPostGetMixin,
+ GiWorkflowPopulator,
+)
+
+DESCRIPTION = """Simple CWL runner script."""
+
+def collect_outputs(cwl_run, output_names, output_directory=None, outdir=os.getcwd()):
+
+ def get_dataset(dataset_details, filename=None):
+ parent_basename = dataset_details.get("cwl_file_name")
+ if not parent_basename:
+ parent_basename = dataset_details.get("name")
+ file_ext = dataset_details["file_ext"]
+ if file_ext == "directory":
+ # TODO: rename output_directory to outputs_directory because we can have output directories
+ # and this is confusing...
+ the_output_directory = os.path.join(output_directory, parent_basename)
+ safe_makedirs(the_output_directory)
+ destination = self.download_output_to(dataset_details, the_output_directory, filename=filename)
+ else:
+ destination = self.download_output_to(dataset_details, output_directory, filename=filename)
+ if filename is None:
+ basename = parent_basename
+ else:
+ basename = os.path.basename(filename)
+ return {"path": destination, "basename": basename}
+
+ outputs = {}
+ for output_name in output_names:
+ cwl_output = cwl_run.get_output_as_object(output_name, download_folder=outdir)
+ outputs[output_name] = cwl_output
+ return outputs
+
+def main(argv=None):
+ """Entry point for workflow driving."""
+ arg_parser = argparse.ArgumentParser(description=DESCRIPTION)
+ arg_parser.add_argument("--api_key", default="testmasterapikey")
+ arg_parser.add_argument("--host", default="http://localhost:8080/")
+ arg_parser.add_argument("--outdir", default=".")
+ arg_parser.add_argument("--quiet", action="store_true")
+ arg_parser.add_argument("--version", action='version', version='%(prog)s {}~CWL'.format(VERSION))
+ arg_parser.add_argument("--cwd", default=os.getcwd())
+ arg_parser.add_argument('tool', metavar='TOOL', help='tool or workflow')
+ arg_parser.add_argument('job', metavar='JOB', help='job')
+
+ args = arg_parser.parse_args(argv)
+
+ gi = galaxy.GalaxyInstance(args.host, args.api_key)
+ i = GiPostGetMixin()
+ i._gi = gi
+ response = i._get("whoami")
+ if response.json() is None:
+ email = "cwluser@example.com"
+ all_users = i._get('users').json()
+ try:
+ test_user = [user for user in all_users if user["email"] == email][0]
+ except IndexError:
+ data = dict(
+ email=email,
+ password="testpass",
+ username="cwluser",
+ )
+ test_user = i._post('users', data).json()
+
+ api_key = i._post("users/%s/api_key" % test_user['id']).json()
+ gi = galaxy.GalaxyInstance(args.host, api_key)
+
+ dataset_populator = GiDatasetPopulator(gi)
+ workflow_populator = GiWorkflowPopulator(gi)
+ cwl_populator = CwlPopulator(dataset_populator, workflow_populator)
+
+ abs_cwd = os.path.abspath(args.cwd)
+
+ tool = args.tool
+ if not os.path.isabs(tool):
+ tool = os.path.join(abs_cwd, tool)
+
+ job = args.job
+ if not os.path.isabs(job):
+ job = os.path.join(abs_cwd, job)
+
+ run = cwl_populator.run_cwl_job(tool, job)
+
+ outputs = get_outputs(tool)
+ output_names = [o.get_id() for o in outputs]
+ outputs = collect_outputs(run, output_names, outdir=args.outdir)
+ print(json.dumps(outputs, indent=4))
+ #for output_dataset in output_datasets.values():
+ # name = output_dataset.name
+ # print(run.get_output_as_object(name))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/run_cwl_conformance.py b/scripts/run_cwl_conformance.py
new file mode 100755
index 000000000000..a929e3b13477
--- /dev/null
+++ b/scripts/run_cwl_conformance.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+from __future__ import absolute_import, print_function
+
+import argparse
+import os
+import sys
+import subprocess
+
+DESCRIPTION = """This script runs CWL conformity tests over a running Galaxy server."""
+SCRIPT_DIRECTORY = os.path.abspath(os.path.dirname(__file__))
+ROOT_DIRECTORY = os.path.dirname(SCRIPT_DIRECTORY)
+CWL_TOOL_DIRECTORY_DEFAULT = os.path.join(ROOT_DIRECTORY, "test", "functional", "tools", "cwl_tools")
+
+EPILOG_MSG="Note: this script needs the Galaxy server \
+to be started with the following command: \
+CWL_TOOL_DIRECTORY= GALAXY_RUN_WITH_TEST_TOOLS=1 sh run.sh"
+
+def path_inference(path):
+ if os.path.isdir(path):
+ return path
+ else:
+ return os.path.join(ROOT_DIRECTORY,path)
+
+def main(argv=None):
+ arg_parser = argparse.ArgumentParser(description=DESCRIPTION,
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
+ epilog=EPILOG_MSG)
+ arg_parser.add_argument("--api_key", required=True, help="Galaxy API key")
+ arg_parser.add_argument("--host", default="localhost", help="Galaxy server host")
+ arg_parser.add_argument("--port", default="8080", help="Galaxy server port")
+ arg_parser.add_argument("--test_path", default="test/api/test_cwl_conformance_v1_0.py:CwlConformanceTestCase.test_conformance_v1_0_17", help="Test path")
+ arg_parser.add_argument("--cwl_tool_directory",default=CWL_TOOL_DIRECTORY_DEFAULT)
+
+ args = arg_parser.parse_args(argv)
+
+ cwl_tool_directory = path_inference(args.cwl_tool_directory)
+ test_path = path_inference(args.test_path)
+
+ os.environ['GALAXY_TEST_HOST']=args.host
+ os.environ['GALAXY_TEST_PORT']=args.port
+ os.environ['GALAXY_TEST_EXTERNAL']='http://{}:{}'.format(args.host,args.port)
+ os.environ['GALAXY_CONFIG_MASTER_API_KEY']=args.api_key
+ os.environ['GALAXY_TEST_USER_API_KEY']=args.api_key
+ os.environ['CWL_TOOL_DIRECTORY']=cwl_tool_directory
+
+ test_runner_script='{}/run_tests.sh'.format(ROOT_DIRECTORY)
+ cmd=[test_runner_script,'-api',test_path]
+ subprocess.call(cmd)
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/run_for_cwl_conformance.bash b/scripts/run_for_cwl_conformance.bash
new file mode 100644
index 000000000000..1497a1672bb2
--- /dev/null
+++ b/scripts/run_for_cwl_conformance.bash
@@ -0,0 +1,41 @@
+#!/bin/bash
+
+set +e
+
+: ${GALAXY_BOOTSTRAP_DATABASE_CONTAINER:="gxpostgres"}
+: ${GALAXY_BOOTSTRAP_DATABASE_PASSWORD:="mysecretpassword"}
+: ${GALAXY_BOOTSTRAP_DATABASE_NAME:="galaxy"}
+: ${GALAXY_BOOTSTRAP_DATABASE:=1}
+: ${GALAXY_PORT:=8080}
+: ${POSTGRES_PORT:=5432}
+
+if [ $GALAXY_BOOTSTRAP_DATABASE -eq 1 ]; then
+ docker stop "${GALAXY_BOOTSTRAP_DATABASE_CONTAINER}" || /bin/true
+ docker rm "${GALAXY_BOOTSTRAP_DATABASE_CONTAINER}" || /bin/true
+ docker run -p "${POSTGRES_PORT}:${POSTGRES_PORT}" --name "${GALAXY_BOOTSTRAP_DATABASE_CONTAINER}" -e POSTGRES_PASSWORD="${GALAXY_BOOTSTRAP_DATABASE_PASSWORD}" -d postgres
+fi
+
+export GALAXY_SKIP_CLIENT_BUILD=1
+export GALAXY_PID="cwl.pid"
+export GALAXY_CONFIG_OVERRIDE_MASTER_API_KEY=testmasterapikey
+export GALAXY_CONFIG_OVERRIDE_DATABASE_CONNECTION="postgres://postgres:${GALAXY_BOOTSTRAP_DATABASE_PASSWORD}@localhost:${POSTGRES_PORT}/${GALAXY_BOOTSTRAP_DATABASE_NAME}"
+export GALAXY_CONFIG_OVERRIDE_DATABASE_AUTO_MIGRATE="true"
+export GALAXY_CONFIG_OVERRIDE_ALLOW_PATH_PASTE="true"
+export GALAXY_CONFIG_OVERRIDE_ADMIN_USERS="cwluser@example.com"
+
+export GALAXY_CONFIG_OVERRIDE_ENABLE_BETA_WORKFLOW_MODULES="true"
+export GALAXY_CONFIG_OVERRIDE_ENABLE_BETA_WORKFLOW_FORMAT="true"
+export GALAXY_CONFIG_OVERRIDE_ENABLE_BETA_TOOL_FORMATS="true"
+export GALAXY_CONFIG_OVERRIDE_CHECK_UPLOAD_CONTENT="false"
+export GALAXY_CONFIG_OVERRIDE_STRICT_CWL_VALIDATION="false"
+
+export GALAXY_PORT
+
+./run.sh --daemon
+
+export GALAXY_TEST_EXTERNAL="http://localhost:$GALAXY_PORT/"
+while ! curl -s "${GALAXY_TEST_EXTERNAL}api/version";
+do
+ printf "."
+ sleep 4;
+done;
diff --git a/test/api/ebiwftest/__init__.py b/test/api/ebiwftest/__init__.py
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/test/api/ebiwftest/ebiwftest.py b/test/api/ebiwftest/ebiwftest.py
new file mode 100644
index 000000000000..a890b24c7051
--- /dev/null
+++ b/test/api/ebiwftest/ebiwftest.py
@@ -0,0 +1,24 @@
+
+"""Test CWL conformance for version $version."""
+
+from ..test_workflows_cwl import BaseCwlWorklfowTestCase
+
+
+class CwlConformanceTestCase(BaseCwlWorklfowTestCase):
+ """Test case mapping to CWL conformance tests for version $version."""
+
+ def test_conformance_v1_0_17(self):
+ """Test command execution in with stdin and stdout redirection
+
+ Generated from::
+
+ job: v1.0/wc-job.json
+ output:
+ output:
+ checksum: sha1$3596ea087bfdaf52380eae441077572ed289d657
+ class: File
+ location: output
+ size: 3
+ tool: v1.0/wc-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command execution in with stdin and stdout redirection""")
diff --git a/test/api/ebiwftest/v1.0/conformance_tests.yaml b/test/api/ebiwftest/v1.0/conformance_tests.yaml
new file mode 100644
index 000000000000..989a5bd51585
--- /dev/null
+++ b/test/api/ebiwftest/v1.0/conformance_tests.yaml
@@ -0,0 +1,9 @@
+- job: v1.0/wc-job.json
+ output:
+ output:
+ checksum: sha1$3596ea087bfdaf52380eae441077572ed289d657
+ class: File
+ location: output
+ size: 3
+ tool: v1.0/wc-tool.cwl
+ doc: Test command execution in with stdin and stdout redirection
diff --git a/test/api/ebiwftest/v1.0/wc-job.json b/test/api/ebiwftest/v1.0/wc-job.json
new file mode 100644
index 000000000000..598568d385f2
--- /dev/null
+++ b/test/api/ebiwftest/v1.0/wc-job.json
@@ -0,0 +1,6 @@
+{
+ "file1": {
+ "class": "File",
+ "location": "whale.txt"
+ }
+}
diff --git a/test/api/ebiwftest/v1.0/wc-tool.cwl b/test/api/ebiwftest/v1.0/wc-tool.cwl
new file mode 100644
index 000000000000..165585494089
--- /dev/null
+++ b/test/api/ebiwftest/v1.0/wc-tool.cwl
@@ -0,0 +1,17 @@
+#!/usr/bin/env cwl-runner
+
+class: CommandLineTool
+cwlVersion: v1.0
+
+inputs:
+ file1: File
+
+outputs:
+ output:
+ type: File
+ outputBinding: { glob: output }
+
+baseCommand: [wc, -l]
+
+stdin: $(inputs.file1.path)
+stdout: output
diff --git a/test/api/ebiwftest/v1.0/whale.txt b/test/api/ebiwftest/v1.0/whale.txt
new file mode 100644
index 000000000000..425d1ed02c8d
--- /dev/null
+++ b/test/api/ebiwftest/v1.0/whale.txt
@@ -0,0 +1,16 @@
+Call me Ishmael. Some years ago--never mind how long precisely--having
+little or no money in my purse, and nothing particular to interest me on
+shore, I thought I would sail about a little and see the watery part of
+the world. It is a way I have of driving off the spleen and regulating
+the circulation. Whenever I find myself growing grim about the mouth;
+whenever it is a damp, drizzly November in my soul; whenever I find
+myself involuntarily pausing before coffin warehouses, and bringing up
+the rear of every funeral I meet; and especially whenever my hypos get
+such an upper hand of me, that it requires a strong moral principle to
+prevent me from deliberately stepping into the street, and methodically
+knocking people's hats off--then, I account it high time to get to sea
+as soon as I can. This is my substitute for pistol and ball. With a
+philosophical flourish Cato throws himself upon his sword; I quietly
+take to the ship. There is nothing surprising in this. If they but knew
+it, almost all men in their degree, some time or other, cherish very
+nearly the same feelings towards the ocean with me.
diff --git a/test/api/embed_test_1.gxwf.yml b/test/api/embed_test_1.gxwf.yml
new file mode 100644
index 000000000000..76461dff6fd2
--- /dev/null
+++ b/test/api/embed_test_1.gxwf.yml
@@ -0,0 +1,20 @@
+class: GalaxyWorkflow
+inputs:
+ - label: input1
+
+steps:
+ - tool_id: cat1
+ label: first_cat
+ state:
+ input1:
+ $link: input1
+ - label: embed1
+ run:
+ "@import": "embed_test_1_tool.gxtool.yml"
+ - tool_id: cat1
+ state:
+ input1:
+ $link: first_cat#out_file1
+ queries:
+ - input2:
+ $link: embed1#output1
diff --git a/test/api/embed_test_1_tool.gxtool.yml b/test/api/embed_test_1_tool.gxtool.yml
new file mode 100644
index 000000000000..ff99796a65cf
--- /dev/null
+++ b/test/api/embed_test_1_tool.gxtool.yml
@@ -0,0 +1,5 @@
+class: GalaxyTool
+command: echo 'hello world 2' > $output1
+outputs:
+ output1:
+ format: txt
diff --git a/test/api/test_cwl_conformance_green_v1_0.py b/test/api/test_cwl_conformance_green_v1_0.py
new file mode 100644
index 000000000000..d352bd149136
--- /dev/null
+++ b/test/api/test_cwl_conformance_green_v1_0.py
@@ -0,0 +1,1868 @@
+
+"""Test CWL conformance for version $version."""
+
+from .test_workflows_cwl import BaseCwlWorklfowTestCase
+
+
+class CwlConformanceTestCase(BaseCwlWorklfowTestCase):
+ """Test case mapping to CWL conformance tests for version $version."""
+
+ def test_conformance_v1_0_cl_basic_generation(self):
+ """General test of command line generation
+
+ Generated from::
+
+ job: v1.0/bwa-mem-job.json
+ label: cl_basic_generation
+ output:
+ args:
+ - bwa
+ - mem
+ - -t
+ - '2'
+ - -I
+ - 1,2,3,4
+ - -m
+ - '3'
+ - chr20.fa
+ - example_human_Illumina.pe_1.fastq
+ - example_human_Illumina.pe_2.fastq
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/bwa-mem-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """General test of command line generation""")
+
+ def test_conformance_v1_0_nested_prefixes_arrays(self):
+ """Test nested prefixes with arrays
+
+ Generated from::
+
+ job: v1.0/bwa-mem-job.json
+ label: nested_prefixes_arrays
+ output:
+ args:
+ - bwa
+ - mem
+ - chr20.fa
+ - -XXX
+ - -YYY
+ - example_human_Illumina.pe_1.fastq
+ - -YYY
+ - example_human_Illumina.pe_2.fastq
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/binding-test.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test nested prefixes with arrays""")
+
+ def test_conformance_v1_0_nested_cl_bindings(self):
+ """Test nested command line bindings
+
+ Generated from::
+
+ job: v1.0/tmap-job.json
+ label: nested_cl_bindings
+ output:
+ args:
+ - tmap
+ - mapall
+ - stage1
+ - map1
+ - --min-seq-length
+ - '20'
+ - map2
+ - --min-seq-length
+ - '20'
+ - stage2
+ - map1
+ - --max-seq-length
+ - '20'
+ - --min-seq-length
+ - '10'
+ - --seed-length
+ - '16'
+ - map2
+ - --max-seed-hits
+ - '-1'
+ - --max-seq-length
+ - '20'
+ - --min-seq-length
+ - '10'
+ tags:
+ - schema_def
+ - command_line_tool
+ tool: v1.0/tmap-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test nested command line bindings""")
+
+ def test_conformance_v1_0_cl_optional_inputs_missing(self):
+ """Test command line with optional input (missing)
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: cl_optional_inputs_missing
+ output:
+ args:
+ - cat
+ - hello.txt
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat1-testcli.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command line with optional input (missing)""")
+
+ def test_conformance_v1_0_cl_optional_bindings_provided(self):
+ """Test command line with optional input (provided)
+
+ Generated from::
+
+ job: v1.0/cat-n-job.json
+ label: cl_optional_bindings_provided
+ output:
+ args:
+ - cat
+ - -n
+ - hello.txt
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat1-testcli.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command line with optional input (provided)""")
+
+ def test_conformance_v1_0_initworkdir_expreng_requirements(self):
+ """Test InitialWorkDirRequirement ExpressionEngineRequirement.engineConfig feature
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: initworkdir_expreng_requirements
+ output:
+ foo:
+ checksum: sha1$63da67422622fbf9251a046d7a34b7ea0fd4fead
+ class: File
+ location: foo.txt
+ size: 22
+ tags:
+ - initial_work_dir
+ - inline_javascript
+ - command_line_tool
+ tool: v1.0/template-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test InitialWorkDirRequirement ExpressionEngineRequirement.engineConfig feature""")
+
+ def test_conformance_v1_0_stdout_redirect_docker(self):
+ """Test command execution in Docker with stdout redirection
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: stdout_redirect_docker
+ output:
+ output_file:
+ checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ class: File
+ location: output.txt
+ size: 13
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat3-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command execution in Docker with stdout redirection""")
+
+ def test_conformance_v1_0_stdout_redirect_shortcut_docker(self):
+ """Test command execution in Docker with shortcut stdout redirection
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: stdout_redirect_shortcut_docker
+ output:
+ output_file:
+ checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ class: File
+ location: Any
+ size: 13
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat3-tool-shortcut.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command execution in Docker with shortcut stdout redirection""")
+
+ def test_conformance_v1_0_stdout_redirect_mediumcut_docker(self):
+ """Test command execution in Docker with mediumcut stdout redirection
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: stdout_redirect_mediumcut_docker
+ output:
+ output_file:
+ checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ class: File
+ location: cat-out
+ size: 13
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat3-tool-mediumcut.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command execution in Docker with mediumcut stdout redirection""")
+
+ def test_conformance_v1_0_stderr_redirect(self):
+ """Test command line with stderr redirection
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: stderr_redirect
+ output:
+ output_file:
+ checksum: sha1$f1d2d2f924e986ac86fdf7b36c94bcdf32beec15
+ class: File
+ location: error.txt
+ size: 4
+ tags:
+ - shell_command
+ - command_line_tool
+ tool: v1.0/stderr.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command line with stderr redirection""")
+
+ def test_conformance_v1_0_stderr_redirect_shortcut(self):
+ """Test command line with stderr redirection, brief syntax
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: stderr_redirect_shortcut
+ output:
+ output_file:
+ checksum: sha1$f1d2d2f924e986ac86fdf7b36c94bcdf32beec15
+ class: File
+ location: Any
+ size: 4
+ tags:
+ - shell_command
+ - command_line_tool
+ tool: v1.0/stderr-shortcut.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command line with stderr redirection, brief syntax""")
+
+ def test_conformance_v1_0_stderr_redirect_mediumcut(self):
+ """Test command line with stderr redirection, named brief syntax
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: stderr_redirect_mediumcut
+ output:
+ output_file:
+ checksum: sha1$f1d2d2f924e986ac86fdf7b36c94bcdf32beec15
+ class: File
+ location: std.err
+ size: 4
+ tags:
+ - shell_command
+ - command_line_tool
+ tool: v1.0/stderr-mediumcut.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command line with stderr redirection, named brief syntax""")
+
+ def test_conformance_v1_0_stdinout_redirect_docker(self):
+ """Test command execution in Docker with stdin and stdout redirection
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: stdinout_redirect_docker
+ output:
+ output_txt:
+ checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ class: File
+ location: output.txt
+ size: 13
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat4-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command execution in Docker with stdin and stdout redirection""")
+
+ def test_conformance_v1_0_expression_any(self):
+ """Test default usage of Any in expressions.
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: expression_any
+ output:
+ output: 1
+ tags:
+ - inline_javascript
+ - expression_tool
+ tool: v1.0/null-expression1-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test default usage of Any in expressions.""")
+
+ def test_conformance_v1_0_expression_any_null(self):
+ """Test explicitly passing null to Any type inputs with default values.
+
+ Generated from::
+
+ job: v1.0/null-expression1-job.json
+ label: expression_any_null
+ output:
+ output: 1
+ tags:
+ - inline_javascript
+ - expression_tool
+ tool: v1.0/null-expression1-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test explicitly passing null to Any type inputs with default values.""")
+
+ def test_conformance_v1_0_expression_any_string(self):
+ """Testing the string 'null' does not trip up an Any with a default value.
+
+ Generated from::
+
+ job: v1.0/null-expression2-job.json
+ label: expression_any_string
+ output:
+ output: 2
+ tags:
+ - inline_javascript
+ - expression_tool
+ tool: v1.0/null-expression1-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Testing the string 'null' does not trip up an Any with a default value.""")
+
+ def test_conformance_v1_0_expression_any_nodefaultany(self):
+ """Test Any without defaults cannot be unspecified.
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: expression_any_nodefaultany
+ should_fail: true
+ tags:
+ - inline_javascript
+ - expression_tool
+ tool: v1.0/null-expression2-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test Any without defaults cannot be unspecified.""")
+
+ def test_conformance_v1_0_expression_any_null_nodefaultany(self):
+ """Test explicitly passing null to Any type without a default value.
+
+ Generated from::
+
+ job: v1.0/null-expression1-job.json
+ label: expression_any_null_nodefaultany
+ should_fail: true
+ tags:
+ - inline_javascript
+ - expression_tool
+ tool: v1.0/null-expression2-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test explicitly passing null to Any type without a default value.""")
+
+ def test_conformance_v1_0_expression_any_nullstring_nodefaultany(self):
+ """Testing the string 'null' does not trip up an Any without a default value.
+
+ Generated from::
+
+ job: v1.0/null-expression2-job.json
+ label: expression_any_nullstring_nodefaultany
+ output:
+ output: 2
+ tags:
+ - inline_javascript
+ - expression_tool
+ tool: v1.0/null-expression2-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Testing the string 'null' does not trip up an Any without a default value.""")
+
+ def test_conformance_v1_0_any_outputSource_compatibility(self):
+ """Testing Any type compatibility in outputSource
+
+ Generated from::
+
+ job: v1.0/any-type-job.json
+ label: any_outputSource_compatibility
+ output:
+ output1:
+ - hello
+ - world
+ output2:
+ - foo
+ - bar
+ output3: hello
+ tags:
+ - required
+ - workflow
+ tool: v1.0/any-type-compat.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Testing Any type compatibility in outputSource""")
+
+ def test_conformance_v1_0_stdinout_redirect(self):
+ """Test command execution in with stdin and stdout redirection
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: stdinout_redirect
+ output:
+ output:
+ checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ class: File
+ location: output
+ size: 13
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command execution in with stdin and stdout redirection""")
+
+ def test_conformance_v1_0_expression_parseint(self):
+ """Test ExpressionTool with Docker-based expression engine
+
+ Generated from::
+
+ job: v1.0/parseInt-job.json
+ label: expression_parseint
+ output:
+ output: 42
+ tags:
+ - inline_javascript
+ - expression_tool
+ tool: v1.0/parseInt-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test ExpressionTool with Docker-based expression engine""")
+
+ def test_conformance_v1_0_expression_outputEval(self):
+ """Test outputEval to transform output
+
+ Generated from::
+
+ job: v1.0/wc-job.json
+ label: expression_outputEval
+ output:
+ output: 16
+ tags:
+ - inline_javascript
+ - command_line_tool
+ tool: v1.0/wc2-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test outputEval to transform output""")
+
+ def test_conformance_v1_0_wf_wc_parseInt(self):
+ """Test two step workflow with imported tools
+
+ Generated from::
+
+ job: v1.0/wc-job.json
+ label: wf_wc_parseInt
+ output:
+ count_output: 16
+ tags:
+ - inline_javascript
+ - workflow
+ tool: v1.0/count-lines1-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test two step workflow with imported tools""")
+
+ def test_conformance_v1_0_wf_wc_expressiontool(self):
+ """Test two step workflow with inline tools
+
+ Generated from::
+
+ job: v1.0/wc-job.json
+ label: wf_wc_expressiontool
+ output:
+ count_output: 16
+ tags:
+ - inline_javascript
+ - workflow
+ tool: v1.0/count-lines2-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test two step workflow with inline tools""")
+
+ def test_conformance_v1_0_wf_wc_scatter(self):
+ """Test single step workflow with Scatter step
+
+ Generated from::
+
+ job: v1.0/count-lines3-job.json
+ label: wf_wc_scatter
+ output:
+ count_output:
+ - 16
+ - 1
+ tags:
+ - scatter
+ - inline_javascript
+ - workflow
+ tool: v1.0/count-lines3-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test single step workflow with Scatter step""")
+
+ def test_conformance_v1_0_wf_wc_scatter_multiple_merge(self):
+ """Test single step workflow with Scatter step and two data links connected to
+same input, default merge behavior
+
+
+ Generated from::
+
+ job: v1.0/count-lines4-job.json
+ label: wf_wc_scatter_multiple_merge
+ output:
+ count_output:
+ - 16
+ - 1
+ tags:
+ - scatter
+ - multiple_input
+ - inline_javascript
+ - workflow
+ tool: v1.0/count-lines4-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test single step workflow with Scatter step and two data links connected to
+same input, default merge behavior
+""")
+
+ def test_conformance_v1_0_wf_wc_scatter_multiple_flattened(self):
+ """Test single step workflow with Scatter step and two data links connected to
+same input, flattened merge behavior
+
+
+ Generated from::
+
+ job: v1.0/count-lines6-job.json
+ label: wf_wc_scatter_multiple_flattened
+ output:
+ count_output: 34
+ tags:
+ - multiple_input
+ - inline_javascript
+ - workflow
+ tool: v1.0/count-lines7-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test single step workflow with Scatter step and two data links connected to
+same input, flattened merge behavior
+""")
+
+ def test_conformance_v1_0_wf_wc_nomultiple(self):
+ """Test that no MultipleInputFeatureRequirement is necessary when
+workflow step source is a single-item list
+
+
+ Generated from::
+
+ job: v1.0/count-lines6-job.json
+ label: wf_wc_nomultiple
+ output:
+ count_output: 32
+ tags:
+ - inline_javascript
+ - workflow
+ tool: v1.0/count-lines13-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that no MultipleInputFeatureRequirement is necessary when
+workflow step source is a single-item list
+""")
+
+ def test_conformance_v1_0_wf_input_default_missing(self):
+ """Test workflow with default value for input parameter (missing)
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: wf_input_default_missing
+ output:
+ count_output: 1
+ tags:
+ - inline_javascript
+ - workflow
+ tool: v1.0/count-lines5-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test workflow with default value for input parameter (missing)""")
+
+ def test_conformance_v1_0_wf_input_default_provided(self):
+ """Test workflow with default value for input parameter (provided)
+
+ Generated from::
+
+ job: v1.0/wc-job.json
+ label: wf_input_default_provided
+ output:
+ count_output: 16
+ tags:
+ - inline_javacscript
+ - workflow
+ tool: v1.0/count-lines5-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test workflow with default value for input parameter (provided)""")
+
+ def test_conformance_v1_0_wf_default_tool_default(self):
+ """Test that workflow defaults override tool defaults
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: wf_default_tool_default
+ output:
+ default_output: workflow_default
+ tags:
+ - required
+ - workflow
+ tool: v1.0/echo-wf-default.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that workflow defaults override tool defaults""")
+
+ def test_conformance_v1_0_envvar_req(self):
+ """Test EnvVarRequirement
+
+ Generated from::
+
+ job: v1.0/env-job.json
+ label: envvar_req
+ output:
+ out:
+ checksum: sha1$b3ec4ed1749c207e52b3a6d08c59f31d83bff519
+ class: File
+ location: out
+ size: 15
+ tags:
+ - env_var
+ - command_line_tool
+ tool: v1.0/env-tool1.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test EnvVarRequirement""")
+
+ def test_conformance_v1_0_wf_scatter_emptylist(self):
+ """Test workflow scatter with single empty list parameter
+
+ Generated from::
+
+ job: v1.0/scatter-empty-job1.json
+ label: wf_scatter_emptylist
+ output:
+ out: []
+ tags:
+ - scatter
+ - workflow
+ tool: v1.0/scatter-wf1.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test workflow scatter with single empty list parameter""")
+
+ def test_conformance_v1_0_wf_scatter_dotproduct_twoempty(self):
+ """Test workflow scatter with two empty scatter parameters and dotproduct join method
+
+ Generated from::
+
+ job: v1.0/scatter-empty-job4.json
+ label: wf_scatter_dotproduct_twoempty
+ output:
+ out: []
+ tags:
+ - scatter
+ - workflow
+ tool: v1.0/scatter-wf4.cwl#main
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test workflow scatter with two empty scatter parameters and dotproduct join method""")
+
+ def test_conformance_v1_0_any_input_param(self):
+ """Test Any type input parameter
+
+ Generated from::
+
+ job: v1.0/env-job.json
+ label: any_input_param
+ output:
+ out: 'hello test env
+
+ '
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/echo-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test Any type input parameter""")
+
+ def test_conformance_v1_0_nested_workflow(self):
+ """Test nested workflow
+
+ Generated from::
+
+ job: v1.0/wc-job.json
+ label: nested_workflow
+ output:
+ count_output: 16
+ tags:
+ - subworkflow
+ - workflow
+ tool: v1.0/count-lines8-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test nested workflow""")
+
+ def test_conformance_v1_0_requirement_priority(self):
+ """Test requirement priority
+
+ Generated from::
+
+ job: v1.0/env-job.json
+ label: requirement_priority
+ output:
+ out:
+ checksum: sha1$b3ec4ed1749c207e52b3a6d08c59f31d83bff519
+ class: File
+ location: out
+ size: 15
+ tags:
+ - env_var
+ - workflow
+ tool: v1.0/env-wf1.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test requirement priority""")
+
+ def test_conformance_v1_0_requirement_override_hints(self):
+ """Test requirements override hints
+
+ Generated from::
+
+ job: v1.0/env-job.json
+ label: requirement_override_hints
+ output:
+ out:
+ checksum: sha1$cdc1e84968261d6a7575b5305945471f8be199b6
+ class: File
+ location: out
+ size: 9
+ tags:
+ - env_var
+ - workflow
+ tool: v1.0/env-wf2.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test requirements override hints""")
+
+ def test_conformance_v1_0_requirement_workflow_steps(self):
+ """Test requirements on workflow steps
+
+ Generated from::
+
+ job: v1.0/env-job.json
+ label: requirement_workflow_steps
+ output:
+ out:
+ checksum: sha1$cdc1e84968261d6a7575b5305945471f8be199b6
+ class: File
+ location: out
+ size: 9
+ tags:
+ - env_var
+ - workflow
+ tool: v1.0/env-wf3.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test requirements on workflow steps""")
+
+ def test_conformance_v1_0_step_input_default_value(self):
+ """Test default value on step input parameter
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: step_input_default_value
+ output:
+ count_output: 16
+ tags:
+ - inline_javascript
+ - workflow
+ tool: v1.0/count-lines9-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test default value on step input parameter""")
+
+ def test_conformance_v1_0_step_input_default_value_nosource(self):
+ """Test use default value on step input parameter with empty source
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: step_input_default_value_nosource
+ output:
+ count_output: 16
+ tags:
+ - inline_javascript
+ - workflow
+ tool: v1.0/count-lines11-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test use default value on step input parameter with empty source""")
+
+ def test_conformance_v1_0_step_input_default_value_nullsource(self):
+ """Test use default value on step input parameter with null source
+
+ Generated from::
+
+ job: v1.0/file1-null.json
+ label: step_input_default_value_nullsource
+ output:
+ count_output: 16
+ tags:
+ - inline_javascript
+ - workflow
+ tool: v1.0/count-lines11-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test use default value on step input parameter with null source""")
+
+ def test_conformance_v1_0_hints_unknown_ignored(self):
+ """Test unknown hints are ignored.
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: hints_unknown_ignored
+ output:
+ output_file:
+ checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ class: File
+ location: output.txt
+ size: 13
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat5-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test unknown hints are ignored.""")
+
+ def test_conformance_v1_0_initial_workdir_secondary_files_expr(self):
+ """Test InitialWorkDirRequirement linking input files and capturing secondaryFiles
+on input and output. Also tests the use of a variety of parameter references
+and expressions in the secondaryFiles field.
+
+
+ Generated from::
+
+ job: v1.0/search-job.json
+ label: initial_workdir_secondary_files_expr
+ output:
+ indexedfile:
+ checksum: sha1$327fc7aedf4f6b69a42a7c8b808dc5a7aff61376
+ class: File
+ location: input.txt
+ secondaryFiles:
+ - checksum: sha1$553f3a09003a9f69623f03bec13c0b078d706023
+ class: File
+ location: input.txt.idx1
+ size: 1500
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: input.idx2
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: input.txt.idx3
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: input.txt.idx4
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: input.txt.idx5
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: input.idx6.txt
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: input.txt.idx7
+ size: 0
+ - checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ class: File
+ location: hello.txt
+ size: 13
+ - class: Directory
+ listing:
+ - basename: index
+ checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: index
+ size: 0
+ location: input.txt_idx8
+ size: 1111
+ outfile:
+ checksum: sha1$e2dc9daaef945ac15f01c238ed2f1660f60909a0
+ class: File
+ location: result.txt
+ size: 142
+ tags:
+ - initial_work_dir
+ - inline_javascript
+ - command_line_tool
+ tool: v1.0/search.cwl#main
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test InitialWorkDirRequirement linking input files and capturing secondaryFiles
+on input and output. Also tests the use of a variety of parameter references
+and expressions in the secondaryFiles field.
+""")
+
+ def test_conformance_v1_0_rename(self):
+ """Test InitialWorkDirRequirement with expression in filename.
+
+
+ Generated from::
+
+ job: v1.0/rename-job.json
+ label: rename
+ output:
+ outfile:
+ checksum: sha1$327fc7aedf4f6b69a42a7c8b808dc5a7aff61376
+ class: File
+ location: fish.txt
+ size: 1111
+ tags:
+ - initial_work_dir
+ - command_line_tool
+ tool: v1.0/rename.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test InitialWorkDirRequirement with expression in filename.
+""")
+
+ def test_conformance_v1_0_initial_workdir_trailingnl(self):
+ """Test if trailing newline is present in file entry in InitialWorkDir
+
+ Generated from::
+
+ job: v1.0/string-job.json
+ label: initial_workdir_trailingnl
+ output:
+ out:
+ checksum: sha1$6a47aa22b2a9d13a66a24b3ee5eaed95ce4753cf
+ class: File
+ location: example.conf
+ size: 16
+ tags:
+ - initial_work_dir
+ - command_line_tool
+ tool: v1.0/iwdr-entry.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test if trailing newline is present in file entry in InitialWorkDir""")
+
+ def test_conformance_v1_0_inline_expressions(self):
+ """Test inline expressions
+
+
+ Generated from::
+
+ job: v1.0/wc-job.json
+ label: inline_expressions
+ output:
+ output: 16
+ tags:
+ - inline_javascript
+ - command_line_tool
+ tool: v1.0/wc4-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test inline expressions
+""")
+
+ def test_conformance_v1_0_schemadef_req_tool_param(self):
+ """Test SchemaDefRequirement definition used in tool parameter
+
+
+ Generated from::
+
+ job: v1.0/schemadef-job.json
+ label: schemadef_req_tool_param
+ output:
+ output:
+ checksum: sha1$f12e6cfe70f3253f70b0dbde17c692e7fb0f1e5e
+ class: File
+ location: output.txt
+ size: 12
+ tags:
+ - schema_def
+ - command_line_tool
+ tool: v1.0/schemadef-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test SchemaDefRequirement definition used in tool parameter
+""")
+
+ def test_conformance_v1_0_schemadef_req_wf_param(self):
+ """Test SchemaDefRequirement definition used in workflow parameter
+
+
+ Generated from::
+
+ job: v1.0/schemadef-job.json
+ label: schemadef_req_wf_param
+ output:
+ output:
+ checksum: sha1$f12e6cfe70f3253f70b0dbde17c692e7fb0f1e5e
+ class: File
+ location: output.txt
+ size: 12
+ tags:
+ - schema_def
+ - workflow
+ tool: v1.0/schemadef-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test SchemaDefRequirement definition used in workflow parameter
+""")
+
+ def test_conformance_v1_0_param_evaluation_noexpr(self):
+ """Test parameter evaluation, no support for JS expressions
+
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: param_evaluation_noexpr
+ output:
+ t1:
+ bar:
+ b az: 2
+ b"az: null
+ b'az: true
+ baz: zab1
+ buz:
+ - a
+ - b
+ - c
+ t10: true
+ t11: true
+ t12: null
+ t13: -zab1
+ t14: -zab1
+ t15: -zab1
+ t16: -zab1
+ t17: zab1 zab1
+ t18: zab1 zab1
+ t19: zab1 zab1
+ t2:
+ b az: 2
+ b"az: null
+ b'az: true
+ baz: zab1
+ buz:
+ - a
+ - b
+ - c
+ t20: zab1 zab1
+ t21: 2 2
+ t22: true true
+ t23: true true
+ t24: null null
+ t25: b
+ t26: b b
+ t27: null
+ t28: 3
+ t3:
+ b az: 2
+ b"az: null
+ b'az: true
+ baz: zab1
+ buz:
+ - a
+ - b
+ - c
+ t4:
+ b az: 2
+ b"az: null
+ b'az: true
+ baz: zab1
+ buz:
+ - a
+ - b
+ - c
+ t5: zab1
+ t6: zab1
+ t7: zab1
+ t8: zab1
+ t9: 2
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/params.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test parameter evaluation, no support for JS expressions
+""")
+
+ def test_conformance_v1_0_param_evaluation_expr(self):
+ """Test parameter evaluation, with support for JS expressions
+
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: param_evaluation_expr
+ output:
+ t1:
+ bar:
+ b az: 2
+ b"az: null
+ b'az: true
+ baz: zab1
+ buz:
+ - a
+ - b
+ - c
+ t10: true
+ t11: true
+ t12: null
+ t13: -zab1
+ t14: -zab1
+ t15: -zab1
+ t16: -zab1
+ t17: zab1 zab1
+ t18: zab1 zab1
+ t19: zab1 zab1
+ t2:
+ b az: 2
+ b"az: null
+ b'az: true
+ baz: zab1
+ buz:
+ - a
+ - b
+ - c
+ t20: zab1 zab1
+ t21: 2 2
+ t22: true true
+ t23: true true
+ t24: null null
+ t25: b
+ t26: b b
+ t27: null
+ t28: 3
+ t3:
+ b az: 2
+ b"az: null
+ b'az: true
+ baz: zab1
+ buz:
+ - a
+ - b
+ - c
+ t4:
+ b az: 2
+ b"az: null
+ b'az: true
+ baz: zab1
+ buz:
+ - a
+ - b
+ - c
+ t5: zab1
+ t6: zab1
+ t7: zab1
+ t8: zab1
+ t9: 2
+ tags:
+ - inline_javascript
+ - command_line_tool
+ tool: v1.0/params2.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test parameter evaluation, with support for JS expressions
+""")
+
+ def test_conformance_v1_0_metadata(self):
+ """Test metadata
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: metadata
+ output: {}
+ tags:
+ - required
+ tool: v1.0/metadata.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test metadata""")
+
+ def test_conformance_v1_0_valuefrom_secondexpr_ignored(self):
+ """Test that second expression in concatenated valueFrom is not ignored
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: valuefrom_secondexpr_ignored
+ output:
+ out: 'a string
+
+ '
+ tags:
+ - inline_javascript
+ - command_line_tool
+ tool: v1.0/vf-concat.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that second expression in concatenated valueFrom is not ignored""")
+
+ def test_conformance_v1_0_docker_json_output_path(self):
+ """Test support for reading cwl.output.json when running in a Docker container
+and just 'path' is provided.
+
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: docker_json_output_path
+ output:
+ foo:
+ checksum: sha1$f1d2d2f924e986ac86fdf7b36c94bcdf32beec15
+ class: File
+ location: foo
+ size: 4
+ tags:
+ - shell_command
+ - command_line_tool
+ tool: v1.0/test-cwl-out.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test support for reading cwl.output.json when running in a Docker container
+and just 'path' is provided.
+""")
+
+ def test_conformance_v1_0_docker_json_output_location(self):
+ """Test support for reading cwl.output.json when running in a Docker container
+and just 'location' is provided.
+
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: docker_json_output_location
+ output:
+ foo:
+ checksum: sha1$f1d2d2f924e986ac86fdf7b36c94bcdf32beec15
+ class: File
+ location: foo
+ size: 4
+ tags:
+ - shell_command
+ - command_line_tool
+ tool: v1.0/test-cwl-out2.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test support for reading cwl.output.json when running in a Docker container
+and just 'location' is provided.
+""")
+
+ def test_conformance_v1_0_wf_two_inputfiles_namecollision(self):
+ """Test workflow two input files with same name.
+
+ Generated from::
+
+ job: v1.0/conflict-job.json
+ label: wf_two_inputfiles_namecollision
+ output:
+ fileout:
+ checksum: sha1$a2d8d6e7b28295dc9977dc3bdb652ddd480995f0
+ class: File
+ location: out.txt
+ size: 25
+ tags:
+ - required
+ - workflow
+ tool: v1.0/conflict-wf.cwl#collision
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test workflow two input files with same name.""")
+
+ def test_conformance_v1_0_directory_input_docker(self):
+ """Test directory input in Docker
+
+ Generated from::
+
+ job: v1.0/dir-job.yml
+ label: directory_input_docker
+ output:
+ outlist:
+ checksum: sha1$13cda8661796ae241da3a18668fb552161a72592
+ class: File
+ location: output.txt
+ size: 20
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/dir2.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test directory input in Docker""")
+
+ def test_conformance_v1_0_directory_output(self):
+ """Test directory output
+
+ Generated from::
+
+ job: v1.0/dir3-job.yml
+ label: directory_output
+ output:
+ outdir:
+ class: Directory
+ listing:
+ - checksum: sha1$dd0a4c4c49ba43004d6611771972b6cf969c1c01
+ class: File
+ location: goodbye.txt
+ size: 24
+ - checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ class: File
+ location: hello.txt
+ size: 13
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/dir3.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test directory output""")
+
+ def test_conformance_v1_0_writable_stagedfiles(self):
+ """Test writable staged files.
+
+ Generated from::
+
+ job: v1.0/stagefile-job.yml
+ label: writable_stagedfiles
+ output:
+ outfile:
+ checksum: sha1$b769c7b2e316edd4b5eb2d24799b2c1f9d8c86e6
+ class: File
+ location: bob.txt
+ size: 1111
+ tags:
+ - initial_work_dir
+ - command_line_tool
+ tool: v1.0/stagefile.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test writable staged files.""")
+
+ def test_conformance_v1_0_input_file_literal(self):
+ """Test file literal as input
+
+ Generated from::
+
+ job: v1.0/file-literal.yml
+ label: input_file_literal
+ output:
+ output_file:
+ checksum: sha1$d0e04ff6c413c7d57f9a0ca0a33cd3ab52e2dd9c
+ class: File
+ location: output.txt
+ size: 18
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat3-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test file literal as input""")
+
+ def test_conformance_v1_0_initial_workdir_expr(self):
+ """Test expression in InitialWorkDir listing
+
+ Generated from::
+
+ job: v1.0/arguments-job.yml
+ label: initial_workdir_expr
+ output:
+ classfile:
+ checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: Hello.class
+ size: 0
+ tags:
+ - initial_work_dir
+ - command_line_tool
+ tool: v1.0/linkfile.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test expression in InitialWorkDir listing""")
+
+ def test_conformance_v1_0_nameroot_nameext_stdout_expr(self):
+ """Test nameroot/nameext expression in arguments, stdout
+
+ Generated from::
+
+ job: v1.0/wc-job.json
+ label: nameroot_nameext_stdout_expr
+ output:
+ b:
+ checksum: sha1$c4cfd130e7578714e3eef91c1d6d90e0e0b9db3e
+ class: File
+ location: whale.xtx
+ size: 21
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/nameroot.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test nameroot/nameext expression in arguments, stdout""")
+
+ def test_conformance_v1_0_cl_gen_arrayofarrays(self):
+ """Test command line generation of array-of-arrays
+
+ Generated from::
+
+ job: v1.0/nested-array-job.yml
+ label: cl_gen_arrayofarrays
+ output:
+ echo:
+ checksum: sha1$3f786850e387550fdab836ed7e6dc881de23001b
+ class: File
+ location: echo.txt
+ size: 2
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/nested-array.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command line generation of array-of-arrays""")
+
+ def test_conformance_v1_0_initial_workdir_output(self):
+ """Test output of InitialWorkDir
+
+ Generated from::
+
+ job: v1.0/initialworkdirrequirement-docker-out-job.json
+ label: initial_workdir_output
+ output:
+ OUTPUT:
+ checksum: sha1$aeb3d11bdf536511649129f4077d5cda6a324118
+ class: File
+ location: ref.fasta
+ secondaryFiles:
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: ref.fasta.fai
+ size: 0
+ size: 12010
+ tags:
+ - docker
+ - initial_work_dir
+ - command_line_tool
+ tool: v1.0/initialworkdirrequirement-docker-out.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test output of InitialWorkDir""")
+
+ def test_conformance_v1_0_exprtool_directory_literal(self):
+ """Test directory literal output created by ExpressionTool
+
+ Generated from::
+
+ job: v1.0/dir7.yml
+ label: exprtool_directory_literal
+ output:
+ dir:
+ class: Directory
+ listing:
+ - checksum: sha1$327fc7aedf4f6b69a42a7c8b808dc5a7aff61376
+ class: File
+ location: whale.txt
+ size: 1111
+ - checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ class: File
+ location: hello.txt
+ size: 13
+ location: a_directory
+ tags:
+ - inline_javascript
+ - expression_tool
+ tool: v1.0/dir7.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test directory literal output created by ExpressionTool""")
+
+ def test_conformance_v1_0_exprtool_file_literal(self):
+ """Test file literal output created by ExpressionTool
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: exprtool_file_literal
+ output:
+ lit:
+ checksum: sha1$fea23663b9c8ed71968f86415b5ec091bb111448
+ class: File
+ location: a_file
+ size: 19
+ tags:
+ - inline_javascript
+ - expression_tool
+ tool: v1.0/file-literal-ex.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test file literal output created by ExpressionTool""")
+
+ def test_conformance_v1_0_hints_import(self):
+ """Test hints with $import
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: hints_import
+ output:
+ out:
+ checksum: sha1$b3ec4ed1749c207e52b3a6d08c59f31d83bff519
+ class: File
+ location: out
+ size: 15
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/imported-hint.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test hints with $import""")
+
+ def test_conformance_v1_0_default_path_notfound_warning(self):
+ """Test warning instead of error when default path is not found
+
+ Generated from::
+
+ job: v1.0/default_path_job.yml
+ label: default_path_notfound_warning
+ output: {}
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/default_path.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test warning instead of error when default path is not found""")
+
+ def test_conformance_v1_0_null_missing_params(self):
+ """Test that missing parameters are null (not undefined) in expression
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: null_missing_params
+ output:
+ out: 't
+
+ '
+ tags:
+ - inline_javascript
+ - command_line_tool
+ tool: v1.0/null-defined.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that missing parameters are null (not undefined) in expression""")
+
+ def test_conformance_v1_0_wf_compound_doc(self):
+ """Test compound workflow document
+
+ Generated from::
+
+ job: v1.0/revsort-job.json
+ label: wf_compound_doc
+ output:
+ output:
+ checksum: sha1$b9214658cc453331b62c2282b772a5c063dbd284
+ class: File
+ location: output.txt
+ size: 1111
+ tags:
+ - required
+ - workflow
+ tool: v1.0/revsort-packed.cwl#main
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test compound workflow document""")
+
+ def test_conformance_v1_0_initialworkpath_output(self):
+ """Test that file path in $(inputs) for initialworkdir is in $(outdir).
+
+ Generated from::
+
+ job: v1.0/wc-job.json
+ label: initialworkpath_output
+ output: {}
+ tags:
+ - initial_work_dir
+ - command_line_tool
+ tool: v1.0/initialwork-path.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that file path in $(inputs) for initialworkdir is in $(outdir).""")
+
+ def test_conformance_v1_0_shelldir_notinterpreted(self):
+ """Test that shell directives are not interpreted.
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: shelldir_notinterpreted
+ output:
+ stderr_file:
+ checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: Any
+ size: 0
+ stdout_file:
+ checksum: sha1$1555252d52d4ec3262538a4426a83a99cfff4402
+ class: File
+ location: Any
+ size: 9
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/shellchar.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that shell directives are not interpreted.""")
+
+ def test_conformance_v1_0_initial_workdir_empty_writable(self):
+ """Test empty writable dir with InitialWorkDirRequirement
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: initial_workdir_empty_writable
+ output:
+ out:
+ basename: emptyWritableDir
+ class: Directory
+ listing:
+ - basename: blurg
+ checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: blurg
+ size: 0
+ location: emptyWritableDir
+ tags:
+ - inline_javascript
+ - initial_work_dir
+ - command_line_tool
+ tool: v1.0/writable-dir.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test empty writable dir with InitialWorkDirRequirement""")
+
+ def test_conformance_v1_0_initial_workdir_empty_writable_docker(self):
+ """Test empty writable dir with InitialWorkDirRequirement inside Docker
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: initial_workdir_empty_writable_docker
+ output:
+ out:
+ basename: emptyWritableDir
+ class: Directory
+ listing:
+ - basename: blurg
+ checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: blurg
+ size: 0
+ location: emptyWritableDir
+ tags:
+ - inline_javascript
+ - initial_work_dir
+ - command_line_tool
+ tool: v1.0/writable-dir-docker.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test empty writable dir with InitialWorkDirRequirement inside Docker""")
+
+ def test_conformance_v1_0_fileliteral_input_docker(self):
+ """Test file literal as input without Docker
+
+ Generated from::
+
+ job: v1.0/file-literal.yml
+ label: fileliteral_input_docker
+ output:
+ output_file:
+ checksum: sha1$d0e04ff6c413c7d57f9a0ca0a33cd3ab52e2dd9c
+ class: File
+ location: output.txt
+ size: 18
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat3-nodocker.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test file literal as input without Docker""")
+
+ def test_conformance_v1_0_outputbinding_glob_sorted(self):
+ """Test that OutputBinding.glob is sorted as specified by POSIX
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: outputbinding_glob_sorted
+ output:
+ letters:
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: a
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: b
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: c
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: w
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: x
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: y
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: z
+ size: 0
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/glob_test.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that OutputBinding.glob is sorted as specified by POSIX""")
+
+ def test_conformance_v1_0_booleanflags_cl_noinputbinding(self):
+ """Test that boolean flags do not appear on command line if inputBinding is empty and not null
+
+ Generated from::
+
+ job: v1.0/bool-empty-inputbinding-job.json
+ label: booleanflags_cl_noinputbinding
+ output:
+ args: []
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/bool-empty-inputbinding.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that boolean flags do not appear on command line if inputBinding is empty and not null""")
+
+ def test_conformance_v1_0_expr_reference_self_noinput(self):
+ """Test that expression engine does not fail to evaluate reference to self with unprovided input
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: expr_reference_self_noinput
+ output:
+ args: []
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/stage-unprovided-file.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that expression engine does not fail to evaluate reference to self with unprovided input""")
+
+ def test_conformance_v1_0_success_codes(self):
+ """Test successCodes
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: success_codes
+ output: {}
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/exit-success.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test successCodes""")
+
+ def test_conformance_v1_0_cl_empty_array_input(self):
+ """Test that empty array input does not add anything to command line
+
+ Generated from::
+
+ job: v1.0/empty-array-job.json
+ label: cl_empty_array_input
+ output:
+ args: []
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/empty-array-input.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that empty array input does not add anything to command line""")
+
+ def test_conformance_v1_0_resreq_step_overrides_wf(self):
+ """Test that ResourceRequirement on a step level redefines requirement on the workflow level
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: resreq_step_overrides_wf
+ output:
+ out:
+ checksum: sha1$e5fa44f2b31c1fb553b6021e7360d07d5d91ff5e
+ class: File
+ location: cores.txt
+ size: 2
+ tags:
+ - resource
+ - workflow
+ tool: v1.0/steplevel-resreq.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that ResourceRequirement on a step level redefines requirement on the workflow level""")
+
+ def test_conformance_v1_0_valuefrom_constant_overrides_inputs(self):
+ """Test valueFrom with constant value overriding provided array inputs
+
+ Generated from::
+
+ job: v1.0/array-of-strings-job.yml
+ label: valuefrom_constant_overrides_inputs
+ output:
+ args:
+ - replacementValue
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/valueFrom-constant.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test valueFrom with constant value overriding provided array inputs""")
+
+ def test_conformance_v1_0_wf_step_connect_undeclared_param(self):
+ """Test that it is not an error to connect a parameter to a workflow
+step, even if the parameter doesn't appear in the `run` process
+inputs.
+
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: wf_step_connect_undeclared_param
+ output:
+ out: 'hello inp1
+
+ '
+ tags:
+ - required
+ - workflow
+ tool: v1.0/pass-unconnected.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that it is not an error to connect a parameter to a workflow
+step, even if the parameter doesn't appear in the `run` process
+inputs.
+""")
+
+ def test_conformance_v1_0_wf_step_access_undeclared_param(self):
+ """Test that parameters that don't appear in the `run` process
+inputs are not present in the input object used to run the tool.
+
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: wf_step_access_undeclared_param
+ should_fail: true
+ tags:
+ - required
+ - workflow
+ tool: v1.0/fail-unconnected.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that parameters that don't appear in the `run` process
+inputs are not present in the input object used to run the tool.
+""")
+
diff --git a/test/api/test_cwl_conformance_required_v1_0.py b/test/api/test_cwl_conformance_required_v1_0.py
new file mode 100644
index 000000000000..92c94848e87b
--- /dev/null
+++ b/test/api/test_cwl_conformance_required_v1_0.py
@@ -0,0 +1,875 @@
+
+"""Test CWL conformance for version $version."""
+
+from .test_workflows_cwl import BaseCwlWorklfowTestCase
+
+
+class CwlConformanceTestCase(BaseCwlWorklfowTestCase):
+ """Test case mapping to CWL conformance tests for version $version."""
+
+ def test_conformance_v1_0_cl_basic_generation(self):
+ """General test of command line generation
+
+ Generated from::
+
+ job: v1.0/bwa-mem-job.json
+ label: cl_basic_generation
+ output:
+ args:
+ - bwa
+ - mem
+ - -t
+ - '2'
+ - -I
+ - 1,2,3,4
+ - -m
+ - '3'
+ - chr20.fa
+ - example_human_Illumina.pe_1.fastq
+ - example_human_Illumina.pe_2.fastq
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/bwa-mem-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """General test of command line generation""")
+
+ def test_conformance_v1_0_nested_prefixes_arrays(self):
+ """Test nested prefixes with arrays
+
+ Generated from::
+
+ job: v1.0/bwa-mem-job.json
+ label: nested_prefixes_arrays
+ output:
+ args:
+ - bwa
+ - mem
+ - chr20.fa
+ - -XXX
+ - -YYY
+ - example_human_Illumina.pe_1.fastq
+ - -YYY
+ - example_human_Illumina.pe_2.fastq
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/binding-test.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test nested prefixes with arrays""")
+
+ def test_conformance_v1_0_cl_optional_inputs_missing(self):
+ """Test command line with optional input (missing)
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: cl_optional_inputs_missing
+ output:
+ args:
+ - cat
+ - hello.txt
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat1-testcli.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command line with optional input (missing)""")
+
+ def test_conformance_v1_0_cl_optional_bindings_provided(self):
+ """Test command line with optional input (provided)
+
+ Generated from::
+
+ job: v1.0/cat-n-job.json
+ label: cl_optional_bindings_provided
+ output:
+ args:
+ - cat
+ - -n
+ - hello.txt
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat1-testcli.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command line with optional input (provided)""")
+
+ def test_conformance_v1_0_stdout_redirect_docker(self):
+ """Test command execution in Docker with stdout redirection
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: stdout_redirect_docker
+ output:
+ output_file:
+ checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ class: File
+ location: output.txt
+ size: 13
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat3-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command execution in Docker with stdout redirection""")
+
+ def test_conformance_v1_0_stdout_redirect_shortcut_docker(self):
+ """Test command execution in Docker with shortcut stdout redirection
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: stdout_redirect_shortcut_docker
+ output:
+ output_file:
+ checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ class: File
+ location: Any
+ size: 13
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat3-tool-shortcut.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command execution in Docker with shortcut stdout redirection""")
+
+ def test_conformance_v1_0_stdout_redirect_mediumcut_docker(self):
+ """Test command execution in Docker with mediumcut stdout redirection
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: stdout_redirect_mediumcut_docker
+ output:
+ output_file:
+ checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ class: File
+ location: cat-out
+ size: 13
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat3-tool-mediumcut.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command execution in Docker with mediumcut stdout redirection""")
+
+ def test_conformance_v1_0_stdinout_redirect_docker(self):
+ """Test command execution in Docker with stdin and stdout redirection
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: stdinout_redirect_docker
+ output:
+ output_txt:
+ checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ class: File
+ location: output.txt
+ size: 13
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat4-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command execution in Docker with stdin and stdout redirection""")
+
+ def test_conformance_v1_0_any_outputSource_compatibility(self):
+ """Testing Any type compatibility in outputSource
+
+ Generated from::
+
+ job: v1.0/any-type-job.json
+ label: any_outputSource_compatibility
+ output:
+ output1:
+ - hello
+ - world
+ output2:
+ - foo
+ - bar
+ output3: hello
+ tags:
+ - required
+ - workflow
+ tool: v1.0/any-type-compat.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Testing Any type compatibility in outputSource""")
+
+ def test_conformance_v1_0_stdinout_redirect(self):
+ """Test command execution in with stdin and stdout redirection
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: stdinout_redirect
+ output:
+ output:
+ checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ class: File
+ location: output
+ size: 13
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command execution in with stdin and stdout redirection""")
+
+ def test_conformance_v1_0_wf_default_tool_default(self):
+ """Test that workflow defaults override tool defaults
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: wf_default_tool_default
+ output:
+ default_output: workflow_default
+ tags:
+ - required
+ - workflow
+ tool: v1.0/echo-wf-default.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that workflow defaults override tool defaults""")
+
+ def test_conformance_v1_0_any_input_param(self):
+ """Test Any type input parameter
+
+ Generated from::
+
+ job: v1.0/env-job.json
+ label: any_input_param
+ output:
+ out: 'hello test env
+
+ '
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/echo-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test Any type input parameter""")
+
+ def test_conformance_v1_0_wf_simple(self):
+ """Test simple workflow
+
+ Generated from::
+
+ job: v1.0/revsort-job.json
+ label: wf_simple
+ output:
+ output:
+ checksum: sha1$b9214658cc453331b62c2282b772a5c063dbd284
+ class: File
+ location: output.txt
+ size: 1111
+ tags:
+ - required
+ - workflow
+ tool: v1.0/revsort.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test simple workflow""")
+
+ def test_conformance_v1_0_hints_unknown_ignored(self):
+ """Test unknown hints are ignored.
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: hints_unknown_ignored
+ output:
+ output_file:
+ checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ class: File
+ location: output.txt
+ size: 13
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat5-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test unknown hints are ignored.""")
+
+ def test_conformance_v1_0_param_evaluation_noexpr(self):
+ """Test parameter evaluation, no support for JS expressions
+
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: param_evaluation_noexpr
+ output:
+ t1:
+ bar:
+ b az: 2
+ b"az: null
+ b'az: true
+ baz: zab1
+ buz:
+ - a
+ - b
+ - c
+ t10: true
+ t11: true
+ t12: null
+ t13: -zab1
+ t14: -zab1
+ t15: -zab1
+ t16: -zab1
+ t17: zab1 zab1
+ t18: zab1 zab1
+ t19: zab1 zab1
+ t2:
+ b az: 2
+ b"az: null
+ b'az: true
+ baz: zab1
+ buz:
+ - a
+ - b
+ - c
+ t20: zab1 zab1
+ t21: 2 2
+ t22: true true
+ t23: true true
+ t24: null null
+ t25: b
+ t26: b b
+ t27: null
+ t28: 3
+ t3:
+ b az: 2
+ b"az: null
+ b'az: true
+ baz: zab1
+ buz:
+ - a
+ - b
+ - c
+ t4:
+ b az: 2
+ b"az: null
+ b'az: true
+ baz: zab1
+ buz:
+ - a
+ - b
+ - c
+ t5: zab1
+ t6: zab1
+ t7: zab1
+ t8: zab1
+ t9: 2
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/params.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test parameter evaluation, no support for JS expressions
+""")
+
+ def test_conformance_v1_0_metadata(self):
+ """Test metadata
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: metadata
+ output: {}
+ tags:
+ - required
+ tool: v1.0/metadata.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test metadata""")
+
+ def test_conformance_v1_0_format_checking(self):
+ """Test simple format checking.
+
+
+ Generated from::
+
+ job: v1.0/formattest-job.json
+ label: format_checking
+ output:
+ output:
+ checksum: sha1$97fe1b50b4582cebc7d853796ebd62e3e163aa3f
+ class: File
+ format: http://edamontology.org/format_2330
+ location: output.txt
+ size: 1111
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/formattest.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test simple format checking.
+""")
+
+ def test_conformance_v1_0_format_checking_subclass(self):
+ """Test format checking against ontology using subclassOf.
+
+
+ Generated from::
+
+ job: v1.0/formattest2-job.json
+ label: format_checking_subclass
+ output:
+ output:
+ checksum: sha1$971d88faeda85a796752ecf752b7e2e34f1337ce
+ class: File
+ format: http://edamontology.org/format_1929
+ location: output.txt
+ size: 12010
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/formattest2.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test format checking against ontology using subclassOf.
+""")
+
+ def test_conformance_v1_0_format_checking_equivalentclass(self):
+ """Test format checking against ontology using equivalentClass.
+
+
+ Generated from::
+
+ job: v1.0/formattest2-job.json
+ label: format_checking_equivalentclass
+ output:
+ output:
+ checksum: sha1$971d88faeda85a796752ecf752b7e2e34f1337ce
+ class: File
+ format: http://edamontology.org/format_1929
+ location: output.txt
+ size: 12010
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/formattest3.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test format checking against ontology using equivalentClass.
+""")
+
+ def test_conformance_v1_0_multiple_glob_expr_list(self):
+ """Test support for returning multiple glob patterns from expression
+
+ Generated from::
+
+ job: v1.0/abc.json
+ label: multiple_glob_expr_list
+ output:
+ files:
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: a
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: b
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: c
+ size: 0
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/glob-expr-list.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test support for returning multiple glob patterns from expression""")
+
+ def test_conformance_v1_0_wf_two_inputfiles_namecollision(self):
+ """Test workflow two input files with same name.
+
+ Generated from::
+
+ job: v1.0/conflict-job.json
+ label: wf_two_inputfiles_namecollision
+ output:
+ fileout:
+ checksum: sha1$a2d8d6e7b28295dc9977dc3bdb652ddd480995f0
+ class: File
+ location: out.txt
+ size: 25
+ tags:
+ - required
+ - workflow
+ tool: v1.0/conflict-wf.cwl#collision
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test workflow two input files with same name.""")
+
+ def test_conformance_v1_0_directory_input_docker(self):
+ """Test directory input in Docker
+
+ Generated from::
+
+ job: v1.0/dir-job.yml
+ label: directory_input_docker
+ output:
+ outlist:
+ checksum: sha1$13cda8661796ae241da3a18668fb552161a72592
+ class: File
+ location: output.txt
+ size: 20
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/dir2.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test directory input in Docker""")
+
+ def test_conformance_v1_0_directory_output(self):
+ """Test directory output
+
+ Generated from::
+
+ job: v1.0/dir3-job.yml
+ label: directory_output
+ output:
+ outdir:
+ class: Directory
+ listing:
+ - checksum: sha1$dd0a4c4c49ba43004d6611771972b6cf969c1c01
+ class: File
+ location: goodbye.txt
+ size: 24
+ - checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ class: File
+ location: hello.txt
+ size: 13
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/dir3.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test directory output""")
+
+ def test_conformance_v1_0_input_file_literal(self):
+ """Test file literal as input
+
+ Generated from::
+
+ job: v1.0/file-literal.yml
+ label: input_file_literal
+ output:
+ output_file:
+ checksum: sha1$d0e04ff6c413c7d57f9a0ca0a33cd3ab52e2dd9c
+ class: File
+ location: output.txt
+ size: 18
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat3-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test file literal as input""")
+
+ def test_conformance_v1_0_nameroot_nameext_stdout_expr(self):
+ """Test nameroot/nameext expression in arguments, stdout
+
+ Generated from::
+
+ job: v1.0/wc-job.json
+ label: nameroot_nameext_stdout_expr
+ output:
+ b:
+ checksum: sha1$c4cfd130e7578714e3eef91c1d6d90e0e0b9db3e
+ class: File
+ location: whale.xtx
+ size: 21
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/nameroot.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test nameroot/nameext expression in arguments, stdout""")
+
+ def test_conformance_v1_0_cl_gen_arrayofarrays(self):
+ """Test command line generation of array-of-arrays
+
+ Generated from::
+
+ job: v1.0/nested-array-job.yml
+ label: cl_gen_arrayofarrays
+ output:
+ echo:
+ checksum: sha1$3f786850e387550fdab836ed7e6dc881de23001b
+ class: File
+ location: echo.txt
+ size: 2
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/nested-array.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command line generation of array-of-arrays""")
+
+ def test_conformance_v1_0_hints_import(self):
+ """Test hints with $import
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: hints_import
+ output:
+ out:
+ checksum: sha1$b3ec4ed1749c207e52b3a6d08c59f31d83bff519
+ class: File
+ location: out
+ size: 15
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/imported-hint.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test hints with $import""")
+
+ def test_conformance_v1_0_default_path_notfound_warning(self):
+ """Test warning instead of error when default path is not found
+
+ Generated from::
+
+ job: v1.0/default_path_job.yml
+ label: default_path_notfound_warning
+ output: {}
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/default_path.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test warning instead of error when default path is not found""")
+
+ def test_conformance_v1_0_wf_compound_doc(self):
+ """Test compound workflow document
+
+ Generated from::
+
+ job: v1.0/revsort-job.json
+ label: wf_compound_doc
+ output:
+ output:
+ checksum: sha1$b9214658cc453331b62c2282b772a5c063dbd284
+ class: File
+ location: output.txt
+ size: 1111
+ tags:
+ - required
+ - workflow
+ tool: v1.0/revsort-packed.cwl#main
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test compound workflow document""")
+
+ def test_conformance_v1_0_shelldir_notinterpreted(self):
+ """Test that shell directives are not interpreted.
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: shelldir_notinterpreted
+ output:
+ stderr_file:
+ checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: Any
+ size: 0
+ stdout_file:
+ checksum: sha1$1555252d52d4ec3262538a4426a83a99cfff4402
+ class: File
+ location: Any
+ size: 9
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/shellchar.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that shell directives are not interpreted.""")
+
+ def test_conformance_v1_0_fileliteral_input_docker(self):
+ """Test file literal as input without Docker
+
+ Generated from::
+
+ job: v1.0/file-literal.yml
+ label: fileliteral_input_docker
+ output:
+ output_file:
+ checksum: sha1$d0e04ff6c413c7d57f9a0ca0a33cd3ab52e2dd9c
+ class: File
+ location: output.txt
+ size: 18
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat3-nodocker.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test file literal as input without Docker""")
+
+ def test_conformance_v1_0_outputbinding_glob_sorted(self):
+ """Test that OutputBinding.glob is sorted as specified by POSIX
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: outputbinding_glob_sorted
+ output:
+ letters:
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: a
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: b
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: c
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: w
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: x
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: y
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: z
+ size: 0
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/glob_test.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that OutputBinding.glob is sorted as specified by POSIX""")
+
+ def test_conformance_v1_0_booleanflags_cl_noinputbinding(self):
+ """Test that boolean flags do not appear on command line if inputBinding is empty and not null
+
+ Generated from::
+
+ job: v1.0/bool-empty-inputbinding-job.json
+ label: booleanflags_cl_noinputbinding
+ output:
+ args: []
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/bool-empty-inputbinding.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that boolean flags do not appear on command line if inputBinding is empty and not null""")
+
+ def test_conformance_v1_0_expr_reference_self_noinput(self):
+ """Test that expression engine does not fail to evaluate reference to self with unprovided input
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: expr_reference_self_noinput
+ output:
+ args: []
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/stage-unprovided-file.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that expression engine does not fail to evaluate reference to self with unprovided input""")
+
+ def test_conformance_v1_0_success_codes(self):
+ """Test successCodes
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: success_codes
+ output: {}
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/exit-success.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test successCodes""")
+
+ def test_conformance_v1_0_cl_empty_array_input(self):
+ """Test that empty array input does not add anything to command line
+
+ Generated from::
+
+ job: v1.0/empty-array-job.json
+ label: cl_empty_array_input
+ output:
+ args: []
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/empty-array-input.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that empty array input does not add anything to command line""")
+
+ def test_conformance_v1_0_valuefrom_constant_overrides_inputs(self):
+ """Test valueFrom with constant value overriding provided array inputs
+
+ Generated from::
+
+ job: v1.0/array-of-strings-job.yml
+ label: valuefrom_constant_overrides_inputs
+ output:
+ args:
+ - replacementValue
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/valueFrom-constant.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test valueFrom with constant value overriding provided array inputs""")
+
+ def test_conformance_v1_0_wf_step_connect_undeclared_param(self):
+ """Test that it is not an error to connect a parameter to a workflow
+step, even if the parameter doesn't appear in the `run` process
+inputs.
+
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: wf_step_connect_undeclared_param
+ output:
+ out: 'hello inp1
+
+ '
+ tags:
+ - required
+ - workflow
+ tool: v1.0/pass-unconnected.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that it is not an error to connect a parameter to a workflow
+step, even if the parameter doesn't appear in the `run` process
+inputs.
+""")
+
+ def test_conformance_v1_0_wf_step_access_undeclared_param(self):
+ """Test that parameters that don't appear in the `run` process
+inputs are not present in the input object used to run the tool.
+
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: wf_step_access_undeclared_param
+ should_fail: true
+ tags:
+ - required
+ - workflow
+ tool: v1.0/fail-unconnected.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that parameters that don't appear in the `run` process
+inputs are not present in the input object used to run the tool.
+""")
+
diff --git a/test/api/test_cwl_conformance_v1_0.py b/test/api/test_cwl_conformance_v1_0.py
new file mode 100644
index 000000000000..1d43298242e5
--- /dev/null
+++ b/test/api/test_cwl_conformance_v1_0.py
@@ -0,0 +1,3477 @@
+
+"""Test CWL conformance for version $version."""
+
+from .test_workflows_cwl import BaseCwlWorklfowTestCase
+
+
+class CwlConformanceTestCase(BaseCwlWorklfowTestCase):
+ """Test case mapping to CWL conformance tests for version $version."""
+
+ def test_conformance_v1_0_cl_basic_generation(self):
+ """General test of command line generation
+
+ Generated from::
+
+ job: v1.0/bwa-mem-job.json
+ label: cl_basic_generation
+ output:
+ args:
+ - bwa
+ - mem
+ - -t
+ - '2'
+ - -I
+ - 1,2,3,4
+ - -m
+ - '3'
+ - chr20.fa
+ - example_human_Illumina.pe_1.fastq
+ - example_human_Illumina.pe_2.fastq
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/bwa-mem-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """General test of command line generation""")
+
+ def test_conformance_v1_0_nested_prefixes_arrays(self):
+ """Test nested prefixes with arrays
+
+ Generated from::
+
+ job: v1.0/bwa-mem-job.json
+ label: nested_prefixes_arrays
+ output:
+ args:
+ - bwa
+ - mem
+ - chr20.fa
+ - -XXX
+ - -YYY
+ - example_human_Illumina.pe_1.fastq
+ - -YYY
+ - example_human_Illumina.pe_2.fastq
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/binding-test.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test nested prefixes with arrays""")
+
+ def test_conformance_v1_0_nested_cl_bindings(self):
+ """Test nested command line bindings
+
+ Generated from::
+
+ job: v1.0/tmap-job.json
+ label: nested_cl_bindings
+ output:
+ args:
+ - tmap
+ - mapall
+ - stage1
+ - map1
+ - --min-seq-length
+ - '20'
+ - map2
+ - --min-seq-length
+ - '20'
+ - stage2
+ - map1
+ - --max-seq-length
+ - '20'
+ - --min-seq-length
+ - '10'
+ - --seed-length
+ - '16'
+ - map2
+ - --max-seed-hits
+ - '-1'
+ - --max-seq-length
+ - '20'
+ - --min-seq-length
+ - '10'
+ tags:
+ - schema_def
+ - command_line_tool
+ tool: v1.0/tmap-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test nested command line bindings""")
+
+ def test_conformance_v1_0_cl_optional_inputs_missing(self):
+ """Test command line with optional input (missing)
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: cl_optional_inputs_missing
+ output:
+ args:
+ - cat
+ - hello.txt
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat1-testcli.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command line with optional input (missing)""")
+
+ def test_conformance_v1_0_cl_optional_bindings_provided(self):
+ """Test command line with optional input (provided)
+
+ Generated from::
+
+ job: v1.0/cat-n-job.json
+ label: cl_optional_bindings_provided
+ output:
+ args:
+ - cat
+ - -n
+ - hello.txt
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat1-testcli.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command line with optional input (provided)""")
+
+ def test_conformance_v1_0_initworkdir_expreng_requirements(self):
+ """Test InitialWorkDirRequirement ExpressionEngineRequirement.engineConfig feature
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: initworkdir_expreng_requirements
+ output:
+ foo:
+ checksum: sha1$63da67422622fbf9251a046d7a34b7ea0fd4fead
+ class: File
+ location: foo.txt
+ size: 22
+ tags:
+ - initial_work_dir
+ - inline_javascript
+ - command_line_tool
+ tool: v1.0/template-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test InitialWorkDirRequirement ExpressionEngineRequirement.engineConfig feature""")
+
+ def test_conformance_v1_0_stdout_redirect_docker(self):
+ """Test command execution in Docker with stdout redirection
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: stdout_redirect_docker
+ output:
+ output_file:
+ checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ class: File
+ location: output.txt
+ size: 13
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat3-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command execution in Docker with stdout redirection""")
+
+ def test_conformance_v1_0_stdout_redirect_shortcut_docker(self):
+ """Test command execution in Docker with shortcut stdout redirection
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: stdout_redirect_shortcut_docker
+ output:
+ output_file:
+ checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ class: File
+ location: Any
+ size: 13
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat3-tool-shortcut.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command execution in Docker with shortcut stdout redirection""")
+
+ def test_conformance_v1_0_stdout_redirect_mediumcut_docker(self):
+ """Test command execution in Docker with mediumcut stdout redirection
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: stdout_redirect_mediumcut_docker
+ output:
+ output_file:
+ checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ class: File
+ location: cat-out
+ size: 13
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat3-tool-mediumcut.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command execution in Docker with mediumcut stdout redirection""")
+
+ def test_conformance_v1_0_stderr_redirect(self):
+ """Test command line with stderr redirection
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: stderr_redirect
+ output:
+ output_file:
+ checksum: sha1$f1d2d2f924e986ac86fdf7b36c94bcdf32beec15
+ class: File
+ location: error.txt
+ size: 4
+ tags:
+ - shell_command
+ - command_line_tool
+ tool: v1.0/stderr.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command line with stderr redirection""")
+
+ def test_conformance_v1_0_stderr_redirect_shortcut(self):
+ """Test command line with stderr redirection, brief syntax
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: stderr_redirect_shortcut
+ output:
+ output_file:
+ checksum: sha1$f1d2d2f924e986ac86fdf7b36c94bcdf32beec15
+ class: File
+ location: Any
+ size: 4
+ tags:
+ - shell_command
+ - command_line_tool
+ tool: v1.0/stderr-shortcut.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command line with stderr redirection, brief syntax""")
+
+ def test_conformance_v1_0_stderr_redirect_mediumcut(self):
+ """Test command line with stderr redirection, named brief syntax
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: stderr_redirect_mediumcut
+ output:
+ output_file:
+ checksum: sha1$f1d2d2f924e986ac86fdf7b36c94bcdf32beec15
+ class: File
+ location: std.err
+ size: 4
+ tags:
+ - shell_command
+ - command_line_tool
+ tool: v1.0/stderr-mediumcut.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command line with stderr redirection, named brief syntax""")
+
+ def test_conformance_v1_0_stdinout_redirect_docker(self):
+ """Test command execution in Docker with stdin and stdout redirection
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: stdinout_redirect_docker
+ output:
+ output_txt:
+ checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ class: File
+ location: output.txt
+ size: 13
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat4-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command execution in Docker with stdin and stdout redirection""")
+
+ def test_conformance_v1_0_expression_any(self):
+ """Test default usage of Any in expressions.
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: expression_any
+ output:
+ output: 1
+ tags:
+ - inline_javascript
+ - expression_tool
+ tool: v1.0/null-expression1-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test default usage of Any in expressions.""")
+
+ def test_conformance_v1_0_expression_any_null(self):
+ """Test explicitly passing null to Any type inputs with default values.
+
+ Generated from::
+
+ job: v1.0/null-expression1-job.json
+ label: expression_any_null
+ output:
+ output: 1
+ tags:
+ - inline_javascript
+ - expression_tool
+ tool: v1.0/null-expression1-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test explicitly passing null to Any type inputs with default values.""")
+
+ def test_conformance_v1_0_expression_any_string(self):
+ """Testing the string 'null' does not trip up an Any with a default value.
+
+ Generated from::
+
+ job: v1.0/null-expression2-job.json
+ label: expression_any_string
+ output:
+ output: 2
+ tags:
+ - inline_javascript
+ - expression_tool
+ tool: v1.0/null-expression1-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Testing the string 'null' does not trip up an Any with a default value.""")
+
+ def test_conformance_v1_0_expression_any_nodefaultany(self):
+ """Test Any without defaults cannot be unspecified.
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: expression_any_nodefaultany
+ should_fail: true
+ tags:
+ - inline_javascript
+ - expression_tool
+ tool: v1.0/null-expression2-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test Any without defaults cannot be unspecified.""")
+
+ def test_conformance_v1_0_expression_any_null_nodefaultany(self):
+ """Test explicitly passing null to Any type without a default value.
+
+ Generated from::
+
+ job: v1.0/null-expression1-job.json
+ label: expression_any_null_nodefaultany
+ should_fail: true
+ tags:
+ - inline_javascript
+ - expression_tool
+ tool: v1.0/null-expression2-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test explicitly passing null to Any type without a default value.""")
+
+ def test_conformance_v1_0_expression_any_nullstring_nodefaultany(self):
+ """Testing the string 'null' does not trip up an Any without a default value.
+
+ Generated from::
+
+ job: v1.0/null-expression2-job.json
+ label: expression_any_nullstring_nodefaultany
+ output:
+ output: 2
+ tags:
+ - inline_javascript
+ - expression_tool
+ tool: v1.0/null-expression2-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Testing the string 'null' does not trip up an Any without a default value.""")
+
+ def test_conformance_v1_0_any_outputSource_compatibility(self):
+ """Testing Any type compatibility in outputSource
+
+ Generated from::
+
+ job: v1.0/any-type-job.json
+ label: any_outputSource_compatibility
+ output:
+ output1:
+ - hello
+ - world
+ output2:
+ - foo
+ - bar
+ output3: hello
+ tags:
+ - required
+ - workflow
+ tool: v1.0/any-type-compat.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Testing Any type compatibility in outputSource""")
+
+ def test_conformance_v1_0_stdinout_redirect(self):
+ """Test command execution in with stdin and stdout redirection
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: stdinout_redirect
+ output:
+ output:
+ checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ class: File
+ location: output
+ size: 13
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command execution in with stdin and stdout redirection""")
+
+ def test_conformance_v1_0_expression_parseint(self):
+ """Test ExpressionTool with Docker-based expression engine
+
+ Generated from::
+
+ job: v1.0/parseInt-job.json
+ label: expression_parseint
+ output:
+ output: 42
+ tags:
+ - inline_javascript
+ - expression_tool
+ tool: v1.0/parseInt-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test ExpressionTool with Docker-based expression engine""")
+
+ def test_conformance_v1_0_expression_outputEval(self):
+ """Test outputEval to transform output
+
+ Generated from::
+
+ job: v1.0/wc-job.json
+ label: expression_outputEval
+ output:
+ output: 16
+ tags:
+ - inline_javascript
+ - command_line_tool
+ tool: v1.0/wc2-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test outputEval to transform output""")
+
+ def test_conformance_v1_0_wf_wc_parseInt(self):
+ """Test two step workflow with imported tools
+
+ Generated from::
+
+ job: v1.0/wc-job.json
+ label: wf_wc_parseInt
+ output:
+ count_output: 16
+ tags:
+ - inline_javascript
+ - workflow
+ tool: v1.0/count-lines1-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test two step workflow with imported tools""")
+
+ def test_conformance_v1_0_wf_wc_expressiontool(self):
+ """Test two step workflow with inline tools
+
+ Generated from::
+
+ job: v1.0/wc-job.json
+ label: wf_wc_expressiontool
+ output:
+ count_output: 16
+ tags:
+ - inline_javascript
+ - workflow
+ tool: v1.0/count-lines2-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test two step workflow with inline tools""")
+
+ def test_conformance_v1_0_wf_wc_scatter(self):
+ """Test single step workflow with Scatter step
+
+ Generated from::
+
+ job: v1.0/count-lines3-job.json
+ label: wf_wc_scatter
+ output:
+ count_output:
+ - 16
+ - 1
+ tags:
+ - scatter
+ - inline_javascript
+ - workflow
+ tool: v1.0/count-lines3-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test single step workflow with Scatter step""")
+
+ def test_conformance_v1_0_wf_wc_scatter_multiple_merge(self):
+ """Test single step workflow with Scatter step and two data links connected to
+same input, default merge behavior
+
+
+ Generated from::
+
+ job: v1.0/count-lines4-job.json
+ label: wf_wc_scatter_multiple_merge
+ output:
+ count_output:
+ - 16
+ - 1
+ tags:
+ - scatter
+ - multiple_input
+ - inline_javascript
+ - workflow
+ tool: v1.0/count-lines4-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test single step workflow with Scatter step and two data links connected to
+same input, default merge behavior
+""")
+
+ def test_conformance_v1_0_wf_wc_scatter_multiple_nested(self):
+ """Test single step workflow with Scatter step and two data links connected to
+same input, nested merge behavior
+
+
+ Generated from::
+
+ job: v1.0/count-lines6-job.json
+ label: wf_wc_scatter_multiple_nested
+ output:
+ count_output:
+ - 32
+ - 2
+ tags:
+ - scatter
+ - multiple_input
+ - inline_javascript
+ - workflow
+ tool: v1.0/count-lines6-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test single step workflow with Scatter step and two data links connected to
+same input, nested merge behavior
+""")
+
+ def test_conformance_v1_0_wf_wc_scatter_multiple_flattened(self):
+ """Test single step workflow with Scatter step and two data links connected to
+same input, flattened merge behavior
+
+
+ Generated from::
+
+ job: v1.0/count-lines6-job.json
+ label: wf_wc_scatter_multiple_flattened
+ output:
+ count_output: 34
+ tags:
+ - multiple_input
+ - inline_javascript
+ - workflow
+ tool: v1.0/count-lines7-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test single step workflow with Scatter step and two data links connected to
+same input, flattened merge behavior
+""")
+
+ def test_conformance_v1_0_wf_wc_nomultiple(self):
+ """Test that no MultipleInputFeatureRequirement is necessary when
+workflow step source is a single-item list
+
+
+ Generated from::
+
+ job: v1.0/count-lines6-job.json
+ label: wf_wc_nomultiple
+ output:
+ count_output: 32
+ tags:
+ - inline_javascript
+ - workflow
+ tool: v1.0/count-lines13-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that no MultipleInputFeatureRequirement is necessary when
+workflow step source is a single-item list
+""")
+
+ def test_conformance_v1_0_wf_input_default_missing(self):
+ """Test workflow with default value for input parameter (missing)
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: wf_input_default_missing
+ output:
+ count_output: 1
+ tags:
+ - inline_javascript
+ - workflow
+ tool: v1.0/count-lines5-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test workflow with default value for input parameter (missing)""")
+
+ def test_conformance_v1_0_wf_input_default_provided(self):
+ """Test workflow with default value for input parameter (provided)
+
+ Generated from::
+
+ job: v1.0/wc-job.json
+ label: wf_input_default_provided
+ output:
+ count_output: 16
+ tags:
+ - inline_javacscript
+ - workflow
+ tool: v1.0/count-lines5-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test workflow with default value for input parameter (provided)""")
+
+ def test_conformance_v1_0_wf_default_tool_default(self):
+ """Test that workflow defaults override tool defaults
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: wf_default_tool_default
+ output:
+ default_output: workflow_default
+ tags:
+ - required
+ - workflow
+ tool: v1.0/echo-wf-default.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that workflow defaults override tool defaults""")
+
+ def test_conformance_v1_0_envvar_req(self):
+ """Test EnvVarRequirement
+
+ Generated from::
+
+ job: v1.0/env-job.json
+ label: envvar_req
+ output:
+ out:
+ checksum: sha1$b3ec4ed1749c207e52b3a6d08c59f31d83bff519
+ class: File
+ location: out
+ size: 15
+ tags:
+ - env_var
+ - command_line_tool
+ tool: v1.0/env-tool1.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test EnvVarRequirement""")
+
+ def test_conformance_v1_0_wf_scatter_single_param(self):
+ """Test workflow scatter with single scatter parameter
+
+ Generated from::
+
+ job: v1.0/scatter-job1.json
+ label: wf_scatter_single_param
+ output:
+ out:
+ - foo one
+ - foo two
+ - foo three
+ - foo four
+ tags:
+ - scatter
+ - workflow
+ tool: v1.0/scatter-wf1.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test workflow scatter with single scatter parameter""")
+
+ def test_conformance_v1_0_wf_scatter_two_nested_crossproduct(self):
+ """Test workflow scatter with two scatter parameters and nested_crossproduct join method
+
+ Generated from::
+
+ job: v1.0/scatter-job2.json
+ label: wf_scatter_two_nested_crossproduct
+ output:
+ out:
+ - - foo one three
+ - foo one four
+ - - foo two three
+ - foo two four
+ tags:
+ - scatter
+ - workflow
+ tool: v1.0/scatter-wf2.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test workflow scatter with two scatter parameters and nested_crossproduct join method""")
+
+ def test_conformance_v1_0_wf_scatter_two_flat_crossproduct(self):
+ """Test workflow scatter with two scatter parameters and flat_crossproduct join method
+
+ Generated from::
+
+ job: v1.0/scatter-job2.json
+ label: wf_scatter_two_flat_crossproduct
+ output:
+ out:
+ - foo one three
+ - foo one four
+ - foo two three
+ - foo two four
+ tags:
+ - scatter
+ - workflow
+ tool: v1.0/scatter-wf3.cwl#main
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test workflow scatter with two scatter parameters and flat_crossproduct join method""")
+
+ def test_conformance_v1_0_wf_scatter_two_dotproduct(self):
+ """Test workflow scatter with two scatter parameters and dotproduct join method
+
+ Generated from::
+
+ job: v1.0/scatter-job2.json
+ label: wf_scatter_two_dotproduct
+ output:
+ out:
+ - foo one three
+ - foo two four
+ tags:
+ - scatter
+ - workflow
+ tool: v1.0/scatter-wf4.cwl#main
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test workflow scatter with two scatter parameters and dotproduct join method""")
+
+ def test_conformance_v1_0_wf_scatter_emptylist(self):
+ """Test workflow scatter with single empty list parameter
+
+ Generated from::
+
+ job: v1.0/scatter-empty-job1.json
+ label: wf_scatter_emptylist
+ output:
+ out: []
+ tags:
+ - scatter
+ - workflow
+ tool: v1.0/scatter-wf1.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test workflow scatter with single empty list parameter""")
+
+ def test_conformance_v1_0_wf_scatter_nested_crossproduct_secondempty(self):
+ """Test workflow scatter with two scatter parameters and nested_crossproduct join method with second list empty
+
+ Generated from::
+
+ job: v1.0/scatter-empty-job2.json
+ label: wf_scatter_nested_crossproduct_secondempty
+ output:
+ out:
+ - []
+ - []
+ tags:
+ - scatter
+ - workflow
+ tool: v1.0/scatter-wf2.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test workflow scatter with two scatter parameters and nested_crossproduct join method with second list empty""")
+
+ def test_conformance_v1_0_wf_scatter_nested_crossproduct_firstempty(self):
+ """Test workflow scatter with two scatter parameters and nested_crossproduct join method with first list empty
+
+ Generated from::
+
+ job: v1.0/scatter-empty-job3.json
+ label: wf_scatter_nested_crossproduct_firstempty
+ output:
+ out: []
+ tags:
+ - scatter
+ - workflow
+ tool: v1.0/scatter-wf3.cwl#main
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test workflow scatter with two scatter parameters and nested_crossproduct join method with first list empty""")
+
+ def test_conformance_v1_0_wf_scatter_flat_crossproduct_oneempty(self):
+ """Test workflow scatter with two scatter parameters, one of which is empty and flat_crossproduct join method
+
+ Generated from::
+
+ job: v1.0/scatter-empty-job2.json
+ label: wf_scatter_flat_crossproduct_oneempty
+ output:
+ out: []
+ tags:
+ - scatter
+ - workflow
+ tool: v1.0/scatter-wf3.cwl#main
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test workflow scatter with two scatter parameters, one of which is empty and flat_crossproduct join method""")
+
+ def test_conformance_v1_0_wf_scatter_dotproduct_twoempty(self):
+ """Test workflow scatter with two empty scatter parameters and dotproduct join method
+
+ Generated from::
+
+ job: v1.0/scatter-empty-job4.json
+ label: wf_scatter_dotproduct_twoempty
+ output:
+ out: []
+ tags:
+ - scatter
+ - workflow
+ tool: v1.0/scatter-wf4.cwl#main
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test workflow scatter with two empty scatter parameters and dotproduct join method""")
+
+ def test_conformance_v1_0_any_input_param(self):
+ """Test Any type input parameter
+
+ Generated from::
+
+ job: v1.0/env-job.json
+ label: any_input_param
+ output:
+ out: 'hello test env
+
+ '
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/echo-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test Any type input parameter""")
+
+ def test_conformance_v1_0_nested_workflow(self):
+ """Test nested workflow
+
+ Generated from::
+
+ job: v1.0/wc-job.json
+ label: nested_workflow
+ output:
+ count_output: 16
+ tags:
+ - subworkflow
+ - workflow
+ tool: v1.0/count-lines8-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test nested workflow""")
+
+ def test_conformance_v1_0_requirement_priority(self):
+ """Test requirement priority
+
+ Generated from::
+
+ job: v1.0/env-job.json
+ label: requirement_priority
+ output:
+ out:
+ checksum: sha1$b3ec4ed1749c207e52b3a6d08c59f31d83bff519
+ class: File
+ location: out
+ size: 15
+ tags:
+ - env_var
+ - workflow
+ tool: v1.0/env-wf1.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test requirement priority""")
+
+ def test_conformance_v1_0_requirement_override_hints(self):
+ """Test requirements override hints
+
+ Generated from::
+
+ job: v1.0/env-job.json
+ label: requirement_override_hints
+ output:
+ out:
+ checksum: sha1$cdc1e84968261d6a7575b5305945471f8be199b6
+ class: File
+ location: out
+ size: 9
+ tags:
+ - env_var
+ - workflow
+ tool: v1.0/env-wf2.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test requirements override hints""")
+
+ def test_conformance_v1_0_requirement_workflow_steps(self):
+ """Test requirements on workflow steps
+
+ Generated from::
+
+ job: v1.0/env-job.json
+ label: requirement_workflow_steps
+ output:
+ out:
+ checksum: sha1$cdc1e84968261d6a7575b5305945471f8be199b6
+ class: File
+ location: out
+ size: 9
+ tags:
+ - env_var
+ - workflow
+ tool: v1.0/env-wf3.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test requirements on workflow steps""")
+
+ def test_conformance_v1_0_step_input_default_value(self):
+ """Test default value on step input parameter
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: step_input_default_value
+ output:
+ count_output: 16
+ tags:
+ - inline_javascript
+ - workflow
+ tool: v1.0/count-lines9-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test default value on step input parameter""")
+
+ def test_conformance_v1_0_step_input_default_value_nosource(self):
+ """Test use default value on step input parameter with empty source
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: step_input_default_value_nosource
+ output:
+ count_output: 16
+ tags:
+ - inline_javascript
+ - workflow
+ tool: v1.0/count-lines11-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test use default value on step input parameter with empty source""")
+
+ def test_conformance_v1_0_step_input_default_value_nullsource(self):
+ """Test use default value on step input parameter with null source
+
+ Generated from::
+
+ job: v1.0/file1-null.json
+ label: step_input_default_value_nullsource
+ output:
+ count_output: 16
+ tags:
+ - inline_javascript
+ - workflow
+ tool: v1.0/count-lines11-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test use default value on step input parameter with null source""")
+
+ def test_conformance_v1_0_step_input_default_value_overriden(self):
+ """Test default value on step input parameter overridden by provided source
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: step_input_default_value_overriden
+ output:
+ count_output: 1
+ tags:
+ - inline_javascript
+ - workflow
+ tool: v1.0/count-lines11-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test default value on step input parameter overridden by provided source""")
+
+ def test_conformance_v1_0_wf_simple(self):
+ """Test simple workflow
+
+ Generated from::
+
+ job: v1.0/revsort-job.json
+ label: wf_simple
+ output:
+ output:
+ checksum: sha1$b9214658cc453331b62c2282b772a5c063dbd284
+ class: File
+ location: output.txt
+ size: 1111
+ tags:
+ - required
+ - workflow
+ tool: v1.0/revsort.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test simple workflow""")
+
+ def test_conformance_v1_0_hints_unknown_ignored(self):
+ """Test unknown hints are ignored.
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: hints_unknown_ignored
+ output:
+ output_file:
+ checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ class: File
+ location: output.txt
+ size: 13
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat5-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test unknown hints are ignored.""")
+
+ def test_conformance_v1_0_initial_workdir_secondary_files_expr(self):
+ """Test InitialWorkDirRequirement linking input files and capturing secondaryFiles
+on input and output. Also tests the use of a variety of parameter references
+and expressions in the secondaryFiles field.
+
+
+ Generated from::
+
+ job: v1.0/search-job.json
+ label: initial_workdir_secondary_files_expr
+ output:
+ indexedfile:
+ checksum: sha1$327fc7aedf4f6b69a42a7c8b808dc5a7aff61376
+ class: File
+ location: input.txt
+ secondaryFiles:
+ - checksum: sha1$553f3a09003a9f69623f03bec13c0b078d706023
+ class: File
+ location: input.txt.idx1
+ size: 1500
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: input.idx2
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: input.txt.idx3
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: input.txt.idx4
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: input.txt.idx5
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: input.idx6.txt
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: input.txt.idx7
+ size: 0
+ - checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ class: File
+ location: hello.txt
+ size: 13
+ - class: Directory
+ listing:
+ - basename: index
+ checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: index
+ size: 0
+ location: input.txt_idx8
+ size: 1111
+ outfile:
+ checksum: sha1$e2dc9daaef945ac15f01c238ed2f1660f60909a0
+ class: File
+ location: result.txt
+ size: 142
+ tags:
+ - initial_work_dir
+ - inline_javascript
+ - command_line_tool
+ tool: v1.0/search.cwl#main
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test InitialWorkDirRequirement linking input files and capturing secondaryFiles
+on input and output. Also tests the use of a variety of parameter references
+and expressions in the secondaryFiles field.
+""")
+
+ def test_conformance_v1_0_rename(self):
+ """Test InitialWorkDirRequirement with expression in filename.
+
+
+ Generated from::
+
+ job: v1.0/rename-job.json
+ label: rename
+ output:
+ outfile:
+ checksum: sha1$327fc7aedf4f6b69a42a7c8b808dc5a7aff61376
+ class: File
+ location: fish.txt
+ size: 1111
+ tags:
+ - initial_work_dir
+ - command_line_tool
+ tool: v1.0/rename.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test InitialWorkDirRequirement with expression in filename.
+""")
+
+ def test_conformance_v1_0_initial_workdir_trailingnl(self):
+ """Test if trailing newline is present in file entry in InitialWorkDir
+
+ Generated from::
+
+ job: v1.0/string-job.json
+ label: initial_workdir_trailingnl
+ output:
+ out:
+ checksum: sha1$6a47aa22b2a9d13a66a24b3ee5eaed95ce4753cf
+ class: File
+ location: example.conf
+ size: 16
+ tags:
+ - initial_work_dir
+ - command_line_tool
+ tool: v1.0/iwdr-entry.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test if trailing newline is present in file entry in InitialWorkDir""")
+
+ def test_conformance_v1_0_inline_expressions(self):
+ """Test inline expressions
+
+
+ Generated from::
+
+ job: v1.0/wc-job.json
+ label: inline_expressions
+ output:
+ output: 16
+ tags:
+ - inline_javascript
+ - command_line_tool
+ tool: v1.0/wc4-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test inline expressions
+""")
+
+ def test_conformance_v1_0_schemadef_req_tool_param(self):
+ """Test SchemaDefRequirement definition used in tool parameter
+
+
+ Generated from::
+
+ job: v1.0/schemadef-job.json
+ label: schemadef_req_tool_param
+ output:
+ output:
+ checksum: sha1$f12e6cfe70f3253f70b0dbde17c692e7fb0f1e5e
+ class: File
+ location: output.txt
+ size: 12
+ tags:
+ - schema_def
+ - command_line_tool
+ tool: v1.0/schemadef-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test SchemaDefRequirement definition used in tool parameter
+""")
+
+ def test_conformance_v1_0_schemadef_req_wf_param(self):
+ """Test SchemaDefRequirement definition used in workflow parameter
+
+
+ Generated from::
+
+ job: v1.0/schemadef-job.json
+ label: schemadef_req_wf_param
+ output:
+ output:
+ checksum: sha1$f12e6cfe70f3253f70b0dbde17c692e7fb0f1e5e
+ class: File
+ location: output.txt
+ size: 12
+ tags:
+ - schema_def
+ - workflow
+ tool: v1.0/schemadef-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test SchemaDefRequirement definition used in workflow parameter
+""")
+
+ def test_conformance_v1_0_param_evaluation_noexpr(self):
+ """Test parameter evaluation, no support for JS expressions
+
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: param_evaluation_noexpr
+ output:
+ t1:
+ bar:
+ b az: 2
+ b"az: null
+ b'az: true
+ baz: zab1
+ buz:
+ - a
+ - b
+ - c
+ t10: true
+ t11: true
+ t12: null
+ t13: -zab1
+ t14: -zab1
+ t15: -zab1
+ t16: -zab1
+ t17: zab1 zab1
+ t18: zab1 zab1
+ t19: zab1 zab1
+ t2:
+ b az: 2
+ b"az: null
+ b'az: true
+ baz: zab1
+ buz:
+ - a
+ - b
+ - c
+ t20: zab1 zab1
+ t21: 2 2
+ t22: true true
+ t23: true true
+ t24: null null
+ t25: b
+ t26: b b
+ t27: null
+ t28: 3
+ t3:
+ b az: 2
+ b"az: null
+ b'az: true
+ baz: zab1
+ buz:
+ - a
+ - b
+ - c
+ t4:
+ b az: 2
+ b"az: null
+ b'az: true
+ baz: zab1
+ buz:
+ - a
+ - b
+ - c
+ t5: zab1
+ t6: zab1
+ t7: zab1
+ t8: zab1
+ t9: 2
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/params.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test parameter evaluation, no support for JS expressions
+""")
+
+ def test_conformance_v1_0_param_evaluation_expr(self):
+ """Test parameter evaluation, with support for JS expressions
+
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: param_evaluation_expr
+ output:
+ t1:
+ bar:
+ b az: 2
+ b"az: null
+ b'az: true
+ baz: zab1
+ buz:
+ - a
+ - b
+ - c
+ t10: true
+ t11: true
+ t12: null
+ t13: -zab1
+ t14: -zab1
+ t15: -zab1
+ t16: -zab1
+ t17: zab1 zab1
+ t18: zab1 zab1
+ t19: zab1 zab1
+ t2:
+ b az: 2
+ b"az: null
+ b'az: true
+ baz: zab1
+ buz:
+ - a
+ - b
+ - c
+ t20: zab1 zab1
+ t21: 2 2
+ t22: true true
+ t23: true true
+ t24: null null
+ t25: b
+ t26: b b
+ t27: null
+ t28: 3
+ t3:
+ b az: 2
+ b"az: null
+ b'az: true
+ baz: zab1
+ buz:
+ - a
+ - b
+ - c
+ t4:
+ b az: 2
+ b"az: null
+ b'az: true
+ baz: zab1
+ buz:
+ - a
+ - b
+ - c
+ t5: zab1
+ t6: zab1
+ t7: zab1
+ t8: zab1
+ t9: 2
+ tags:
+ - inline_javascript
+ - command_line_tool
+ tool: v1.0/params2.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test parameter evaluation, with support for JS expressions
+""")
+
+ def test_conformance_v1_0_metadata(self):
+ """Test metadata
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: metadata
+ output: {}
+ tags:
+ - required
+ tool: v1.0/metadata.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test metadata""")
+
+ def test_conformance_v1_0_format_checking(self):
+ """Test simple format checking.
+
+
+ Generated from::
+
+ job: v1.0/formattest-job.json
+ label: format_checking
+ output:
+ output:
+ checksum: sha1$97fe1b50b4582cebc7d853796ebd62e3e163aa3f
+ class: File
+ format: http://edamontology.org/format_2330
+ location: output.txt
+ size: 1111
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/formattest.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test simple format checking.
+""")
+
+ def test_conformance_v1_0_format_checking_subclass(self):
+ """Test format checking against ontology using subclassOf.
+
+
+ Generated from::
+
+ job: v1.0/formattest2-job.json
+ label: format_checking_subclass
+ output:
+ output:
+ checksum: sha1$971d88faeda85a796752ecf752b7e2e34f1337ce
+ class: File
+ format: http://edamontology.org/format_1929
+ location: output.txt
+ size: 12010
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/formattest2.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test format checking against ontology using subclassOf.
+""")
+
+ def test_conformance_v1_0_format_checking_equivalentclass(self):
+ """Test format checking against ontology using equivalentClass.
+
+
+ Generated from::
+
+ job: v1.0/formattest2-job.json
+ label: format_checking_equivalentclass
+ output:
+ output:
+ checksum: sha1$971d88faeda85a796752ecf752b7e2e34f1337ce
+ class: File
+ format: http://edamontology.org/format_1929
+ location: output.txt
+ size: 12010
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/formattest3.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test format checking against ontology using equivalentClass.
+""")
+
+ def test_conformance_v1_0_output_secondaryfile_optional(self):
+ """Test optional output file and optional secondaryFile on output.
+
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: output_secondaryfile_optional
+ output:
+ optional_file: null
+ output_file:
+ checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ class: File
+ location: output.txt
+ size: 13
+ tags:
+ - docker
+ - command_line_tool
+ tool: v1.0/optional-output.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test optional output file and optional secondaryFile on output.
+""")
+
+ def test_conformance_v1_0_valuefrom_ignored_null(self):
+ """Test that valueFrom is ignored when the parameter is null
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: valuefrom_ignored_null
+ output:
+ out: '
+
+ '
+ tags:
+ - inline_javascript
+ - command_line_tool
+ tool: v1.0/vf-concat.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that valueFrom is ignored when the parameter is null""")
+
+ def test_conformance_v1_0_valuefrom_secondexpr_ignored(self):
+ """Test that second expression in concatenated valueFrom is not ignored
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: valuefrom_secondexpr_ignored
+ output:
+ out: 'a string
+
+ '
+ tags:
+ - inline_javascript
+ - command_line_tool
+ tool: v1.0/vf-concat.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that second expression in concatenated valueFrom is not ignored""")
+
+ def test_conformance_v1_0_valuefrom_wf_step(self):
+ """Test valueFrom on workflow step.
+
+ Generated from::
+
+ job: v1.0/step-valuefrom-wf.json
+ label: valuefrom_wf_step
+ output:
+ count_output: 16
+ tags:
+ - step_input
+ - inline_javascript
+ - workflow
+ tool: v1.0/step-valuefrom-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test valueFrom on workflow step.""")
+
+ def test_conformance_v1_0_valuefrom_wf_step_multiple(self):
+ """Test valueFrom on workflow step with multiple sources
+
+ Generated from::
+
+ job: v1.0/step-valuefrom-job.json
+ label: valuefrom_wf_step_multiple
+ output:
+ val: '3
+
+ '
+ tags:
+ - step_input
+ - inline_javascript
+ - multiple_input
+ - workflow
+ tool: v1.0/step-valuefrom2-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test valueFrom on workflow step with multiple sources""")
+
+ def test_conformance_v1_0_valuefrom_wf_step_other(self):
+ """Test valueFrom on workflow step referencing other inputs
+
+ Generated from::
+
+ job: v1.0/step-valuefrom-job.json
+ label: valuefrom_wf_step_other
+ output:
+ val: '3
+
+ '
+ tags:
+ - step_input
+ - inline_javascript
+ - workflow
+ tool: v1.0/step-valuefrom3-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test valueFrom on workflow step referencing other inputs""")
+
+ def test_conformance_v1_0_record_output_binding(self):
+ """Test record type output binding.
+
+ Generated from::
+
+ job: v1.0/record-output-job.json
+ label: record_output_binding
+ output:
+ orec:
+ obar:
+ checksum: sha1$aeb3d11bdf536511649129f4077d5cda6a324118
+ class: File
+ location: bar
+ size: 12010
+ ofoo:
+ checksum: sha1$327fc7aedf4f6b69a42a7c8b808dc5a7aff61376
+ class: File
+ location: foo
+ size: 1111
+ tags:
+ - shell_command
+ - command_line_tool
+ tool: v1.0/record-output.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test record type output binding.""")
+
+ def test_conformance_v1_0_docker_json_output_path(self):
+ """Test support for reading cwl.output.json when running in a Docker container
+and just 'path' is provided.
+
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: docker_json_output_path
+ output:
+ foo:
+ checksum: sha1$f1d2d2f924e986ac86fdf7b36c94bcdf32beec15
+ class: File
+ location: foo
+ size: 4
+ tags:
+ - shell_command
+ - command_line_tool
+ tool: v1.0/test-cwl-out.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test support for reading cwl.output.json when running in a Docker container
+and just 'path' is provided.
+""")
+
+ def test_conformance_v1_0_docker_json_output_location(self):
+ """Test support for reading cwl.output.json when running in a Docker container
+and just 'location' is provided.
+
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: docker_json_output_location
+ output:
+ foo:
+ checksum: sha1$f1d2d2f924e986ac86fdf7b36c94bcdf32beec15
+ class: File
+ location: foo
+ size: 4
+ tags:
+ - shell_command
+ - command_line_tool
+ tool: v1.0/test-cwl-out2.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test support for reading cwl.output.json when running in a Docker container
+and just 'location' is provided.
+""")
+
+ def test_conformance_v1_0_multiple_glob_expr_list(self):
+ """Test support for returning multiple glob patterns from expression
+
+ Generated from::
+
+ job: v1.0/abc.json
+ label: multiple_glob_expr_list
+ output:
+ files:
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: a
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: b
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: c
+ size: 0
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/glob-expr-list.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test support for returning multiple glob patterns from expression""")
+
+ def test_conformance_v1_0_wf_scatter_oneparam_valuefrom(self):
+ """Test workflow scatter with single scatter parameter and two valueFrom on step input (first and current el)
+
+ Generated from::
+
+ job: v1.0/scatter-valuefrom-job1.json
+ label: wf_scatter_oneparam_valuefrom
+ output:
+ out:
+ - foo one one
+ - foo one two
+ - foo one three
+ - foo one four
+ tags:
+ - scatter
+ - step_input
+ - workflow
+ tool: v1.0/scatter-valuefrom-wf1.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test workflow scatter with single scatter parameter and two valueFrom on step input (first and current el)""")
+
+ def test_conformance_v1_0_wf_scatter_twoparam_nested_crossproduct_valuefrom(self):
+ """Test workflow scatter with two scatter parameters and nested_crossproduct join method and valueFrom on step input
+
+ Generated from::
+
+ job: v1.0/scatter-valuefrom-job2.json
+ label: wf_scatter_twoparam_nested_crossproduct_valuefrom
+ output:
+ out:
+ - - foo one one three
+ - foo one one four
+ - - foo one two three
+ - foo one two four
+ tags:
+ - scatter
+ - step_input
+ - workflow
+ tool: v1.0/scatter-valuefrom-wf2.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test workflow scatter with two scatter parameters and nested_crossproduct join method and valueFrom on step input""")
+
+ def test_conformance_v1_0_wf_scatter_twoparam_flat_crossproduct_valuefrom(self):
+ """Test workflow scatter with two scatter parameters and flat_crossproduct join method and valueFrom on step input
+
+ Generated from::
+
+ job: v1.0/scatter-valuefrom-job2.json
+ label: wf_scatter_twoparam_flat_crossproduct_valuefrom
+ output:
+ out:
+ - foo one one three
+ - foo one one four
+ - foo one two three
+ - foo one two four
+ tags:
+ - scatter
+ - step_input
+ - workflow
+ tool: v1.0/scatter-valuefrom-wf3.cwl#main
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test workflow scatter with two scatter parameters and flat_crossproduct join method and valueFrom on step input""")
+
+ def test_conformance_v1_0_wf_scatter_twoparam_dotproduct_valuefrom(self):
+ """Test workflow scatter with two scatter parameters and dotproduct join method and valueFrom on step input
+
+ Generated from::
+
+ job: v1.0/scatter-valuefrom-job2.json
+ label: wf_scatter_twoparam_dotproduct_valuefrom
+ output:
+ out:
+ - foo one one three
+ - foo one two four
+ tags:
+ - scatter
+ - step_input
+ - workflow
+ tool: v1.0/scatter-valuefrom-wf4.cwl#main
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test workflow scatter with two scatter parameters and dotproduct join method and valueFrom on step input""")
+
+ def test_conformance_v1_0_wf_scatter_oneparam_valuefrom_twice_current_el(self):
+ """Test workflow scatter with single scatter parameter and two valueFrom on step input (current el twice)
+
+ Generated from::
+
+ job: v1.0/scatter-valuefrom-job1.json
+ label: wf_scatter_oneparam_valuefrom_twice_current_el
+ output:
+ out:
+ - foo one one
+ - foo two two
+ - foo three three
+ - foo four four
+ tags:
+ - scatter
+ - step_input
+ - workflow
+ tool: v1.0/scatter-valuefrom-wf5.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test workflow scatter with single scatter parameter and two valueFrom on step input (current el twice)""")
+
+ def test_conformance_v1_0_wf_scatter_oneparam_valueFrom(self):
+ """Test valueFrom eval on scattered input parameter
+
+ Generated from::
+
+ job: v1.0/scatter-valuefrom-job3.json
+ label: wf_scatter_oneparam_valueFrom
+ output:
+ out_message:
+ - checksum: sha1$98030575f6fc40e5021be5a8803a6bef94aee11f
+ class: File
+ location: Any
+ size: 16
+ - checksum: sha1$edcacd50778d98ae113015406b3195c165059dd8
+ class: File
+ location: Any
+ size: 16
+ tags:
+ - scatter
+ - step_input
+ - workflow
+ tool: v1.0/scatter-valuefrom-wf6.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test valueFrom eval on scattered input parameter""")
+
+ def test_conformance_v1_0_wf_two_inputfiles_namecollision(self):
+ """Test workflow two input files with same name.
+
+ Generated from::
+
+ job: v1.0/conflict-job.json
+ label: wf_two_inputfiles_namecollision
+ output:
+ fileout:
+ checksum: sha1$a2d8d6e7b28295dc9977dc3bdb652ddd480995f0
+ class: File
+ location: out.txt
+ size: 25
+ tags:
+ - required
+ - workflow
+ tool: v1.0/conflict-wf.cwl#collision
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test workflow two input files with same name.""")
+
+ def test_conformance_v1_0_directory_input_param_ref(self):
+ """Test directory input with parameter reference
+
+ Generated from::
+
+ job: v1.0/dir-job.yml
+ label: directory_input_param_ref
+ output:
+ outlist:
+ checksum: sha1$13cda8661796ae241da3a18668fb552161a72592
+ class: File
+ location: output.txt
+ size: 20
+ tags:
+ - shell_command
+ - command_line_tool
+ tool: v1.0/dir.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test directory input with parameter reference""")
+
+ def test_conformance_v1_0_directory_input_docker(self):
+ """Test directory input in Docker
+
+ Generated from::
+
+ job: v1.0/dir-job.yml
+ label: directory_input_docker
+ output:
+ outlist:
+ checksum: sha1$13cda8661796ae241da3a18668fb552161a72592
+ class: File
+ location: output.txt
+ size: 20
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/dir2.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test directory input in Docker""")
+
+ def test_conformance_v1_0_directory_output(self):
+ """Test directory output
+
+ Generated from::
+
+ job: v1.0/dir3-job.yml
+ label: directory_output
+ output:
+ outdir:
+ class: Directory
+ listing:
+ - checksum: sha1$dd0a4c4c49ba43004d6611771972b6cf969c1c01
+ class: File
+ location: goodbye.txt
+ size: 24
+ - checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ class: File
+ location: hello.txt
+ size: 13
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/dir3.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test directory output""")
+
+ def test_conformance_v1_0_directory_secondaryfiles(self):
+ """Test directories in secondaryFiles
+
+ Generated from::
+
+ job: v1.0/dir4-job.yml
+ label: directory_secondaryfiles
+ output:
+ outlist:
+ checksum: sha1$13cda8661796ae241da3a18668fb552161a72592
+ class: File
+ location: output.txt
+ size: 20
+ tags:
+ - shell_command
+ - command_line_tool
+ tool: v1.0/dir4.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test directories in secondaryFiles""")
+
+ def test_conformance_v1_0_87(self):
+ """Test specifying secondaryFiles in subdirectories of the job input document.
+
+ Generated from::
+
+ job: v1.0/dir4-subdir-1-job.yml
+ output:
+ outlist:
+ checksum: sha1$9d9bc8f5252d39274b5dfbac64216c6e888f5dfc
+ class: File
+ location: output.txt
+ size: 14
+ tool: v1.0/dir4.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test specifying secondaryFiles in subdirectories of the job input document.""")
+
+ def test_conformance_v1_0_88(self):
+ """Test specifying secondaryFiles in same subdirectory of the job input as the primary input file.
+
+ Generated from::
+
+ job: v1.0/dir4-subdir-2-job.yml
+ output:
+ outlist:
+ checksum: sha1$9d9bc8f5252d39274b5dfbac64216c6e888f5dfc
+ class: File
+ location: output.txt
+ size: 14
+ tool: v1.0/dir4.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test specifying secondaryFiles in same subdirectory of the job input as the primary input file.""")
+
+ def test_conformance_v1_0_dynamic_initial_workdir(self):
+ """Test dynamic initial work dir
+
+ Generated from::
+
+ job: v1.0/dir-job.yml
+ label: dynamic_initial_workdir
+ output:
+ outlist:
+ checksum: sha1$13cda8661796ae241da3a18668fb552161a72592
+ class: File
+ location: output.txt
+ size: 20
+ tags:
+ - shell_command
+ - initial_work_dir
+ - command_line_tool
+ tool: v1.0/dir5.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test dynamic initial work dir""")
+
+ def test_conformance_v1_0_writable_stagedfiles(self):
+ """Test writable staged files.
+
+ Generated from::
+
+ job: v1.0/stagefile-job.yml
+ label: writable_stagedfiles
+ output:
+ outfile:
+ checksum: sha1$b769c7b2e316edd4b5eb2d24799b2c1f9d8c86e6
+ class: File
+ location: bob.txt
+ size: 1111
+ tags:
+ - initial_work_dir
+ - command_line_tool
+ tool: v1.0/stagefile.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test writable staged files.""")
+
+ def test_conformance_v1_0_input_file_literal(self):
+ """Test file literal as input
+
+ Generated from::
+
+ job: v1.0/file-literal.yml
+ label: input_file_literal
+ output:
+ output_file:
+ checksum: sha1$d0e04ff6c413c7d57f9a0ca0a33cd3ab52e2dd9c
+ class: File
+ location: output.txt
+ size: 18
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat3-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test file literal as input""")
+
+ def test_conformance_v1_0_initial_workdir_expr(self):
+ """Test expression in InitialWorkDir listing
+
+ Generated from::
+
+ job: v1.0/arguments-job.yml
+ label: initial_workdir_expr
+ output:
+ classfile:
+ checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: Hello.class
+ size: 0
+ tags:
+ - initial_work_dir
+ - command_line_tool
+ tool: v1.0/linkfile.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test expression in InitialWorkDir listing""")
+
+ def test_conformance_v1_0_nameroot_nameext_stdout_expr(self):
+ """Test nameroot/nameext expression in arguments, stdout
+
+ Generated from::
+
+ job: v1.0/wc-job.json
+ label: nameroot_nameext_stdout_expr
+ output:
+ b:
+ checksum: sha1$c4cfd130e7578714e3eef91c1d6d90e0e0b9db3e
+ class: File
+ location: whale.xtx
+ size: 21
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/nameroot.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test nameroot/nameext expression in arguments, stdout""")
+
+ def test_conformance_v1_0_input_dir_inputbinding(self):
+ """Test directory input with inputBinding
+
+ Generated from::
+
+ job: v1.0/dir-job.yml
+ label: input_dir_inputbinding
+ output:
+ outlist:
+ checksum: sha1$13cda8661796ae241da3a18668fb552161a72592
+ class: File
+ location: output.txt
+ size: 20
+ tags:
+ - shell_command
+ - command_line_tool
+ tool: v1.0/dir6.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test directory input with inputBinding""")
+
+ def test_conformance_v1_0_cl_gen_arrayofarrays(self):
+ """Test command line generation of array-of-arrays
+
+ Generated from::
+
+ job: v1.0/nested-array-job.yml
+ label: cl_gen_arrayofarrays
+ output:
+ echo:
+ checksum: sha1$3f786850e387550fdab836ed7e6dc881de23001b
+ class: File
+ location: echo.txt
+ size: 2
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/nested-array.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test command line generation of array-of-arrays""")
+
+ def test_conformance_v1_0_env_home_tmpdir(self):
+ """Test $HOME and $TMPDIR are set correctly
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: env_home_tmpdir
+ output: {}
+ tags:
+ - shell_command
+ - command_line_tool
+ tool: v1.0/envvar.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test $HOME and $TMPDIR are set correctly""")
+
+ def test_conformance_v1_0_env_home_tmpdir_docker(self):
+ """Test $HOME and $TMPDIR are set correctly in Docker
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: env_home_tmpdir_docker
+ output: {}
+ tags:
+ - shell_command
+ - command_line_tool
+ tool: v1.0/envvar2.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test $HOME and $TMPDIR are set correctly in Docker""")
+
+ def test_conformance_v1_0_expressionlib_tool_wf_override(self):
+ """Test that expressionLib requirement of individual tool step overrides expressionLib of workflow.
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: expressionlib_tool_wf_override
+ output:
+ out:
+ checksum: sha1$7448d8798a4380162d4b56f9b452e2f6f9e24e7a
+ class: File
+ location: whatever.txt
+ size: 2
+ tags:
+ - inline_javascript
+ - workflow
+ tool: v1.0/js-expr-req-wf.cwl#wf
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that expressionLib requirement of individual tool step overrides expressionLib of workflow.""")
+
+ def test_conformance_v1_0_initial_workdir_output(self):
+ """Test output of InitialWorkDir
+
+ Generated from::
+
+ job: v1.0/initialworkdirrequirement-docker-out-job.json
+ label: initial_workdir_output
+ output:
+ OUTPUT:
+ checksum: sha1$aeb3d11bdf536511649129f4077d5cda6a324118
+ class: File
+ location: ref.fasta
+ secondaryFiles:
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: ref.fasta.fai
+ size: 0
+ size: 12010
+ tags:
+ - docker
+ - initial_work_dir
+ - command_line_tool
+ tool: v1.0/initialworkdirrequirement-docker-out.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test output of InitialWorkDir""")
+
+ def test_conformance_v1_0_embedded_subworkflow(self):
+ """Test embedded subworkflow
+
+ Generated from::
+
+ job: v1.0/wc-job.json
+ label: embedded_subworkflow
+ output:
+ count_output: 16
+ tags:
+ - subworkflow
+ - workflow
+ tool: v1.0/count-lines10-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test embedded subworkflow""")
+
+ def test_conformance_v1_0_filesarray_secondaryfiles(self):
+ """Test secondaryFiles on array of files.
+
+ Generated from::
+
+ job: v1.0/docker-array-secondaryfiles-job.json
+ label: filesarray_secondaryfiles
+ output:
+ bai_list:
+ checksum: sha1$081fc0e57d6efa5f75eeb237aab1d04031132be6
+ class: File
+ location: fai.list
+ size: 386
+ tags:
+ - docker
+ - inline_javascript
+ - shell_command
+ - command_line_tool
+ tool: v1.0/docker-array-secondaryfiles.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test secondaryFiles on array of files.""")
+
+ def test_conformance_v1_0_exprtool_directory_literal(self):
+ """Test directory literal output created by ExpressionTool
+
+ Generated from::
+
+ job: v1.0/dir7.yml
+ label: exprtool_directory_literal
+ output:
+ dir:
+ class: Directory
+ listing:
+ - checksum: sha1$327fc7aedf4f6b69a42a7c8b808dc5a7aff61376
+ class: File
+ location: whale.txt
+ size: 1111
+ - checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ class: File
+ location: hello.txt
+ size: 13
+ location: a_directory
+ tags:
+ - inline_javascript
+ - expression_tool
+ tool: v1.0/dir7.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test directory literal output created by ExpressionTool""")
+
+ def test_conformance_v1_0_exprtool_file_literal(self):
+ """Test file literal output created by ExpressionTool
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: exprtool_file_literal
+ output:
+ lit:
+ checksum: sha1$fea23663b9c8ed71968f86415b5ec091bb111448
+ class: File
+ location: a_file
+ size: 19
+ tags:
+ - inline_javascript
+ - expression_tool
+ tool: v1.0/file-literal-ex.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test file literal output created by ExpressionTool""")
+
+ def test_conformance_v1_0_dockeroutputdir(self):
+ """Test dockerOutputDirectory
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: dockeroutputdir
+ output:
+ thing:
+ checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: thing
+ size: 0
+ tags:
+ - docker
+ - command_line_tool
+ tool: v1.0/docker-output-dir.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test dockerOutputDirectory""")
+
+ def test_conformance_v1_0_hints_import(self):
+ """Test hints with $import
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: hints_import
+ output:
+ out:
+ checksum: sha1$b3ec4ed1749c207e52b3a6d08c59f31d83bff519
+ class: File
+ location: out
+ size: 15
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/imported-hint.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test hints with $import""")
+
+ def test_conformance_v1_0_default_path_notfound_warning(self):
+ """Test warning instead of error when default path is not found
+
+ Generated from::
+
+ job: v1.0/default_path_job.yml
+ label: default_path_notfound_warning
+ output: {}
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/default_path.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test warning instead of error when default path is not found""")
+
+ def test_conformance_v1_0_inlinejs_req_expressions(self):
+ """Test InlineJavascriptRequirement with multiple expressions in the same tool
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: inlinejs_req_expressions
+ output:
+ args:
+ - -A
+ - '2'
+ - -B
+ - baz
+ - -C
+ - '10'
+ - '9'
+ - '8'
+ - '7'
+ - '6'
+ - '5'
+ - '4'
+ - '3'
+ - '2'
+ - '1'
+ - -D
+ tags:
+ - inline_javascript
+ - command_line_tool
+ tool: v1.0/inline-js.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test InlineJavascriptRequirement with multiple expressions in the same tool""")
+
+ def test_conformance_v1_0_input_dir_recurs_copy_writable(self):
+ """Test if a writable input directory is recursively copied and writable
+
+ Generated from::
+
+ job: v1.0/recursive-input-directory.yml
+ label: input_dir_recurs_copy_writable
+ output:
+ output_dir:
+ basename: work_dir
+ class: Directory
+ listing:
+ - basename: a
+ checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: work_dir/a
+ size: 0
+ - basename: b
+ checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: work_dir/b
+ size: 0
+ - basename: c
+ class: Directory
+ listing:
+ - basename: d
+ checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: work_dir/c/d
+ size: 0
+ location: work_dir/c
+ - basename: e
+ checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: work_dir/e
+ size: 0
+ location: work_dir
+ test_result:
+ checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: output.txt
+ size: 0
+ tags:
+ - initial_work_dir
+ - shell_command
+ - command_line_tool
+ tool: v1.0/recursive-input-directory.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test if a writable input directory is recursively copied and writable""")
+
+ def test_conformance_v1_0_null_missing_params(self):
+ """Test that missing parameters are null (not undefined) in expression
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: null_missing_params
+ output:
+ out: 't
+
+ '
+ tags:
+ - inline_javascript
+ - command_line_tool
+ tool: v1.0/null-defined.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that missing parameters are null (not undefined) in expression""")
+
+ def test_conformance_v1_0_param_notnull_expr(self):
+ """Test that provided parameter is not null in expression
+
+ Generated from::
+
+ job: v1.0/cat-job.json
+ label: param_notnull_expr
+ output:
+ out: 'f
+
+ '
+ tags:
+ - inline_javascript
+ - command_line_tool
+ tool: v1.0/null-defined.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that provided parameter is not null in expression""")
+
+ def test_conformance_v1_0_wf_compound_doc(self):
+ """Test compound workflow document
+
+ Generated from::
+
+ job: v1.0/revsort-job.json
+ label: wf_compound_doc
+ output:
+ output:
+ checksum: sha1$b9214658cc453331b62c2282b772a5c063dbd284
+ class: File
+ location: output.txt
+ size: 1111
+ tags:
+ - required
+ - workflow
+ tool: v1.0/revsort-packed.cwl#main
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test compound workflow document""")
+
+ def test_conformance_v1_0_nameroot_nameext_generated(self):
+ """Test that nameroot and nameext are generated from basename at execution time by the runner
+
+ Generated from::
+
+ job: v1.0/basename-fields-job.yml
+ label: nameroot_nameext_generated
+ output:
+ extFile:
+ checksum: sha1$301a72c82a835e1737caf30f94d0eec210c4d9f1
+ class: File
+ location: Any
+ path: Any
+ size: 5
+ rootFile:
+ checksum: sha1$b4a583c391e234cf210e1d576f68f674c8ad7ecd
+ class: File
+ location: Any
+ path: Any
+ size: 10
+ tags:
+ - step_input_expression
+ - workflow
+ tool: v1.0/basename-fields-test.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that nameroot and nameext are generated from basename at execution time by the runner""")
+
+ def test_conformance_v1_0_initialworkpath_output(self):
+ """Test that file path in $(inputs) for initialworkdir is in $(outdir).
+
+ Generated from::
+
+ job: v1.0/wc-job.json
+ label: initialworkpath_output
+ output: {}
+ tags:
+ - initial_work_dir
+ - command_line_tool
+ tool: v1.0/initialwork-path.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that file path in $(inputs) for initialworkdir is in $(outdir).""")
+
+ def test_conformance_v1_0_wf_scatter_twopar_oneinput_flattenedmerge(self):
+ """Test single step workflow with Scatter step and two data links connected to
+same input, flattened merge behavior. Workflow inputs are set as list
+
+
+ Generated from::
+
+ job: v1.0/count-lines6-job.json
+ label: wf_scatter_twopar_oneinput_flattenedmerge
+ output:
+ count_output: 34
+ tags:
+ - multiple_input
+ - inline_javascript
+ - workflow
+ tool: v1.0/count-lines12-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test single step workflow with Scatter step and two data links connected to
+same input, flattened merge behavior. Workflow inputs are set as list
+""")
+
+ def test_conformance_v1_0_wf_multiplesources_multipletypes(self):
+ """Test step input with multiple sources with multiple types
+
+ Generated from::
+
+ job: v1.0/sum-job.json
+ label: wf_multiplesources_multipletypes
+ output:
+ result: 12
+ tags:
+ - step_input
+ - inline_javascript
+ - multiple_input
+ - workflow
+ tool: v1.0/sum-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test step input with multiple sources with multiple types""")
+
+ def test_conformance_v1_0_shelldir_notinterpreted(self):
+ """Test that shell directives are not interpreted.
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: shelldir_notinterpreted
+ output:
+ stderr_file:
+ checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: Any
+ size: 0
+ stdout_file:
+ checksum: sha1$1555252d52d4ec3262538a4426a83a99cfff4402
+ class: File
+ location: Any
+ size: 9
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/shellchar.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that shell directives are not interpreted.""")
+
+ def test_conformance_v1_0_shelldir_quoted(self):
+ """Test that shell directives are quoted.
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: shelldir_quoted
+ output:
+ stderr_file:
+ checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: Any
+ size: 0
+ stdout_file:
+ checksum: sha1$1555252d52d4ec3262538a4426a83a99cfff4402
+ class: File
+ location: Any
+ size: 9
+ tags:
+ - shell_command
+ - command_line_tool
+ tool: v1.0/shellchar2.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that shell directives are quoted.""")
+
+ def test_conformance_v1_0_initial_workdir_empty_writable(self):
+ """Test empty writable dir with InitialWorkDirRequirement
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: initial_workdir_empty_writable
+ output:
+ out:
+ basename: emptyWritableDir
+ class: Directory
+ listing:
+ - basename: blurg
+ checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: blurg
+ size: 0
+ location: emptyWritableDir
+ tags:
+ - inline_javascript
+ - initial_work_dir
+ - command_line_tool
+ tool: v1.0/writable-dir.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test empty writable dir with InitialWorkDirRequirement""")
+
+ def test_conformance_v1_0_initial_workdir_empty_writable_docker(self):
+ """Test empty writable dir with InitialWorkDirRequirement inside Docker
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: initial_workdir_empty_writable_docker
+ output:
+ out:
+ basename: emptyWritableDir
+ class: Directory
+ listing:
+ - basename: blurg
+ checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: blurg
+ size: 0
+ location: emptyWritableDir
+ tags:
+ - inline_javascript
+ - initial_work_dir
+ - command_line_tool
+ tool: v1.0/writable-dir-docker.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test empty writable dir with InitialWorkDirRequirement inside Docker""")
+
+ def test_conformance_v1_0_dynamic_resreq_inputs(self):
+ """Test dynamic resource reqs referencing inputs
+
+ Generated from::
+
+ job: v1.0/dynresreq-job.yaml
+ label: dynamic_resreq_inputs
+ output:
+ output:
+ checksum: sha1$7448d8798a4380162d4b56f9b452e2f6f9e24e7a
+ class: File
+ location: cores.txt
+ size: 2
+ tags:
+ - resource
+ - command_line_tool
+ tool: v1.0/dynresreq.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test dynamic resource reqs referencing inputs""")
+
+ def test_conformance_v1_0_fileliteral_input_docker(self):
+ """Test file literal as input without Docker
+
+ Generated from::
+
+ job: v1.0/file-literal.yml
+ label: fileliteral_input_docker
+ output:
+ output_file:
+ checksum: sha1$d0e04ff6c413c7d57f9a0ca0a33cd3ab52e2dd9c
+ class: File
+ location: output.txt
+ size: 18
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/cat3-nodocker.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test file literal as input without Docker""")
+
+ def test_conformance_v1_0_outputbinding_glob_sorted(self):
+ """Test that OutputBinding.glob is sorted as specified by POSIX
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: outputbinding_glob_sorted
+ output:
+ letters:
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: a
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: b
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: c
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: w
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: x
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: y
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: z
+ size: 0
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/glob_test.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that OutputBinding.glob is sorted as specified by POSIX""")
+
+ def test_conformance_v1_0_initialworkdir_nesteddir(self):
+ """Test InitialWorkDirRequirement with a nested directory structure from another step
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: initialworkdir_nesteddir
+ output:
+ ya_empty:
+ checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: ya
+ size: 0
+ tags:
+ - initial_work_dir
+ - workflow
+ tool: v1.0/iwdr_with_nested_dirs.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test InitialWorkDirRequirement with a nested directory structure from another step""")
+
+ def test_conformance_v1_0_booleanflags_cl_noinputbinding(self):
+ """Test that boolean flags do not appear on command line if inputBinding is empty and not null
+
+ Generated from::
+
+ job: v1.0/bool-empty-inputbinding-job.json
+ label: booleanflags_cl_noinputbinding
+ output:
+ args: []
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/bool-empty-inputbinding.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that boolean flags do not appear on command line if inputBinding is empty and not null""")
+
+ def test_conformance_v1_0_expr_reference_self_noinput(self):
+ """Test that expression engine does not fail to evaluate reference to self with unprovided input
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: expr_reference_self_noinput
+ output:
+ args: []
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/stage-unprovided-file.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that expression engine does not fail to evaluate reference to self with unprovided input""")
+
+ def test_conformance_v1_0_success_codes(self):
+ """Test successCodes
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: success_codes
+ output: {}
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/exit-success.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test successCodes""")
+
+ def test_conformance_v1_0_dynamic_resreq_wf(self):
+ """Test simple workflow with a dynamic resource requirement
+
+ Generated from::
+
+ job: v1.0/dynresreq-job.yaml
+ label: dynamic_resreq_wf
+ output:
+ cores:
+ checksum: sha1$7448d8798a4380162d4b56f9b452e2f6f9e24e7a
+ class: File
+ location: output
+ size: 2
+ tags:
+ - resource
+ - workflow
+ tool: v1.0/dynresreq-workflow.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test simple workflow with a dynamic resource requirement""")
+
+ def test_conformance_v1_0_cl_empty_array_input(self):
+ """Test that empty array input does not add anything to command line
+
+ Generated from::
+
+ job: v1.0/empty-array-job.json
+ label: cl_empty_array_input
+ output:
+ args: []
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/empty-array-input.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that empty array input does not add anything to command line""")
+
+ def test_conformance_v1_0_resreq_step_overrides_wf(self):
+ """Test that ResourceRequirement on a step level redefines requirement on the workflow level
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: resreq_step_overrides_wf
+ output:
+ out:
+ checksum: sha1$e5fa44f2b31c1fb553b6021e7360d07d5d91ff5e
+ class: File
+ location: cores.txt
+ size: 2
+ tags:
+ - resource
+ - workflow
+ tool: v1.0/steplevel-resreq.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that ResourceRequirement on a step level redefines requirement on the workflow level""")
+
+ def test_conformance_v1_0_valuefrom_constant_overrides_inputs(self):
+ """Test valueFrom with constant value overriding provided array inputs
+
+ Generated from::
+
+ job: v1.0/array-of-strings-job.yml
+ label: valuefrom_constant_overrides_inputs
+ output:
+ args:
+ - replacementValue
+ tags:
+ - required
+ - command_line_tool
+ tool: v1.0/valueFrom-constant.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test valueFrom with constant value overriding provided array inputs""")
+
+ def test_conformance_v1_0_dynamic_resreq_filesizes(self):
+ """Test dynamic resource reqs referencing the size of Files inside a Directory
+
+ Generated from::
+
+ job: v1.0/dynresreq-dir-job.yaml
+ label: dynamic_resreq_filesizes
+ output:
+ output:
+ checksum: sha1$7448d8798a4380162d4b56f9b452e2f6f9e24e7a
+ class: File
+ location: cores.txt
+ size: 2
+ tags:
+ - resource
+ - command_line_tool
+ tool: v1.0/dynresreq-dir.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test dynamic resource reqs referencing the size of Files inside a Directory""")
+
+ def test_conformance_v1_0_wf_step_connect_undeclared_param(self):
+ """Test that it is not an error to connect a parameter to a workflow
+step, even if the parameter doesn't appear in the `run` process
+inputs.
+
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: wf_step_connect_undeclared_param
+ output:
+ out: 'hello inp1
+
+ '
+ tags:
+ - required
+ - workflow
+ tool: v1.0/pass-unconnected.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that it is not an error to connect a parameter to a workflow
+step, even if the parameter doesn't appear in the `run` process
+inputs.
+""")
+
+ def test_conformance_v1_0_wf_step_access_undeclared_param(self):
+ """Test that parameters that don't appear in the `run` process
+inputs are not present in the input object used to run the tool.
+
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: wf_step_access_undeclared_param
+ should_fail: true
+ tags:
+ - required
+ - workflow
+ tool: v1.0/fail-unconnected.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test that parameters that don't appear in the `run` process
+inputs are not present in the input object used to run the tool.
+""")
+
+ def test_conformance_v1_0_wf_scatter_embedded_subwf(self):
+ """Test simple scatter over an embedded subworkflow
+
+ Generated from::
+
+ job: v1.0/count-lines3-job.json
+ label: wf_scatter_embedded_subwf
+ output:
+ count_output:
+ - 16
+ - 1
+ tool: v1.0/count-lines13-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test simple scatter over an embedded subworkflow""")
+
+ def test_conformance_v1_0_wf_multiple_param_embedded_subwf(self):
+ """Test simple multiple input scatter over an embedded subworkflow
+
+ Generated from::
+
+ job: v1.0/count-lines4-job.json
+ label: wf_multiple_param_embedded_subwf
+ output:
+ count_output:
+ - 16
+ - 1
+ tool: v1.0/count-lines14-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test simple multiple input scatter over an embedded subworkflow""")
+
+ def test_conformance_v1_0_wf_double_nested_subwf(self):
+ """Test twice nested subworkflow
+
+ Generated from::
+
+ job: v1.0/wc-job.json
+ label: wf_double_nested_subwf
+ output:
+ count_output: 16
+ tool: v1.0/count-lines15-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test twice nested subworkflow""")
+
+ def test_conformance_v1_0_wf_subwf_tool_then_wf(self):
+ """Test subworkflow of mixed depth with tool first
+
+ Generated from::
+
+ job: v1.0/wc-job.json
+ label: wf_subwf_tool_then_wf
+ output:
+ count_output: 16
+ tool: v1.0/count-lines16-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test subworkflow of mixed depth with tool first""")
+
+ def test_conformance_v1_0_wf_subwf_wf_then_tool(self):
+ """Test subworkflow of mixed depth with tool after
+
+ Generated from::
+
+ job: v1.0/wc-job.json
+ label: wf_subwf_wf_then_tool
+ output:
+ count_output: 16
+ tool: v1.0/count-lines17-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test subworkflow of mixed depth with tool after""")
+
+ def test_conformance_v1_0_wf_output_record(self):
+ """Test record type inputs to and outputs from workflows.
+
+ Generated from::
+
+ job: v1.0/record-output-job.json
+ label: wf_output_record
+ output:
+ orec:
+ obar:
+ checksum: sha1$aeb3d11bdf536511649129f4077d5cda6a324118
+ class: File
+ location: bar
+ size: 12010
+ ofoo:
+ checksum: sha1$327fc7aedf4f6b69a42a7c8b808dc5a7aff61376
+ class: File
+ location: foo
+ size: 1111
+ tool: v1.0/record-output-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test record type inputs to and outputs from workflows.""")
+
+ def test_conformance_v1_0_wf_input_output_int(self):
+ """Test integer workflow input and outputs
+
+ Generated from::
+
+ job: v1.0/io-int.json
+ label: wf_input_output_int
+ output:
+ o: 10
+ tool: v1.0/io-int-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test integer workflow input and outputs""")
+
+ def test_conformance_v1_0_wf_input_int_opt_spec(self):
+ """Test optional integer workflow inputs (specified)
+
+ Generated from::
+
+ job: v1.0/io-int.json
+ label: wf_input_int_opt_spec
+ output:
+ o: 10
+ tool: v1.0/io-int-optional-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test optional integer workflow inputs (specified)""")
+
+ def test_conformance_v1_0_wf_input_int_opt_unspec(self):
+ """Test optional integer workflow inputs (unspecified)
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: wf_input_int_opt_unspec
+ output:
+ o: 4
+ tool: v1.0/io-int-optional-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test optional integer workflow inputs (unspecified)""")
+
+ def test_conformance_v1_0_wf_input_int_default_spec(self):
+ """Test default integer workflow inputs (specified)
+
+ Generated from::
+
+ job: v1.0/io-int.json
+ label: wf_input_int_default_spec
+ output:
+ o: 10
+ tool: v1.0/io-int-default-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test default integer workflow inputs (specified)""")
+
+ def test_conformance_v1_0_wf_input_int_default_unspec(self):
+ """Test default integer workflow inputs (unspecified)
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: wf_input_int_default_unspec
+ output:
+ o: 8
+ tool: v1.0/io-int-default-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test default integer workflow inputs (unspecified)""")
+
+ def test_conformance_v1_0_wf_input_int_default_tool_wf_unspec(self):
+ """Test default integer tool and workflow inputs (unspecified)
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: wf_input_int_default_tool_wf_unspec
+ output:
+ o: 13
+ tool: v1.0/io-int-default-tool-and-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test default integer tool and workflow inputs (unspecified)""")
+
+ def test_conformance_v1_0_wf_input_file_default_unspec(self):
+ """Test File input with default unspecified to workflow
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: wf_input_file_default_unspec
+ output:
+ o:
+ basename: output
+ checksum: sha1$327fc7aedf4f6b69a42a7c8b808dc5a7aff61376
+ class: File
+ location: Any
+ size: 1111
+ tool: v1.0/io-file-default-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test File input with default unspecified to workflow""")
+
+ def test_conformance_v1_0_wf_input_file_default_spec(self):
+ """Test File input with default specified to workflow
+
+ Generated from::
+
+ job: v1.0/default_path_job.yml
+ label: wf_input_file_default_spec
+ output:
+ o:
+ basename: output
+ checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ class: File
+ location: Any
+ size: 13
+ tool: v1.0/io-file-default-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test File input with default specified to workflow""")
+
+ def test_conformance_v1_0_wf_input_union_file_filearray_onefilearray(self):
+ """Test input union type or File or File array to a tool with one file in array specified.
+
+ Generated from::
+
+ job: v1.0/job-input-array-one-empty-file.json
+ label: wf_input_union_file_filearray_onefilearray
+ output:
+ output_file:
+ basename: output.txt
+ checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: Any
+ size: 0
+ tool: v1.0/io-file-or-files.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test input union type or File or File array to a tool with one file in array specified.""")
+
+ def test_conformance_v1_0_wf_input_union_file_filearray_fewfilesarray(self):
+ """Test input union type or File or File array to a tool with a few files in array specified.
+
+ Generated from::
+
+ job: v1.0/job-input-array-few-files.json
+ label: wf_input_union_file_filearray_fewfilesarray
+ output:
+ output_file:
+ basename: output.txt
+ checksum: sha1$6d1723861ad5a1260f1c3c07c93076c5a215f646
+ class: File
+ location: Any
+ size: 1114
+ tool: v1.0/io-file-or-files.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test input union type or File or File array to a tool with a few files in array specified.""")
+
+ def test_conformance_v1_0_wf_input_union_file_filearray_onefile(self):
+ """Test input union type or File or File array to a tool with one file specified.
+
+ Generated from::
+
+ job: v1.0/job-input-one-file.json
+ label: wf_input_union_file_filearray_onefile
+ output:
+ output_file:
+ basename: output.txt
+ checksum: sha1$327fc7aedf4f6b69a42a7c8b808dc5a7aff61376
+ class: File
+ location: Any
+ size: 1111
+ tool: v1.0/io-file-or-files.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test input union type or File or File array to a tool with one file specified.""")
+
+ def test_conformance_v1_0_wf_input_union_file_filearray_null(self):
+ """Test input union type or File or File array to a tool with null specified.
+
+ Generated from::
+
+ job: v1.0/job-input-null.json
+ label: wf_input_union_file_filearray_null
+ output:
+ output_file:
+ basename: output.txt
+ checksum: sha1$503458abf7614be3fb26d85ff5d8f3e17aa0a552
+ class: File
+ location: Any
+ size: 10
+ tool: v1.0/io-file-or-files.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test input union type or File or File array to a tool with null specified.""")
+
+ def test_conformance_v1_0_wf_input_any_integer_tool(self):
+ """Test Any parameter with integer input to a tool
+
+ Generated from::
+
+ job: v1.0/io-any-int.json
+ label: wf_input_any_integer_tool
+ output:
+ t1: 7
+ tool: v1.0/io-any-1.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test Any parameter with integer input to a tool""")
+
+ def test_conformance_v1_0_wf_input_any_string_tool(self):
+ """Test Any parameter with string input to a tool
+
+ Generated from::
+
+ job: v1.0/io-any-string.json
+ label: wf_input_any_string_tool
+ output:
+ t1: '7'
+ tool: v1.0/io-any-1.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test Any parameter with string input to a tool""")
+
+ def test_conformance_v1_0_wf_input_any_file_tool(self):
+ """Test Any parameter with file input to a tool
+
+ Generated from::
+
+ job: v1.0/io-any-file.json
+ label: wf_input_any_file_tool
+ output:
+ t1: File
+ tool: v1.0/io-any-1.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test Any parameter with file input to a tool""")
+
+ def test_conformance_v1_0_wf_input_any_array_tool(self):
+ """Test Any parameter with array input to a tool
+
+ Generated from::
+
+ job: v1.0/io-any-array.json
+ label: wf_input_any_array_tool
+ output:
+ t1:
+ - 1
+ - moocow
+ tool: v1.0/io-any-1.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test Any parameter with array input to a tool""")
+
+ def test_conformance_v1_0_wf_input_any_record_tool(self):
+ """Test Any parameter with record input to a tool
+
+ Generated from::
+
+ job: v1.0/io-any-record.json
+ label: wf_input_any_record_tool
+ output:
+ t1:
+ cow: 5
+ moo: 1
+ tool: v1.0/io-any-1.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test Any parameter with record input to a tool""")
+
+ def test_conformance_v1_0_wf_input_any_integer_wf(self):
+ """Test Any parameter with integer input to a workflow
+
+ Generated from::
+
+ job: v1.0/io-any-int.json
+ label: wf_input_any_integer_wf
+ output:
+ t1: 7
+ tool: v1.0/io-any-wf-1.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test Any parameter with integer input to a workflow""")
+
+ def test_conformance_v1_0_wf_input_any_string_wf(self):
+ """Test Any parameter with string input to a workflow
+
+ Generated from::
+
+ job: v1.0/io-any-string.json
+ label: wf_input_any_string_wf
+ output:
+ t1: '7'
+ tool: v1.0/io-any-wf-1.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test Any parameter with string input to a workflow""")
+
+ def test_conformance_v1_0_wf_input_any_file_wf(self):
+ """Test Any parameter with file input to a workflow
+
+ Generated from::
+
+ job: v1.0/io-any-file.json
+ label: wf_input_any_file_wf
+ output:
+ t1: File
+ tool: v1.0/io-any-wf-1.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test Any parameter with file input to a workflow""")
+
+ def test_conformance_v1_0_wf_input_any_array_wf(self):
+ """Test Any parameter with array input to a workflow
+
+ Generated from::
+
+ job: v1.0/io-any-array.json
+ label: wf_input_any_array_wf
+ output:
+ t1:
+ - 1
+ - moocow
+ tool: v1.0/io-any-wf-1.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test Any parameter with array input to a workflow""")
+
+ def test_conformance_v1_0_wf_input_any_record_wf(self):
+ """Test Any parameter with record input to a tool
+
+ Generated from::
+
+ job: v1.0/io-any-record.json
+ label: wf_input_any_record_wf
+ output:
+ t1:
+ cow: 5
+ moo: 1
+ tool: v1.0/io-any-wf-1.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test Any parameter with record input to a tool""")
+
+ def test_conformance_v1_0_wf_input_union_default_unspec(self):
+ """Test union type input to workflow with default unspecified
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: wf_input_union_default_unspec
+ output:
+ o: the default value
+ tool: v1.0/io-union-input-default-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test union type input to workflow with default unspecified""")
+
+ def test_conformance_v1_0_wf_input_union_default_file(self):
+ """Test union type input to workflow with default specified as file
+
+ Generated from::
+
+ job: v1.0/io-any-file.json
+ label: wf_input_union_default_file
+ output:
+ o: File
+ tool: v1.0/io-union-input-default-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test union type input to workflow with default specified as file""")
+
+ def test_conformance_v1_0_wf_step_valuefrom_literal(self):
+ """Test valueFrom on workflow step from literal (string).
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: wf_step_valuefrom_literal
+ output:
+ val: 'moocow
+
+ '
+ tool: v1.0/step-valuefrom4-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test valueFrom on workflow step from literal (string).""")
+
+ def test_conformance_v1_0_wf_step_valuefrom_basename(self):
+ """Test valueFrom on workflow step using basename.
+
+ Generated from::
+
+ job: v1.0/wc-job.json
+ label: wf_step_valuefrom_basename
+ output:
+ val1: 'whale.txt
+
+ '
+ val2: 'step1_out
+
+ '
+ tool: v1.0/step-valuefrom5-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test valueFrom on workflow step using basename.""")
+
+ def test_conformance_v1_0_tool_output_arrays_ints(self):
+ """Test output arrays in a tool (with ints).
+
+ Generated from::
+
+ job: v1.0/output-arrays-int-job.json
+ label: tool_output_arrays_ints
+ output:
+ o:
+ - 0
+ - 1
+ - 2
+ tool: v1.0/output-arrays-int.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test output arrays in a tool (with ints).""")
+
+ def test_conformance_v1_0_wf_output_arrays_ints(self):
+ """Test output arrays in a workflow (with ints).
+
+ Generated from::
+
+ job: v1.0/output-arrays-int-job.json
+ label: wf_output_arrays_ints
+ output:
+ o: 12
+ tool: v1.0/output-arrays-int-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test output arrays in a workflow (with ints).""")
+
+ def test_conformance_v1_0_wf_output_arrays_files(self):
+ """Test output arrays in a workflow (with Files).
+
+ Generated from::
+
+ job: v1.0/output-arrays-file-job.json
+ label: wf_output_arrays_files
+ output:
+ o:
+ - basename: moo
+ checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: Any
+ size: 0
+ - basename: cow
+ checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ class: File
+ location: Any
+ size: 0
+ tool: v1.0/output-arrays-file-wf.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test output arrays in a workflow (with Files).""")
+
+ def test_conformance_v1_0_tool_docker_entrypoint(self):
+ """Test Docker ENTRYPOINT usage
+
+ Generated from::
+
+ job: v1.0/empty.json
+ label: tool_docker_entrypoint
+ output:
+ cow:
+ basename: cow
+ checksum: sha1$7a788f56fa49ae0ba5ebde780efe4d6a89b5db47
+ class: File
+ location: Any
+ size: 4
+ tool: v1.0/docker-run-cmd.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test Docker ENTRYPOINT usage""")
+
+ def test_conformance_v1_0_tool_expressions_size_emptyfile(self):
+ """Test use of size in expressions for an empty file
+
+ Generated from::
+
+ job: v1.0/job-input-array-one-empty-file.json
+ label: tool_expressions_size_emptyfile
+ output:
+ output_file:
+ basename: output.txt
+ checksum: sha1$dad5a8472b87f6c5ef87d8fc6ef1458defc57250
+ class: File
+ location: Any
+ size: 11
+ tool: v1.0/size-expression-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test use of size in expressions for an empty file""")
+
+ def test_conformance_v1_0_tool_expressions_size_fewfiles(self):
+ """Test use of size in expressions for a few files
+
+ Generated from::
+
+ job: v1.0/job-input-array-few-files.json
+ label: tool_expressions_size_fewfiles
+ output:
+ output_file:
+ basename: output.txt
+ checksum: sha1$9def39730e8012bd09bf8387648982728501737d
+ class: File
+ location: Any
+ size: 31
+ tool: v1.0/size-expression-tool.cwl
+ """
+ self.cwl_populator.run_conformance_test("""v1.0""", """Test use of size in expressions for a few files""")
+
diff --git a/test/api/test_dataset_collections.py b/test/api/test_dataset_collections.py
index 946114e51af1..ca207248a989 100644
--- a/test/api/test_dataset_collections.py
+++ b/test/api/test_dataset_collections.py
@@ -91,6 +91,118 @@ def test_create_list_of_new_pairs(self):
pair_1_element_1 = pair_elements[0]
assert pair_1_element_1["element_index"] == 0
+ def test_create_record(self):
+ contents = [
+ ("condition", "1\t2\t3"),
+ ("control1", "4\t5\t6"),
+ ("control2", "7\t8\t9"),
+ ]
+ record_identifiers = self.dataset_collection_populator.list_identifiers(self.history_id, contents)
+ fields = [
+ {"name": "condition",
+ "type": "File"},
+ {"name": "control1",
+ "type": "File"},
+ {"name": "control2",
+ "type": "File"},
+ ]
+ payload = dict(
+ name="a record",
+ instance_type="history",
+ history_id=self.history_id,
+ element_identifiers=json.dumps(record_identifiers),
+ collection_type="record",
+ fields=json.dumps(fields),
+ )
+ create_response = self._post("dataset_collections", payload)
+ dataset_collection = self._check_create_response(create_response)
+ assert dataset_collection["collection_type"] == "record"
+ assert dataset_collection["name"] == "a record"
+ returned_collections = dataset_collection["elements"]
+ assert len(returned_collections) == 3, dataset_collection
+ record_pos_0_element = returned_collections[0]
+ self._assert_has_keys(record_pos_0_element, "element_index")
+ record_pos_0_object = record_pos_0_element["object"]
+ self._assert_has_keys(record_pos_0_object, "name", "history_content_type")
+
+ def test_record_requires_fields(self):
+ contents = [
+ ("condition", "1\t2\t3"),
+ ("control1", "4\t5\t6"),
+ ("control2", "7\t8\t9"),
+ ]
+ record_identifiers = self.dataset_collection_populator.list_identifiers(self.history_id, contents)
+ payload = dict(
+ name="a record",
+ instance_type="history",
+ history_id=self.history_id,
+ element_identifiers=json.dumps(record_identifiers),
+ collection_type="record",
+ )
+ create_response = self._post("dataset_collections", payload)
+ self._assert_status_code_is(create_response, 400)
+
+ def test_record_auto_fields(self):
+ contents = [
+ ("condition", "1\t2\t3"),
+ ("control1", "4\t5\t6"),
+ ("control2", "7\t8\t9"),
+ ]
+ record_identifiers = self.dataset_collection_populator.list_identifiers(self.history_id, contents)
+ payload = dict(
+ name="a record",
+ instance_type="history",
+ history_id=self.history_id,
+ element_identifiers=json.dumps(record_identifiers),
+ collection_type="record",
+ fields="auto",
+ )
+ create_response = self._post("dataset_collections", payload)
+ self._check_create_response(create_response)
+
+ def test_record_field_validation(self):
+ contents = [
+ ("condition", "1\t2\t3"),
+ ("control1", "4\t5\t6"),
+ ("control2", "7\t8\t9"),
+ ]
+ record_identifiers = self.dataset_collection_populator.list_identifiers(self.history_id, contents)
+ too_few_fields = [
+ {"name": "condition",
+ "type": "File"},
+ {"name": "control1",
+ "type": "File"},
+ ]
+ too_many_fields = [
+ {"name": "condition",
+ "type": "File"},
+ {"name": "control1",
+ "type": "File"},
+ {"name": "control2",
+ "type": "File"},
+ {"name": "control3",
+ "type": "File"},
+ ]
+ wrong_name_fields = [
+ {"name": "condition",
+ "type": "File"},
+ {"name": "control1",
+ "type": "File"},
+ {"name": "control3",
+ "type": "File"},
+ ]
+ for fields in [too_few_fields, too_many_fields, wrong_name_fields]:
+ payload = dict(
+ name="a record",
+ instance_type="history",
+ history_id=self.history_id,
+ element_identifiers=json.dumps(record_identifiers),
+ collection_type="record",
+ fields=json.dumps(fields),
+ )
+ create_response = self._post("dataset_collections", payload)
+ self._assert_status_code_is(create_response, 400)
+
def test_list_download(self):
fetch_response = self.dataset_collection_populator.create_list_in_history(self.history_id, direct_upload=True).json()
dataset_collection = self.dataset_collection_populator.wait_for_fetched_collection(fetch_response)
diff --git a/test/api/test_jobs.py b/test/api/test_jobs.py
index a1eea668508e..80ca40663d1e 100644
--- a/test/api/test_jobs.py
+++ b/test/api/test_jobs.py
@@ -131,7 +131,10 @@ def test_show_security(self, history_id):
job_id = job["id"]
show_jobs_response = self._get("jobs/%s" % job_id, admin=False)
- self._assert_not_has_keys(show_jobs_response.json(), "command_line", "external_id")
+ self._assert_not_has_keys(
+ show_jobs_response.json(),
+ "command_line", "external_id", "cwl_command_state",
+ )
# TODO: Re-activate test case when API accepts privacy settings
# with self._different_user():
@@ -139,7 +142,10 @@ def test_show_security(self, history_id):
# self._assert_status_code_is( show_jobs_response, 200 )
show_jobs_response = self._get("jobs/%s" % job_id, admin=True)
- self._assert_has_keys(show_jobs_response.json(), "command_line", "external_id")
+ self._assert_has_keys(
+ show_jobs_response.json(),
+ "command_line", "external_id", "cwl_command_state",
+ )
@skip_without_tool('detect_errors_aggressive')
def test_report_error(self):
diff --git a/test/api/test_tools.py b/test/api/test_tools.py
index 2562e12a6993..d652366c6748 100644
--- a/test/api/test_tools.py
+++ b/test/api/test_tools.py
@@ -1,4 +1,3 @@
-# Test tools API.
import contextlib
import json
import os
@@ -15,7 +14,21 @@
)
+MINIMAL_TOOL = {
+ 'id': "minimal_tool",
+ 'name': "Minimal Tool",
+ 'class': "GalaxyTool",
+ 'version': "1.0.0",
+ 'command': "echo 'Hello World' > $output1",
+ 'inputs': [],
+ 'outputs': dict(
+ output1=dict(format='txt'),
+ )
+}
+
+
class ToolsTestCase(api.ApiTestCase):
+ """Test the Galaxy Tool API."""
def setUp(self):
super(ToolsTestCase, self).setUp()
@@ -476,6 +489,25 @@ def test_apply_rules_3(self):
def test_apply_rules_4(self):
self._apply_rules_and_check(rules_test_data.EXAMPLE_4)
+ def test_filter_0(self):
+ history_id = self.dataset_populator.new_history()
+ hdca_id = self.dataset_collection_populator.create_list_in_history(history_id, contents=["a", "a\nb", "a\nb\nc", "a\nb\nc\nd"]).json()["id"]
+ self.dataset_populator.wait_for_history(history_id)
+ inputs = {
+ "input": {"src": "hdca", "id": hdca_id},
+ "expression": "metadata_data_lines % 2 == 0"
+ }
+ response = self._run("__FILTER__", history_id, inputs, assert_ok=True)
+ output_collections = response["output_collections"]
+ assert len(output_collections) == 1
+
+ filtered_hid = output_collections[0]["hid"]
+ filtered_hdca = self.dataset_populator.get_history_collection_details(history_id, hid=filtered_hid)
+ self.assertEquals(len(filtered_hdca["elements"]), 2)
+ filtered_dataset = filtered_hdca["elements"][0]["object"]
+ filtered_dataset_content = self.dataset_populator.get_history_dataset_content(history_id, dataset=filtered_dataset)
+ self.assertEquals(filtered_dataset_content.strip(), "a\nb")
+
@skip_without_tool("multi_select")
def test_multi_select_as_list(self):
with self.dataset_populator.test_history() as history_id:
@@ -840,6 +872,68 @@ def test_dynamic_list_output(self, history_id):
output_element_hda_0 = output_element_0["object"]
assert output_element_hda_0["metadata_column_types"] is not None
+ def test_nonadmin_users_cannot_create_tools(self):
+ payload = dict(
+ representation=json.dumps(MINIMAL_TOOL),
+ )
+ create_response = self._post("dynamic_tools", data=payload, admin=False)
+ self._assert_status_code_is(create_response, 403)
+
+ def test_dynamic_tool_1(self):
+ # Create tool.
+ self.dataset_populator.create_tool(MINIMAL_TOOL)
+
+ # Run tool.
+ history_id = self.dataset_populator.new_history()
+ inputs = {}
+ self._run("minimal_tool", history_id, inputs)
+
+ self.dataset_populator.wait_for_history(history_id, assert_ok=True)
+ output_content = self.dataset_populator.get_history_dataset_content(history_id)
+ self.assertEqual(output_content, "Hello World\n")
+
+ @skip_without_tool("expression_forty_two")
+ def test_galaxy_expression_tool_simplest(self):
+ history_id = self.dataset_populator.new_history()
+ inputs = {
+ }
+ run_response = self._run(
+ "expression_forty_two", history_id, inputs
+ )
+ self._assert_status_code_is(run_response, 200)
+ self.dataset_populator.wait_for_history(history_id, assert_ok=True)
+ output_content = self.dataset_populator.get_history_dataset_content(history_id)
+ self.assertEqual(output_content, "42")
+
+ @skip_without_tool("expression_parse_int")
+ def test_galaxy_expression_tool_simple(self):
+ history_id = self.dataset_populator.new_history()
+ inputs = {
+ 'input1': '7',
+ }
+ run_response = self._run(
+ "expression_parse_int", history_id, inputs
+ )
+ self._assert_status_code_is(run_response, 200)
+ self.dataset_populator.wait_for_history(history_id, assert_ok=True)
+ output_content = self.dataset_populator.get_history_dataset_content(history_id)
+ self.assertEqual(output_content, "7")
+
+ @skip_without_tool("expression_log_line_count")
+ def test_galaxy_expression_metadata(self):
+ history_id = self.dataset_populator.new_history()
+ new_dataset1 = self.dataset_populator.new_dataset(history_id, content='1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n11\n12\n13\n14')
+ inputs = {
+ 'input1': dataset_to_param(new_dataset1),
+ }
+ run_response = self._run(
+ "expression_log_line_count", history_id, inputs
+ )
+ self._assert_status_code_is(run_response, 200)
+ self.dataset_populator.wait_for_history(history_id, assert_ok=True)
+ output_content = self.dataset_populator.get_history_dataset_content(history_id)
+ self.assertEqual(output_content, "3")
+
@skip_without_tool("cat1")
@uses_test_history(require_new=False)
def test_run_cat1_with_two_inputs(self, history_id):
@@ -1936,11 +2030,12 @@ def _run_outputs(self, create_response):
def _run_cat1(self, history_id, inputs, assert_ok=False, **kwargs):
return self._run('cat1', history_id, inputs, assert_ok=assert_ok, **kwargs)
- def _run(self, tool_id, history_id, inputs, assert_ok=False, tool_version=None, use_cached_job=False, wait_for_job=False):
+ def _run(self, tool_id, history_id, inputs, assert_ok=False, tool_version=None, use_cached_job=False, wait_for_job=False, inputs_representation=None):
payload = self.dataset_populator.run_tool_payload(
tool_id=tool_id,
inputs=inputs,
history_id=history_id,
+ inputs_representation=inputs_representation,
)
if tool_version is not None:
payload["tool_version"] = tool_version
diff --git a/test/api/test_tools_cwl.py b/test/api/test_tools_cwl.py
new file mode 100644
index 000000000000..2b7f3c085c8d
--- /dev/null
+++ b/test/api/test_tools_cwl.py
@@ -0,0 +1,452 @@
+"""Test CWL Tool Execution via the API."""
+
+from sys import platform as _platform
+
+from base import api
+from base.populators import (
+ DatasetPopulator,
+ CwlPopulator,
+ WorkflowPopulator,
+)
+from base.populators import skip_without_tool
+
+from galaxy.tools.cwl.representation import USE_FIELD_TYPES
+
+IS_OS_X = _platform == "darwin"
+
+
+class CwlToolsTestCase(api.ApiTestCase):
+ """Test CWL Tool Execution via the API."""
+
+ def setUp(self):
+ """Setup dataset populator."""
+ super(CwlToolsTestCase, self).setUp()
+ self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
+ worklfow_populator = WorkflowPopulator(self.galaxy_interactor)
+ self.cwl_populator = CwlPopulator(self.dataset_populator, worklfow_populator)
+
+ @skip_without_tool("cat1-tool")
+ def test_cat1_number(self):
+ """Test execution of cat1 using the "normal" Galaxy job API representation."""
+ history_id = self.dataset_populator.new_history()
+ hda1 = _dataset_to_param(self.dataset_populator.new_dataset(history_id, content='1\n2\n3', name="test1"))
+ if not USE_FIELD_TYPES:
+ inputs = {
+ "file1": hda1,
+ "numbering|_cwl__type_": "boolean",
+ "numbering|_cwl__value_": True,
+ }
+ else:
+ inputs = {
+ "file1": hda1,
+ "numbering": {"src": "json", "value": True},
+ }
+ stdout = self._run_and_get_stdout("cat1-tool", history_id, inputs, assert_ok=True)
+ self.assertEquals(stdout, " 1\t1\n 2\t2\n 3\t3\n")
+
+ @skip_without_tool("cat1-tool")
+ def test_cat1_number_cwl_json(self):
+ """Test execution of cat1 using the "CWL" Galaxy job API representation."""
+ history_id = self.dataset_populator.new_history()
+ hda1 = _dataset_to_param(self.dataset_populator.new_dataset(history_id, content='1\n2\n3'))
+ inputs = {
+ "file1": hda1,
+ "numbering": True,
+ }
+ stdout = self._run_and_get_stdout("cat1-tool", history_id, inputs, assert_ok=True, inputs_representation="cwl")
+ self.assertEquals(stdout, " 1\t1\n 2\t2\n 3\t3\n")
+
+ @skip_without_tool("cat1-tool")
+ def test_cat1_number_cwl_json_file(self):
+ """Test execution of cat1 using the CWL job definition file."""
+ run_object = self.cwl_populator.run_cwl_artifact("cat1-tool", "test/functional/tools/cwl_tools/draft3/cat-job.json")
+ stdout = self._get_job_stdout(run_object.job_id)
+ self.assertEquals(stdout, "Hello world!\n")
+
+ @skip_without_tool("cat1-tool")
+ def test_cat1_number_cwl_n_json_file(self):
+ run_object = self.cwl_populator.run_cwl_artifact("cat1-tool", "test/functional/tools/cwl_tools/draft3/cat-n-job.json")
+ stdout = self._get_job_stdout(run_object.job_id)
+ self.assertEquals(stdout, " 1\tHello world!\n")
+
+ @skip_without_tool("cat2-tool")
+ def test_cat2(self):
+ run_object = self.cwl_populator.run_cwl_artifact("cat2-tool", "test/functional/tools/cwl_tools/draft3/cat-job.json")
+ stdout = self._get_job_stdout(run_object.job_id)
+ self.assertEquals(stdout, "Hello world!\n")
+
+ @skip_without_tool("cat4-tool")
+ def test_cat4(self):
+ run_object = self.cwl_populator.run_cwl_artifact("cat4-tool", "test/functional/tools/cwl_tools/draft3/cat-job.json")
+ output1_content = self.dataset_populator.get_history_dataset_content(run_object.history_id)
+ self.assertEquals(output1_content, "Hello world!\n")
+
+ @skip_without_tool("cat-default")
+ def test_cat_default(self):
+ run_object = self.cwl_populator.run_cwl_artifact("cat-default", job={})
+ output1_content = self.dataset_populator.get_history_dataset_content(run_object.history_id)
+ self.assertEquals(output1_content, "Hello world!\n")
+
+ @skip_without_tool("wc-tool")
+ def test_wc(self):
+ run_object = self.cwl_populator.run_cwl_artifact("wc-tool", "test/functional/tools/cwl_tools/draft3/wc-job.json")
+ output1_content = self.dataset_populator.get_history_dataset_content(run_object.history_id)
+ if not IS_OS_X:
+ self.assertEquals(output1_content, " 16 198 1111\n")
+ else:
+ self.assertEquals(output1_content, " 16 198 1111\n")
+
+ @skip_without_tool("wc2-tool")
+ def test_wc2(self):
+ run_object = self.cwl_populator.run_cwl_artifact("wc2-tool", "test/functional/tools/cwl_tools/draft3/wc-job.json")
+ output1_content = self.dataset_populator.get_history_dataset_content(run_object.history_id)
+ self.assertEquals(output1_content, "16")
+
+ @skip_without_tool("wc3-tool")
+ def test_wc3(self):
+ run_object = self.cwl_populator.run_cwl_artifact(
+ "wc4-tool",
+ job={
+ "file1": [
+ {
+ "class": "File",
+ "path": "whale.txt"
+ },
+ ],
+ },
+ test_data_directory="test/functional/tools/cwl_tools/draft3/"
+ )
+ output1_content = self.dataset_populator.get_history_dataset_content(run_object.history_id)
+ self.assertEquals(output1_content, "16")
+
+ @skip_without_tool("wc4-tool")
+ def test_wc4(self):
+ run_object = self.cwl_populator.run_cwl_artifact("wc4-tool", "test/functional/tools/cwl_tools/draft3/wc-job.json")
+ output1_content = self.dataset_populator.get_history_dataset_content(run_object.history_id)
+ self.assertEquals(output1_content, "16")
+
+ @skip_without_tool("galactic_cat")
+ def test_galactic_cat_1(self):
+ with self.dataset_populator.test_history() as history_id:
+ hda_id = self.dataset_populator.new_dataset(history_id, name="test_dataset.txt")["id"]
+ self.dataset_populator.wait_for_history(history_id, assert_ok=True)
+ inputs = {
+ "input1": {"src": "hda", "id": hda_id}
+ }
+ run_response = self._run("galactic_cat", history_id, inputs, assert_ok=True)
+ dataset = run_response["outputs"][0]
+ content = self.dataset_populator.get_history_dataset_content(history_id, dataset=dataset)
+ assert content.strip() == "TestData123", content
+
+ def test_galactic_record_input(self):
+ with self.dataset_populator.test_history() as history_id:
+ hda1_id = self.dataset_populator.new_dataset(history_id, content="moo", name="test_dataset.txt")["id"]
+ hda2_id = self.dataset_populator.new_dataset(history_id, content="cow dog foo", name="test_dataset.txt")["id"]
+ self.dataset_populator.wait_for_history(history_id, assert_ok=True)
+ inputs = {
+ "input1": {"src": "hda", "id": hda1_id},
+ "input2": {"src": "hda", "id": hda2_id},
+ }
+ run_response = self._run("galactic_record_input", history_id, inputs, assert_ok=True)
+ dataset = run_response["outputs"][0]
+ content = self.dataset_populator.get_history_dataset_content(history_id, dataset=dataset)
+ assert content.strip() == "moo", content
+
+ dataset = run_response["outputs"][1]
+ content = self.dataset_populator.get_history_dataset_content(history_id, dataset=dataset)
+ assert content.strip() == "cow dog foo", content
+
+ def _run_and_get_stdout(self, tool_id, history_id, inputs, **kwds):
+ response = self._run(tool_id, history_id, inputs, **kwds)
+ assert "jobs" in response
+ job = response["jobs"][0]
+ job_id = job["id"]
+ final_state = self.dataset_populator.wait_for_job(job_id)
+ assert final_state == "ok"
+ return self._get_job_stdout(job_id)
+
+ def _get_job_stdout(self, job_id):
+ job_details = self.dataset_populator.get_job_details(job_id, full=True)
+ stdout = job_details.json()["stdout"]
+ return stdout
+
+ @skip_without_tool("cat3-tool")
+ def test_cat3(self):
+ with self.dataset_populator.test_history() as history_id:
+ hda1 = _dataset_to_param(self.dataset_populator.new_dataset(history_id, content='1\t2\t3'))
+ inputs = {
+ "f1": hda1,
+ }
+ response = self._run("cat3-tool", history_id, inputs, assert_ok=True)
+ output1 = response["outputs"][0]
+ output1_details = self.dataset_populator.get_history_dataset_details(history_id, dataset=output1)
+ assert "cwl_file_name" in output1_details, output1_details.keys()
+ assert output1_details["cwl_file_name"] == "output.txt", output1_details["cwl_file_name"]
+ output1_content = self.dataset_populator.get_history_dataset_content(history_id, dataset=output1)
+ assert output1_content == "1\t2\t3\n", output1_content
+
+ @skip_without_tool("sorttool")
+ def test_sorttool(self):
+ history_id = self.dataset_populator.new_history()
+ hda1 = _dataset_to_param(self.dataset_populator.new_dataset(history_id, content='1\n2\n3'))
+ inputs = {
+ "reverse": False,
+ "input": hda1
+ }
+ response = self._run("sorttool", history_id, inputs, assert_ok=True)
+ output1 = response["outputs"][0]
+ output1_content = self.dataset_populator.get_history_dataset_content(history_id, dataset=output1)
+ assert output1_content == "1\n2\n3\n", output1_content
+
+ @skip_without_tool("sorttool")
+ def test_sorttool_reverse(self):
+ history_id = self.dataset_populator.new_history()
+ hda1 = _dataset_to_param(self.dataset_populator.new_dataset(history_id, content='1\n2\n3'))
+ inputs = {
+ "reverse": True,
+ "input": hda1
+ }
+ response = self._run("sorttool", history_id, inputs, assert_ok=True)
+ output1 = response["outputs"][0]
+ output1_content = self.dataset_populator.get_history_dataset_content(history_id, dataset=output1)
+ assert output1_content == "3\n2\n1\n", output1_content
+
+ @skip_without_tool("env-tool1")
+ def test_env_tool1(self):
+ history_id = self.dataset_populator.new_history()
+ inputs = {
+ "in": "Hello World",
+ }
+ response = self._run("env-tool1", history_id, inputs, assert_ok=True)
+ output1 = response["outputs"][0]
+ output1_content = self.dataset_populator.get_history_dataset_content(history_id, dataset=output1)
+ self.assertEquals(output1_content, "Hello World\n")
+
+ @skip_without_tool("env-tool2")
+ def test_env_tool2(self):
+ run_object = self.cwl_populator.run_cwl_artifact("env-tool2", "test/functional/tools/cwl_tools/draft3/env-job.json")
+ output1_content = self.dataset_populator.get_history_dataset_content(run_object.history_id)
+ self.assertEquals(output1_content, "hello test env\n")
+
+ @skip_without_tool("rename")
+ def test_rename(self):
+ run_object = self.cwl_populator.run_cwl_artifact("rename", "test/functional/tools/cwl_tools/draft3/rename-job.json")
+ output1_content = self.dataset_populator.get_history_dataset_content(run_object.history_id)
+ self.assertEquals(output1_content, whale_text())
+
+ @skip_without_tool("optional-output")
+ def test_optional_output(self):
+ run_object = self.cwl_populator.run_cwl_artifact("optional-output", "test/functional/tools/cwl_tools/draft3/cat-job.json")
+ output_file = run_object.output(0)
+ optional_file = run_object.output(1)
+ output_content = self.dataset_populator.get_history_dataset_content(run_object.history_id, dataset=output_file)
+ optional_content = self.dataset_populator.get_history_dataset_content(run_object.history_id, dataset=optional_file)
+ self.assertEquals(output_content, "Hello world!\n")
+ self.assertEquals(optional_content, "null")
+
+ @skip_without_tool("optional-output2")
+ def test_optional_output2_on(self):
+ run_object = self.cwl_populator.run_cwl_artifact(
+ "optional-output2",
+ job={
+ "produce": "do_write",
+ },
+ test_data_directory="test/functional/tools/cwl_tools/draft3/"
+ )
+ output_content = self.dataset_populator.get_history_dataset_content(run_object.history_id)
+ self.assertEquals(output_content, "bees\n")
+
+ @skip_without_tool("optional-output2")
+ def test_optional_output2_off(self):
+ run_object = self.cwl_populator.run_cwl_artifact(
+ "optional-output2",
+ job={
+ "produce": "dont_write",
+ },
+ test_data_directory="test/functional/tools/cwl_tools/draft3/"
+ )
+ output_content = self.dataset_populator.get_history_dataset_content(run_object.history_id)
+ self.assertEquals(output_content, "null")
+
+ @skip_without_tool("index1")
+ @skip_without_tool("showindex1")
+ def test_index1(self):
+ run_object = self.cwl_populator.run_cwl_artifact(
+ "index1",
+ job={
+ "file": {
+ "class": "File",
+ "path": "whale.txt"
+ },
+ },
+ test_data_directory="test/functional/tools/cwl_tools/draft3/",
+ )
+ output1 = self.dataset_populator.get_history_dataset_details(run_object.history_id)
+ run_object = self.cwl_populator.run_cwl_artifact(
+ "showindex1",
+ job={
+ "file": {
+ "src": "hda",
+ "id": output1["id"],
+ },
+ },
+ test_data_directory="test/functional/tools/cwl_tools/draft3/",
+ history_id=run_object.history_id,
+ )
+ output1_content = self.dataset_populator.get_history_dataset_content(run_object.history_id)
+ assert "call: 1\n" in output1_content, output1_content
+
+ @skip_without_tool("any1")
+ def test_any1_0(self):
+ run_object = self.cwl_populator.run_cwl_artifact(
+ "any1",
+ job={"bar": 7},
+ test_data_directory="test/functional/tools/cwl_tools/draft3/",
+ )
+ output1_content = self.dataset_populator.get_history_dataset_content(run_object.history_id)
+ assert output1_content == '7', output1_content
+
+ @skip_without_tool("any1")
+ def test_any1_1(self):
+ run_object = self.cwl_populator.run_cwl_artifact(
+ "any1",
+ job={"bar": "7"},
+ test_data_directory="test/functional/tools/cwl_tools/draft3/",
+ )
+ output1_content = self.dataset_populator.get_history_dataset_content(run_object.history_id)
+ assert output1_content == '"7"', output1_content
+
+ @skip_without_tool("any1")
+ def test_any1_file(self):
+ run_object = self.cwl_populator.run_cwl_artifact(
+ "any1",
+ job={"bar": {
+ "class": "File",
+ "location": "whale.txt",
+ }},
+ test_data_directory="test/functional/tools/cwl_tools/draft3/",
+ )
+ output1_content = self.dataset_populator.get_history_dataset_content(run_object.history_id)
+ self.dataset_populator._summarize_history_errors(run_object.history_id)
+ assert output1_content == '"File"', "[%s]" % output1_content
+
+ @skip_without_tool("any1")
+ def test_any1_2(self):
+ run_object = self.cwl_populator.run_cwl_artifact(
+ "any1",
+ job={"bar": {"Cow": ["Turkey"]}},
+ test_data_directory="test/functional/tools/cwl_tools/draft3/",
+ )
+ output1_content = self.dataset_populator.get_history_dataset_content(run_object.history_id)
+ assert output1_content == '{"Cow": ["Turkey"]}', output1_content
+
+ @skip_without_tool("null-expression1-tool")
+ def test_null_expression_1_1(self):
+ run_object = self.cwl_populator.run_cwl_artifact("null-expression1-tool", "test/functional/tools/cwl_tools/draft3/empty.json")
+ output1_content = self.dataset_populator.get_history_dataset_content(run_object.history_id)
+ assert output1_content == '1', output1_content
+
+ @skip_without_tool("null-expression1-tool")
+ def test_null_expression_1_2(self):
+ run_object = self.cwl_populator.run_cwl_artifact("null-expression1-tool", "test/functional/tools/cwl_tools/draft3/null-expression2-job.json")
+ output1_content = self.dataset_populator.get_history_dataset_content(run_object.history_id)
+ assert output1_content == '2', output1_content
+
+ @skip_without_tool("null-expression2-tool")
+ def test_null_expression_any_bad_1(self):
+ """Test explicitly passing null to Any type without a default value fails."""
+ run_object = self.cwl_populator.run_cwl_artifact("null-expression2-tool", "test/functional/tools/cwl_tools/draft3/null-expression1-job.json", assert_ok=False)
+ self._assert_status_code_is(run_object.run_response, 400)
+
+ @skip_without_tool("null-expression2-tool")
+ def test_null_expression_any_bad_2(self):
+ """Test Any without defaults can be unspecified."""
+ run_object = self.cwl_populator.run_cwl_artifact("null-expression2-tool", "test/functional/tools/cwl_tools/draft3/empty.json", assert_ok=False)
+ self._assert_status_code_is(run_object.run_response, 400)
+
+ @skip_without_tool("default_path")
+ def test_default_path_override(self):
+ run_object = self.cwl_populator.run_cwl_artifact("default_path", "test/functional/tools/cwl_tools/v1.0/default_path_job.yml")
+ output1_content = self.dataset_populator.get_history_dataset_content(run_object.history_id)
+ assert output1_content.strip() == "Hello world!", output1_content
+
+ @skip_without_tool("default_path_custom_1")
+ def test_default_path(self):
+ # produces no output - just test the job runs okay.
+ # later come back and verify standard output of the job.
+ run_object = self.cwl_populator.run_cwl_artifact("default_path_custom_1", job={})
+ stdout = self._get_job_stdout(run_object.job_id)
+ assert "this is the test file that will be used when calculating an md5sum" in stdout
+
+ @skip_without_tool("params")
+ def test_params1(self):
+ run_object = self.cwl_populator.run_cwl_artifact("params", "test/functional/tools/cwl_tools/draft3/empty.json")
+ output1_content = self.dataset_populator.get_history_dataset_content(run_object.history_id)
+ assert output1_content == '"b b"', output1_content
+
+ @skip_without_tool("parseInt-tool")
+ def test_parse_int_tool(self):
+ run_object = self.cwl_populator.run_cwl_artifact("parseInt-tool", "test/functional/tools/cwl_tools/draft3/parseInt-job.json")
+ output1 = self.dataset_populator.get_history_dataset_details(run_object.history_id, hid=2)
+ assert output1["state"] == "ok"
+ output1_content = self.dataset_populator.get_history_dataset_content(run_object.history_id, hid=2)
+ self.assertEquals(output1_content, '42')
+ self.assertEquals(output1["extension"], "expression.json")
+
+ @skip_without_tool("record-output")
+ def test_record_output(self):
+ run_object = self.cwl_populator.run_cwl_artifact("record-output", "test/functional/tools/cwl_tools/v1.0/record-output-job.json")
+ result_record = run_object.output_collection(0)
+ assert result_record["collection_type"] == "record"
+ record_elements = result_record["elements"]
+ first_element = record_elements[0]
+ assert first_element["element_identifier"] == "ofoo"
+ first_hda = first_element["object"]
+ output1_content = self.dataset_populator.get_history_dataset_content(run_object.history_id, hid=first_hda["hid"])
+ assert "Call me Ishmael." in output1_content, "Expected contents of whale.txt, got [%s]" % output1_content
+
+ # def test_dynamic_tool_execution( self ):
+ # workflow_tool_json = {
+ # 'inputs': [{'inputBinding': {}, 'type': 'File', 'id': 'file:///home/john/workspace/galaxy/test/unit/tools/cwl_tools/draft3/count-lines2-wf.cwl#step1/wc/wc_file1'}],
+ # 'stdout': 'output.txt',
+ # 'id': 'file:///home/john/workspace/galaxy/test/unit/tools/cwl_tools/draft3/count-lines2-wf.cwl#step1/wc',
+ # 'outputs': [{'outputBinding': {'glob': 'output.txt'}, 'type': 'File', 'id': 'file:///home/john/workspace/galaxy/test/unit/tools/cwl_tools/draft3/count-lines2-wf.cwl#step1/wc/wc_output'}],
+ # 'baseCommand': 'wc',
+ # 'class': 'CommandLineTool'
+ # }
+
+ # create_payload = dict(
+ # representation=json.dumps(workflow_tool_json),
+ # )
+ # create_response = self._post( "dynamic_tools", data=create_payload, admin=True )
+ # self._assert_status_code_is( create_response, 200 )
+
+ # TODO: Use mixin so this can be shared with tools test case.
+ def _run(self, tool_id, history_id, inputs, assert_ok=False, tool_version=None, inputs_representation=None):
+ payload = self.dataset_populator.run_tool_payload(
+ tool_id=tool_id,
+ inputs=inputs,
+ history_id=history_id,
+ inputs_representation=inputs_representation,
+ )
+ if tool_version is not None:
+ payload["tool_version"] = tool_version
+ create_response = self._post("tools", data=payload)
+ if assert_ok:
+ self._assert_status_code_is(create_response, 200)
+ create = create_response.json()
+ self._assert_has_keys(create, 'outputs')
+ return create
+ else:
+ return create_response
+
+
+def whale_text():
+ return open("test/functional/tools/cwl_tools/draft3/whale.txt", "r").read()
+
+
+def _dataset_to_param(dataset):
+ return dict(
+ src='hda',
+ id=dataset['id']
+ )
diff --git a/test/api/test_workflows.py b/test/api/test_workflows.py
index f4ac72775a2f..856ca6a3922c 100644
--- a/test/api/test_workflows.py
+++ b/test/api/test_workflows.py
@@ -2,7 +2,8 @@
import json
import time
-from json import dumps
+
+from json import dumps, loads
from uuid import uuid4
from requests import delete, put
@@ -20,6 +21,7 @@
WORKFLOW_NESTED_REPLACEMENT_PARAMETER,
WORKFLOW_NESTED_RUNTIME_PARAMETER,
WORKFLOW_NESTED_SIMPLE,
+ WORKFLOW_ONE_STEP_DEFAULT,
WORKFLOW_RENAME_ON_INPUT,
WORKFLOW_RUNTIME_PARAMETER_AFTER_PAUSE,
WORKFLOW_WITH_DYNAMIC_OUTPUT_COLLECTION,
@@ -175,13 +177,7 @@ def _assert_history_job_count(self, history_id, n):
self.assertEqual(len(jobs), n)
def _download_workflow(self, workflow_id, style=None):
- params = {}
- if style:
- params = {"style": style}
- download_response = self._get("workflows/%s/download" % workflow_id, params)
- self._assert_status_code_is(download_response, 200)
- downloaded_workflow = download_response.json()
- return downloaded_workflow
+ return self.workflow_populator.downloaded_workflow(workflow_id, style=style)
def wait_for_invocation_and_jobs(self, history_id, workflow_id, invocation_id, assert_ok=True):
state = self.workflow_populator.wait_for_invocation(workflow_id, invocation_id)
@@ -386,6 +382,46 @@ def test_import_deprecated(self):
self._assert_status_code_is(other_import_response, 200)
self._assert_user_has_workflow_with_name("imported: test_import_published_deprecated")
+ def test_import_export_dynamic(self):
+ workflow_id = self._upload_yaml_workflow("""
+class: GalaxyWorkflow
+steps:
+ - type: input
+ label: input1
+ - tool_id: cat1
+ label: first_cat
+ state:
+ input1:
+ $link: 0
+ - label: embed1
+ run:
+ class: GalaxyTool
+ command: echo 'hello world 2' > $output1
+ outputs:
+ output1:
+ format: txt
+ - tool_id: cat1
+ state:
+ input1:
+ $link: first_cat#out_file1
+ queries:
+ input2:
+ $link: embed1#output1
+test_data:
+ input1: "hello world"
+""")
+ downloaded_workflow = self._download_workflow(workflow_id)
+ downloaded_tool_step = downloaded_workflow["steps"]["1"]
+ tool_representation = downloaded_tool_step["tool_representation"]
+ import_response = self._import_tool_response(loads(tool_representation))
+ self._assert_status_code_is(import_response, 303)
+
+ response = self.workflow_populator.create_workflow_response(downloaded_workflow)
+
+ downloaded_second_workflow = self._download_workflow(response.json()["id"])
+ print(downloaded_second_workflow)
+ assert False
+
def test_import_annotations(self):
workflow_id = self.workflow_populator.simple_workflow("test_import_annotations", publish=True)
with self._different_user():
@@ -1180,6 +1216,28 @@ def test_workflow_output_dataset_collection(self):
elements0 = elements[0]
assert elements0["element_identifier"] == "el1"
+ def test_workflow_input_as_output(self):
+ with self.dataset_populator.test_history() as history_id:
+ summary = self._run_jobs("""
+class: GalaxyWorkflow
+inputs:
+ input1: data
+outputs:
+ wf_output_1:
+ outputSource: input1
+steps: []
+""", test_data={"input1": "hello world"}, history_id=history_id)
+ workflow_id = summary.workflow_id
+ invocation_id = summary.invocation_id
+ invocation_response = self._get("workflows/%s/invocations/%s" % (workflow_id, invocation_id))
+ self._assert_status_code_is(invocation_response, 200)
+ invocation = invocation_response.json()
+ self._assert_has_keys(invocation , "id", "outputs", "output_collections")
+ assert len(invocation["output_collections"]) == 0
+ assert len(invocation["outputs"]) == 1
+ output_content = self.dataset_populator.get_history_dataset_content(history_id, content_id=invocation["outputs"]["wf_output_1"]["id"])
+ assert output_content == "hello world\n"
+
@skip_without_tool("cat")
def test_workflow_input_mapping(self):
with self.dataset_populator.test_history() as history_id:
@@ -1388,13 +1446,14 @@ def test_subworkflow_recover_mapping_2(self):
steps:
random_lines:
tool_id: random_lines1
- state:
- num_lines: 2
- input:
- $link: inner_input
- seed_source:
- seed_source_selector: set_seed
- seed: asdf
+ in:
+ input: inner_input
+ num_lines:
+ default: 2
+ seed_source|seed_source_selector:
+ default: set_seed
+ seed_source|seed:
+ default: asdf
split:
tool_id: split
in:
@@ -1832,7 +1891,7 @@ def test_run_with_validated_parameter_connection_invalid(self):
type: raw
""", history_id=history_id, wait=True, assert_ok=False)
- def test_run_with_text_connection(self):
+ def test_run_with_text_input_connection(self):
with self.dataset_populator.test_history() as history_id:
self._run_jobs("""
class: GalaxyWorkflow
@@ -1863,6 +1922,27 @@ def test_run_with_text_connection(self):
content = self.dataset_populator.get_history_dataset_content(history_id)
self.assertEqual("chrX\t152691446\t152691471\tCCDS14735.1_cds_0_0_chrX_152691447_f\t0\t+\n", content)
+ def test_run_with_numeric_input_connection(self):
+ history_id = self.dataset_populator.new_history()
+ self._run_jobs("""
+class: GalaxyWorkflow
+steps:
+- label: forty_two
+ tool_id: expression_forty_two
+ state: {}
+- label: consume_expression_parameter
+ tool_id: cheetah_casting
+ state:
+ floattest: 3.14
+ inttest:
+ $link: forty_two#out1
+test_data: {}
+""", history_id=history_id)
+
+ self.dataset_populator.wait_for_history(history_id, assert_ok=True)
+ content = self.dataset_populator.get_history_dataset_content(history_id)
+ self.assertEquals("43\n4.14\n", content)
+
@skip_without_tool('cat1')
def test_workflow_rerun_with_use_cached_job(self):
workflow = self.workflow_populator.load_workflow(name="test_for_run")
@@ -2846,6 +2926,70 @@ def test_run_replace_params_nested_normalized(self):
self.dataset_populator.wait_for_history(history_id, assert_ok=True)
self.assertEqual("2\n", self.dataset_populator.get_history_dataset_content(history_id))
+ @skip_without_tool("random_lines1")
+ def test_run_replace_params_over_default(self):
+ with self.dataset_populator.test_history() as history_id:
+ self._run_jobs(WORKFLOW_ONE_STEP_DEFAULT, test_data="""
+step_parameters:
+ '1':
+ num_lines: 4
+input:
+ value: 1.bed
+ type: File
+""", history_id=history_id, wait=True, assert_ok=True, round_trip_format_conversion=True)
+ result = self.dataset_populator.get_history_dataset_content(history_id)
+ assert result.count("\n") == 4
+
+ @skip_without_tool("random_lines1")
+ def test_defaults_editor(self):
+ with self.dataset_populator.test_history() as history_id:
+ workflow_id = self._upload_yaml_workflow(WORKFLOW_ONE_STEP_DEFAULT, publish=True)
+ workflow_object = self._download_workflow(workflow_id, style="editor")
+ put_response = self._update_workflow(workflow_id, workflow_object)
+ assert put_response.status_code == 200
+
+ @skip_without_tool("random_lines1")
+ def test_run_replace_params_over_default_delayed(self):
+ with self.dataset_populator.test_history() as history_id:
+ run_summary = self._run_jobs("""
+class: GalaxyWorkflow
+inputs:
+ input: data
+steps:
+ first_cat:
+ tool_id: cat1
+ in:
+ input1: input
+ the_pause:
+ type: pause
+ in:
+ input: first_cat/out_file1
+ randomlines:
+ tool_id: random_lines1
+ in:
+ input: the_pause
+ num_lines:
+ default: 6
+""", test_data="""
+step_parameters:
+ '3':
+ num_lines: 4
+input:
+ value: 1.bed
+ type: File
+""", history_id=history_id, wait=False)
+ wait_on(lambda: len(self._history_jobs(history_id)) >= 2 or None, "history jobs")
+ self.dataset_populator.wait_for_history(history_id, assert_ok=True)
+
+ workflow_id = run_summary.workflow_id
+ invocation_id = run_summary.invocation_id
+
+ self.__review_paused_steps(workflow_id, invocation_id, order_index=2, action=True)
+ self.wait_for_invocation_and_jobs(history_id, workflow_id, invocation_id)
+
+ result = self.dataset_populator.get_history_dataset_content(history_id)
+ assert result.count("\n") == 4
+
def test_pja_import_export(self):
workflow = self.workflow_populator.load_workflow(name="test_for_pja_import", add_pja=True)
uploaded_workflow_id = self.workflow_populator.create_workflow(workflow)
@@ -3077,13 +3221,7 @@ def _wait_for_invocation_state(self, workflow_id, invocation_id, target_state):
return target_state_reached
def _update_workflow(self, workflow_id, workflow_object):
- data = dict(
- workflow=workflow_object
- )
- raw_url = 'workflows/%s' % workflow_id
- url = self._api_url(raw_url, use_key=True)
- put_response = put(url, data=dumps(data))
- return put_response
+ return self.workflow_populator.update_workflow(workflow_id, workflow_object)
def _invocation_step_details(self, workflow_id, invocation_id, step_id):
invocation_step_response = self._get("workflows/%s/usage/%s/steps/%s" % (workflow_id, invocation_id, step_id))
diff --git a/test/api/test_workflows_cwl.py b/test/api/test_workflows_cwl.py
new file mode 100644
index 000000000000..f984cf14dd50
--- /dev/null
+++ b/test/api/test_workflows_cwl.py
@@ -0,0 +1,192 @@
+"""Test CWL workflow functionality."""
+import json
+import os
+import re
+
+from base.populators import (
+ CWL_TOOL_DIRECTORY,
+ CwlPopulator,
+)
+
+from .test_workflows import BaseWorkflowsApiTestCase
+
+
+class BaseCwlWorklfowTestCase(BaseWorkflowsApiTestCase):
+
+ require_admin_user = True
+
+ def setUp(self):
+ super(BaseCwlWorklfowTestCase, self).setUp()
+ self.history_id = self.dataset_populator.new_history()
+ self.cwl_populator = CwlPopulator(
+ self.dataset_populator, self.workflow_populator
+ )
+
+
+class CwlWorkflowsTestCase(BaseCwlWorklfowTestCase):
+ """Test case encompassing CWL workflow tests."""
+
+ def test_simplest_wf(self):
+ """Test simplest workflow."""
+ workflow_id = self._load_workflow("v1.0_custom/just-wc-wf.cwl")
+ workflow_content = self._download_workflow(workflow_id)
+ for step_index, step in workflow_content["steps"].items():
+ if "tool_representation" in step:
+ del step["tool_representation"]
+
+ hda1 = self.dataset_populator.new_dataset(self.history_id, content="hello world\nhello all\nhello all in world\nhello")
+ inputs_map = {
+ "file1": {"src": "hda", "id": hda1["id"]}
+ }
+ invocation_id = self._invoke(inputs_map, workflow_id)
+ self.wait_for_invocation_and_jobs(self.history_id, workflow_id, invocation_id)
+ output = self.dataset_populator.get_history_dataset_content(self.history_id, hid=2)
+ assert re.search(r"\s+4\s+9\s+47\s+", output)
+
+ def test_load_ids(self):
+ workflow_id = self._load_workflow("v1.0/search.cwl#main")
+ workflow_content = self._download_workflow(workflow_id)
+ for step_index, step in workflow_content["steps"].items():
+ if "tool_representation" in step:
+ del step["tool_representation"]
+
+ print(workflow_content)
+ steps = workflow_content["steps"]
+ step_3 = steps["3"]
+ step_4 = steps["4"]
+
+ assert step_3["label"] == "index", step_3
+ assert step_4["label"] == "search", step_4
+
+ print(step_3)
+ print(step_4)
+
+ def test_count_line1_v1(self):
+ """Test simple workflow v1.0/count-lines1-wf.cwl."""
+ self._run_count_lines_wf("v1.0/count-lines1-wf.cwl")
+
+ def test_count_line1_v1_json(self):
+ run_object = self.cwl_populator.run_workflow_job("v1.0/count-lines1-wf.cwl", "v1.0/wc-job.json", history_id=self.history_id)
+ self._check_countlines_wf(run_object.invocation_id, run_object.workflow_id, expected_count=16)
+
+ def test_count_line1_draft3(self):
+ """Test simple workflow draft3/count-lines1-wf.cwl."""
+ self._run_count_lines_wf("draft3/count-lines1-wf.cwl")
+
+ def test_count_line2_v1(self):
+ """Test simple workflow v1.0/count-lines2-wf.cwl."""
+ self._run_count_lines_wf("v1.0/count-lines2-wf.cwl")
+
+ def test_count_lines3_v1(self):
+ workflow_id = self._load_workflow("v1.0/count-lines3-wf.cwl")
+ hdca = self.dataset_collection_populator.create_list_in_history(self.history_id).json()
+ inputs_map = {
+ "file1": {"src": "hdca", "id": hdca["id"]}
+ }
+ invocation_id = self._invoke(inputs_map, workflow_id)
+ self.wait_for_invocation_and_jobs(self.history_id, workflow_id, invocation_id)
+ hdca = self.dataset_populator.get_history_collection_details(self.history_id, hid=8)
+ assert hdca["collection_type"] == "list"
+ elements = hdca["elements"]
+ assert len(elements) == 3
+ element0 = elements[0]["object"]
+ assert element0["history_content_type"] == "dataset"
+ assert element0["state"] == "ok"
+ assert element0["file_ext"] == "expression.json"
+ # TODO: ensure this looks like an int[] - it doesn't currently...
+
+ def test_count_lines3_ct(self):
+ self.run_conformance_test("v1.0", "Test single step workflow with Scatter step")
+
+ def test_count_lines4_v1(self):
+ workflow_id = self._load_workflow("v1.0/count-lines4-wf.cwl")
+ hda1 = self.dataset_populator.new_dataset(self.history_id, content="hello world\nhello all\nhello all in world\nhello")
+ hda2 = self.dataset_populator.new_dataset(self.history_id, content="moo\ncow\nthat\nis\nall")
+ inputs_map = {
+ "file1": {"src": "hda", "id": hda1["id"]},
+ "file2": {"src": "hda", "id": hda2["id"]}
+ }
+ invocation_id = self._invoke(inputs_map, workflow_id)
+ self.wait_for_invocation_and_jobs(self.history_id, workflow_id, invocation_id)
+ self.dataset_populator.get_history_collection_details(self.history_id, hid=5)
+
+ def test_count_lines4_json(self):
+ self.cwl_populator.run_workflow_job("v1.0/count-lines4-wf.cwl", "v1.0/count-lines4-job.json", history_id=self.history_id)
+ self.dataset_populator.get_history_collection_details(self.history_id, hid=5)
+
+ def test_scatter_wf1_v1(self):
+ self.cwl_populator.run_workflow_job("v1.0/scatter-wf1.cwl", "v1.0/scatter-job1.json", history_id=self.history_id)
+ self.dataset_populator.get_history_collection_details(self.history_id, hid=5)
+
+ def test_record_io(self):
+ self.run_conformance_test("v1.0_custom", "Test record type inputs to and outputs from workflows.")
+
+ def test_workflow_int_io(self):
+ self.run_conformance_test("v1.0_custom", "Test integer workflow input and outputs")
+
+ def test_workflow_int_io_opt_spec(self):
+ self.run_conformance_test("v1.0_custom", "Test optional integer workflow inputs (specified)")
+
+ def test_workflow_int_io_opt_unspec(self):
+ self.run_conformance_test("v1.0_custom", "Test optional integer workflow inputs (unspecified)")
+
+ def test_workflow_any_int(self):
+ self.run_conformance_test("v1.0_custom", "Test any parameter with integer input to a workflow")
+
+ def test_workflow_any_string(self):
+ self.run_conformance_test("v1.0_custom", "Test any parameter with string input to a workflow")
+
+ def test_workflow_any_file(self):
+ self.run_conformance_test("v1.0_custom", "Test any parameter with file input to a workflow")
+
+ def test_file_input_default_unspecified(self):
+ self.run_conformance_test("v1.0_custom", "Test File input with default unspecified")
+
+ def test_io_input_optional_unspecified(self):
+ self.run_conformance_test("v1.0_custom", "Test default integer workflow inputs (unspecified)")
+
+ def test_union_input_optional_unspecified(self):
+ self.run_conformance_test("v1.0_custom", "Test union type input to workflow with default unspecified")
+
+ def test_union_input_optional_specified_file(self):
+ self.run_conformance_test("v1.0_custom", "Test union type input to workflow with default specified as file")
+
+ def _run_count_lines_wf(self, wf_path):
+ workflow_id = self._load_workflow(wf_path)
+ hda1 = self.dataset_populator.new_dataset(self.history_id, content="hello world\nhello all\nhello all in world\nhello")
+ inputs_map = {
+ "file1": {"src": "hda", "id": hda1["id"]}
+ }
+ invocation_id = self._invoke(inputs_map, workflow_id)
+ self._check_countlines_wf(invocation_id, workflow_id)
+
+ def _check_countlines_wf(self, invocation_id, workflow_id, expected_count=4):
+ self.wait_for_invocation_and_jobs(self.history_id, workflow_id, invocation_id)
+ output = self.dataset_populator.get_history_dataset_content(self.history_id, hid=3)
+ self.dataset_populator._summarize_history_errors(self.history_id)
+ assert str(expected_count) == output, output
+
+ def _invoke(self, inputs, workflow_id):
+ workflow_request = dict(
+ history="hist_id=%s" % self.history_id,
+ workflow_id=workflow_id,
+ inputs=json.dumps(inputs),
+ inputs_by="name",
+ )
+ url = "workflows/%s/invocations" % workflow_id
+ invocation_response = self._post(url, data=workflow_request)
+ self._assert_status_code_is(invocation_response, 200)
+ invocation_id = invocation_response.json()["id"]
+ return invocation_id
+
+ def _load_workflow(self, rel_path):
+ path = os.path.join(CWL_TOOL_DIRECTORY, rel_path)
+ data = dict(
+ from_path=path,
+ )
+ route = "workflows"
+ upload_response = self._post(route, data=data)
+ self._assert_status_code_is(upload_response, 200)
+ workflow = upload_response.json()
+ workflow_id = workflow["id"]
+ return workflow_id
diff --git a/test/api/test_workflows_from_yaml.py b/test/api/test_workflows_from_yaml.py
index 71a33d7bd158..551def06475a 100644
--- a/test/api/test_workflows_from_yaml.py
+++ b/test/api/test_workflows_from_yaml.py
@@ -242,6 +242,7 @@ def test_implicit_connections(self):
""")
self.workflow_populator.dump_workflow(workflow_id)
+
@uses_test_history()
def test_conditional_ints(self, history_id):
self._run_jobs("""
@@ -275,6 +276,54 @@ def test_conditional_ints(self, history_id):
assert "no file specified" in content
assert "7 7 5" in content
+ def test_workflow_embed_tool(self):
+ history_id = self.dataset_populator.new_history()
+ self._run_jobs("""
+class: GalaxyWorkflow
+steps:
+ - type: input
+ label: input1
+ - tool_id: cat1
+ label: first_cat
+ state:
+ input1:
+ $link: 0
+ - label: embed1
+ run:
+ class: GalaxyTool
+ command: echo 'hello world 2' > $output1
+ outputs:
+ output1:
+ format: txt
+ - tool_id: cat1
+ state:
+ input1:
+ $link: first_cat#out_file1
+ queries:
+ - input2:
+ $link: embed1#output1
+test_data:
+ input1: "hello world"
+""", history_id=history_id)
+
+ content = self.dataset_populator.get_history_dataset_content(history_id)
+ self.assertEquals(content, "hello world\nhello world 2\n")
+
+ def test_workflow_import_tool(self):
+ history_id = self.dataset_populator.new_history()
+ workflow_path = os.path.join(WORKFLOWS_DIRECTORY, "embed_test_1.gxwf.yml")
+ jobs_descriptions = {
+ "test_data": {"input1": "hello world"}
+ }
+ self._run_jobs(
+ workflow_path,
+ source_type="path",
+ jobs_descriptions=jobs_descriptions,
+ history_id=history_id
+ )
+ content = self.dataset_populator.get_history_dataset_content(history_id)
+ self.assertEquals(content, "hello world\nhello world 2\n")
+
def _steps_by_label(self, workflow_as_dict):
by_label = {}
assert "steps" in workflow_as_dict, workflow_as_dict
diff --git a/test/base/driver_util.py b/test/base/driver_util.py
index fe9fae1cf786..fd6716803bfa 100644
--- a/test/base/driver_util.py
+++ b/test/base/driver_util.py
@@ -189,11 +189,13 @@ def setup_galaxy_config(
config = dict(
admin_users='test@bx.psu.edu',
allow_library_path_paste=True,
+ allow_path_paste=True,
allow_user_creation=True,
allow_user_deletion=True,
api_allow_run_as='test@bx.psu.edu',
auto_configure_logging=logging_config_file is None,
check_migrate_tools=False,
+ check_upload_content=False,
chunk_upload_size=100,
conda_prefix=conda_prefix,
conda_auto_init=conda_auto_init,
@@ -229,6 +231,7 @@ def setup_galaxy_config(
webhooks_dir=TEST_WEBHOOKS_DIR,
logging=LOGGING_CONFIG_DEFAULT,
monitor_thread_join_timeout=5,
+ strict_cwl_validation=False,
)
config.update(database_conf(tmpdir, prefer_template_database=prefer_template_database))
config.update(install_database_conf(tmpdir, default_merged=default_install_db_merged))
diff --git a/test/base/populators.py b/test/base/populators.py
index 4f2e7ef90682..d0c4aae38c02 100644
--- a/test/base/populators.py
+++ b/test/base/populators.py
@@ -9,6 +9,7 @@
from functools import wraps
from operator import itemgetter
+import cwltest
try:
from nose.tools import nottest
except ImportError:
@@ -24,16 +25,32 @@ def nottest(x):
from six import StringIO
from galaxy.tools.verify.test_data import TestDataResolver
-from galaxy.util import unicodify
+from galaxy.tools.cwl.util import (
+ FileLiteralTarget,
+ FileUploadTarget,
+ DirectoryUploadTarget,
+ download_output,
+ galactic_job_json,
+ guess_artifact_type,
+ invocation_to_output,
+ output_to_cwl_json,
+ tool_response_to_output,
+)
+from galaxy.util import galaxy_root_path, unicodify
+
from . import api_asserts
+CWL_TOOL_DIRECTORY = os.path.join(galaxy_root_path, "test", "functional", "tools", "cwl_tools")
+LOAD_TOOLS_FROM_PATH = True
+
# Simple workflow that takes an input and call cat wrapper on it.
workflow_str = unicodify(resource_string(__name__, "data/test_workflow_1.ga"))
# Simple workflow that takes an input and filters with random lines twice in a
# row - first grabbing 8 lines at random and then 6.
workflow_random_x2_str = unicodify(resource_string(__name__, "data/test_workflow_2.ga"))
+UPLOAD_VIA = "path" # or content, but content breaks down for empty uploads, tar, etc...
DEFAULT_TIMEOUT = 60 # Secs to wait for state to turn ok
@@ -160,6 +177,330 @@ def _run_tool_payload(self, tool_id, inputs, history_id, **kwds):
return DatasetPopulator(self.galaxy_interactor).run_tool_payload(tool_id, inputs, history_id, **kwds)
+class CwlRun(object):
+
+ def __init__(self, dataset_populator, history_id):
+ self.dataset_populator = dataset_populator
+ self.history_id = history_id
+
+ def get_output_as_object(self, output_name, download_folder=None):
+ galaxy_output = self._output_name_to_object(output_name)
+
+ def get_metadata(history_content_type, content_id):
+ if history_content_type == "dataset":
+ return self.dataset_populator.get_history_dataset_details(self.history_id, dataset_id=content_id)
+ else:
+ # Don't wait - we've already done that, history might be "new"
+ return self.dataset_populator.get_history_collection_details(self.history_id, content_id=content_id, wait=False)
+
+ def get_dataset(dataset_details, filename=None):
+ content = self.dataset_populator.get_history_dataset_content(self.history_id, dataset_id=dataset_details["id"], filename=filename)
+ if filename is None:
+ basename = dataset_details.get("cwl_file_name")
+ if not basename:
+ basename = dataset_details.get("name")
+ else:
+ basename = os.path.basename(filename)
+ return {"content": content, "basename": basename}
+
+ def get_extra_files(dataset_details):
+ return self.dataset_populator.get_history_dataset_extra_files(self.history_id, dataset_id=dataset_details["id"])
+ output = output_to_cwl_json(
+ galaxy_output,
+ get_metadata,
+ get_dataset,
+ get_extra_files,
+ pseduo_location=True,
+ )
+ if download_folder:
+ if isinstance(output, dict) and "basename" in output:
+ download_path = os.path.join(download_folder, output["basename"])
+ download_output(galaxy_output, get_metadata, get_dataset, get_extra_files, download_path)
+ output["path"] = download_path
+ output["location"] = "file://%s" % download_path
+ return output
+
+
+class CwlToolRun(CwlRun):
+
+ def __init__(self, dataset_populator, history_id, run_response):
+ self.dataset_populator = dataset_populator
+ self.history_id = history_id
+ self.run_response = run_response
+
+ @property
+ def job_id(self):
+ return self.run_response["jobs"][0]["id"]
+
+ def output(self, output_index):
+ return self.run_response["outputs"][output_index]
+
+ def output_collection(self, output_index):
+ return self.run_response["output_collections"][output_index]
+
+ def _output_name_to_object(self, output_name):
+ return tool_response_to_output(self.run_response, self.history_id, output_name)
+
+ def wait(self):
+ final_state = self.dataset_populator.wait_for_job(self.job_id)
+ assert final_state == "ok"
+
+
+class CwlWorkflowRun(CwlRun):
+
+ def __init__(self, dataset_populator, workflow_populator, history_id, workflow_id, invocation_id):
+ self.dataset_populator = dataset_populator
+ self.workflow_populator = workflow_populator
+ self.history_id = history_id
+ self.workflow_id = workflow_id
+ self.invocation_id = invocation_id
+
+ def _output_name_to_object(self, output_name):
+ invocation_response = self.dataset_populator._get("workflows/%s/invocations/%s" % (self.invocation_id, self.workflow_id))
+ api_asserts.assert_status_code_is(invocation_response, 200)
+ invocation = invocation_response.json()
+ return invocation_to_output(invocation, self.history_id, output_name)
+
+ def wait(self):
+ self.workflow_populator.wait_for_invocation_and_jobs(
+ self.history_id, self.workflow_id, self.invocation_id
+ )
+
+
+class CwlPopulator(object):
+
+ def __init__(self, dataset_populator, workflow_populator):
+ self.dataset_populator = dataset_populator
+ self.workflow_populator = workflow_populator
+
+ def run_cwl_artifact(
+ self, tool_id, json_path=None, job=None, test_data_directory=None, history_id=None, assert_ok=True, tool_or_workflow="tool", upload_via=UPLOAD_VIA
+ ):
+ if test_data_directory is None and json_path is not None:
+ test_data_directory = os.path.dirname(json_path)
+ if json_path is not None:
+ assert job is None
+ with open(json_path, "r") as f:
+ if json_path.endswith(".yml") or json_path.endswith(".yaml"):
+ job_as_dict = yaml.load(f)
+ else:
+ job_as_dict = json.load(f)
+ else:
+ job_as_dict = job
+ if history_id is None:
+ history_id = self.dataset_populator.new_history()
+
+ def upload_func(upload_target):
+ if isinstance(upload_target, FileUploadTarget):
+ path = upload_target.path
+
+ if upload_via == "path":
+ content = "file://%s" % path
+ else:
+ with open(path, "rb") as f:
+ content = f.read()
+
+ name = os.path.basename(path)
+
+ extra_inputs = dict()
+ if upload_target.secondary_files:
+ assert upload_via == "path"
+ extra_inputs["files_1|url_paste"] = "file://%s" % upload_target.secondary_files
+ extra_inputs["files_1|type"] = "upload_dataset"
+ extra_inputs["files_1|auto_decompress"] = True
+ extra_inputs["file_count"] = "2"
+ extra_inputs["force_composite"] = "True"
+
+ return self.dataset_populator.new_dataset_request(
+ history_id=history_id,
+ content=content,
+ file_type="auto",
+ name=name,
+ auto_decompress=False,
+ extra_inputs=extra_inputs,
+ ).json()
+ elif isinstance(upload_target, FileLiteralTarget):
+ extra_inputs = dict()
+ return self.dataset_populator.new_dataset_request(
+ history_id=history_id,
+ content=upload_target.contents,
+ file_type="auto",
+ name="filex",
+ auto_decompress=False,
+ to_posix_lines=False,
+ extra_inputs=extra_inputs,
+ ).json()
+ elif isinstance(upload_target, DirectoryUploadTarget):
+ path = upload_target.tar_path
+
+ if upload_via == "path":
+ # TODO: basename?
+ payload = self.dataset_populator.upload_payload(
+ history_id, 'file://%s' % path, ext="tar", auto_decompress=False
+ )
+ else:
+ raise NotImplementedError()
+ create_response = self.dataset_populator._post("tools", data=payload)
+ assert create_response.status_code == 200
+
+ convert_response = self.dataset_populator.run_tool(
+ tool_id="CONVERTER_tar_to_directory",
+ inputs={"input1": {"src": "hda", "id": create_response.json()["outputs"][0]["id"]}},
+ history_id=history_id,
+ )
+ assert "outputs" in convert_response, convert_response
+ return convert_response
+ else:
+ content = json.dumps(upload_target.object)
+ return self.dataset_populator.new_dataset_request(
+ history_id=history_id,
+ content=content,
+ file_type="expression.json",
+ ).json()
+
+ def create_collection_func(element_identifiers, collection_type):
+ payload = {
+ "name": "dataset collection",
+ "instance_type": "history",
+ "history_id": history_id,
+ "element_identifiers": json.dumps(element_identifiers),
+ "collection_type": collection_type,
+ "fields": None if collection_type != "record" else "auto",
+ }
+ response = self.dataset_populator._post("dataset_collections", data=payload)
+ assert response.status_code == 200
+ return response.json()
+
+ job_as_dict, datasets_uploaded = galactic_job_json(
+ job_as_dict,
+ test_data_directory,
+ upload_func,
+ create_collection_func,
+ tool_or_workflow=tool_or_workflow,
+ )
+ if datasets_uploaded:
+ self.dataset_populator.wait_for_history(history_id=history_id, assert_ok=True)
+ if tool_or_workflow == "tool":
+ tool_hash = None
+
+ if os.path.exists(tool_id):
+ raw_tool_id = os.path.basename(tool_id)
+ index = self.dataset_populator._get("tools", data=dict(in_panel=False))
+ tools = index.json()
+ # In panels by default, so flatten out sections...
+ tool_ids = [itemgetter("id")(_) for _ in tools]
+ if raw_tool_id in tool_ids:
+ galaxy_tool_id = raw_tool_id
+ tool_hash = None
+ else:
+ # Assume it is a file not a tool_id.
+ if LOAD_TOOLS_FROM_PATH:
+ dynamic_tool = self.dataset_populator.create_tool_from_path(tool_id)
+ else:
+ with open(tool_id, "r") as f:
+ representation = yaml.load(f)
+ if "id" not in representation:
+ # TODO: following line doesn't work.
+ representation["id"] = os.path.splitext(os.path.basename(tool_id))[0]
+ tool_directory = os.path.abspath(os.path.dirname(tool_id))
+
+ dynamic_tool = self.dataset_populator.create_tool(representation, tool_directory=tool_directory)
+
+ tool_id = dynamic_tool["tool_id"]
+ tool_hash = dynamic_tool["tool_hash"]
+ assert tool_id, dynamic_tool
+ galaxy_tool_id = None
+
+ run_response = self.dataset_populator.run_tool(galaxy_tool_id, job_as_dict, history_id, inputs_representation="cwl", assert_ok=assert_ok, tool_hash=tool_hash)
+ run_object = CwlToolRun(self.dataset_populator, history_id, run_response)
+ if assert_ok:
+ try:
+ final_state = self.dataset_populator.wait_for_job(run_object.job_id)
+ assert final_state == "ok"
+ except Exception:
+ self.dataset_populator._summarize_history(history_id)
+ raise
+
+ return run_object
+ else:
+ route = "workflows"
+ path = os.path.join(tool_id)
+ object_id = None
+ if "#" in tool_id:
+ path, object_id = tool_id.split("#", 1)
+ data = dict(
+ from_path=path,
+ )
+ if object_id is not None:
+ data["object_id"] = object_id
+ upload_response = self.dataset_populator._post(route, data=data)
+ api_asserts.assert_status_code_is(upload_response, 200)
+ workflow = upload_response.json()
+ workflow_id = workflow["id"]
+
+ workflow_request = dict(
+ history="hist_id=%s" % history_id,
+ workflow_id=workflow_id,
+ inputs=json.dumps(job_as_dict),
+ inputs_by="name",
+ )
+ url = "workflows/%s/invocations" % workflow_id
+ invocation_response = self.dataset_populator._post(url, data=workflow_request)
+ api_asserts.assert_status_code_is(invocation_response, 200)
+ invocation_id = invocation_response.json()["id"]
+ return CwlWorkflowRun(self.dataset_populator, self.workflow_populator, history_id, workflow_id, invocation_id)
+
+ def get_conformance_test(self, version, doc):
+ conformance_tests = yaml.load(open(os.path.join(CWL_TOOL_DIRECTORY, str(version), "conformance_tests.yaml"), "r"))
+ for test in conformance_tests:
+ if test.get("doc") == doc:
+ return test
+ raise Exception("No such doc found %s" % doc)
+
+ def run_conformance_test(self, version, doc):
+ test = self.get_conformance_test(version, doc)
+ tool = os.path.join(CWL_TOOL_DIRECTORY, test["tool"])
+ job = os.path.join(CWL_TOOL_DIRECTORY, test["job"])
+ try:
+ run = self.run_cwl_job(tool, job)
+ except Exception:
+ # Should fail so this is good!
+ if test.get("should_fail", False):
+ return True
+ raise
+
+ expected_outputs = test["output"]
+ try:
+ for key, value in expected_outputs.items():
+ actual_output = run.get_output_as_object(key)
+ cwltest.compare(value, actual_output)
+ except Exception:
+ self.dataset_populator._summarize_history(run.history_id)
+ raise
+
+ def run_cwl_job(self, tool, job):
+ tool_or_workflow = guess_artifact_type(tool)
+ run = self.run_workflow_job(tool, job, tool_or_workflow=tool_or_workflow)
+ assert run.history_id
+ return run
+
+ def run_workflow_job(self, workflow_path, job_path, history_id=None, tool_or_workflow="workflow"):
+ if history_id is None:
+ history_id = self.dataset_populator.new_history()
+ if not os.path.isabs(workflow_path):
+ workflow_path = os.path.join(CWL_TOOL_DIRECTORY, workflow_path)
+ if not os.path.isabs(job_path):
+ job_path = os.path.join(CWL_TOOL_DIRECTORY, job_path)
+ run_object = self.run_cwl_artifact(
+ workflow_path,
+ job_path,
+ history_id=history_id,
+ tool_or_workflow=tool_or_workflow,
+ )
+ run_object.wait()
+ return run_object
+
+
class BaseDatasetPopulator(object):
""" Abstract description of API operations optimized for testing
Galaxy - implementations must implement _get, _post and _delete.
@@ -203,9 +544,17 @@ def check_run(self, run_response):
job = run["jobs"][0]
return job
- def wait_for_history(self, history_id, assert_ok=False, timeout=DEFAULT_TIMEOUT):
+ def wait_for_history(self, history_id, assert_ok=False, timeout=DEFAULT_TIMEOUT, hack_allow_new=False):
try:
- return wait_on_state(lambda: self._get("histories/%s" % history_id), desc="history state", assert_ok=assert_ok, timeout=timeout)
+ skip_states = ["running", "queued", "new", "ready"]
+ ok_states = ["ok"]
+ if hack_allow_new:
+ # If there are empty collections in the history for instance with no datasets,
+ # the history summary will be new even though there are jobs in the history.
+ skip_states = ["running", "queued", "ready"]
+ ok_states = ["new", "ok"]
+
+ return wait_on_state(lambda: self._get("histories/%s" % history_id), desc="history state", assert_ok=assert_ok, timeout=timeout, skip_states=skip_states, ok_states=ok_states)
except AssertionError:
self._summarize_history(history_id)
raise
@@ -227,7 +576,9 @@ def has_active_jobs():
raise TimeoutAssertionError(message)
if assert_ok:
- return self.wait_for_history(history_id, assert_ok=True, timeout=timeout)
+ jobs = self.history_jobs(history_id)
+ hack_allow_new = True if len(jobs) > 0 else False
+ return self.wait_for_history(history_id, assert_ok=True, timeout=timeout, hack_allow_new=hack_allow_new)
def wait_for_job(self, job_id, assert_ok=False, timeout=DEFAULT_TIMEOUT):
return wait_on_state(lambda: self.get_job_details(job_id), desc="job state", assert_ok=assert_ok, timeout=timeout)
@@ -254,6 +605,32 @@ def active_history_jobs(self, history_id):
def cancel_job(self, job_id):
return self._delete("jobs/%s" % job_id)
+ def create_tool_from_path(self, tool_path):
+ tool_directory = os.path.dirname(os.path.abspath(tool_path))
+ payload = dict(
+ src="from_path",
+ path=tool_path,
+ tool_directory=tool_directory,
+ )
+ return self._create_tool_raw(payload)
+
+ def create_tool(self, representation, tool_directory=None):
+ if isinstance(representation, dict):
+ representation = json.dumps(representation)
+ payload = dict(
+ representation=representation,
+ tool_directory=tool_directory,
+ )
+ return self._create_tool_raw(payload)
+
+ def _create_tool_raw(self, payload):
+ try:
+ create_response = self._post("dynamic_tools", data=payload, admin=True)
+ except TypeError:
+ create_response = self._post("dynamic_tools", data=payload)
+ assert create_response.status_code == 200, create_response.json()
+ return create_response.json()
+
def _summarize_history(self, history_id):
pass
@@ -332,6 +709,10 @@ def run_tool_payload(self, tool_id, inputs, history_id, **kwds):
kwds["__files"][key] = value
del inputs[key]
+ ir = kwds.get("inputs_representation", None)
+ if ir is None and "inputs_representation" in kwds:
+ del kwds["inputs_representation"]
+
return dict(
tool_id=tool_id,
inputs=json.dumps(inputs),
@@ -500,7 +881,7 @@ class DatasetPopulator(BaseDatasetPopulator):
def __init__(self, galaxy_interactor):
self.galaxy_interactor = galaxy_interactor
- def _post(self, route, data=None, files=None):
+ def _post(self, route, data=None, files=None, admin=False):
if data is None:
data = {}
@@ -508,7 +889,7 @@ def _post(self, route, data=None, files=None):
if files is not None:
del data["__files"]
- return self.galaxy_interactor.post(route, data, files=files)
+ return self.galaxy_interactor.post(route, data, files=files, admin=admin)
def _get(self, route, data=None):
if data is None:
@@ -556,6 +937,14 @@ def simple_workflow(self, name, **create_kwds):
workflow = self.load_workflow(name)
return self.create_workflow(workflow, **create_kwds)
+ def import_workflow_from_path(self, from_path):
+ data = dict(
+ from_path=from_path
+ )
+ import_response = self._post("workflows", data=data)
+ api_asserts.assert_status_code_is(import_response, 200)
+ return import_response.json()["id"]
+
def create_workflow(self, workflow, **create_kwds):
upload_response = self.create_workflow_response(workflow, **create_kwds)
uploaded_workflow_id = upload_response.json()["id"]
@@ -625,6 +1014,21 @@ def download_workflow(self, workflow_id, style=None):
api_asserts.assert_status_code_is(response, 200)
return response.json()
+ def update_workflow(self, workflow_id, workflow_object):
+ data = dict(
+ workflow=workflow_object
+ )
+ raw_url = 'workflows/%s' % workflow_id
+ # url = self.galaxy_interactor._api_url(raw_url, use_key=True)
+ put_response = self.galaxy_interactor._put(raw_url, data=json.dumps(data))
+ return put_response
+
+ @contextlib.contextmanager
+ def export_for_update(self, workflow_id):
+ workflow_object = self.download_workflow(workflow_id)
+ yield workflow_object
+ self.update_workflow(workflow_id, workflow_object)
+
def run_workflow(self, has_workflow, test_data=None, history_id=None, wait=True, source_type=None, jobs_descriptions=None, expected_response=200, assert_ok=True, client_convert=None, round_trip_format_conversion=False, raw_yaml=False):
"""High-level wrapper around workflow API, etc. to invoke format 2 workflows."""
workflow_populator = self
@@ -693,6 +1097,22 @@ def dump_workflow(self, workflow_id, style=None):
else:
print(json.dumps(raw_workflow, sort_keys=True, indent=2))
+ def wait_for_invocation_and_jobs(self, history_id, workflow_id, invocation_id, assert_ok=True):
+ # TODO: replace instances in workflow test cases with this to de-dup...
+ self.wait_for_invocation(workflow_id, invocation_id)
+ url = "workflows/%s/usage/%s" % (workflow_id, invocation_id)
+ invocation = self._get(url)
+ steps = invocation.json()["steps"]
+ wait_for_jobs = False
+ for step in steps:
+ if step.get("job_id"):
+ wait_for_jobs = True
+ break
+ time.sleep(.05)
+ if wait_for_jobs:
+ self.dataset_populator.wait_for_history_jobs(history_id, assert_ok=assert_ok)
+ time.sleep(.05)
+
RunJobsSummary = namedtuple('RunJobsSummary', ['history_id', 'workflow_id', 'invocation_id', 'inputs', 'jobs', 'invocation', 'workflow_request'])
@@ -704,11 +1124,11 @@ def __init__(self, galaxy_interactor):
self.dataset_populator = DatasetPopulator(galaxy_interactor)
self.dataset_collection_populator = DatasetCollectionPopulator(galaxy_interactor)
- def _post(self, route, data=None):
+ def _post(self, route, data=None, admin=False):
if data is None:
data = {}
- return self.galaxy_interactor.post(route, data)
+ return self.galaxy_interactor.post(route, data, admin=admin)
def _get(self, route, data=None):
if data is None:
@@ -728,6 +1148,22 @@ def import_workflow(self, workflow, **kwds):
assert upload_response.status_code == 200, upload_response.content
return upload_response.json()
+ def import_tool(self, tool):
+ """ Import a workflow via POST /api/workflows or
+ comparable interface into Galaxy.
+ """
+ upload_response = self._import_tool_response(tool)
+ assert upload_response.status_code == 200, upload_response
+ return upload_response.json()
+
+ def _import_tool_response(self, tool):
+ tool_str = json.dumps(tool, indent=4)
+ data = {
+ 'representation': tool_str
+ }
+ upload_response = self._post("dynamic_tools", data=data, admin=True)
+ return upload_response
+
class LibraryPopulator(object):
@@ -1203,7 +1639,7 @@ def read_test_data(test_dict):
return inputs, label_map, has_uploads
-def wait_on_state(state_func, desc="state", skip_states=["running", "queued", "new", "ready"], assert_ok=False, timeout=DEFAULT_TIMEOUT):
+def wait_on_state(state_func, desc="state", skip_states=["running", "queued", "new", "ready"], ok_states=["ok"], assert_ok=False, timeout=DEFAULT_TIMEOUT):
def get_state():
response = state_func()
assert response.status_code == 200, "Failed to fetch state update while waiting."
@@ -1212,7 +1648,7 @@ def get_state():
return None
else:
if assert_ok:
- assert state == "ok", "Final state - %s - not okay." % state
+ assert state in ok_states, "Final state - %s - not okay." % state
return state
try:
return wait_on(get_state, desc=desc, timeout=timeout)
diff --git a/test/base/workflow_fixtures.py b/test/base/workflow_fixtures.py
index 519dd7501315..7f96b3c41db2 100644
--- a/test/base/workflow_fixtures.py
+++ b/test/base/workflow_fixtures.py
@@ -417,6 +417,19 @@
inner_input: outer_input
"""
+WORKFLOW_ONE_STEP_DEFAULT = """
+class: GalaxyWorkflow
+inputs:
+ input: data
+steps:
+ randomlines:
+ tool_id: random_lines1
+ in:
+ input: input
+ num_lines:
+ default: 6
+"""
+
WORKFLOW_WITH_OUTPUTS = """
class: GalaxyWorkflow
inputs:
diff --git a/test/functional/tools/cat.json b/test/functional/tools/cat.json
new file mode 100644
index 000000000000..ff4d73f82509
--- /dev/null
+++ b/test/functional/tools/cat.json
@@ -0,0 +1,3 @@
+input1:
+ class: File
+ location: cat.json
diff --git a/test/functional/tools/cwl_tools b/test/functional/tools/cwl_tools
new file mode 120000
index 000000000000..f7aa3669f89a
--- /dev/null
+++ b/test/functional/tools/cwl_tools
@@ -0,0 +1 @@
+../../unit/tools/cwl_tools
\ No newline at end of file
diff --git a/test/functional/tools/expression_forty_two.xml b/test/functional/tools/expression_forty_two.xml
new file mode 100644
index 000000000000..e1f40808748e
--- /dev/null
+++ b/test/functional/tools/expression_forty_two.xml
@@ -0,0 +1,15 @@
+
+ Parse Int
+
+ {return {'output':
+ 42};
+ }
+
+
+
+
+
+
+ Produces the integer 42.
+
diff --git a/test/functional/tools/expression_log_line_count.xml b/test/functional/tools/expression_log_line_count.xml
new file mode 100644
index 000000000000..938ab3a1541a
--- /dev/null
+++ b/test/functional/tools/expression_log_line_count.xml
@@ -0,0 +1,14 @@
+
+ Log Lines
+
+ {return {'output': Math.max(Math.round(Math.log(parseInt($job.input1.metadata.data_lines))), 1)};}
+
+
+
+
+
+
+
+
+
diff --git a/test/functional/tools/expression_parse_int.xml b/test/functional/tools/expression_parse_int.xml
new file mode 100644
index 000000000000..adb99dc63591
--- /dev/null
+++ b/test/functional/tools/expression_parse_int.xml
@@ -0,0 +1,14 @@
+
+ Parse Int
+
+ {return {'output': parseInt($job.input1)};}
+
+
+
+
+
+
+
+ Parse an integer from text.
+
diff --git a/test/functional/tools/optional_data_param.xml b/test/functional/tools/optional_data_param.xml
new file mode 100644
index 000000000000..7cc2d0444232
--- /dev/null
+++ b/test/functional/tools/optional_data_param.xml
@@ -0,0 +1,23 @@
+
+
+
+ cat $input1 > $out_file1;
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/functional/tools/samples_tool_conf.xml b/test/functional/tools/samples_tool_conf.xml
index b3a586e29777..115e237a450e 100644
--- a/test/functional/tools/samples_tool_conf.xml
+++ b/test/functional/tools/samples_tool_conf.xml
@@ -1,5 +1,17 @@
+
+
+
@@ -79,6 +91,7 @@
+
@@ -135,8 +148,10 @@
+
+
+
-
@@ -146,6 +161,49 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Created in
+ Version in which a concept was created.
+ true
+ concept_properties
+
+
+
+
+
+
+
+ Documentation
+ Specification
+ 'Documentation' trailing modifier (qualifier, 'documentation') of 'xref' links of 'Format' concepts. When 'true', the link is pointing to a page with explanation, description, documentation, or specification of the given data format.
+ true
+ concept_properties
+
+
+
+
+
+
+
+ Example
+ 'Example' concept property ('example' metadat tag) lists examples of valid values of types of identifiers (accessions). Applicable to some other types of data, too.
+ true
+ concept_properties
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Obsolete since
+ true
+ concept_properties
+ Version in which a concept was made obsolete.
+
+
+
+
+
+
+
+ Regular expression
+ 'Regular expression' concept property ('regex' metadata tag) specifies the allowed values of types of identifiers (accessions). Applicable to some other types of data, too.
+ concept_properties
+ true
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ has format
+ "http://purl.obolibrary.org/obo/OBI_0000298"
+ Subject A can be any concept or entity outside of an ontology (or an ontology concept in a role of an entity being semantically annotated) that is (or is in a role of) 'Data', or an input, output, input or output argument of an 'Operation'. Object B can either be a concept that is a 'Format', or in unexpected cases an entity outside of an ontology that is a 'Format' or is in the role of a 'Format'. In EDAM, 'has_format' is not explicitly defined between EDAM concepts, only the inverse 'is_format_of'.
+ false
+ OBO_REL:is_a
+ relations
+ http://www.loa-cnr.it/ontologies/DOLCE-Lite.owl#has-quality"
+ false
+ false
+ edam
+ 'A has_format B' defines for the subject A, that it has the object B as its data format.
+ false
+
+
+
+
+
+
+
+
+
+ has function
+ http://wsio.org/has_function
+ false
+ OBO_REL:is_a
+ OBO_REL:bearer_of
+ edam
+ Subject A can be any concept or entity outside of an ontology (or an ontology concept in a role of an entity being semantically annotated). Object B can either be a concept that is (or is in a role of) a function, or an entity outside of an ontology that is (or is in a role of) a function specification. In the scope of EDAM, 'has_function' serves only for relating annotated entities outside of EDAM with 'Operation' concepts.
+ false
+ http://www.loa-cnr.it/ontologies/DOLCE-Lite.owl#has-quality"
+ true
+ 'A has_function B' defines for the subject A, that it has the object B as its function.
+ "http://purl.obolibrary.org/obo/OBI_0000306"
+ relations
+ false
+
+
+
+ true
+ In very unusual cases.
+
+
+
+
+ Is defined anywhere? Not in the 'unknown' version of RO. 'OBO_REL:bearer_of' is narrower in the sense that it only relates ontological categories (concepts) that are an 'independent_continuant' (snap:IndependentContinuant) with ontological categories that are a 'specifically_dependent_continuant' (snap:SpecificallyDependentContinuant), and broader in the sense that it relates with any borne objects not just functions of the subject.
+ OBO_REL:bearer_of
+
+
+
+
+
+
+
+
+
+ has identifier
+ false
+ false
+ relations
+ OBO_REL:is_a
+ edam
+ 'A has_identifier B' defines for the subject A, that it has the object B as its identifier.
+ Subject A can be any concept or entity outside of an ontology (or an ontology concept in a role of an entity being semantically annotated). Object B can either be a concept that is an 'Identifier', or an entity outside of an ontology that is an 'Identifier' or is in the role of an 'Identifier'. In EDAM, 'has_identifier' is not explicitly defined between EDAM concepts, only the inverse 'is_identifier_of'.
+ false
+ false
+
+
+
+
+
+
+
+
+
+ has input
+ OBO_REL:has_participant
+ "http://purl.obolibrary.org/obo/OBI_0000293"
+ false
+ http://wsio.org/has_input
+ Subject A can either be concept that is or has an 'Operation' function, or an entity outside of an ontology (or an ontology concept in a role of an entity being semantically annotated) that has an 'Operation' function or is an 'Operation'. Object B can be any concept or entity. In EDAM, only 'has_input' is explicitly defined between EDAM concepts ('Operation' 'has_input' 'Data'). The inverse, 'is_input_of', is not explicitly defined.
+ relations
+ OBO_REL:is_a
+ false
+ 'A has_input B' defines for the subject A, that it has the object B as a necessary or actual input or input argument.
+ false
+ true
+ edam
+
+
+
+
+ OBO_REL:has_participant
+ 'OBO_REL:has_participant' is narrower in the sense that it only relates ontological categories (concepts) that are a 'process' (span:Process) with ontological categories that are a 'continuant' (snap:Continuant), and broader in the sense that it relates with any participating objects not just inputs or input arguments of the subject.
+
+
+
+
+ true
+ In very unusual cases.
+
+
+
+
+
+
+
+
+
+ has output
+ http://wsio.org/has_output
+ Subject A can either be concept that is or has an 'Operation' function, or an entity outside of an ontology (or an ontology concept in a role of an entity being semantically annotated) that has an 'Operation' function or is an 'Operation'. Object B can be any concept or entity. In EDAM, only 'has_output' is explicitly defined between EDAM concepts ('Operation' 'has_output' 'Data'). The inverse, 'is_output_of', is not explicitly defined.
+ edam
+ "http://purl.obolibrary.org/obo/OBI_0000299"
+ OBO_REL:is_a
+ relations
+ OBO_REL:has_participant
+ true
+ 'A has_output B' defines for the subject A, that it has the object B as a necessary or actual output or output argument.
+ false
+ false
+ false
+
+
+
+
+ 'OBO_REL:has_participant' is narrower in the sense that it only relates ontological categories (concepts) that are a 'process' (span:Process) with ontological categories that are a 'continuant' (snap:Continuant), and broader in the sense that it relates with any participating objects not just outputs or output arguments of the subject. It is also not clear whether an output (result) actually participates in the process that generates it.
+ OBO_REL:has_participant
+
+
+
+
+ In very unusual cases.
+ true
+
+
+
+
+
+
+
+
+
+ has topic
+ relations
+ true
+ Subject A can be any concept or entity outside of an ontology (or an ontology concept in a role of an entity being semantically annotated). Object B can either be a concept that is a 'Topic', or in unexpected cases an entity outside of an ontology that is a 'Topic' or is in the role of a 'Topic'. In EDAM, only 'has_topic' is explicitly defined between EDAM concepts ('Operation' or 'Data' 'has_topic' 'Topic'). The inverse, 'is_topic_of', is not explicitly defined.
+ false
+ 'A has_topic B' defines for the subject A, that it has the object B as its topic (A is in the scope of a topic B).
+ edam
+ OBO_REL:is_a
+ http://annotation-ontology.googlecode.com/svn/trunk/annotation-core.owl#hasTopic
+ false
+ "http://purl.obolibrary.org/obo/IAO_0000136"
+ false
+ http://www.loa-cnr.it/ontologies/DOLCE-Lite.owl#has-quality
+ "http://purl.obolibrary.org/obo/OBI_0000298"
+
+
+
+
+
+
+
+
+
+
+
+ In very unusual cases.
+ true
+
+
+
+
+
+
+
+
+
+ is format of
+ false
+ OBO_REL:is_a
+ false
+ false
+ false
+ 'A is_format_of B' defines for the subject A, that it is a data format of the object B.
+ edam
+ relations
+ Subject A can either be a concept that is a 'Format', or in unexpected cases an entity outside of an ontology (or an ontology concept in a role of an entity being semantically annotated) that is a 'Format' or is in the role of a 'Format'. Object B can be any concept or entity outside of an ontology that is (or is in a role of) 'Data', or an input, output, input or output argument of an 'Operation'. In EDAM, only 'is_format_of' is explicitly defined between EDAM concepts ('Format' 'is_format_of' 'Data'). The inverse, 'has_format', is not explicitly defined.
+ OBO_REL:quality_of
+ http://www.loa-cnr.it/ontologies/DOLCE-Lite.owl#inherent-in
+
+
+
+
+
+ OBO_REL:quality_of
+ Is defined anywhere? Not in the 'unknown' version of RO. 'OBO_REL:quality_of' might be seen narrower in the sense that it only relates subjects that are a 'quality' (snap:Quality) with objects that are an 'independent_continuant' (snap:IndependentContinuant), and is broader in the sense that it relates any qualities of the object.
+
+
+
+
+
+
+
+
+
+ is function of
+ Subject A can either be concept that is (or is in a role of) a function, or an entity outside of an ontology (or an ontology concept in a role of an entity being semantically annotated) that is (or is in a role of) a function specification. Object B can be any concept or entity. Within EDAM itself, 'is_function_of' is not used.
+ OBO_REL:inheres_in
+ true
+ OBO_REL:is_a
+ false
+ 'A is_function_of B' defines for the subject A, that it is a function of the object B.
+ OBO_REL:function_of
+ edam
+ http://wsio.org/is_function_of
+ relations
+ http://www.loa-cnr.it/ontologies/DOLCE-Lite.owl#inherent-in
+ false
+ false
+
+
+
+
+ OBO_REL:inheres_in
+ Is defined anywhere? Not in the 'unknown' version of RO. 'OBO_REL:inheres_in' is narrower in the sense that it only relates ontological categories (concepts) that are a 'specifically_dependent_continuant' (snap:SpecificallyDependentContinuant) with ontological categories that are an 'independent_continuant' (snap:IndependentContinuant), and broader in the sense that it relates any borne subjects not just functions.
+
+
+
+
+ true
+ In very unusual cases.
+
+
+
+
+ OBO_REL:function_of
+ Is defined anywhere? Not in the 'unknown' version of RO. 'OBO_REL:function_of' only relates subjects that are a 'function' (snap:Function) with objects that are an 'independent_continuant' (snap:IndependentContinuant), so for example no processes. It does not define explicitly that the subject is a function of the object.
+
+
+
+
+
+
+
+
+
+ is identifier of
+ false
+ false
+ edam
+ false
+ relations
+ Subject A can either be a concept that is an 'Identifier', or an entity outside of an ontology (or an ontology concept in a role of an entity being semantically annotated) that is an 'Identifier' or is in the role of an 'Identifier'. Object B can be any concept or entity outside of an ontology. In EDAM, only 'is_identifier_of' is explicitly defined between EDAM concepts (only 'Identifier' 'is_identifier_of' 'Data'). The inverse, 'has_identifier', is not explicitly defined.
+ 'A is_identifier_of B' defines for the subject A, that it is an identifier of the object B.
+ OBO_REL:is_a
+ false
+
+
+
+
+
+
+
+
+
+
+ is input of
+ false
+ http://wsio.org/is_input_of
+ relations
+ true
+ false
+ OBO_REL:participates_in
+ OBO_REL:is_a
+ "http://purl.obolibrary.org/obo/OBI_0000295"
+ edam
+ Subject A can be any concept or entity outside of an ontology (or an ontology concept in a role of an entity being semantically annotated). Object B can either be a concept that is or has an 'Operation' function, or an entity outside of an ontology that has an 'Operation' function or is an 'Operation'. In EDAM, 'is_input_of' is not explicitly defined between EDAM concepts, only the inverse 'has_input'.
+ false
+ 'A is_input_of B' defines for the subject A, that it as a necessary or actual input or input argument of the object B.
+
+
+
+
+
+ 'OBO_REL:participates_in' is narrower in the sense that it only relates ontological categories (concepts) that are a 'continuant' (snap:Continuant) with ontological categories that are a 'process' (span:Process), and broader in the sense that it relates any participating subjects not just inputs or input arguments.
+ OBO_REL:participates_in
+
+
+
+
+ In very unusual cases.
+ true
+
+
+
+
+
+
+
+
+
+ is output of
+ OBO_REL:is_a
+ false
+ false
+ Subject A can be any concept or entity outside of an ontology (or an ontology concept in a role of an entity being semantically annotated). Object B can either be a concept that is or has an 'Operation' function, or an entity outside of an ontology that has an 'Operation' function or is an 'Operation'. In EDAM, 'is_output_of' is not explicitly defined between EDAM concepts, only the inverse 'has_output'.
+ edam
+ false
+ 'A is_output_of B' defines for the subject A, that it as a necessary or actual output or output argument of the object B.
+ OBO_REL:participates_in
+ http://wsio.org/is_output_of
+ true
+ relations
+ "http://purl.obolibrary.org/obo/OBI_0000312"
+
+
+
+
+
+ In very unusual cases.
+ true
+
+
+
+
+ OBO_REL:participates_in
+ 'OBO_REL:participates_in' is narrower in the sense that it only relates ontological categories (concepts) that are a 'continuant' (snap:Continuant) with ontological categories that are a 'process' (span:Process), and broader in the sense that it relates any participating subjects not just outputs or output arguments. It is also not clear whether an output (result) actually participates in the process that generates it.
+
+
+
+
+
+
+
+
+
+ is topic of
+ 'A is_topic_of B' defines for the subject A, that it is a topic of the object B (a topic A is the scope of B).
+ relations
+ OBO_REL:quality_of
+ false
+ true
+ false
+ Subject A can either be a concept that is a 'Topic', or in unexpected cases an entity outside of an ontology (or an ontology concept in a role of an entity being semantically annotated) that is a 'Topic' or is in the role of a 'Topic'. Object B can be any concept or entity outside of an ontology. In EDAM, 'is_topic_of' is not explicitly defined between EDAM concepts, only the inverse 'has_topic'.
+ http://www.loa-cnr.it/ontologies/DOLCE-Lite.owl#inherent-in
+ false
+ OBO_REL:is_a
+ edam
+
+
+
+
+
+
+
+
+
+
+
+
+ OBO_REL:quality_of
+ Is defined anywhere? Not in the 'unknown' version of RO. 'OBO_REL:quality_of' might be seen narrower in the sense that it only relates subjects that are a 'quality' (snap:Quality) with objects that are an 'independent_continuant' (snap:IndependentContinuant), and is broader in the sense that it relates any qualities of the object.
+
+
+
+
+ In very unusual cases.
+ true
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Resource type
+
+ beta12orEarlier
+ beta12orEarlier
+ A type of computational resource used in bioinformatics.
+ true
+
+
+
+
+
+
+
+
+ Data
+
+
+
+
+ Information, represented in an information artefact (data record) that is 'understandable' by dedicated computational tools that can use the data as input or produce it as output.
+ http://www.onto-med.de/ontologies/gfo.owl#Perpetuant
+ http://semanticscience.org/resource/SIO_000088
+ http://semanticscience.org/resource/SIO_000069
+ "http://purl.obolibrary.org/obo/IAO_0000030"
+ "http://purl.obolibrary.org/obo/IAO_0000027"
+ Data set
+ Data record
+ beta12orEarlier
+ http://wsio.org/data_002
+ http://purl.org/biotop/biotop.owl#DigitalEntity
+ http://www.ifomis.org/bfo/1.1/snap#Continuant
+ Datum
+
+
+
+
+ Data set
+ EDAM does not distinguish the multiplicity of data, such as one data item (datum) versus a collection of data (data set).
+
+
+
+
+ Datum
+ EDAM does not distinguish the multiplicity of data, such as one data item (datum) versus a collection of data (data set).
+
+
+
+
+ Data record
+ EDAM does not distinguish a data record (a tool-understandable information artefact) from data or datum (its content, the tool-understandable encoding of an information).
+
+
+
+
+
+
+
+
+
+ Tool
+
+ beta12orEarlier
+ A bioinformatics package or tool, e.g. a standalone application or web service.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+ Database
+
+ A digital data archive typically based around a relational model but sometimes using an object-oriented, tree or graph-based model.
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+ Ontology
+
+
+
+
+
+
+
+ beta12orEarlier
+ Ontologies
+ An ontology of biological or bioinformatics concepts and relations, a controlled vocabulary, structured glossary etc.
+
+
+
+
+
+
+
+
+
+ Directory metadata
+
+ 1.5
+ A directory on disk from which files are read.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ MeSH vocabulary
+
+ beta12orEarlier
+ true
+ Controlled vocabulary from National Library of Medicine. The MeSH thesaurus is used to index articles in biomedical journals for the Medline/PubMED databases.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ HGNC vocabulary
+
+ beta12orEarlier
+ beta12orEarlier
+ Controlled vocabulary for gene names (symbols) from HUGO Gene Nomenclature Committee.
+ true
+
+
+
+
+
+
+
+
+
+ UMLS vocabulary
+
+ Compendium of controlled vocabularies for the biomedical domain (Unified Medical Language System).
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Identifier
+
+
+
+
+
+
+
+
+
+ http://semanticscience.org/resource/SIO_000115
+ beta12orEarlier
+ ID
+ "http://purl.org/dc/elements/1.1/identifier"
+ http://wsio.org/data_005
+ A text token, number or something else which identifies an entity, but which may not be persistent (stable) or unique (the same identifier may identify multiple things).
+
+
+
+
+
+
+ Almost exact but limited to identifying resources.
+
+
+
+
+
+
+
+
+
+
+ Database entry
+
+ beta12orEarlier
+ beta12orEarlier
+ An entry (retrievable via URL) from a biological database.
+ true
+
+
+
+
+
+
+
+
+
+ Molecular mass
+
+ Mass of a molecule.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Molecular charge
+
+ Net charge of a molecule.
+ beta12orEarlier
+ PDBML:pdbx_formal_charge
+
+
+
+
+
+
+
+
+
+ Chemical formula
+
+ Chemical structure specification
+ A specification of a chemical structure.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ QSAR descriptor
+
+ A QSAR quantitative descriptor (name-value pair) of chemical structure.
+ QSAR descriptors have numeric values that quantify chemical information encoded in a symbolic representation of a molecule. They are used in quantitative structure activity relationship (QSAR) applications. Many subtypes of individual descriptors (not included in EDAM) cover various types of protein properties.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Raw sequence
+
+ beta12orEarlier
+ A raw molecular sequence (string of characters) which might include ambiguity, unknown positions and non-sequence characters.
+ Non-sequence characters may be used for example for gaps and translation stop.
+
+
+
+
+
+
+
+
+
+ Sequence record
+
+ http://purl.bioontology.org/ontology/MSH/D058977
+ beta12orEarlier
+ A molecular sequence and associated metadata.
+ SO:2000061
+
+
+
+
+
+
+
+
+
+ Sequence set
+
+ A collection of multiple molecular sequences and associated metadata that do not (typically) correspond to molecular sequence database records or entries and which (typically) are derived from some analytical method.
+ This concept may be used for arbitrary sequence sets and associated data arising from processing.
+ beta12orEarlier
+ SO:0001260
+
+
+
+
+
+
+
+
+
+ Sequence mask character
+
+ true
+ beta12orEarlier
+ 1.5
+ A character used to replace (mask) other characters in a molecular sequence.
+
+
+
+
+
+
+
+
+
+ Sequence mask type
+
+ A label (text token) describing the type of sequence masking to perform.
+ Sequence masking is where specific characters or positions in a molecular sequence are masked (replaced) with an another (mask character). The mask type indicates what is masked, for example regions that are not of interest or which are information-poor including acidic protein regions, basic protein regions, proline-rich regions, low compositional complexity regions, short-periodicity internal repeats, simple repeats and low complexity regions. Masked sequences are used in database search to eliminate statistically significant but biologically uninteresting hits.
+ beta12orEarlier
+ 1.5
+ true
+
+
+
+
+
+
+
+
+
+ DNA sense specification
+
+ DNA strand specification
+ beta12orEarlier
+ Strand
+ The strand of a DNA sequence (forward or reverse).
+ The forward or 'top' strand might specify a sequence is to be used as given, the reverse or 'bottom' strand specifying the reverse complement of the sequence is to be used.
+
+
+
+
+
+
+
+
+
+ Sequence length specification
+
+ true
+ A specification of sequence length(s).
+ beta12orEarlier
+ 1.5
+
+
+
+
+
+
+
+
+
+ Sequence metadata
+
+ beta12orEarlier
+ Basic or general information concerning molecular sequences.
+ This is used for such things as a report including the sequence identifier, type and length.
+ 1.5
+ true
+
+
+
+
+
+
+
+
+
+ Sequence feature source
+
+ This might be the name and version of a software tool, the name of a database, or 'curated' to indicate a manual annotation (made by a human).
+ How the annotation of a sequence feature (for example in EMBL or Swiss-Prot) was derived.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence search results
+
+ beta12orEarlier
+ Database hits (sequence)
+
+ Sequence database hits
+ Sequence search hits
+ The score list includes the alignment score, percentage of the query sequence matched, length of the database sequence entry in this alignment, identifier of the database sequence entry, excerpt of the database sequence entry description etc.
+ A report of sequence hits and associated data from searching a database of sequences (for example a BLAST search). This will typically include a list of scores (often with statistical evaluation) and a set of alignments for the hits.
+ Sequence database search results
+
+
+
+
+
+
+
+
+
+ Sequence signature matches
+
+ Sequence motif matches
+ Protein secondary database search results
+ beta12orEarlier
+ Report on the location of matches in one or more sequences to profiles, motifs (conserved or functional patterns) or other signatures.
+ Sequence profile matches
+ This ncluding reports of hits from a search of a protein secondary or domain database.
+ Search results (protein secondary database)
+
+
+
+
+
+
+
+
+
+ Sequence signature model
+
+ Data files used by motif or profile methods.
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Sequence signature data
+
+
+
+
+
+
+
+ beta12orEarlier
+ This can include metadata about a motif or sequence profile such as its name, length, technical details about the profile construction, and so on.
+ Data concering concerning specific or conserved pattern in molecular sequences and the classifiers used for their identification, including sequence motifs, profiles or other diagnostic element.
+
+
+
+
+
+
+
+
+
+ Sequence alignment (words)
+
+ 1.5
+ beta12orEarlier
+ true
+ Sequence word alignment
+ Alignment of exact matches between subsequences (words) within two or more molecular sequences.
+
+
+
+
+
+
+
+
+
+ Dotplot
+
+ A dotplot of sequence similarities identified from word-matching or character comparison.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence alignment
+
+
+
+
+
+
+
+ http://en.wikipedia.org/wiki/Sequence_alignment
+ http://purl.bioontology.org/ontology/MSH/D016415
+ http://semanticscience.org/resource/SIO_010066
+ beta12orEarlier
+ Alignment of multiple molecular sequences.
+
+
+
+
+
+
+
+
+
+ Sequence alignment parameter
+
+ Some simple value controlling a sequence alignment (or similar 'match') operation.
+ true
+ 1.5
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence similarity score
+
+ A value representing molecular sequence similarity.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence alignment metadata
+
+ Report of general information on a sequence alignment, typically include a description, sequence identifiers and alignment score.
+ beta12orEarlier
+ true
+ 1.5
+
+
+
+
+
+
+
+
+
+ Sequence alignment report
+
+ Use this for any computer-generated reports on sequence alignments, and for general information (metadata) on a sequence alignment, such as a description, sequence identifiers and alignment score.
+ An informative report of molecular sequence alignment-derived data or metadata.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence profile alignment
+
+ beta12orEarlier
+ A profile-profile alignment (each profile typically representing a sequence alignment).
+
+
+
+
+
+
+
+
+
+ Sequence-profile alignment
+
+ beta12orEarlier
+ Alignment of one or more molecular sequence(s) to one or more sequence profile(s) (each profile typically representing a sequence alignment).
+ Data associated with the alignment might also be included, e.g. ranked list of best-scoring sequences and a graphical representation of scores.
+
+
+
+
+
+
+
+
+
+ Sequence distance matrix
+
+ beta12orEarlier
+ Moby:phylogenetic_distance_matrix
+ A matrix of estimated evolutionary distance between molecular sequences, such as is suitable for phylogenetic tree calculation.
+ Phylogenetic distance matrix
+ Methods might perform character compatibility analysis or identify patterns of similarity in an alignment or data matrix.
+
+
+
+
+
+
+
+
+
+ Phylogenetic character data
+
+ Basic character data from which a phylogenetic tree may be generated.
+ As defined, this concept would also include molecular sequences, microsatellites, polymorphisms (RAPDs, RFLPs, or AFLPs), restriction sites and fragments
+ http://www.evolutionaryontology.org/cdao.owl#Character
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree
+
+
+
+
+
+
+
+ Phylogeny
+ Moby:Tree
+ http://www.evolutionaryontology.org/cdao.owl#Tree
+ A phylogenetic tree is usually constructed from a set of sequences from which an alignment (or data matrix) is calculated. See also 'Phylogenetic tree image'.
+ http://purl.bioontology.org/ontology/MSH/D010802
+ Moby:phylogenetic_tree
+ The raw data (not just an image) from which a phylogenetic tree is directly generated or plotted, such as topology, lengths (in time or in expected amounts of variance) and a confidence interval for each length.
+ beta12orEarlier
+ Moby:myTree
+
+
+
+
+
+
+
+
+
+ Comparison matrix
+
+ beta12orEarlier
+ The comparison matrix might include matrix name, optional comment, height and width (or size) of matrix, an index row/column (of characters) and data rows/columns (of integers or floats).
+ Matrix of integer or floating point numbers for amino acid or nucleotide sequence comparison.
+ Substitution matrix
+
+
+
+
+
+
+
+
+
+ Protein topology
+
+ beta12orEarlier
+ beta12orEarlier
+ Predicted or actual protein topology represented as a string of protein secondary structure elements.
+ true
+ The location and size of the secondary structure elements and intervening loop regions is usually indicated.
+
+
+
+
+
+
+
+
+
+ Protein features report (secondary structure)
+
+ beta12orEarlier
+ 1.8
+ true
+ Secondary structure (predicted or real) of a protein.
+
+
+
+
+
+
+
+
+
+ Protein features report (super-secondary)
+
+ 1.8
+ Super-secondary structures include leucine zippers, coiled coils, Helix-Turn-Helix etc.
+ true
+ beta12orEarlier
+ Super-secondary structure of protein sequence(s).
+
+
+
+
+
+
+
+
+
+ Secondary structure alignment (protein)
+
+
+ Alignment of the (1D representations of) secondary structure of two or more proteins.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Secondary structure alignment metadata (protein)
+
+ An informative report on protein secondary structure alignment-derived data or metadata.
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ RNA secondary structure
+
+
+
+
+
+
+
+ An informative report of secondary structure (predicted or real) of an RNA molecule.
+ This includes thermodynamically stable or evolutionarily conserved structures such as knots, pseudoknots etc.
+ Moby:RNAStructML
+ Secondary structure (RNA)
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Secondary structure alignment (RNA)
+
+ Moby:RNAStructAlignmentML
+ Alignment of the (1D representations of) secondary structure of two or more RNA molecules.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Secondary structure alignment metadata (RNA)
+
+ true
+ beta12orEarlier
+ An informative report of RNA secondary structure alignment-derived data or metadata.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Structure
+
+
+
+
+
+
+
+ beta12orEarlier
+ Coordinate model
+ Structure data
+ The coordinate data may be predicted or real.
+ http://purl.bioontology.org/ontology/MSH/D015394
+ 3D coordinate and associated data for a macromolecular tertiary (3D) structure or part of a structure.
+
+
+
+
+
+
+
+
+
+ Tertiary structure record
+
+ true
+ beta12orEarlier
+ beta12orEarlier
+ An entry from a molecular tertiary (3D) structure database.
+
+
+
+
+
+
+
+
+
+ Structure database search results
+
+ 1.8
+ Results (hits) from searching a database of tertiary structure.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Structure alignment
+
+
+
+
+
+
+
+ Alignment (superimposition) of molecular tertiary (3D) structures.
+ A tertiary structure alignment will include the untransformed coordinates of one macromolecule, followed by the second (or subsequent) structure(s) with all the coordinates transformed (by rotation / translation) to give a superposition.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Structure alignment report
+
+ beta12orEarlier
+ This is a broad data type and is used a placeholder for other, more specific types.
+ An informative report of molecular tertiary structure alignment-derived data.
+
+
+
+
+
+
+
+
+
+ Structure similarity score
+
+ beta12orEarlier
+ A value representing molecular structure similarity, measured from structure alignment or some other type of structure comparison.
+
+
+
+
+
+
+
+
+
+ Structural profile
+
+
+
+
+
+
+
+ beta12orEarlier
+ 3D profile
+ Some type of structural (3D) profile or template (representing a structure or structure alignment).
+ Structural (3D) profile
+
+
+
+
+
+
+
+
+
+ Structural (3D) profile alignment
+
+ beta12orEarlier
+ Structural profile alignment
+ A 3D profile-3D profile alignment (each profile representing structures or a structure alignment).
+
+
+
+
+
+
+
+
+
+ Sequence-3D profile alignment
+
+ Sequence-structural profile alignment
+ 1.5
+ An alignment of a sequence to a 3D profile (representing structures or a structure alignment).
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Protein sequence-structure scoring matrix
+
+ beta12orEarlier
+ Matrix of values used for scoring sequence-structure compatibility.
+
+
+
+
+
+
+
+
+
+ Sequence-structure alignment
+
+ beta12orEarlier
+ An alignment of molecular sequence to structure (from threading sequence(s) through 3D structure or representation of structure(s)).
+
+
+
+
+
+
+
+
+
+ Amino acid annotation
+
+ An informative report about a specific amino acid.
+ 1.4
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Peptide annotation
+
+ 1.4
+ true
+ An informative report about a specific peptide.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein report
+
+ Gene product annotation
+ beta12orEarlier
+ An informative human-readable report about one or more specific protein molecules or protein structural domains, derived from analysis of primary (sequence or structural) data.
+
+
+
+
+
+
+
+
+
+ Protein property
+
+ Protein physicochemical property
+ A report of primarily non-positional data describing intrinsic physical, chemical or other properties of a protein molecule or model.
+ beta12orEarlier
+ Protein sequence statistics
+ Protein properties
+ The report may be based on analysis of nucleic acid sequence or structural data. This is a broad data type and is used a placeholder for other, more specific types.
+
+
+
+
+
+
+
+
+
+ Protein structural motifs and surfaces
+
+ true
+ 1.8
+ 3D structural motifs in a protein.
+ beta12orEarlier
+ Protein 3D motifs
+
+
+
+
+
+
+
+
+ Protein domain classification
+
+ true
+ Data concerning the classification of the sequences and/or structures of protein structural domain(s).
+ 1.5
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein features report (domains)
+
+ true
+ structural domains or 3D folds in a protein or polypeptide chain.
+ 1.8
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein architecture report
+
+ 1.4
+ An informative report on architecture (spatial arrangement of secondary structure) of a protein structure.
+ Protein property (architecture)
+ Protein structure report (architecture)
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Protein folding report
+
+ beta12orEarlier
+ A report on an analysis or model of protein folding properties, folding pathways, residues or sites that are key to protein folding, nucleation or stabilization centers etc.
+ true
+ 1.8
+
+
+
+
+
+
+
+
+
+ Protein features (mutation)
+
+ This is a broad data type and is used a placeholder for other, more specific types. It is primarily intended to help navigation of EDAM and would not typically be used for annotation.
+ Data on the effect of (typically point) mutation on protein folding, stability, structure and function.
+ true
+ beta12orEarlier
+ Protein property (mutation)
+ Protein structure report (mutation)
+ beta13
+ Protein report (mutation)
+
+
+
+
+
+
+
+
+
+ Protein interaction raw data
+
+ This is a broad data type and is used a placeholder for other, more specific types. It is primarily intended to help navigation of EDAM and would not typically be used for annotation.
+ Protein-protein interaction data from for example yeast two-hybrid analysis, protein microarrays, immunoaffinity chromatography followed by mass spectrometry, phage display etc.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein interaction report
+
+
+
+
+
+
+
+ beta12orEarlier
+ Protein report (interaction)
+ Protein interaction record
+ An informative report on the interactions (predicted or known) of a protein, protein domain or part of a protein with some other molecule(s), which might be another protein, nucleic acid or some other ligand.
+
+
+
+
+
+
+
+
+
+ Protein family report
+
+
+
+
+
+
+
+ beta12orEarlier
+ An informative report on a specific protein family or other classification or group of protein sequences or structures.
+ Protein family annotation
+ Protein classification data
+
+
+
+
+
+
+
+
+
+ Vmax
+
+ beta12orEarlier
+ The maximum initial velocity or rate of a reaction. It is the limiting velocity as substrate concentrations get very large.
+
+
+
+
+
+
+
+
+
+ Km
+
+ Km is the concentration (usually in Molar units) of substrate that leads to half-maximal velocity of an enzyme-catalysed reaction.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Nucleotide base annotation
+
+ beta12orEarlier
+ true
+ An informative report about a specific nucleotide base.
+ 1.4
+
+
+
+
+
+
+
+
+
+ Nucleic acid property
+
+ A report of primarily non-positional data describing intrinsic physical, chemical or other properties of a nucleic acid molecule.
+ The report may be based on analysis of nucleic acid sequence or structural data. This is a broad data type and is used a placeholder for other, more specific types.
+ Nucleic acid physicochemical property
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Codon usage data
+
+
+
+
+
+
+
+ beta12orEarlier
+ Data derived from analysis of codon usage (typically a codon usage table) of DNA sequences.
+ This is a broad data type and is used a placeholder for other, more specific types.
+
+
+
+
+
+
+
+
+
+ Gene report
+
+ Gene structure (repot)
+ A report on predicted or actual gene structure, regions which make an RNA product and features such as promoters, coding regions, splice sites etc.
+ Gene and transcript structure (report)
+ Gene features report
+ Nucleic acid features (gene and transcript structure)
+ Moby:gene
+ This includes any report on a particular locus or gene. This might include the gene name, description, summary and so on. It can include details about the function of a gene, such as its encoded protein or a functional classification of the gene sequence along according to the encoded protein(s).
+ Gene annotation
+ beta12orEarlier
+ Moby_namespace:Human_Readable_Description
+ Gene function (report)
+ Moby:GeneInfo
+
+
+
+
+
+
+
+
+
+ Gene classification
+
+ beta12orEarlier
+ true
+ A report on the classification of nucleic acid / gene sequences according to the functional classification of their gene products.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ DNA variation
+
+ stable, naturally occuring mutations in a nucleotide sequence including alleles, naturally occurring mutations such as single base nucleotide substitutions, deletions and insertions, RFLPs and other polymorphisms.
+ true
+ 1.8
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Chromosome report
+
+
+
+
+
+
+
+ beta12orEarlier
+ An informative report on a specific chromosome.
+ This includes basic information. e.g. chromosome number, length, karyotype features, chromosome sequence etc.
+
+
+
+
+
+
+
+
+
+ Genotype/phenotype report
+
+ An informative report on the set of genes (or allelic forms) present in an individual, organism or cell and associated with a specific physical characteristic, or a report concerning an organisms traits and phenotypes.
+ Genotype/phenotype annotation
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Nucleic acid features report (primers)
+
+ true
+ 1.8
+ beta12orEarlier
+ PCR primers and hybridization oligos in a nucleic acid sequence.
+
+
+
+
+
+
+
+
+
+ PCR experiment report
+
+ true
+ beta12orEarlier
+ PCR experiments, e.g. quantitative real-time PCR.
+ 1.8
+
+
+
+
+
+
+
+
+
+ Sequence trace
+
+
+ Fluorescence trace data generated by an automated DNA sequencer, which can be interprted as a molecular sequence (reads), given associated sequencing metadata such as base-call quality scores.
+ This is the raw data produced by a DNA sequencing machine.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence assembly
+
+ beta12orEarlier
+ An assembly of fragments of a (typically genomic) DNA sequence.
+ http://en.wikipedia.org/wiki/Sequence_assembly
+ SO:0001248
+ Typically, an assembly is a collection of contigs (for example ESTs and genomic DNA fragments) that are ordered, aligned and merged. Annotation of the assembled sequence might be included.
+ SO:0000353
+
+
+
+
+ SO:0001248
+ Perhaps surprisingly, the definition of 'SO:assembly' is narrower than the 'SO:sequence_assembly'.
+
+
+
+
+
+
+
+
+
+ Radiation Hybrid (RH) scores
+
+ beta12orEarlier
+ Radiation Hybrid (RH) scores are used in Radiation Hybrid mapping.
+ Radiation hybrid scores (RH) scores for one or more markers.
+
+
+
+
+
+
+
+
+
+ Genetic linkage report
+
+ beta12orEarlier
+ Gene annotation (linkage)
+ Linkage disequilibrium (report)
+ An informative report on the linkage of alleles.
+ This includes linkage disequilibrium; the non-random association of alleles or polymorphisms at two or more loci (not necessarily on the same chromosome).
+
+
+
+
+
+
+
+
+
+ Gene expression profile
+
+ Data quantifying the level of expression of (typically) multiple genes, derived for example from microarray experiments.
+ beta12orEarlier
+ Gene expression pattern
+
+
+
+
+
+
+
+
+
+ Microarray experiment report
+
+ true
+ microarray experiments including conditions, protocol, sample:data relationships etc.
+ 1.8
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Oligonucleotide probe data
+
+ beta12orEarlier
+ beta13
+ true
+ Data on oligonucleotide probes (typically for use with DNA microarrays).
+
+
+
+
+
+
+
+
+
+ SAGE experimental data
+
+ beta12orEarlier
+ true
+ Output from a serial analysis of gene expression (SAGE) experiment.
+ Serial analysis of gene expression (SAGE) experimental data
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ MPSS experimental data
+
+ beta12orEarlier
+ Massively parallel signature sequencing (MPSS) data.
+ beta12orEarlier
+ Massively parallel signature sequencing (MPSS) experimental data
+ true
+
+
+
+
+
+
+
+
+
+ SBS experimental data
+
+ beta12orEarlier
+ beta12orEarlier
+ true
+ Sequencing by synthesis (SBS) experimental data
+ Sequencing by synthesis (SBS) data.
+
+
+
+
+
+
+
+
+
+ Sequence tag profile (with gene assignment)
+
+ beta12orEarlier
+ Tag to gene assignments (tag mapping) of SAGE, MPSS and SBS data. Typically this is the sequencing-based expression profile annotated with gene identifiers.
+
+
+
+
+
+
+
+
+
+ Protein X-ray crystallographic data
+
+ X-ray crystallography data.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein NMR data
+
+ Protein nuclear magnetic resonance (NMR) raw data.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein circular dichroism (CD) spectroscopic data
+
+ beta12orEarlier
+ Protein secondary structure from protein coordinate or circular dichroism (CD) spectroscopic data.
+
+
+
+
+
+
+
+
+
+ Electron microscopy volume map
+
+
+
+
+
+
+
+ beta12orEarlier
+ Volume map data from electron microscopy.
+ EM volume map
+
+
+
+
+
+
+
+
+
+ Electron microscopy model
+
+
+
+
+
+
+
+ beta12orEarlier
+ Annotation on a structural 3D model (volume map) from electron microscopy.
+ This might include the location in the model of the known features of a particular macromolecule.
+
+
+
+
+
+
+
+
+
+ 2D PAGE image
+
+
+
+
+
+
+
+ beta12orEarlier
+ Two-dimensional gel electrophoresis image
+
+
+
+
+
+
+
+
+
+ Mass spectrometry spectra
+
+
+
+
+
+
+
+ beta12orEarlier
+ Spectra from mass spectrometry.
+
+
+
+
+
+
+
+
+
+ Peptide mass fingerprint
+
+
+
+
+
+
+
+
+ Peak list
+ Protein fingerprint
+ A set of peptide masses (peptide mass fingerprint) from mass spectrometry.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Peptide identification
+
+
+
+
+
+
+
+ Protein or peptide identifications with evidence supporting the identifications, typically from comparing a peptide mass fingerprint (from mass spectrometry) to a sequence database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Pathway or network annotation
+
+ beta12orEarlier
+ true
+ An informative report about a specific biological pathway or network, typically including a map (diagram) of the pathway.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Biological pathway map
+
+ beta12orEarlier
+ true
+ A map (typically a diagram) of a biological pathway.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Data resource definition
+
+ beta12orEarlier
+ true
+ 1.5
+ A definition of a data resource serving one or more types of data, including metadata and links to the resource or data proper.
+
+
+
+
+
+
+
+
+
+ Workflow metadata
+
+ Basic information, annotation or documentation concerning a workflow (but not the workflow itself).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Mathematical model
+
+
+
+
+
+
+
+ Biological model
+ beta12orEarlier
+ A biological model represented in mathematical terms.
+
+
+
+
+
+
+
+
+
+ Statistical estimate score
+
+ beta12orEarlier
+ A value representing estimated statistical significance of some observed data; typically sequence database hits.
+
+
+
+
+
+
+
+
+
+ EMBOSS database resource definition
+
+ beta12orEarlier
+ Resource definition for an EMBOSS database.
+ true
+ 1.5
+
+
+
+
+
+
+
+
+
+ Version information
+
+ "http://purl.obolibrary.org/obo/IAO_0000129"
+ 1.5
+ Development status / maturity may be part of the version information, for example in case of tools, standards, or some data records.
+ http://www.ebi.ac.uk/swo/maturity/SWO_9000061
+ beta12orEarlier
+ Information on a version of software or data, for example name, version number and release date.
+ http://semanticscience.org/resource/SIO_000653
+ true
+ http://usefulinc.com/ns/doap#Version
+
+
+
+
+
+
+
+
+
+ Database cross-mapping
+
+ beta12orEarlier
+ A mapping of the accession numbers (or other database identifier) of entries between (typically) two biological or biomedical databases.
+ The cross-mapping is typically a table where each row is an accession number and each column is a database being cross-referenced. The cells give the accession number or identifier of the corresponding entry in a database. If a cell in the table is not filled then no mapping could be found for the database. Additional information might be given on version, date etc.
+
+
+
+
+
+
+
+
+
+ Data index
+
+
+
+
+
+
+
+ An index of data of biological relevance.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Data index report
+
+
+
+
+
+
+
+ A report of an analysis of an index of biological data.
+ Database index annotation
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Database metadata
+
+ Basic information on bioinformatics database(s) or other data sources such as name, type, description, URL etc.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Tool metadata
+
+ beta12orEarlier
+ Basic information about one or more bioinformatics applications or packages, such as name, type, description, or other documentation.
+
+
+
+
+
+
+
+
+
+ Job metadata
+
+ beta12orEarlier
+ true
+ 1.5
+ Moby:PDGJOB
+ Textual metadata on a submitted or completed job.
+
+
+
+
+
+
+
+
+
+ User metadata
+
+ beta12orEarlier
+ Textual metadata on a software author or end-user, for example a person or other software.
+
+
+
+
+
+
+
+
+
+ Small molecule report
+
+
+
+
+
+
+
+ Small molecule annotation
+ Small molecule report
+ Chemical structure report
+ An informative report on a specific chemical compound.
+ beta12orEarlier
+ Chemical compound annotation
+
+
+
+
+
+
+
+
+
+ Cell line report
+
+ Organism strain data
+ Cell line annotation
+ Report on a particular strain of organism cell line including plants, virus, fungi and bacteria. The data typically includes strain number, organism type, growth conditions, source and so on.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Scent annotation
+
+ beta12orEarlier
+ An informative report about a specific scent.
+ 1.4
+ true
+
+
+
+
+
+
+
+
+
+ Ontology term
+
+ Ontology class name
+ beta12orEarlier
+ A term (name) from an ontology.
+ Ontology terms
+
+
+
+
+
+
+
+
+
+ Ontology concept data
+
+ beta12orEarlier
+ Ontology class metadata
+ Ontology term metadata
+ Data concerning or derived from a concept from a biological ontology.
+
+
+
+
+
+
+
+
+
+ Keyword
+
+ Phrases
+ Keyword(s) or phrase(s) used (typically) for text-searching purposes.
+ Boolean operators (AND, OR and NOT) and wildcard characters may be allowed.
+ Moby:QueryString
+ beta12orEarlier
+ Moby:BooleanQueryString
+ Moby:Wildcard_Query
+ Moby:Global_Keyword
+ Terms
+ Text
+
+
+
+
+
+
+
+
+
+ Citation
+
+ Bibliographic data that uniquely identifies a scientific article, book or other published material.
+ A bibliographic reference might include information such as authors, title, journal name, date and (possibly) a link to the abstract or full-text of the article if available.
+ Moby:GCP_SimpleCitation
+ Reference
+ Bibliographic reference
+ Moby:Publication
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Article
+
+
+
+
+
+
+
+ A document of scientific text, typically a full text article from a scientific journal.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Text mining report
+
+ An abstract of the results of text mining.
+ beta12orEarlier
+ Text mining output
+ A text mining abstract will typically include an annotated a list of words or sentences extracted from one or more scientific articles.
+
+
+
+
+
+
+
+
+
+ Entity identifier
+
+ beta12orEarlier
+ true
+ beta12orEarlier
+ An identifier of a biological entity or phenomenon.
+
+
+
+
+
+
+
+
+
+ Data resource identifier
+
+ true
+ An identifier of a data resource.
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Identifier (typed)
+
+ beta12orEarlier
+ This concept exists only to assist EDAM maintenance and navigation in graphical browsers. It does not add semantic information. This branch provides an alternative organisation of the concepts nested under 'Accession' and 'Name'. All concepts under here are already included under 'Accession' or 'Name'.
+ An identifier that identifies a particular type of data.
+
+
+
+
+
+
+
+
+
+
+ Tool identifier
+
+ An identifier of a bioinformatics tool, e.g. an application or web service.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Discrete entity identifier
+
+ beta12orEarlier
+ true
+ beta12orEarlier
+ Name or other identifier of a discrete entity (any biological thing with a distinct, discrete physical existence).
+
+
+
+
+
+
+
+
+
+ Entity feature identifier
+
+ true
+ beta12orEarlier
+ Name or other identifier of an entity feature (a physical part or region of a discrete biological entity, or a feature that can be mapped to such a thing).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Entity collection identifier
+
+ beta12orEarlier
+ true
+ beta12orEarlier
+ Name or other identifier of a collection of discrete biological entities.
+
+
+
+
+
+
+
+
+
+ Phenomenon identifier
+
+ beta12orEarlier
+ true
+ beta12orEarlier
+ Name or other identifier of a physical, observable biological occurrence or event.
+
+
+
+
+
+
+
+
+
+ Molecule identifier
+
+ Name or other identifier of a molecule.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Atom ID
+
+ Atom identifier
+ Identifier (e.g. character symbol) of a specific atom.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Molecule name
+
+
+ Name of a specific molecule.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Molecule type
+
+ For example, 'Protein', 'DNA', 'RNA' etc.
+ true
+ 1.5
+ beta12orEarlier
+ A label (text token) describing the type a molecule.
+ Protein|DNA|RNA
+
+
+
+
+
+
+
+
+
+ Chemical identifier
+
+ true
+ beta12orEarlier
+ beta12orEarlier
+ Unique identifier of a chemical compound.
+
+
+
+
+
+
+
+
+
+ Chromosome name
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Name of a chromosome.
+
+
+
+
+
+
+
+
+
+
+ Peptide identifier
+
+ Identifier of a peptide chain.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Protein identifier
+
+
+
+
+
+
+
+ beta12orEarlier
+ Identifier of a protein.
+
+
+
+
+
+
+
+
+
+
+ Compound name
+
+
+ Chemical name
+ Unique name of a chemical compound.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Chemical registry number
+
+ beta12orEarlier
+ Unique registry number of a chemical compound.
+
+
+
+
+
+
+
+
+
+
+ Ligand identifier
+
+ true
+ beta12orEarlier
+ Code word for a ligand, for example from a PDB file.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Drug identifier
+
+
+
+
+
+
+
+ beta12orEarlier
+ Identifier of a drug.
+
+
+
+
+
+
+
+
+
+
+ Amino acid identifier
+
+
+
+
+
+
+
+ Identifier of an amino acid.
+ beta12orEarlier
+ Residue identifier
+
+
+
+
+
+
+
+
+
+
+ Nucleotide identifier
+
+ beta12orEarlier
+ Name or other identifier of a nucleotide.
+
+
+
+
+
+
+
+
+
+
+ Monosaccharide identifier
+
+ beta12orEarlier
+ Identifier of a monosaccharide.
+
+
+
+
+
+
+
+
+
+
+ Chemical name (ChEBI)
+
+ ChEBI chemical name
+ Unique name from Chemical Entities of Biological Interest (ChEBI) of a chemical compound.
+ beta12orEarlier
+ This is the recommended chemical name for use for example in database annotation.
+
+
+
+
+
+
+
+
+
+
+ Chemical name (IUPAC)
+
+ IUPAC recommended name of a chemical compound.
+ IUPAC chemical name
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Chemical name (INN)
+
+ INN chemical name
+ beta12orEarlier
+ International Non-proprietary Name (INN or 'generic name') of a chemical compound, assigned by the World Health Organization (WHO).
+
+
+
+
+
+
+
+
+
+
+ Chemical name (brand)
+
+ Brand name of a chemical compound.
+ Brand chemical name
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Chemical name (synonymous)
+
+ beta12orEarlier
+ Synonymous chemical name
+ Synonymous name of a chemical compound.
+
+
+
+
+
+
+
+
+
+
+ Chemical registry number (CAS)
+
+ CAS chemical registry number
+ CAS registry number of a chemical compound.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Chemical registry number (Beilstein)
+
+ Beilstein chemical registry number
+ beta12orEarlier
+ Beilstein registry number of a chemical compound.
+
+
+
+
+
+
+
+
+
+
+ Chemical registry number (Gmelin)
+
+ Gmelin chemical registry number
+ beta12orEarlier
+ Gmelin registry number of a chemical compound.
+
+
+
+
+
+
+
+
+
+
+ HET group name
+
+ 3-letter code word for a ligand (HET group) from a PDB file, for example ATP.
+ Short ligand name
+ Component identifier code
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Amino acid name
+
+ String of one or more ASCII characters representing an amino acid.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Nucleotide code
+
+
+ beta12orEarlier
+ String of one or more ASCII characters representing a nucleotide.
+
+
+
+
+
+
+
+
+
+
+ Polypeptide chain ID
+
+
+
+
+
+
+
+ beta12orEarlier
+ WHATIF: chain
+ Chain identifier
+ Identifier of a polypeptide chain from a protein.
+ PDBML:pdbx_PDB_strand_id
+ Protein chain identifier
+ PDB strand id
+ PDB chain identifier
+ This is typically a character (for the chain) appended to a PDB identifier, e.g. 1cukA
+ Polypeptide chain identifier
+
+
+
+
+
+
+
+
+
+
+ Protein name
+
+
+ Name of a protein.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Enzyme identifier
+
+ beta12orEarlier
+ Name or other identifier of an enzyme or record from a database of enzymes.
+
+
+
+
+
+
+
+
+
+
+ EC number
+
+ [0-9]+\.-\.-\.-|[0-9]+\.[0-9]+\.-\.-|[0-9]+\.[0-9]+\.[0-9]+\.-|[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+
+ EC code
+ Moby:EC_Number
+ An Enzyme Commission (EC) number of an enzyme.
+ EC
+ Moby:Annotated_EC_Number
+ beta12orEarlier
+ Enzyme Commission number
+
+
+
+
+
+
+
+
+
+
+ Enzyme name
+
+
+ Name of an enzyme.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Restriction enzyme name
+
+ Name of a restriction enzyme.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Sequence position specification
+
+ 1.5
+ A specification (partial or complete) of one or more positions or regions of a molecular sequence or map.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Sequence feature ID
+
+
+ A unique identifier of molecular sequence feature, for example an ID of a feature that is unique within the scope of the GFF file.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Sequence position
+
+ WHATIF: number
+ WHATIF: PDBx_atom_site
+ beta12orEarlier
+ PDBML:_atom_site.id
+ SO:0000735
+ A position of one or more points (base or residue) in a sequence, or part of such a specification.
+
+
+
+
+
+
+
+
+
+ Sequence range
+
+ beta12orEarlier
+ Specification of range(s) of sequence positions.
+
+
+
+
+
+
+
+
+
+ Nucleic acid feature identifier
+
+ beta12orEarlier
+ beta12orEarlier
+ Name or other identifier of an nucleic acid feature.
+ true
+
+
+
+
+
+
+
+
+
+ Protein feature identifier
+
+ Name or other identifier of a protein feature.
+ true
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence feature key
+
+ Sequence feature method
+ The type of a sequence feature, typically a term or accession from the Sequence Ontology, for example an EMBL or Swiss-Prot sequence feature key.
+ Sequence feature type
+ beta12orEarlier
+ A feature key indicates the biological nature of the feature or information about changes to or versions of the sequence.
+
+
+
+
+
+
+
+
+
+ Sequence feature qualifier
+
+ beta12orEarlier
+ Typically one of the EMBL or Swiss-Prot feature qualifiers.
+ Feature qualifiers hold information about a feature beyond that provided by the feature key and location.
+
+
+
+
+
+
+
+
+
+ Sequence feature label
+
+ Sequence feature name
+ Typically an EMBL or Swiss-Prot feature label.
+ A feature label identifies a feature of a sequence database entry. When used with the database name and the entry's primary accession number, it is a unique identifier of that feature.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ EMBOSS Uniform Feature Object
+
+
+ beta12orEarlier
+ UFO
+ The name of a sequence feature-containing entity adhering to the standard feature naming scheme used by all EMBOSS applications.
+
+
+
+
+
+
+
+
+
+ Codon name
+
+ beta12orEarlier
+ beta12orEarlier
+ String of one or more ASCII characters representing a codon.
+ true
+
+
+
+
+
+
+
+
+
+ Gene identifier
+
+
+
+
+
+
+
+ Moby:GeneAccessionList
+ An identifier of a gene, such as a name/symbol or a unique identifier of a gene in a database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Gene symbol
+
+ Moby_namespace:Global_GeneSymbol
+ beta12orEarlier
+ Moby_namespace:Global_GeneCommonName
+ The short name of a gene; a single word that does not contain white space characters. It is typically derived from the gene name.
+
+
+
+
+
+
+
+
+
+
+ Gene ID (NCBI)
+
+
+ NCBI geneid
+ Gene identifier (NCBI)
+ http://www.geneontology.org/doc/GO.xrf_abbs:NCBI_Gene
+ Entrez gene ID
+ Gene identifier (Entrez)
+ http://www.geneontology.org/doc/GO.xrf_abbs:LocusID
+ An NCBI unique identifier of a gene.
+ NCBI gene ID
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Gene identifier (NCBI RefSeq)
+
+ beta12orEarlier
+ true
+ beta12orEarlier
+ An NCBI RefSeq unique identifier of a gene.
+
+
+
+
+
+
+
+
+
+ Gene identifier (NCBI UniGene)
+
+ beta12orEarlier
+ An NCBI UniGene unique identifier of a gene.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Gene identifier (Entrez)
+
+ An Entrez unique identifier of a gene.
+ beta12orEarlier
+ true
+ [0-9]+
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Gene ID (CGD)
+
+ CGD ID
+ Identifier of a gene or feature from the CGD database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Gene ID (DictyBase)
+
+ beta12orEarlier
+ Identifier of a gene from DictyBase.
+
+
+
+
+
+
+
+
+
+
+ Ensembl gene ID
+
+
+ beta12orEarlier
+ Gene ID (Ensembl)
+ Unique identifier for a gene (or other feature) from the Ensembl database.
+
+
+
+
+
+
+
+
+
+
+ Gene ID (SGD)
+
+
+ Identifier of an entry from the SGD database.
+ S[0-9]+
+ SGD identifier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Gene ID (GeneDB)
+
+ Moby_namespace:GeneDB
+ GeneDB identifier
+ beta12orEarlier
+ [a-zA-Z_0-9\.-]*
+ Identifier of a gene from the GeneDB database.
+
+
+
+
+
+
+
+
+
+
+ TIGR identifier
+
+
+ beta12orEarlier
+ Identifier of an entry from the TIGR database.
+
+
+
+
+
+
+
+
+
+
+ TAIR accession (gene)
+
+
+ Gene:[0-9]{7}
+ beta12orEarlier
+ Identifier of an gene from the TAIR database.
+
+
+
+
+
+
+
+
+
+
+ Protein domain ID
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Identifier of a protein structural domain.
+ This is typically a character or string concatenated with a PDB identifier and a chain identifier.
+
+
+
+
+
+
+
+
+
+
+ SCOP domain identifier
+
+ Identifier of a protein domain (or other node) from the SCOP database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ CATH domain ID
+
+ 1nr3A00
+ beta12orEarlier
+ CATH domain identifier
+ Identifier of a protein domain from CATH.
+
+
+
+
+
+
+
+
+
+
+ SCOP concise classification string (sccs)
+
+ A SCOP concise classification string (sccs) is a compact representation of a SCOP domain classification.
+ beta12orEarlier
+ An scss includes the class (alphabetical), fold, superfamily and family (all numerical) to which a given domain belongs.
+
+
+
+
+
+
+
+
+
+
+ SCOP sunid
+
+ Unique identifier (number) of an entry in the SCOP hierarchy, for example 33229.
+ beta12orEarlier
+ A sunid uniquely identifies an entry in the SCOP hierarchy, including leaves (the SCOP domains) and higher level nodes including entries corresponding to the protein level.
+ sunid
+ SCOP unique identifier
+ 33229
+
+
+
+
+
+
+
+
+
+
+ CATH node ID
+
+ 3.30.1190.10.1.1.1.1.1
+ CATH code
+ A code number identifying a node from the CATH database.
+ CATH node identifier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Kingdom name
+
+ The name of a biological kingdom (Bacteria, Archaea, or Eukaryotes).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Species name
+
+ The name of a species (typically a taxonomic group) of organism.
+ Organism species
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Strain name
+
+
+ beta12orEarlier
+ The name of a strain of an organism variant, typically a plant, virus or bacterium.
+
+
+
+
+
+
+
+
+
+
+ URI
+
+ A string of characters that name or otherwise identify a resource on the Internet.
+ URIs
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Database ID
+
+
+
+
+
+
+
+ An identifier of a biological or bioinformatics database.
+ Database identifier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Directory name
+
+ beta12orEarlier
+ The name of a directory.
+
+
+
+
+
+
+
+
+
+
+ File name
+
+ The name (or part of a name) of a file (of any type).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Ontology name
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Name of an ontology of biological or bioinformatics concepts and relations.
+
+
+
+
+
+
+
+
+
+
+ URL
+
+ A Uniform Resource Locator (URL).
+ Moby:URL
+ Moby:Link
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ URN
+
+ beta12orEarlier
+ A Uniform Resource Name (URN).
+
+
+
+
+
+
+
+
+
+ LSID
+
+ beta12orEarlier
+ LSIDs provide a standard way to locate and describe data. An LSID is represented as a Uniform Resource Name (URN) with the following format: URN:LSID:<Authority>:<Namespace>:<ObjectID>[:<Version>]
+ Life Science Identifier
+ A Life Science Identifier (LSID) - a unique identifier of some data.
+
+
+
+
+
+
+
+
+
+ Database name
+
+
+ The name of a biological or bioinformatics database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Sequence database name
+
+ The name of a molecular sequence database.
+ true
+ beta13
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Enumerated file name
+
+ beta12orEarlier
+ The name of a file (of any type) with restricted possible values.
+
+
+
+
+
+
+
+
+
+
+ File name extension
+
+ The extension of a file name.
+ A file extension is the characters appearing after the final '.' in the file name.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ File base name
+
+ beta12orEarlier
+ The base name of a file.
+ A file base name is the file name stripped of its directory specification and extension.
+
+
+
+
+
+
+
+
+
+
+ QSAR descriptor name
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Name of a QSAR descriptor.
+
+
+
+
+
+
+
+
+
+
+ Database entry identifier
+
+ true
+ This concept is required for completeness. It should never have child concepts.
+ beta12orEarlier
+ An identifier of an entry from a database where the same type of identifier is used for objects (data) of different semantic type.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence identifier
+
+
+
+
+
+
+
+ An identifier of molecular sequence(s) or entries from a molecular sequence database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Sequence set ID
+
+
+
+
+
+
+
+
+ An identifier of a set of molecular sequence(s).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Sequence signature identifier
+
+ beta12orEarlier
+ beta12orEarlier
+ true
+ Identifier of a sequence signature (motif or profile) for example from a database of sequence patterns.
+
+
+
+
+
+
+
+
+
+
+ Sequence alignment ID
+
+
+
+
+
+
+
+
+ Identifier of a molecular sequence alignment, for example a record from an alignment database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Phylogenetic distance matrix identifier
+
+ beta12orEarlier
+ Identifier of a phylogenetic distance matrix.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree ID
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Identifier of a phylogenetic tree for example from a phylogenetic tree database.
+
+
+
+
+
+
+
+
+
+
+ Comparison matrix identifier
+
+
+
+
+
+
+
+ An identifier of a comparison matrix.
+ Substitution matrix identifier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Structure ID
+
+
+ beta12orEarlier
+ A unique and persistent identifier of a molecular tertiary structure, typically an entry from a structure database.
+
+
+
+
+
+
+
+
+
+
+ Structural (3D) profile ID
+
+
+
+
+
+
+
+
+ Structural profile identifier
+ Identifier or name of a structural (3D) profile or template (representing a structure or structure alignment).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Structure alignment ID
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Identifier of an entry from a database of tertiary structure alignments.
+
+
+
+
+
+
+
+
+
+
+ Amino acid index ID
+
+
+
+
+
+
+
+
+ Identifier of an index of amino acid physicochemical and biochemical property data.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Protein interaction ID
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Molecular interaction ID
+ Identifier of a report of protein interactions from a protein interaction database (typically).
+
+
+
+
+
+
+
+
+
+
+ Protein family identifier
+
+
+
+
+
+
+
+ Protein secondary database record identifier
+ Identifier of a protein family.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Codon usage table name
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Unique name of a codon usage table.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Transcription factor identifier
+
+
+ Identifier of a transcription factor (or a TF binding site).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Experiment annotation ID
+
+
+
+
+
+
+
+ beta12orEarlier
+ Identifier of an entry from a database of microarray data.
+
+
+
+
+
+
+
+
+
+
+ Electron microscopy model ID
+
+
+
+
+
+
+
+
+ Identifier of an entry from a database of electron microscopy data.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Gene expression report ID
+
+
+
+
+
+
+
+
+ Accession of a report of gene expression (e.g. a gene expression profile) from a database.
+ beta12orEarlier
+ Gene expression profile identifier
+
+
+
+
+
+
+
+
+
+
+ Genotype and phenotype annotation ID
+
+
+
+
+
+
+
+
+ Identifier of an entry from a database of genotypes and phenotypes.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Pathway or network identifier
+
+
+
+
+
+
+
+ Identifier of an entry from a database of biological pathways or networks.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Workflow ID
+
+
+ beta12orEarlier
+ Identifier of a biological or biomedical workflow, typically from a database of workflows.
+
+
+
+
+
+
+
+
+
+
+ Data resource definition ID
+
+ beta12orEarlier
+ Identifier of a data type definition from some provider.
+ Data resource definition identifier
+
+
+
+
+
+
+
+
+
+
+ Biological model ID
+
+
+
+
+
+
+
+ Biological model identifier
+ beta12orEarlier
+ Identifier of a mathematical model, typically an entry from a database.
+
+
+
+
+
+
+
+
+
+
+ Compound identifier
+
+
+
+
+
+
+
+ beta12orEarlier
+ Chemical compound identifier
+ Identifier of an entry from a database of chemicals.
+ Small molecule identifier
+
+
+
+
+
+
+
+
+
+
+ Ontology concept ID
+
+
+ A unique (typically numerical) identifier of a concept in an ontology of biological or bioinformatics concepts and relations.
+ beta12orEarlier
+ Ontology concept ID
+
+
+
+
+
+
+
+
+
+
+ Article ID
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Unique identifier of a scientific article.
+ Article identifier
+
+
+
+
+
+
+
+
+
+
+ FlyBase ID
+
+
+ Identifier of an object from the FlyBase database.
+ FB[a-zA-Z_0-9]{2}[0-9]{7}
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ WormBase name
+
+
+ Name of an object from the WormBase database, usually a human-readable name.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ WormBase class
+
+ beta12orEarlier
+ Class of an object from the WormBase database.
+ A WormBase class describes the type of object such as 'sequence' or 'protein'.
+
+
+
+
+
+
+
+
+
+
+ Sequence accession
+
+
+ beta12orEarlier
+ A persistent, unique identifier of a molecular sequence database entry.
+ Sequence accession number
+
+
+
+
+
+
+
+
+
+
+ Sequence type
+
+ 1.5
+ Sequence type might reflect the molecule (protein, nucleic acid etc) or the sequence itself (gapped, ambiguous etc).
+ A label (text token) describing a type of molecular sequence.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ EMBOSS Uniform Sequence Address
+
+
+ EMBOSS USA
+ beta12orEarlier
+ The name of a sequence-based entity adhering to the standard sequence naming scheme used by all EMBOSS applications.
+
+
+
+
+
+
+
+
+
+
+ Sequence accession (protein)
+
+
+
+
+
+
+
+ Accession number of a protein sequence database entry.
+ Protein sequence accession number
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Sequence accession (nucleic acid)
+
+
+
+
+
+
+
+ Accession number of a nucleotide sequence database entry.
+ beta12orEarlier
+ Nucleotide sequence accession number
+
+
+
+
+
+
+
+
+
+
+ RefSeq accession
+
+ Accession number of a RefSeq database entry.
+ beta12orEarlier
+ RefSeq ID
+ (NC|AC|NG|NT|NW|NZ|NM|NR|XM|XR|NP|AP|XP|YP|ZP)_[0-9]+
+
+
+
+
+
+
+
+
+
+
+ UniProt accession (extended)
+
+ true
+ Accession number of a UniProt (protein sequence) database entry. May contain version or isoform number.
+ [A-NR-Z][0-9][A-Z][A-Z0-9][A-Z0-9][0-9]|[OPQ][0-9][A-Z0-9][A-Z0-9][A-Z0-9][0-9]|[A-NR-Z][0-9][A-Z][A-Z0-9][A-Z0-9][0-9].[0-9]+|[OPQ][0-9][A-Z0-9][A-Z0-9][A-Z0-9][0-9].[0-9]+|[A-NR-Z][0-9][A-Z][A-Z0-9][A-Z0-9][0-9]-[0-9]+|[OPQ][0-9][A-Z0-9][A-Z0-9][A-Z0-9][0-9]-[0-9]+
+ beta12orEarlier
+ Q7M1G0|P43353-2|P01012.107
+ 1.0
+
+
+
+
+
+
+
+
+
+ PIR identifier
+
+
+
+
+
+
+
+ An identifier of PIR sequence database entry.
+ beta12orEarlier
+ PIR ID
+ PIR accession number
+
+
+
+
+
+
+
+
+
+
+ TREMBL accession
+
+ beta12orEarlier
+ Identifier of a TREMBL sequence database entry.
+ true
+ 1.2
+
+
+
+
+
+
+
+
+
+ Gramene primary identifier
+
+ beta12orEarlier
+ Gramene primary ID
+ Primary identifier of a Gramene database entry.
+
+
+
+
+
+
+
+
+
+
+ EMBL/GenBank/DDBJ ID
+
+ Identifier of a (nucleic acid) entry from the EMBL/GenBank/DDBJ databases.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Sequence cluster ID (UniGene)
+
+ UniGene identifier
+ UniGene cluster id
+ UniGene ID
+ UniGene cluster ID
+ beta12orEarlier
+ A unique identifier of an entry (gene cluster) from the NCBI UniGene database.
+
+
+
+
+
+
+
+
+
+
+ dbEST accession
+
+
+ dbEST ID
+ Identifier of a dbEST database entry.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ dbSNP ID
+
+ beta12orEarlier
+ dbSNP identifier
+ Identifier of a dbSNP database entry.
+
+
+
+
+
+
+
+
+
+
+ EMBOSS sequence type
+
+ beta12orEarlier
+ true
+ See the EMBOSS documentation (http://emboss.sourceforge.net/) for a definition of what this includes.
+ beta12orEarlier
+ The EMBOSS type of a molecular sequence.
+
+
+
+
+
+
+
+
+
+ EMBOSS listfile
+
+ 1.5
+ List of EMBOSS Uniform Sequence Addresses (EMBOSS listfile).
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence cluster ID
+
+
+
+
+
+
+
+ An identifier of a cluster of molecular sequence(s).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Sequence cluster ID (COG)
+
+ COG ID
+ beta12orEarlier
+ Unique identifier of an entry from the COG database.
+
+
+
+
+
+
+
+
+
+
+ Sequence motif identifier
+
+
+
+
+
+
+
+ Identifier of a sequence motif, for example an entry from a motif database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Sequence profile ID
+
+
+
+
+
+
+
+
+ Identifier of a sequence profile.
+ beta12orEarlier
+ A sequence profile typically represents a sequence alignment.
+
+
+
+
+
+
+
+
+
+
+ ELM ID
+
+ Identifier of an entry from the ELMdb database of protein functional sites.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Prosite accession number
+
+ beta12orEarlier
+ Accession number of an entry from the Prosite database.
+ PS[0-9]{5}
+ Prosite ID
+
+
+
+
+
+
+
+
+
+
+ HMMER hidden Markov model ID
+
+
+
+
+
+
+
+ Unique identifier or name of a HMMER hidden Markov model.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ JASPAR profile ID
+
+ beta12orEarlier
+ Unique identifier or name of a profile from the JASPAR database.
+
+
+
+
+
+
+
+
+
+
+ Sequence alignment type
+
+ beta12orEarlier
+ 1.5
+ true
+ Possible values include for example the EMBOSS alignment types, BLAST alignment types and so on.
+ A label (text token) describing the type of a sequence alignment.
+
+
+
+
+
+
+
+
+
+ BLAST sequence alignment type
+
+ true
+ beta12orEarlier
+ beta12orEarlier
+ The type of a BLAST sequence alignment.
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree type
+
+ For example 'nj', 'upgmp' etc.
+ beta12orEarlier
+ true
+ A label (text token) describing the type of a phylogenetic tree.
+ 1.5
+ nj|upgmp
+
+
+
+
+
+
+
+
+
+ TreeBASE study accession number
+
+ Accession number of an entry from the TreeBASE database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ TreeFam accession number
+
+ beta12orEarlier
+ Accession number of an entry from the TreeFam database.
+
+
+
+
+
+
+
+
+
+
+ Comparison matrix type
+
+ 1.5
+ true
+ beta12orEarlier
+ blosum|pam|gonnet|id
+ A label (text token) describing the type of a comparison matrix.
+ Substitution matrix type
+ For example 'blosum', 'pam', 'gonnet', 'id' etc. Comparison matrix type may be required where a series of matrices of a certain type are used.
+
+
+
+
+
+
+
+
+
+ Comparison matrix name
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Substitution matrix name
+ See for example http://www.ebi.ac.uk/Tools/webservices/help/matrix.
+ Unique name or identifier of a comparison matrix.
+
+
+
+
+
+
+
+
+
+
+ PDB ID
+
+ An identifier of an entry from the PDB database.
+ [a-zA-Z_0-9]{4}
+ PDBID
+ PDB identifier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ AAindex ID
+
+ beta12orEarlier
+ Identifier of an entry from the AAindex database.
+
+
+
+
+
+
+
+
+
+
+ BIND accession number
+
+ Accession number of an entry from the BIND database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ IntAct accession number
+
+ EBI\-[0-9]+
+ beta12orEarlier
+ Accession number of an entry from the IntAct database.
+
+
+
+
+
+
+
+
+
+
+ Protein family name
+
+
+ beta12orEarlier
+ Name of a protein family.
+
+
+
+
+
+
+
+
+
+
+ InterPro entry name
+
+
+
+
+
+
+
+ beta12orEarlier
+ Name of an InterPro entry, usually indicating the type of protein matches for that entry.
+
+
+
+
+
+
+
+
+
+
+ InterPro accession
+
+
+
+
+
+
+
+ Primary accession number of an InterPro entry.
+ InterPro primary accession
+ Every InterPro entry has a unique accession number to provide a persistent citation of database records.
+ beta12orEarlier
+ InterPro primary accession number
+ IPR015590
+ IPR[0-9]{6}
+
+
+
+
+
+
+
+
+
+
+ InterPro secondary accession
+
+
+
+
+
+
+
+ Secondary accession number of an InterPro entry.
+ beta12orEarlier
+ InterPro secondary accession number
+
+
+
+
+
+
+
+
+
+
+ Gene3D ID
+
+ beta12orEarlier
+ Unique identifier of an entry from the Gene3D database.
+
+
+
+
+
+
+
+
+
+
+ PIRSF ID
+
+ PIRSF[0-9]{6}
+ beta12orEarlier
+ Unique identifier of an entry from the PIRSF database.
+
+
+
+
+
+
+
+
+
+
+ PRINTS code
+
+ beta12orEarlier
+ PR[0-9]{5}
+ The unique identifier of an entry in the PRINTS database.
+
+
+
+
+
+
+
+
+
+
+ Pfam accession number
+
+ PF[0-9]{5}
+ Accession number of a Pfam entry.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ SMART accession number
+
+ Accession number of an entry from the SMART database.
+ beta12orEarlier
+ SM[0-9]{5}
+
+
+
+
+
+
+
+
+
+
+ Superfamily hidden Markov model number
+
+ Unique identifier (number) of a hidden Markov model from the Superfamily database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ TIGRFam ID
+
+ TIGRFam accession number
+ Accession number of an entry (family) from the TIGRFam database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ ProDom accession number
+
+ A ProDom domain family accession number.
+ PD[0-9]+
+ beta12orEarlier
+ ProDom is a protein domain family database.
+
+
+
+
+
+
+
+
+
+
+ TRANSFAC accession number
+
+ beta12orEarlier
+ Identifier of an entry from the TRANSFAC database.
+
+
+
+
+
+
+
+
+
+
+ ArrayExpress accession number
+
+ Accession number of an entry from the ArrayExpress database.
+ beta12orEarlier
+ [AEP]-[a-zA-Z_0-9]{4}-[0-9]+
+ ArrayExpress experiment ID
+
+
+
+
+
+
+
+
+
+
+ PRIDE experiment accession number
+
+ [0-9]+
+ beta12orEarlier
+ PRIDE experiment accession number.
+
+
+
+
+
+
+
+
+
+
+ EMDB ID
+
+ beta12orEarlier
+ Identifier of an entry from the EMDB electron microscopy database.
+
+
+
+
+
+
+
+
+
+
+ GEO accession number
+
+ Accession number of an entry from the GEO database.
+ o^GDS[0-9]+
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ GermOnline ID
+
+ beta12orEarlier
+ Identifier of an entry from the GermOnline database.
+
+
+
+
+
+
+
+
+
+
+ EMAGE ID
+
+ Identifier of an entry from the EMAGE database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Disease ID
+
+
+
+
+
+
+
+
+ Identifier of an entry from a database of disease.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ HGVbase ID
+
+ Identifier of an entry from the HGVbase database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ HIVDB identifier
+
+ true
+ beta12orEarlier
+ Identifier of an entry from the HIVDB database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ OMIM ID
+
+ beta12orEarlier
+ [*#+%^]?[0-9]{6}
+ Identifier of an entry from the OMIM database.
+
+
+
+
+
+
+
+
+
+
+ KEGG object identifier
+
+
+ beta12orEarlier
+ Unique identifier of an object from one of the KEGG databases (excluding the GENES division).
+
+
+
+
+
+
+
+
+
+
+ Pathway ID (reactome)
+
+ Identifier of an entry from the Reactome database.
+ Reactome ID
+ beta12orEarlier
+ REACT_[0-9]+(\.[0-9]+)?
+
+
+
+
+
+
+
+
+
+
+ Pathway ID (aMAZE)
+
+ beta12orEarlier
+ aMAZE ID
+ true
+ beta12orEarlier
+ Identifier of an entry from the aMAZE database.
+
+
+
+
+
+
+
+
+
+ Pathway ID (BioCyc)
+
+
+ BioCyc pathway ID
+ beta12orEarlier
+ Identifier of an pathway from the BioCyc biological pathways database.
+
+
+
+
+
+
+
+
+
+
+ Pathway ID (INOH)
+
+ beta12orEarlier
+ INOH identifier
+ Identifier of an entry from the INOH database.
+
+
+
+
+
+
+
+
+
+
+ Pathway ID (PATIKA)
+
+ Identifier of an entry from the PATIKA database.
+ PATIKA ID
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Pathway ID (CPDB)
+
+ This concept refers to identifiers used by the databases collated in CPDB; CPDB identifiers are not independently defined.
+ CPDB ID
+ Identifier of an entry from the CPDB (ConsensusPathDB) biological pathways database, which is an identifier from an external database integrated into CPDB.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Pathway ID (Panther)
+
+ Identifier of a biological pathway from the Panther Pathways database.
+ beta12orEarlier
+ PTHR[0-9]{5}
+ Panther Pathways ID
+
+
+
+
+
+
+
+
+
+
+ MIRIAM identifier
+
+
+
+
+
+
+
+ Unique identifier of a MIRIAM data resource.
+ MIR:00100005
+ MIR:[0-9]{8}
+ beta12orEarlier
+ This is the identifier used internally by MIRIAM for a data type.
+
+
+
+
+
+
+
+
+
+
+ MIRIAM data type name
+
+
+
+
+
+
+
+ beta12orEarlier
+ The name of a data type from the MIRIAM database.
+
+
+
+
+
+
+
+
+
+
+ MIRIAM URI
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ The URI (URL or URN) of a data entity from the MIRIAM database.
+ identifiers.org synonym
+ urn:miriam:pubmed:16333295|urn:miriam:obo.go:GO%3A0045202
+ A MIRIAM URI consists of the URI of the MIRIAM data type (PubMed, UniProt etc) followed by the identifier of an element of that data type, for example PMID for a publication or an accession number for a GO term.
+
+
+
+
+
+
+
+
+
+
+ MIRIAM data type primary name
+
+ beta12orEarlier
+ The primary name of a MIRIAM data type is taken from a controlled vocabulary.
+ UniProt|Enzyme Nomenclature
+ The primary name of a data type from the MIRIAM database.
+
+
+
+
+
+ A protein entity has the MIRIAM data type 'UniProt', and an enzyme has the MIRIAM data type 'Enzyme Nomenclature'.
+ UniProt|Enzyme Nomenclature
+
+
+
+
+
+
+
+
+
+ MIRIAM data type synonymous name
+
+ A synonymous name of a data type from the MIRIAM database.
+ A synonymous name for a MIRIAM data type taken from a controlled vocabulary.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Taverna workflow ID
+
+ beta12orEarlier
+ Unique identifier of a Taverna workflow.
+
+
+
+
+
+
+
+
+
+
+ Biological model name
+
+
+ beta12orEarlier
+ Name of a biological (mathematical) model.
+
+
+
+
+
+
+
+
+
+
+ BioModel ID
+
+ Unique identifier of an entry from the BioModel database.
+ beta12orEarlier
+ (BIOMD|MODEL)[0-9]{10}
+
+
+
+
+
+
+
+
+
+
+ PubChem CID
+
+
+ [0-9]+
+ PubChem compound accession identifier
+ Chemical structure specified in PubChem Compound Identification (CID), a non-zero integer identifier for a unique chemical structure.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ ChemSpider ID
+
+ Identifier of an entry from the ChemSpider database.
+ beta12orEarlier
+ [0-9]+
+
+
+
+
+
+
+
+
+
+
+ ChEBI ID
+
+ Identifier of an entry from the ChEBI database.
+ ChEBI IDs
+ ChEBI identifier
+ CHEBI:[0-9]+
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ BioPax concept ID
+
+ beta12orEarlier
+ An identifier of a concept from the BioPax ontology.
+
+
+
+
+
+
+
+
+
+
+ GO concept ID
+
+ GO concept identifier
+ [0-9]{7}|GO:[0-9]{7}
+ beta12orEarlier
+ An identifier of a concept from The Gene Ontology.
+
+
+
+
+
+
+
+
+
+
+ MeSH concept ID
+
+ beta12orEarlier
+ An identifier of a concept from the MeSH vocabulary.
+
+
+
+
+
+
+
+
+
+
+ HGNC concept ID
+
+ beta12orEarlier
+ An identifier of a concept from the HGNC controlled vocabulary.
+
+
+
+
+
+
+
+
+
+
+ NCBI taxonomy ID
+
+
+ NCBI taxonomy identifier
+ [1-9][0-9]{0,8}
+ NCBI tax ID
+ A stable unique identifier for each taxon (for a species, a family, an order, or any other group in the NCBI taxonomy database.
+ 9662|3483|182682
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Plant Ontology concept ID
+
+ An identifier of a concept from the Plant Ontology (PO).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ UMLS concept ID
+
+ An identifier of a concept from the UMLS vocabulary.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ FMA concept ID
+
+ An identifier of a concept from Foundational Model of Anatomy.
+ FMA:[0-9]+
+ Classifies anatomical entities according to their shared characteristics (genus) and distinguishing characteristics (differentia). Specifies the part-whole and spatial relationships of the entities, morphological transformation of the entities during prenatal development and the postnatal life cycle and principles, rules and definitions according to which classes and relationships in the other three components of FMA are represented.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ EMAP concept ID
+
+ beta12orEarlier
+ An identifier of a concept from the EMAP mouse ontology.
+
+
+
+
+
+
+
+
+
+
+ ChEBI concept ID
+
+ beta12orEarlier
+ An identifier of a concept from the ChEBI ontology.
+
+
+
+
+
+
+
+
+
+
+ MGED concept ID
+
+ beta12orEarlier
+ An identifier of a concept from the MGED ontology.
+
+
+
+
+
+
+
+
+
+
+ myGrid concept ID
+
+ beta12orEarlier
+ The ontology is provided as two components, the service ontology and the domain ontology. The domain ontology acts provides concepts for core bioinformatics data types and their relations. The service ontology describes the physical and operational features of web services.
+ An identifier of a concept from the myGrid ontology.
+
+
+
+
+
+
+
+
+
+
+ PubMed ID
+
+ PMID
+ [1-9][0-9]{0,8}
+ PubMed unique identifier of an article.
+ beta12orEarlier
+ 4963447
+
+
+
+
+
+
+
+
+
+
+ DOI
+
+ beta12orEarlier
+ (doi\:)?[0-9]{2}\.[0-9]{4}/.*
+ Digital Object Identifier
+ Digital Object Identifier (DOI) of a published article.
+
+
+
+
+
+
+
+
+
+
+ Medline UI
+
+ beta12orEarlier
+ Medline UI (unique identifier) of an article.
+ The use of Medline UI has been replaced by the PubMed unique identifier.
+ Medline unique identifier
+
+
+
+
+
+
+
+
+
+
+ Tool name
+
+ The name of a computer package, application, method or function.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Tool name (signature)
+
+ beta12orEarlier
+ The unique name of a signature (sequence classifier) method.
+ Signature methods from http://www.ebi.ac.uk/Tools/InterProScan/help.html#results include BlastProDom, FPrintScan, HMMPIR, HMMPfam, HMMSmart, HMMTigr, ProfileScan, ScanRegExp, SuperFamily and HAMAP.
+
+
+
+
+
+
+
+
+
+
+ Tool name (BLAST)
+
+ This include 'blastn', 'blastp', 'blastx', 'tblastn' and 'tblastx'.
+ The name of a BLAST tool.
+ beta12orEarlier
+ BLAST name
+
+
+
+
+
+
+
+
+
+
+ Tool name (FASTA)
+
+ beta12orEarlier
+ The name of a FASTA tool.
+ This includes 'fasta3', 'fastx3', 'fasty3', 'fastf3', 'fasts3' and 'ssearch'.
+
+
+
+
+
+
+
+
+
+
+ Tool name (EMBOSS)
+
+ The name of an EMBOSS application.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Tool name (EMBASSY package)
+
+ The name of an EMBASSY package.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ QSAR descriptor (constitutional)
+
+ A QSAR constitutional descriptor.
+ beta12orEarlier
+ QSAR constitutional descriptor
+
+
+
+
+
+
+
+
+
+ QSAR descriptor (electronic)
+
+ beta12orEarlier
+ A QSAR electronic descriptor.
+ QSAR electronic descriptor
+
+
+
+
+
+
+
+
+
+ QSAR descriptor (geometrical)
+
+ QSAR geometrical descriptor
+ A QSAR geometrical descriptor.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ QSAR descriptor (topological)
+
+ beta12orEarlier
+ QSAR topological descriptor
+ A QSAR topological descriptor.
+
+
+
+
+
+
+
+
+
+ QSAR descriptor (molecular)
+
+ A QSAR molecular descriptor.
+ QSAR molecular descriptor
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence set (protein)
+
+ Any collection of multiple protein sequences and associated metadata that do not (typically) correspond to common sequence database records or database entries.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence set (nucleic acid)
+
+ beta12orEarlier
+ Any collection of multiple nucleotide sequences and associated metadata that do not (typically) correspond to common sequence database records or database entries.
+
+
+
+
+
+
+
+
+
+ Sequence cluster
+
+
+
+
+
+
+
+
+
+
+
+
+
+ A set of sequences that have been clustered or otherwise classified as belonging to a group including (typically) sequence cluster information.
+ The cluster might include sequences identifiers, short descriptions, alignment and summary information.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Psiblast checkpoint file
+
+ beta12orEarlier
+ A Psiblast checkpoint file uses ASN.1 Binary Format and usually has the extension '.asn'.
+ beta12orEarlier
+ true
+ A file of intermediate results from a PSIBLAST search that is used for priming the search in the next PSIBLAST iteration.
+
+
+
+
+
+
+
+
+
+ HMMER synthetic sequences set
+
+ Sequences generated by HMMER package in FASTA-style format.
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Proteolytic digest
+
+
+
+
+
+
+
+ beta12orEarlier
+ A protein sequence cleaved into peptide fragments (by enzymatic or chemical cleavage) with fragment masses.
+
+
+
+
+
+
+
+
+
+ Restriction digest
+
+ Restriction digest fragments from digesting a nucleotide sequence with restriction sites using a restriction endonuclease.
+ SO:0000412
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ PCR primers
+
+ beta12orEarlier
+ Oligonucleotide primer(s) for PCR and DNA amplification, for example a minimal primer set.
+
+
+
+
+
+
+
+
+
+ vectorstrip cloning vector definition file
+
+ beta12orEarlier
+ true
+ File of sequence vectors used by EMBOSS vectorstrip application, or any file in same format.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Primer3 internal oligo mishybridizing library
+
+ true
+ beta12orEarlier
+ A library of nucleotide sequences to avoid during hybridization events. Hybridization of the internal oligo to sequences in this library is avoided, rather than priming from them. The file is in a restricted FASTA format.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Primer3 mispriming library file
+
+ true
+ A nucleotide sequence library of sequences to avoid during amplification (for example repetitive sequences, or possibly the sequences of genes in a gene family that should not be amplified. The file must is in a restricted FASTA format.
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ primersearch primer pairs sequence record
+
+ true
+ beta12orEarlier
+ beta12orEarlier
+ File of one or more pairs of primer sequences, as used by EMBOSS primersearch application.
+
+
+
+
+
+
+
+
+
+ Sequence cluster (protein)
+
+
+ Protein sequence cluster
+ The sequences are typically related, for example a family of sequences.
+ beta12orEarlier
+ A cluster of protein sequences.
+
+
+
+
+
+
+
+
+
+ Sequence cluster (nucleic acid)
+
+
+ A cluster of nucleotide sequences.
+ Nucleotide sequence cluster
+ beta12orEarlier
+ The sequences are typically related, for example a family of sequences.
+
+
+
+
+
+
+
+
+
+ Sequence length
+
+ beta12orEarlier
+ The size (length) of a sequence, subsequence or region in a sequence, or range(s) of lengths.
+
+
+
+
+
+
+
+
+
+ Word size
+
+ Word size is used for example in word-based sequence database search methods.
+ Word length
+ 1.5
+ Size of a sequence word.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Window size
+
+ 1.5
+ true
+ A window is a region of fixed size but not fixed position over a molecular sequence. It is typically moved (computationally) over a sequence during scoring.
+ beta12orEarlier
+ Size of a sequence window.
+
+
+
+
+
+
+
+
+
+ Sequence length range
+
+ true
+ Specification of range(s) of length of sequences.
+ beta12orEarlier
+ 1.5
+
+
+
+
+
+
+
+
+
+ Sequence information report
+
+ Report on basic information about a molecular sequence such as name, accession number, type (nucleic or protein), length, description etc.
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Sequence property
+
+ beta12orEarlier
+ An informative report about non-positional sequence features, typically a report on general molecular sequence properties derived from sequence analysis.
+ Sequence properties report
+
+
+
+
+
+
+
+
+
+ Sequence features
+
+ Sequence features report
+ beta12orEarlier
+ http://purl.bioontology.org/ontology/MSH/D058977
+ SO:0000110
+ This includes annotation of positional sequence features, organized into a standard feature table, or any other report of sequence features. General feature reports are a source of sequence feature table information although internal conversion would be required.
+ General sequence features
+ Annotation of positional features of molecular sequence(s), i.e. that can be mapped to position(s) in the sequence.
+ Features
+ Feature record
+
+
+
+
+
+
+
+
+
+ Sequence features (comparative)
+
+ Comparative data on sequence features such as statistics, intersections (and data on intersections), differences etc.
+ beta13
+ This is a broad data type and is used a placeholder for other, more specific types. It is primarily intended to help navigation of EDAM and would not typically be used for annotation.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence property (protein)
+
+ true
+ A report of general sequence properties derived from protein sequence data.
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence property (nucleic acid)
+
+ A report of general sequence properties derived from nucleotide sequence data.
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence complexity report
+
+ A report on sequence complexity, for example low-complexity or repeat regions in sequences.
+ beta12orEarlier
+ Sequence property (complexity)
+
+
+
+
+
+
+
+
+
+ Sequence ambiguity report
+
+ A report on ambiguity in molecular sequence(s).
+ Sequence property (ambiguity)
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence composition report
+
+ beta12orEarlier
+ A report (typically a table) on character or word composition / frequency of a molecular sequence(s).
+ Sequence property (composition)
+
+
+
+
+
+
+
+
+
+ Peptide molecular weight hits
+
+ A report on peptide fragments of certain molecular weight(s) in one or more protein sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Base position variability plot
+
+ beta12orEarlier
+ A plot of third base position variability in a nucleotide sequence.
+
+
+
+
+
+
+
+
+
+ Sequence composition table
+
+ A table of character or word composition / frequency of a molecular sequence.
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Base frequencies table
+
+
+ beta12orEarlier
+ A table of base frequencies of a nucleotide sequence.
+
+
+
+
+
+
+
+
+
+ Base word frequencies table
+
+
+ A table of word composition of a nucleotide sequence.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Amino acid frequencies table
+
+
+ Sequence composition (amino acid frequencies)
+ A table of amino acid frequencies of a protein sequence.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Amino acid word frequencies table
+
+
+ A table of amino acid word composition of a protein sequence.
+ Sequence composition (amino acid words)
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ DAS sequence feature annotation
+
+ beta12orEarlier
+ Annotation of a molecular sequence in DAS format.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Feature table
+
+ Sequence feature table
+ beta12orEarlier
+ Annotation of positional sequence features, organized into a standard feature table.
+
+
+
+
+
+
+
+
+
+ Map
+
+
+
+
+
+
+
+ DNA map
+ beta12orEarlier
+ A map of (typically one) DNA sequence annotated with positional or non-positional features.
+
+
+
+
+
+
+
+
+
+ Nucleic acid features
+
+
+ An informative report on intrinsic positional features of a nucleotide sequence.
+ beta12orEarlier
+ Genome features
+ This includes nucleotide sequence feature annotation in any known sequence feature table format and any other report of nucleic acid features.
+ Genomic features
+ Nucleic acid feature table
+ Feature table (nucleic acid)
+
+
+
+
+
+
+
+
+
+ Protein features
+
+
+ An informative report on intrinsic positional features of a protein sequence.
+ beta12orEarlier
+ This includes protein sequence feature annotation in any known sequence feature table format and any other report of protein features.
+ Feature table (protein)
+ Protein feature table
+
+
+
+
+
+
+
+
+
+ Genetic map
+
+ A map showing the relative positions of genetic markers in a nucleic acid sequence, based on estimation of non-physical distance such as recombination frequencies.
+ beta12orEarlier
+ A genetic (linkage) map indicates the proximity of two genes on a chromosome, whether two genes are linked and the frequency they are transmitted together to an offspring. They are limited to genetic markers of traits observable only in whole organisms.
+ Linkage map
+ Moby:GeneticMap
+
+
+
+
+
+
+
+
+
+ Sequence map
+
+ A sequence map typically includes annotation on significant subsequences such as contigs, haplotypes and genes. The contigs shown will (typically) be a set of small overlapping clones representing a complete chromosomal segment.
+ beta12orEarlier
+ A map of genetic markers in a contiguous, assembled genomic sequence, with the sizes and separation of markers measured in base pairs.
+
+
+
+
+
+
+
+
+
+ Physical map
+
+ A map of DNA (linear or circular) annotated with physical features or landmarks such as restriction sites, cloned DNA fragments, genes or genetic markers, along with the physical distances between them.
+ Distance in a physical map is measured in base pairs. A physical map might be ordered relative to a reference map (typically a genetic map) in the process of genome sequencing.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence signature map
+
+ true
+ Image of a sequence with matches to signatures, motifs or profiles.
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Cytogenetic map
+
+ beta12orEarlier
+ A map showing banding patterns derived from direct observation of a stained chromosome.
+ Cytologic map
+ Chromosome map
+ Cytogenic map
+ This is the lowest-resolution physical map and can provide only rough estimates of physical (base pair) distances. Like a genetic map, they are limited to genetic markers of traits observable only in whole organisms.
+
+
+
+
+
+
+
+
+
+ DNA transduction map
+
+ beta12orEarlier
+ A gene map showing distances between loci based on relative cotransduction frequencies.
+
+
+
+
+
+
+
+
+
+ Gene map
+
+ Sequence map of a single gene annotated with genetic features such as introns, exons, untranslated regions, polyA signals, promoters, enhancers and (possibly) mutations defining alleles of a gene.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Plasmid map
+
+ Sequence map of a plasmid (circular DNA).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Genome map
+
+ beta12orEarlier
+ Sequence map of a whole genome.
+
+
+
+
+
+
+
+
+
+ Restriction map
+
+
+ Image of the restriction enzyme cleavage sites (restriction sites) in a nucleic acid sequence.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ InterPro compact match image
+
+ beta12orEarlier
+ Image showing matches between protein sequence(s) and InterPro Entries.
+ The sequence(s) might be screened against InterPro, or be the sequences from the InterPro entry itself. Each protein is represented as a scaled horizontal line with colored bars indicating the position of the matches.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ InterPro detailed match image
+
+ beta12orEarlier
+ beta12orEarlier
+ Image showing detailed information on matches between protein sequence(s) and InterPro Entries.
+ The sequence(s) might be screened against InterPro, or be the sequences from the InterPro entry itself.
+ true
+
+
+
+
+
+
+
+
+
+ InterPro architecture image
+
+ beta12orEarlier
+ beta12orEarlier
+ true
+ The sequence(s) might be screened against InterPro, or be the sequences from the InterPro entry itself. Domain architecture is shown as a series of non-overlapping domains in the protein.
+ Image showing the architecture of InterPro domains in a protein sequence.
+
+
+
+
+
+
+
+
+
+ SMART protein schematic
+
+ true
+ beta12orEarlier
+ beta12orEarlier
+ SMART protein schematic in PNG format.
+
+
+
+
+
+
+
+
+
+ GlobPlot domain image
+
+ beta12orEarlier
+ beta12orEarlier
+ true
+ Images based on GlobPlot prediction of intrinsic disordered regions and globular domains in protein sequences.
+
+
+
+
+
+
+
+
+
+ Sequence motif matches
+
+ beta12orEarlier
+ Report on the location of matches to profiles, motifs (conserved or functional patterns) or other signatures in one or more sequences.
+ 1.8
+ true
+
+
+
+
+
+
+
+
+
+ Sequence features (repeats)
+
+ beta12orEarlier
+ true
+ 1.5
+ Repeat sequence map
+ The report might include derived data map such as classification, annotation, organization, periodicity etc.
+ Location of short repetitive subsequences (repeat sequences) in (typically nucleotide) sequences.
+
+
+
+
+
+
+
+
+
+
+ Gene and transcript structure (report)
+
+ 1.5
+ beta12orEarlier
+ A report on predicted or actual gene structure, regions which make an RNA product and features such as promoters, coding regions, splice sites etc.
+ true
+
+
+
+
+
+
+
+
+
+ Mobile genetic elements
+
+ true
+ beta12orEarlier
+ regions of a nucleic acid sequence containing mobile genetic elements.
+ 1.8
+
+
+
+
+
+
+
+
+
+ Nucleic acid features report (PolyA signal or site)
+
+ true
+ regions or sites in a eukaryotic and eukaryotic viral RNA sequence which directs endonuclease cleavage or polyadenylation of an RNA transcript.
+ 1.8
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Nucleic acid features (quadruplexes)
+
+ true
+ 1.5
+ A report on quadruplex-forming motifs in a nucleotide sequence.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Nucleic acid features report (CpG island and isochore)
+
+ 1.8
+ CpG rich regions (isochores) in a nucleotide sequence.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Nucleic acid features report (restriction sites)
+
+ beta12orEarlier
+ true
+ 1.8
+ restriction enzyme recognition sites (restriction sites) in a nucleic acid sequence.
+
+
+
+
+
+
+
+
+
+ Nucleosome exclusion sequences
+
+ beta12orEarlier
+ true
+ Report on nucleosome formation potential or exclusion sequence(s).
+ 1.8
+
+
+
+
+
+
+
+
+
+ Nucleic acid features report (splice sites)
+
+ splice sites in a nucleotide sequence or alternative RNA splicing events.
+ beta12orEarlier
+ true
+ 1.8
+
+
+
+
+
+
+
+
+
+ Nucleic acid features report (matrix/scaffold attachment sites)
+
+ 1.8
+ matrix/scaffold attachment regions (MARs/SARs) in a DNA sequence.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Gene features (exonic splicing enhancer)
+
+ beta12orEarlier
+ beta13
+ true
+ A report on exonic splicing enhancers (ESE) in an exon.
+
+
+
+
+
+
+
+
+
+ Nucleic acid features (microRNA)
+
+ true
+ beta12orEarlier
+ A report on microRNA sequence (miRNA) or precursor, microRNA targets, miRNA binding sites in an RNA sequence etc.
+ 1.5
+
+
+
+
+
+
+
+
+
+ Gene features report (operon)
+
+ true
+ operons (operators, promoters and genes) from a bacterial genome.
+ 1.8
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Nucleic acid features report (promoters)
+
+ 1.8
+ whole promoters or promoter elements (transcription start sites, RNA polymerase binding site, transcription factor binding sites, promoter enhancers etc) in a DNA sequence.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Coding region
+
+ beta12orEarlier
+ protein-coding regions including coding sequences (CDS), exons, translation initiation sites and open reading frames.
+ 1.8
+ true
+
+
+
+
+
+
+
+
+
+ Gene features (SECIS element)
+
+ beta12orEarlier
+ beta13
+ A report on selenocysteine insertion sequence (SECIS) element in a DNA sequence.
+ true
+
+
+
+
+
+
+
+
+
+ Transcription factor binding sites
+
+ transcription factor binding sites (TFBS) in a DNA sequence.
+ beta12orEarlier
+ true
+ 1.8
+
+
+
+
+
+
+
+
+
+ Protein features (sites)
+
+ true
+ beta12orEarlier
+ Use this concept for collections of specific sites which are not necessarily contiguous, rather than contiguous stretches of amino acids.
+ beta12orEarlier
+ A report on predicted or known key residue positions (sites) in a protein sequence, such as binding or functional sites.
+
+
+
+
+
+
+
+
+
+ Protein features report (signal peptides)
+
+ true
+ signal peptides or signal peptide cleavage sites in protein sequences.
+ 1.8
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein features report (cleavage sites)
+
+ true
+ 1.8
+ cleavage sites (for a proteolytic enzyme or agent) in a protein sequence.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein features (post-translation modifications)
+
+ true
+ beta12orEarlier
+ post-translation modifications in a protein sequence, typically describing the specific sites involved.
+ 1.8
+
+
+
+
+
+
+
+
+
+ Protein features report (active sites)
+
+ 1.8
+ true
+ beta12orEarlier
+ catalytic residues (active site) of an enzyme.
+
+
+
+
+
+
+
+
+
+ Protein features report (binding sites)
+
+ beta12orEarlier
+ ligand-binding (non-catalytic) residues of a protein, such as sites that bind metal, prosthetic groups or lipids.
+ true
+ 1.8
+
+
+
+
+
+
+
+
+
+ Protein features (epitopes)
+
+ A report on antigenic determinant sites (epitopes) in proteins, from sequence and / or structural data.
+ beta13
+ beta12orEarlier
+ Epitope mapping is commonly done during vaccine design.
+ true
+
+
+
+
+
+
+
+
+
+ Protein features report (nucleic acid binding sites)
+
+ true
+ beta12orEarlier
+ 1.8
+ RNA and DNA-binding proteins and binding sites in protein sequences.
+
+
+
+
+
+
+
+
+
+ MHC Class I epitopes report
+
+ beta12orEarlier
+ beta12orEarlier
+ true
+ A report on epitopes that bind to MHC class I molecules.
+
+
+
+
+
+
+
+
+
+ MHC Class II epitopes report
+
+ beta12orEarlier
+ beta12orEarlier
+ true
+ A report on predicted epitopes that bind to MHC class II molecules.
+
+
+
+
+
+
+
+
+
+ Protein features (PEST sites)
+
+ beta12orEarlier
+ A report or plot of PEST sites in a protein sequence.
+ true
+ beta13
+ 'PEST' motifs target proteins for proteolytic degradation and reduce the half-lives of proteins dramatically.
+
+
+
+
+
+
+
+
+
+ Sequence database hits scores list
+
+ Scores from a sequence database search (for example a BLAST search).
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence database hits alignments list
+
+ beta12orEarlier
+ Alignments from a sequence database search (for example a BLAST search).
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Sequence database hits evaluation data
+
+ beta12orEarlier
+ A report on the evaluation of the significance of sequence similarity scores from a sequence database search (for example a BLAST search).
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ MEME motif alphabet
+
+ Alphabet for the motifs (patterns) that MEME will search for.
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ MEME background frequencies file
+
+ MEME background frequencies file.
+ true
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ MEME motifs directive file
+
+ beta12orEarlier
+ true
+ File of directives for ordering and spacing of MEME motifs.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Dirichlet distribution
+
+ Dirichlet distribution used by hidden Markov model analysis programs.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ HMM emission and transition counts
+
+ Emission and transition counts of a hidden Markov model, generated once HMM has been determined, for example after residues/gaps have been assigned to match, delete and insert states.
+ true
+ 1.4
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Regular expression
+
+ Regular expression pattern.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence motif
+
+
+
+
+
+
+
+ beta12orEarlier
+ Any specific or conserved pattern (typically expressed as a regular expression) in a molecular sequence.
+
+
+
+
+
+
+
+
+
+ Sequence profile
+
+
+
+
+
+
+
+ Some type of statistical model representing a (typically multiple) sequence alignment.
+ http://semanticscience.org/resource/SIO_010531
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein signature
+
+ An informative report about a specific or conserved protein sequence pattern.
+ InterPro entry
+ Protein repeat signature
+ Protein region signature
+ Protein site signature
+ beta12orEarlier
+ Protein family signature
+ Protein domain signature
+
+
+
+
+
+
+
+
+
+ Prosite nucleotide pattern
+
+ A nucleotide regular expression pattern from the Prosite database.
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Prosite protein pattern
+
+ A protein regular expression pattern from the Prosite database.
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Position frequency matrix
+
+ beta12orEarlier
+ PFM
+ A profile (typically representing a sequence alignment) that is a simple matrix of nucleotide (or amino acid) counts per position.
+
+
+
+
+
+
+
+
+
+ Position weight matrix
+
+ PWM
+ beta12orEarlier
+ A profile (typically representing a sequence alignment) that is weighted matrix of nucleotide (or amino acid) counts per position.
+ Contributions of individual sequences to the matrix might be uneven (weighted).
+
+
+
+
+
+
+
+
+
+ Information content matrix
+
+ beta12orEarlier
+ ICM
+ A profile (typically representing a sequence alignment) derived from a matrix of nucleotide (or amino acid) counts per position that reflects information content at each position.
+
+
+
+
+
+
+
+
+
+ Hidden Markov model
+
+ HMM
+ beta12orEarlier
+ A hidden Markov model representation of a set or alignment of sequences.
+
+
+
+
+
+
+
+
+
+ Fingerprint
+
+ beta12orEarlier
+ One or more fingerprints (sequence classifiers) as used in the PRINTS database.
+
+
+
+
+
+
+
+
+
+ Domainatrix signature
+
+ A protein signature of the type used in the EMBASSY Signature package.
+ true
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ HMMER NULL hidden Markov model
+
+ beta12orEarlier
+ beta12orEarlier
+ true
+ NULL hidden Markov model representation used by the HMMER package.
+
+
+
+
+
+
+
+
+
+ Protein family signature
+
+ Protein family signatures cover all domains in the matching proteins and span >80% of the protein length and with no adjacent protein domain signatures or protein region signatures.
+ beta12orEarlier
+ true
+ 1.5
+ A protein family signature (sequence classifier) from the InterPro database.
+
+
+
+
+
+
+
+
+
+ Protein domain signature
+
+ beta12orEarlier
+ 1.5
+ true
+ A protein domain signature (sequence classifier) from the InterPro database.
+ Protein domain signatures identify structural or functional domains or other units with defined boundaries.
+
+
+
+
+
+
+
+
+
+ Protein region signature
+
+ A protein region signature (sequence classifier) from the InterPro database.
+ true
+ beta12orEarlier
+ 1.5
+ A protein region signature defines a region which cannot be described as a protein family or domain signature.
+
+
+
+
+
+
+
+
+
+ Protein repeat signature
+
+ true
+ 1.5
+ A protein repeat signature is a repeated protein motif, that is not in single copy expected to independently fold into a globular domain.
+ beta12orEarlier
+ A protein repeat signature (sequence classifier) from the InterPro database.
+
+
+
+
+
+
+
+
+
+ Protein site signature
+
+ A protein site signature is a classifier for a specific site in a protein.
+ beta12orEarlier
+ A protein site signature (sequence classifier) from the InterPro database.
+ true
+ 1.5
+
+
+
+
+
+
+
+
+
+ Protein conserved site signature
+
+ 1.4
+ true
+ A protein conserved site signature is any short sequence pattern that may contain one or more unique residues and is cannot be described as a active site, binding site or post-translational modification.
+ A protein conserved site signature (sequence classifier) from the InterPro database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein active site signature
+
+ A protein active site signature (sequence classifier) from the InterPro database.
+ A protein active site signature corresponds to an enzyme catalytic pocket. An active site typically includes non-contiguous residues, therefore multiple signatures may be required to describe an active site. ; residues involved in enzymatic reactions for which mutational data is typically available.
+ true
+ 1.4
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein binding site signature
+
+ 1.4
+ A protein binding site signature (sequence classifier) from the InterPro database.
+ true
+ A protein binding site signature corresponds to a site that reversibly binds chemical compounds, which are not themselves substrates of the enzymatic reaction. This includes enzyme cofactors and residues involved in electron transport or protein structure modification.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein post-translational modification signature
+
+ A protein post-translational modification signature (sequence classifier) from the InterPro database.
+ A protein post-translational modification signature corresponds to sites that undergo modification of the primary structure, typically to activate or de-activate a function. For example, methylation, sumoylation, glycosylation etc. The modification might be permanent or reversible.
+ 1.4
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Sequence alignment (pair)
+
+ http://semanticscience.org/resource/SIO_010068
+ beta12orEarlier
+ Alignment of exactly two molecular sequences.
+
+
+
+
+
+
+
+
+
+ Sequence alignment (multiple)
+
+ beta12orEarlier
+ beta12orEarlier
+ Alignment of more than two molecular sequences.
+ true
+
+
+
+
+
+
+
+
+
+ Sequence alignment (nucleic acid)
+
+ beta12orEarlier
+ Alignment of multiple nucleotide sequences.
+
+
+
+
+
+
+
+
+
+ Sequence alignment (protein)
+
+
+ Alignment of multiple protein sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence alignment (hybrid)
+
+ Alignment of multiple molecular sequences of different types.
+ Hybrid sequence alignments include for example genomic DNA to EST, cDNA or mRNA.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence alignment (nucleic acid pair)
+
+
+ beta12orEarlier
+ Alignment of exactly two nucleotide sequences.
+
+
+
+
+
+
+
+
+
+ Sequence alignment (protein pair)
+
+
+ Alignment of exactly two protein sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Hybrid sequence alignment (pair)
+
+ true
+ beta12orEarlier
+ beta12orEarlier
+ Alignment of exactly two molecular sequences of different types.
+
+
+
+
+
+
+
+
+
+ Multiple nucleotide sequence alignment
+
+ beta12orEarlier
+ Alignment of more than two nucleotide sequences.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Multiple protein sequence alignment
+
+ true
+ beta12orEarlier
+ beta12orEarlier
+ Alignment of more than two protein sequences.
+
+
+
+
+
+
+
+
+
+ Alignment score or penalty
+
+ beta12orEarlier
+ A simple floating point number defining the penalty for opening or extending a gap in an alignment.
+
+
+
+
+
+
+
+
+
+ Score end gaps control
+
+ beta12orEarlier
+ beta12orEarlier
+ Whether end gaps are scored or not.
+ true
+
+
+
+
+
+
+
+
+
+ Aligned sequence order
+
+ beta12orEarlier
+ beta12orEarlier
+ true
+ Controls the order of sequences in an output sequence alignment.
+
+
+
+
+
+
+
+
+
+ Gap opening penalty
+
+ A penalty for opening a gap in an alignment.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Gap extension penalty
+
+ A penalty for extending a gap in an alignment.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Gap separation penalty
+
+ beta12orEarlier
+ A penalty for gaps that are close together in an alignment.
+
+
+
+
+
+
+
+
+
+ Terminal gap penalty
+
+ beta12orEarlier
+ A penalty for gaps at the termini of an alignment, either from the N/C terminal of protein or 5'/3' terminal of nucleotide sequences.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Match reward score
+
+ beta12orEarlier
+ The score for a 'match' used in various sequence database search applications with simple scoring schemes.
+
+
+
+
+
+
+
+
+
+ Mismatch penalty score
+
+ beta12orEarlier
+ The score (penalty) for a 'mismatch' used in various alignment and sequence database search applications with simple scoring schemes.
+
+
+
+
+
+
+
+
+
+ Drop off score
+
+ This is the threshold drop in score at which extension of word alignment is halted.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Gap opening penalty (integer)
+
+ beta12orEarlier
+ true
+ A simple floating point number defining the penalty for opening a gap in an alignment.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Gap opening penalty (float)
+
+ beta12orEarlier
+ beta12orEarlier
+ A simple floating point number defining the penalty for opening a gap in an alignment.
+ true
+
+
+
+
+
+
+
+
+
+ Gap extension penalty (integer)
+
+ true
+ A simple floating point number defining the penalty for extending a gap in an alignment.
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Gap extension penalty (float)
+
+ beta12orEarlier
+ true
+ A simple floating point number defining the penalty for extending a gap in an alignment.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Gap separation penalty (integer)
+
+ A simple floating point number defining the penalty for gaps that are close together in an alignment.
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Gap separation penalty (float)
+
+ beta12orEarlier
+ true
+ beta12orEarlier
+ A simple floating point number defining the penalty for gaps that are close together in an alignment.
+
+
+
+
+
+
+
+
+
+ Terminal gap opening penalty
+
+ beta12orEarlier
+ A number defining the penalty for opening gaps at the termini of an alignment, either from the N/C terminal of protein or 5'/3' terminal of nucleotide sequences.
+
+
+
+
+
+
+
+
+
+ Terminal gap extension penalty
+
+ A number defining the penalty for extending gaps at the termini of an alignment, either from the N/C terminal of protein or 5'/3' terminal of nucleotide sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence identity
+
+ Sequence identity is the number (%) of matches (identical characters) in positions from an alignment of two molecular sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence similarity
+
+ beta12orEarlier
+ Sequence similarity is the similarity (expressed as a percentage) of two molecular sequences calculated from their alignment, a scoring matrix for scoring characters substitutions and penalties for gap insertion and extension.
+ Data Type is float probably.
+
+
+
+
+
+
+
+
+
+ Sequence alignment metadata (quality report)
+
+ beta12orEarlier
+ true
+ beta12orEarlier
+ Data on molecular sequence alignment quality (estimated accuracy).
+
+
+
+
+
+
+
+
+
+ Sequence alignment report (site conservation)
+
+ beta12orEarlier
+ Data on character conservation in a molecular sequence alignment.
+ 1.4
+ This is a broad data type and is used a placeholder for other, more specific types. It is primarily intended to help navigation of EDAM and would not typically be used for annotation. Use this concept for calculated substitution rates, relative site variability, data on sites with biased properties, highly conserved or very poorly conserved sites, regions, blocks etc.
+ true
+
+
+
+
+
+
+
+
+
+ Sequence alignment report (site correlation)
+
+ 1.4
+ beta12orEarlier
+ Data on correlations between sites in a molecular sequence alignment, typically to identify possible covarying positions and predict contacts or structural constraints in protein structures.
+ true
+
+
+
+
+
+
+
+
+
+ Sequence-profile alignment (Domainatrix signature)
+
+ beta12orEarlier
+ Alignment of molecular sequences to a Domainatrix signature (representing a sequence alignment).
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Sequence-profile alignment (HMM)
+
+ beta12orEarlier
+ 1.5
+ true
+ Alignment of molecular sequence(s) to a hidden Markov model(s).
+
+
+
+
+
+
+
+
+
+ Sequence-profile alignment (fingerprint)
+
+ Alignment of molecular sequences to a protein fingerprint from the PRINTS database.
+ 1.5
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Phylogenetic continuous quantitative data
+
+ beta12orEarlier
+ Phylogenetic continuous quantitative characters
+ Quantitative traits
+ Continuous quantitative data that may be read during phylogenetic tree calculation.
+
+
+
+
+
+
+
+
+
+ Phylogenetic discrete data
+
+ Discrete characters
+ Character data with discrete states that may be read during phylogenetic tree calculation.
+ Phylogenetic discrete states
+ beta12orEarlier
+ Discretely coded characters
+
+
+
+
+
+
+
+
+
+ Phylogenetic character cliques
+
+ One or more cliques of mutually compatible characters that are generated, for example from analysis of discrete character data, and are used to generate a phylogeny.
+ Phylogenetic report (cliques)
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Phylogenetic invariants
+
+
+
+
+
+
+
+ Phylogenetic invariants data for testing alternative tree topologies.
+ beta12orEarlier
+ Phylogenetic report (invariants)
+
+
+
+
+
+
+
+
+
+ Phylogenetic report
+
+ beta12orEarlier
+ A report of data concerning or derived from a phylogenetic tree, or from comparing two or more phylogenetic trees.
+ Phylogenetic tree report
+ 1.5
+ Phylogenetic report
+ Phylogenetic tree-derived report
+ This is a broad data type and is used for example for reports on confidence, shape or stratigraphic (age) data derived from phylogenetic tree analysis.
+ true
+
+
+
+
+
+
+
+
+
+ DNA substitution model
+
+ Substitution model
+ Phylogenetic tree report (DNA substitution model)
+ Sequence alignment report (DNA substitution model)
+ beta12orEarlier
+ A model of DNA substitution that explains a DNA sequence alignment, derived from phylogenetic tree analysis.
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree report (tree shape)
+
+ beta12orEarlier
+ true
+ 1.4
+ Data about the shape of a phylogenetic tree.
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree report (tree evaluation)
+
+ beta12orEarlier
+ true
+ 1.4
+ Data on the confidence of a phylogenetic tree.
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree distances
+
+ beta12orEarlier
+ Phylogenetic tree report (tree distances)
+ Distances, such as Branch Score distance, between two or more phylogenetic trees.
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree report (tree stratigraphic)
+
+ beta12orEarlier
+ 1.4
+ true
+ Molecular clock and stratigraphic (age) data derived from phylogenetic tree analysis.
+
+
+
+
+
+
+
+
+
+ Phylogenetic character contrasts
+
+ Phylogenetic report (character contrasts)
+ Independent contrasts for characters used in a phylogenetic tree, or covariances, regressions and correlations between characters for those contrasts.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Comparison matrix (integers)
+
+ beta12orEarlier
+ Substitution matrix (integers)
+ beta12orEarlier
+ Matrix of integer numbers for sequence comparison.
+ true
+
+
+
+
+
+
+
+
+
+ Comparison matrix (floats)
+
+ beta12orEarlier
+ beta12orEarlier
+ true
+ Matrix of floating point numbers for sequence comparison.
+ Substitution matrix (floats)
+
+
+
+
+
+
+
+
+
+ Comparison matrix (nucleotide)
+
+ Matrix of integer or floating point numbers for nucleotide comparison.
+ beta12orEarlier
+ Nucleotide substitution matrix
+
+
+
+
+
+
+
+
+
+ Comparison matrix (amino acid)
+
+
+ Amino acid comparison matrix
+ beta12orEarlier
+ Matrix of integer or floating point numbers for amino acid comparison.
+ Amino acid substitution matrix
+
+
+
+
+
+
+
+
+
+ Nucleotide comparison matrix (integers)
+
+ Nucleotide substitution matrix (integers)
+ beta12orEarlier
+ Matrix of integer numbers for nucleotide comparison.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Nucleotide comparison matrix (floats)
+
+ beta12orEarlier
+ true
+ Matrix of floating point numbers for nucleotide comparison.
+ beta12orEarlier
+ Nucleotide substitution matrix (floats)
+
+
+
+
+
+
+
+
+
+ Amino acid comparison matrix (integers)
+
+ beta12orEarlier
+ Matrix of integer numbers for amino acid comparison.
+ Amino acid substitution matrix (integers)
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Amino acid comparison matrix (floats)
+
+ beta12orEarlier
+ Amino acid substitution matrix (floats)
+ beta12orEarlier
+ true
+ Matrix of floating point numbers for amino acid comparison.
+
+
+
+
+
+
+
+
+
+ Protein features report (membrane regions)
+
+ true
+ beta12orEarlier
+ 1.8
+ trans- or intra-membrane regions of a protein, typically describing physicochemical properties of the secondary structure elements.
+
+
+
+
+
+
+
+
+
+ Nucleic acid structure
+
+
+
+
+
+
+
+ 3D coordinate and associated data for a nucleic acid tertiary (3D) structure.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein structure
+
+
+
+
+
+
+
+ Protein structures
+ 3D coordinate and associated data for a protein tertiary (3D) structure.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein-ligand complex
+
+ The structure of a protein in complex with a ligand, typically a small molecule such as an enzyme substrate or cofactor, but possibly another macromolecule.
+ beta12orEarlier
+ This includes interactions of proteins with atoms, ions and small molecules or macromolecules such as nucleic acids or other polypeptides. For stable inter-polypeptide interactions use 'Protein complex' instead.
+
+
+
+
+
+
+
+
+
+ Carbohydrate structure
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ 3D coordinate and associated data for a carbohydrate (3D) structure.
+
+
+
+
+
+
+
+
+
+ Small molecule structure
+
+
+
+
+
+
+
+ 3D coordinate and associated data for the (3D) structure of a small molecule, such as any common chemical compound.
+ CHEBI:23367
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ DNA structure
+
+ beta12orEarlier
+ 3D coordinate and associated data for a DNA tertiary (3D) structure.
+
+
+
+
+
+
+
+
+
+ RNA structure
+
+
+
+
+
+
+
+ beta12orEarlier
+ 3D coordinate and associated data for an RNA tertiary (3D) structure.
+
+
+
+
+
+
+
+
+
+ tRNA structure
+
+ 3D coordinate and associated data for a tRNA tertiary (3D) structure, including tmRNA, snoRNAs etc.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein chain
+
+ beta12orEarlier
+ 3D coordinate and associated data for the tertiary (3D) structure of a polypeptide chain.
+
+
+
+
+
+
+
+
+
+ Protein domain
+
+
+
+
+
+
+
+ 3D coordinate and associated data for the tertiary (3D) structure of a protein domain.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein structure (all atoms)
+
+ beta12orEarlier
+ 1.5
+ true
+ 3D coordinate and associated data for a protein tertiary (3D) structure (all atoms).
+
+
+
+
+
+
+
+
+
+ C-alpha trace
+
+ 3D coordinate and associated data for a protein tertiary (3D) structure (typically C-alpha atoms only).
+ C-beta atoms from amino acid side-chains may be included.
+ Protein structure (C-alpha atoms)
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein chain (all atoms)
+
+ 3D coordinate and associated data for a polypeptide chain tertiary (3D) structure (all atoms).
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Protein chain (C-alpha atoms)
+
+ true
+ 3D coordinate and associated data for a polypeptide chain tertiary (3D) structure (typically C-alpha atoms only).
+ beta12orEarlier
+ beta12orEarlier
+ C-beta atoms from amino acid side-chains may be included.
+
+
+
+
+
+
+
+
+
+ Protein domain (all atoms)
+
+ 3D coordinate and associated data for a protein domain tertiary (3D) structure (all atoms).
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein domain (C-alpha atoms)
+
+ C-beta atoms from amino acid side-chains may be included.
+ true
+ 3D coordinate and associated data for a protein domain tertiary (3D) structure (typically C-alpha atoms only).
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Structure alignment (pair)
+
+ Alignment (superimposition) of exactly two molecular tertiary (3D) structures.
+ beta12orEarlier
+ Pair structure alignment
+
+
+
+
+
+
+
+
+
+ Structure alignment (multiple)
+
+ beta12orEarlier
+ beta12orEarlier
+ true
+ Alignment (superimposition) of more than two molecular tertiary (3D) structures.
+
+
+
+
+
+
+
+
+
+ Structure alignment (protein)
+
+
+ Protein structure alignment
+ beta12orEarlier
+ Alignment (superimposition) of protein tertiary (3D) structures.
+
+
+
+
+
+
+
+
+
+ Structure alignment (nucleic acid)
+
+ beta12orEarlier
+ Alignment (superimposition) of nucleic acid tertiary (3D) structures.
+ Nucleic acid structure alignment
+
+
+
+
+
+
+
+
+
+ Structure alignment (protein pair)
+
+
+ Protein pair structural alignment
+ beta12orEarlier
+ Alignment (superimposition) of exactly two protein tertiary (3D) structures.
+
+
+
+
+
+
+
+
+
+ Multiple protein tertiary structure alignment
+
+ Alignment (superimposition) of more than two protein tertiary (3D) structures.
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Structure alignment (protein all atoms)
+
+ 1.5
+ Alignment (superimposition) of protein tertiary (3D) structures (all atoms considered).
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Structure alignment (protein C-alpha atoms)
+
+ Alignment (superimposition) of protein tertiary (3D) structures (typically C-alpha atoms only considered).
+ C-beta atoms from amino acid side-chains may be considered.
+ 1.5
+ C-alpha trace
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Pairwise protein tertiary structure alignment (all atoms)
+
+ Alignment (superimposition) of exactly two protein tertiary (3D) structures (all atoms considered).
+ true
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Pairwise protein tertiary structure alignment (C-alpha atoms)
+
+ C-beta atoms from amino acid side-chains may be included.
+ true
+ beta12orEarlier
+ Alignment (superimposition) of exactly two protein tertiary (3D) structures (typically C-alpha atoms only considered).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Multiple protein tertiary structure alignment (all atoms)
+
+ beta12orEarlier
+ true
+ Alignment (superimposition) of exactly two protein tertiary (3D) structures (all atoms considered).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Multiple protein tertiary structure alignment (C-alpha atoms)
+
+ beta12orEarlier
+ Alignment (superimposition) of exactly two protein tertiary (3D) structures (typically C-alpha atoms only considered).
+ true
+ beta12orEarlier
+ C-beta atoms from amino acid side-chains may be included.
+
+
+
+
+
+
+
+
+
+ Structure alignment (nucleic acid pair)
+
+
+ beta12orEarlier
+ Nucleic acid pair structure alignment
+ Alignment (superimposition) of exactly two nucleic acid tertiary (3D) structures.
+
+
+
+
+
+
+
+
+
+ Multiple nucleic acid tertiary structure alignment
+
+ beta12orEarlier
+ Alignment (superimposition) of more than two nucleic acid tertiary (3D) structures.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Structure alignment (RNA)
+
+ RNA structure alignment
+ Alignment (superimposition) of RNA tertiary (3D) structures.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Structural transformation matrix
+
+ Matrix to transform (rotate/translate) 3D coordinates, typically the transformation necessary to superimpose two molecular structures.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ DaliLite hit table
+
+ DaliLite hit table of protein chain tertiary structure alignment data.
+ The significant and top-scoring hits for regions of the compared structures is shown. Data such as Z-Scores, number of aligned residues, root-mean-square deviation (RMSD) of atoms and sequence identity are given.
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Molecular similarity score
+
+ beta12orEarlier
+ A score reflecting structural similarities of two molecules.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Root-mean-square deviation
+
+ RMSD
+ beta12orEarlier
+ Root-mean-square deviation (RMSD) is calculated to measure the average distance between superimposed macromolecular coordinates.
+
+
+
+
+
+
+
+
+
+ Tanimoto similarity score
+
+ beta12orEarlier
+ A measure of the similarity between two ligand fingerprints.
+ A ligand fingerprint is derived from ligand structural data from a Protein DataBank file. It reflects the elements or groups present or absent, covalent bonds and bond orders and the bonded environment in terms of SATIS codes and BLEEP atom types.
+
+
+
+
+
+
+
+
+
+ 3D-1D scoring matrix
+
+ A matrix of 3D-1D scores reflecting the probability of amino acids to occur in different tertiary structural environments.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Amino acid index
+
+
+ beta12orEarlier
+ A table of 20 numerical values which quantify a property (e.g. physicochemical or biochemical) of the common amino acids.
+
+
+
+
+
+
+
+
+
+ Amino acid index (chemical classes)
+
+ Chemical classes (amino acids)
+ Chemical classification (small, aliphatic, aromatic, polar, charged etc) of amino acids.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Amino acid pair-wise contact potentials
+
+ Contact potentials (amino acid pair-wise)
+ Statistical protein contact potentials.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Amino acid index (molecular weight)
+
+ Molecular weights of amino acids.
+ Molecular weight (amino acids)
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Amino acid index (hydropathy)
+
+ Hydrophobic, hydrophilic or charge properties of amino acids.
+ beta12orEarlier
+ Hydropathy (amino acids)
+
+
+
+
+
+
+
+
+
+ Amino acid index (White-Wimley data)
+
+ beta12orEarlier
+ White-Wimley data (amino acids)
+ Experimental free energy values for the water-interface and water-octanol transitions for the amino acids.
+
+
+
+
+
+
+
+
+
+ Amino acid index (van der Waals radii)
+
+ van der Waals radii (amino acids)
+ Van der Waals radii of atoms for different amino acid residues.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Enzyme report
+
+ true
+ 1.5
+ Protein report (enzyme)
+ beta12orEarlier
+ Enzyme report
+ An informative report on a specific enzyme.
+
+
+
+
+
+
+
+
+
+ Restriction enzyme report
+
+ An informative report on a specific restriction enzyme such as enzyme reference data.
+ Restriction enzyme pattern data
+ beta12orEarlier
+ 1.5
+ This might include name of enzyme, organism, isoschizomers, methylation, source, suppliers, literature references, or data on restriction enzyme patterns such as name of enzyme, recognition site, length of pattern, number of cuts made by enzyme, details of blunt or sticky end cut etc.
+ Protein report (restriction enzyme)
+ Restriction enzyme report
+ true
+
+
+
+
+
+
+
+
+
+ Peptide molecular weights
+
+ beta12orEarlier
+ List of molecular weight(s) of one or more proteins or peptides, for example cut by proteolytic enzymes or reagents.
+ The report might include associated data such as frequency of peptide fragment molecular weights.
+
+
+
+
+
+
+
+
+
+ Peptide hydrophobic moment
+
+ beta12orEarlier
+ Report on the hydrophobic moment of a polypeptide sequence.
+ Hydrophobic moment is a peptides hydrophobicity measured for different angles of rotation.
+
+
+
+
+
+
+
+
+
+ Protein aliphatic index
+
+ The aliphatic index of a protein.
+ beta12orEarlier
+ The aliphatic index is the relative protein volume occupied by aliphatic side chains.
+
+
+
+
+
+
+
+
+
+ Protein sequence hydropathy plot
+
+ Hydrophobic moment is a peptides hydrophobicity measured for different angles of rotation.
+ A protein sequence with annotation on hydrophobic or hydrophilic / charged regions, hydrophobicity plot etc.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein charge plot
+
+ beta12orEarlier
+ A plot of the mean charge of the amino acids within a window of specified length as the window is moved along a protein sequence.
+
+
+
+
+
+
+
+
+
+ Protein solubility
+
+ beta12orEarlier
+ The solubility or atomic solvation energy of a protein sequence or structure.
+ Protein solubility data
+
+
+
+
+
+
+
+
+
+ Protein crystallizability
+
+ beta12orEarlier
+ Protein crystallizability data
+ Data on the crystallizability of a protein sequence.
+
+
+
+
+
+
+
+
+
+ Protein globularity
+
+ Protein globularity data
+ beta12orEarlier
+ Data on the stability, intrinsic disorder or globularity of a protein sequence.
+
+
+
+
+
+
+
+
+
+ Protein titration curve
+
+
+ The titration curve of a protein.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein isoelectric point
+
+ beta12orEarlier
+ The isoelectric point of one proteins.
+
+
+
+
+
+
+
+
+
+ Protein pKa value
+
+ The pKa value of a protein.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein hydrogen exchange rate
+
+ beta12orEarlier
+ The hydrogen exchange rate of a protein.
+
+
+
+
+
+
+
+
+
+ Protein extinction coefficient
+
+ The extinction coefficient of a protein.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein optical density
+
+ The optical density of a protein.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein subcellular localization
+
+ Protein report (subcellular localization)
+ An informative report on protein subcellular localization (nuclear, cytoplasmic, mitochondrial, chloroplast, plastid, membrane etc) or destination (exported / extracellular proteins).
+ beta12orEarlier
+ true
+ beta13
+
+
+
+
+
+
+
+
+
+ Peptide immunogenicity data
+
+ An report on allergenicity / immunogenicity of peptides and proteins.
+ Peptide immunogenicity report
+ beta12orEarlier
+ Peptide immunogenicity
+ This includes data on peptide ligands that elicit an immune response (immunogens), allergic cross-reactivity, predicted antigenicity (Hopp and Woods plot) etc. These data are useful in the development of peptide-specific antibodies or multi-epitope vaccines. Methods might use sequence data (for example motifs) and / or structural data.
+
+
+
+
+
+
+
+
+
+ MHC peptide immunogenicity report
+
+ A report on the immunogenicity of MHC class I or class II binding peptides.
+ beta13
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein structure report
+
+
+ Protein structural property
+ Protein structure-derived report
+ This includes for example reports on the surface properties (shape, hydropathy, electrostatic patches etc) of a protein structure, protein flexibility or motion, and protein architecture (spatial arrangement of secondary structure).
+ Protein property (structural)
+ Annotation on or structural information derived from one or more specific protein 3D structure(s) or structural domains.
+ beta12orEarlier
+ Protein report (structure)
+ Protein structure report (domain)
+
+
+
+
+
+
+
+
+
+ Protein structural quality report
+
+ Report on the quality of a protein three-dimensional model.
+ Protein structure report (quality evaluation)
+ Protein structure validation report
+ Protein property (structural quality)
+ Model validation might involve checks for atomic packing, steric clashes, agreement with electron density maps etc.
+ Protein report (structural quality)
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein residue interactions
+
+
+
+
+
+
+
+ Residue interaction data
+ Data on inter-atomic or inter-residue contacts, distances and interactions in protein structure(s) or on the interactions of protein atoms or residues with non-protein groups.
+ beta12orEarlier
+ Atom interaction data
+
+
+
+
+
+
+
+
+
+ Protein flexibility or motion report
+
+ This is a broad data type and is used a placeholder for other, more specific types. It is primarily intended to help navigation of EDAM and would not typically be used for annotation.
+ Protein property (flexibility or motion)
+ Informative report on flexibility or motion of a protein structure.
+ Protein flexibility or motion
+ beta12orEarlier
+ true
+ 1.4
+ Protein structure report (flexibility or motion)
+
+
+
+
+
+
+
+
+
+ Protein solvent accessibility
+
+ This is a broad data type and is used a placeholder for other, more specific types. It is primarily intended to help navigation of EDAM and would not typically be used for annotation. This concept covers definitions of the protein surface, interior and interfaces, accessible and buried residues, surface accessible pockets, interior inaccessible cavities etc.
+ beta12orEarlier
+ Data on the solvent accessible or buried surface area of a protein structure.
+
+
+
+
+
+
+
+
+
+ Protein surface report
+
+ This is a broad data type and is used a placeholder for other, more specific types. It is primarily intended to help navigation of EDAM and would not typically be used for annotation.
+ Protein structure report (surface)
+ 1.4
+ Data on the surface properties (shape, hydropathy, electrostatic patches etc) of a protein structure.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Ramachandran plot
+
+ beta12orEarlier
+ Phi/psi angle data or a Ramachandran plot of a protein structure.
+
+
+
+
+
+
+
+
+
+ Protein dipole moment
+
+ Data on the net charge distribution (dipole moment) of a protein structure.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein distance matrix
+
+ beta12orEarlier
+ A matrix of distances between amino acid residues (for example the C-alpha atoms) in a protein structure.
+
+
+
+
+
+
+
+
+
+ Protein contact map
+
+ An amino acid residue contact map for a protein structure.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein residue 3D cluster
+
+ beta12orEarlier
+ Report on clusters of contacting residues in protein structures such as a key structural residue network.
+
+
+
+
+
+
+
+
+
+ Protein hydrogen bonds
+
+ Patterns of hydrogen bonding in protein structures.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein non-canonical interactions
+
+ Protein non-canonical interactions report
+ true
+ Non-canonical atomic interactions in protein structures.
+ 1.4
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ CATH node
+
+ Information on a node from the CATH database.
+ The report (for example http://www.cathdb.info/cathnode/1.10.10.10) includes CATH code (of the node and upper levels in the hierarchy), classification text (of appropriate levels in hierarchy), list of child nodes, representative domain and other relevant data and links.
+ 1.5
+ beta12orEarlier
+ true
+ CATH classification node report
+
+
+
+
+
+
+
+
+
+ SCOP node
+
+ true
+ SCOP classification node
+ Information on a node from the SCOP database.
+ 1.5
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ EMBASSY domain classification
+
+ beta12orEarlier
+ beta12orEarlier
+ true
+ An EMBASSY domain classification file (DCF) of classification and other data for domains from SCOP or CATH, in EMBL-like format.
+
+
+
+
+
+
+
+
+ CATH class
+
+ beta12orEarlier
+ 1.5
+ Information on a protein 'class' node from the CATH database.
+ true
+
+
+
+
+
+
+
+
+
+ CATH architecture
+
+ beta12orEarlier
+ 1.5
+ Information on a protein 'architecture' node from the CATH database.
+ true
+
+
+
+
+
+
+
+
+
+ CATH topology
+
+ true
+ 1.5
+ Information on a protein 'topology' node from the CATH database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ CATH homologous superfamily
+
+ 1.5
+ true
+ beta12orEarlier
+ Information on a protein 'homologous superfamily' node from the CATH database.
+
+
+
+
+
+
+
+
+
+ CATH structurally similar group
+
+ 1.5
+ true
+ beta12orEarlier
+ Information on a protein 'structurally similar group' node from the CATH database.
+
+
+
+
+
+
+
+
+
+ CATH functional category
+
+ Information on a protein 'functional category' node from the CATH database.
+ true
+ 1.5
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein fold recognition report
+
+ Methods use some type of mapping between sequence and fold, for example secondary structure prediction and alignment, profile comparison, sequence properties, homologous sequence search, kernel machines etc. Domains and folds might be taken from SCOP or CATH.
+ beta12orEarlier
+ A report on known protein structural domains or folds that are recognized (identified) in protein sequence(s).
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein-protein interaction report
+
+ protein-protein interaction(s), including interactions between protein domains.
+ beta12orEarlier
+ true
+ 1.8
+
+
+
+
+
+
+
+
+
+ Protein-ligand interaction report
+
+ beta12orEarlier
+ An informative report on protein-ligand (small molecule) interaction(s).
+
+
+
+
+
+
+
+
+
+ Protein-nucleic acid interactions report
+
+ true
+ protein-DNA/RNA interaction(s).
+ beta12orEarlier
+ 1.8
+
+
+
+
+
+
+
+
+
+ Nucleic acid melting profile
+
+ Nucleic acid stability profile
+ A melting (stability) profile calculated the free energy required to unwind and separate the nucleic acid strands, plotted for sliding windows over a sequence.
+ Data on the dissociation characteristics of a double-stranded nucleic acid molecule (DNA or a DNA/RNA hybrid) during heating.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Nucleic acid enthalpy
+
+ beta12orEarlier
+ Enthalpy of hybridized or double stranded nucleic acid (DNA or RNA/DNA).
+
+
+
+
+
+
+
+
+
+ Nucleic acid entropy
+
+ Entropy of hybridized or double stranded nucleic acid (DNA or RNA/DNA).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Nucleic acid melting temperature
+
+ Melting temperature of hybridized or double stranded nucleic acid (DNA or RNA/DNA).
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Nucleic acid stitch profile
+
+ beta12orEarlier
+ Stitch profile of hybridized or double stranded nucleic acid (DNA or RNA/DNA).
+ A stitch profile diagram shows partly melted DNA conformations (with probabilities) at a range of temperatures. For example, a stitch profile might show possible loop openings with their location, size, probability and fluctuations at a given temperature.
+
+
+
+
+
+
+
+
+
+ DNA base pair stacking energies data
+
+ DNA base pair stacking energies data.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ DNA base pair twist angle data
+
+ beta12orEarlier
+ DNA base pair twist angle data.
+
+
+
+
+
+
+
+
+
+ DNA base trimer roll angles data
+
+ beta12orEarlier
+ DNA base trimer roll angles data.
+
+
+
+
+
+
+
+
+
+ Vienna RNA parameters
+
+ RNA parameters used by the Vienna package.
+ true
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Vienna RNA structure constraints
+
+ true
+ Structure constraints used by the Vienna package.
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Vienna RNA concentration data
+
+ RNA concentration data used by the Vienna package.
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Vienna RNA calculated energy
+
+ beta12orEarlier
+ beta12orEarlier
+ true
+ RNA calculated energy data generated by the Vienna package.
+
+
+
+
+
+
+
+
+
+ Base pairing probability matrix dotplot
+
+
+ beta12orEarlier
+ Such as generated by the Vienna package.
+ Dotplot of RNA base pairing probability matrix.
+
+
+
+
+
+
+
+
+
+ Nucleic acid folding report
+
+ Nucleic acid report (folding)
+ beta12orEarlier
+ Nucleic acid report (folding model)
+ RNA secondary structure folding probablities
+ A report on an analysis of RNA/DNA folding, minimum folding energies for DNA or RNA sequences, energy landscape of RNA mutants etc.
+ This is a broad data type and is used a placeholder for other, more specific types. It is primarily intended to help navigation of EDAM and would not typically be used for annotation.
+ RNA secondary structure folding classification
+
+
+
+
+
+
+
+
+
+ Codon usage table
+
+
+
+
+
+
+
+ Table of codon usage data calculated from one or more nucleic acid sequences.
+ A codon usage table might include the codon usage table name, optional comments and a table with columns for codons and corresponding codon usage data. A genetic code can be extracted from or represented by a codon usage table.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Genetic code
+
+ beta12orEarlier
+ A genetic code for an organism.
+ A genetic code need not include detailed codon usage information.
+
+
+
+
+
+
+
+
+
+ Codon adaptation index
+
+ true
+ A simple measure of synonymous codon usage bias often used to predict gene expression levels.
+ CAI
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Codon usage bias plot
+
+ Synonymous codon usage statistic plot
+ beta12orEarlier
+ A plot of the synonymous codon usage calculated for windows over a nucleotide sequence.
+
+
+
+
+
+
+
+
+
+ Nc statistic
+
+ true
+ beta12orEarlier
+ The effective number of codons used in a gene sequence. This reflects how far codon usage of a gene departs from equal usage of synonymous codons.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Codon usage fraction difference
+
+ The differences in codon usage fractions between two codon usage tables.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Pharmacogenomic test report
+
+ beta12orEarlier
+ The report might correlate gene expression or single-nucleotide polymorphisms with drug efficacy or toxicity.
+ Data on the influence of genotype on drug response.
+
+
+
+
+
+
+
+
+
+ Disease report
+
+
+
+
+
+
+
+ An informative report on a specific disease.
+ For example, an informative report on a specific tumor including nature and origin of the sample, anatomic site, organ or tissue, tumor type, including morphology and/or histologic type, and so on.
+ beta12orEarlier
+ Disease report
+
+
+
+
+
+
+
+
+
+ Linkage disequilibrium (report)
+
+ true
+ A report on linkage disequilibrium; the non-random association of alleles or polymorphisms at two or more loci (not necessarily on the same chromosome).
+ 1.8
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Heat map
+
+
+ A graphical 2D tabular representation of gene expression data, typically derived from a DNA microarray experiment.
+ beta12orEarlier
+ A heat map is a table where rows and columns correspond to different genes and contexts (for example, cells or samples) and the cell color represents the level of expression of a gene that context.
+
+
+
+
+
+
+
+
+
+ Affymetrix probe sets library file
+
+ true
+ Affymetrix library file of information about which probes belong to which probe set.
+ CDF file
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Affymetrix probe sets information library file
+
+ true
+ Affymetrix library file of information about the probe sets such as the gene name with which the probe set is associated.
+ GIN file
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Molecular weights standard fingerprint
+
+ beta12orEarlier
+ Standard protonated molecular masses from trypsin (modified porcine trypsin, Promega) and keratin peptides, used in EMBOSS.
+
+
+
+
+
+
+
+
+
+ Metabolic pathway report
+
+ This includes carbohydrate, energy, lipid, nucleotide, amino acid, glycan, PK/NRP, cofactor/vitamin, secondary metabolite, xenobiotics etc.
+ beta12orEarlier
+ A report typically including a map (diagram) of a metabolic pathway.
+ 1.8
+ true
+
+
+
+
+
+
+
+
+
+ Genetic information processing pathway report
+
+ beta12orEarlier
+ 1.8
+ true
+ genetic information processing pathways.
+
+
+
+
+
+
+
+
+
+ Environmental information processing pathway report
+
+ true
+ environmental information processing pathways.
+ beta12orEarlier
+ 1.8
+
+
+
+
+
+
+
+
+
+ Signal transduction pathway report
+
+ A report typically including a map (diagram) of a signal transduction pathway.
+ 1.8
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Cellular process pathways report
+
+ 1.8
+ Topic concernning cellular process pathways.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Disease pathway or network report
+
+ true
+ beta12orEarlier
+ disease pathways, typically of human disease.
+ 1.8
+
+
+
+
+
+
+
+
+
+ Drug structure relationship map
+
+ A report typically including a map (diagram) of drug structure relationships.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein interaction networks
+
+ 1.8
+ networks of protein interactions.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ MIRIAM datatype
+
+ A MIRIAM entry describes a MIRIAM data type including the official name, synonyms, root URI, identifier pattern (regular expression applied to a unique identifier of the data type) and documentation. Each data type can be associated with several resources. Each resource is a physical location of a service (typically a database) providing information on the elements of a data type. Several resources may exist for each data type, provided the same (mirrors) or different information. MIRIAM provides a stable and persistent reference to its data types.
+ An entry (data type) from the Minimal Information Requested in the Annotation of Biochemical Models (MIRIAM) database of data resources.
+ beta12orEarlier
+ true
+ 1.5
+
+
+
+
+
+
+
+
+
+ E-value
+
+ An expectation value (E-Value) is the expected number of observations which are at least as extreme as observations expected to occur by random chance. The E-value describes the number of hits with a given score or better that are expected to occur at random when searching a database of a particular size. It decreases exponentially with the score (S) of a hit. A low E value indicates a more significant score.
+ beta12orEarlier
+ A simple floating point number defining the lower or upper limit of an expectation value (E-value).
+ Expectation value
+
+
+
+
+
+
+
+
+
+ Z-value
+
+ beta12orEarlier
+ The z-value is the number of standard deviations a data value is above or below a mean value.
+ A z-value might be specified as a threshold for reporting hits from database searches.
+
+
+
+
+
+
+
+
+
+ P-value
+
+ beta12orEarlier
+ A z-value might be specified as a threshold for reporting hits from database searches.
+ The P-value is the probability of obtaining by random chance a result that is at least as extreme as an observed result, assuming a NULL hypothesis is true.
+
+
+
+
+
+
+
+
+
+ Database version information
+
+ true
+ Ontology version information
+ 1.5
+ Information on a database (or ontology) version, for example name, version number and release date.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Tool version information
+
+ beta12orEarlier
+ Information on an application version, for example name, version number and release date.
+ true
+ 1.5
+
+
+
+
+
+
+
+
+
+ CATH version information
+
+ beta12orEarlier
+ beta12orEarlier
+ true
+ Information on a version of the CATH database.
+
+
+
+
+
+
+
+
+
+ Swiss-Prot to PDB mapping
+
+ Cross-mapping of Swiss-Prot codes to PDB identifiers.
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence database cross-references
+
+ Cross-references from a sequence record to other databases.
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Job status
+
+ Metadata on the status of a submitted job.
+ beta12orEarlier
+ 1.5
+ true
+ Values for EBI services are 'DONE' (job has finished and the results can then be retrieved), 'ERROR' (the job failed or no results where found), 'NOT_FOUND' (the job id is no longer available; job results might be deleted, 'PENDING' (the job is in a queue waiting processing), 'RUNNING' (the job is currently being processed).
+
+
+
+
+
+
+
+
+
+ Job ID
+
+ 1.0
+ The (typically numeric) unique identifier of a submitted job.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+ Job type
+
+ 1.5
+ true
+ beta12orEarlier
+ A label (text token) describing the type of job, for example interactive or non-interactive.
+
+
+
+
+
+
+
+
+
+ Tool log
+
+ 1.5
+ A report of tool-specific metadata on some analysis or process performed, for example a log of diagnostic or error messages.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ DaliLite log file
+
+ true
+ beta12orEarlier
+ DaliLite log file describing all the steps taken by a DaliLite alignment of two protein structures.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ STRIDE log file
+
+ STRIDE log file.
+ true
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ NACCESS log file
+
+ beta12orEarlier
+ beta12orEarlier
+ true
+ NACCESS log file.
+
+
+
+
+
+
+
+
+
+
+ EMBOSS wordfinder log file
+
+ EMBOSS wordfinder log file.
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ EMBOSS domainatrix log file
+
+ beta12orEarlier
+ EMBOSS (EMBASSY) domainatrix application log file.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ EMBOSS sites log file
+
+ true
+ beta12orEarlier
+ beta12orEarlier
+ EMBOSS (EMBASSY) sites application log file.
+
+
+
+
+
+
+
+
+
+ EMBOSS supermatcher error file
+
+ EMBOSS (EMBASSY) supermatcher error file.
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ EMBOSS megamerger log file
+
+ beta12orEarlier
+ beta12orEarlier
+ EMBOSS megamerger log file.
+ true
+
+
+
+
+
+
+
+
+
+ EMBOSS whichdb log file
+
+ beta12orEarlier
+ true
+ EMBOSS megamerger log file.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ EMBOSS vectorstrip log file
+
+ true
+ beta12orEarlier
+ beta12orEarlier
+ EMBOSS vectorstrip log file.
+
+
+
+
+
+
+
+
+
+ Username
+
+ A username on a computer system.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Password
+
+ beta12orEarlier
+ A password on a computer system.
+
+
+
+
+
+
+
+
+
+
+ Email address
+
+ beta12orEarlier
+ Moby:Email
+ A valid email address of an end-user.
+ Moby:EmailAddress
+
+
+
+
+
+
+
+
+
+
+ Person name
+
+ beta12orEarlier
+ The name of a person.
+
+
+
+
+
+
+
+
+
+
+ Number of iterations
+
+ 1.5
+ Number of iterations of an algorithm.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Number of output entities
+
+ Number of entities (for example database hits, sequences, alignments etc) to write to an output file.
+ 1.5
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Hit sort order
+
+ Controls the order of hits (reported matches) in an output file from a database search.
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+
+ Drug report
+
+
+
+
+
+
+
+ An informative report on a specific drug.
+ beta12orEarlier
+ Drug annotation
+
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree image
+
+ beta12orEarlier
+ An image (for viewing or printing) of a phylogenetic tree including (typically) a plot of rooted or unrooted phylogenies, cladograms, circular trees or phenograms and associated information.
+ See also 'Phylogenetic tree'
+
+
+
+
+
+
+
+
+
+ RNA secondary structure image
+
+ beta12orEarlier
+ Image of RNA secondary structure, knots, pseudoknots etc.
+
+
+
+
+
+
+
+
+
+ Protein secondary structure image
+
+ Image of protein secondary structure.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Structure image
+
+ beta12orEarlier
+ Image of one or more molecular tertiary (3D) structures.
+
+
+
+
+
+
+
+
+
+ Sequence alignment image
+
+ beta12orEarlier
+ Image of two or more aligned molecular sequences possibly annotated with alignment features.
+
+
+
+
+
+
+
+
+
+ Chemical structure image
+
+ An image of the structure of a small chemical compound.
+ The molecular identifier and formula are typically included.
+ Small molecule structure image
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Fate map
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ A fate map is a plan of early stage of an embryo such as a blastula, showing areas that are significance to development.
+
+
+
+
+
+
+
+
+
+ Microarray spots image
+
+
+ beta12orEarlier
+ An image of spots from a microarray experiment.
+
+
+
+
+
+
+
+
+
+ BioPax term
+
+ beta12orEarlier
+ A term from the BioPax ontology.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ GO
+
+ beta12orEarlier
+ Gene Ontology term
+ Moby:Annotated_GO_Term
+ Moby:Annotated_GO_Term_With_Probability
+ true
+ A term definition from The Gene Ontology (GO).
+ beta12orEarlier
+ Moby:GO_Term
+ Moby:GOTerm
+
+
+
+
+
+
+
+
+
+ MeSH
+
+ true
+ A term from the MeSH vocabulary.
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ HGNC
+
+ beta12orEarlier
+ true
+ A term from the HGNC controlled vocabulary.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ NCBI taxonomy vocabulary
+
+ beta12orEarlier
+ beta12orEarlier
+ true
+ A term from the NCBI taxonomy vocabulary.
+
+
+
+
+
+
+
+
+
+ Plant ontology term
+
+ beta12orEarlier
+ true
+ beta12orEarlier
+ A term from the Plant Ontology (PO).
+
+
+
+
+
+
+
+
+
+ UMLS
+
+ beta12orEarlier
+ beta12orEarlier
+ A term from the UMLS vocabulary.
+ true
+
+
+
+
+
+
+
+
+
+ FMA
+
+ beta12orEarlier
+ Classifies anatomical entities according to their shared characteristics (genus) and distinguishing characteristics (differentia). Specifies the part-whole and spatial relationships of the entities, morphological transformation of the entities during prenatal development and the postnatal life cycle and principles, rules and definitions according to which classes and relationships in the other three components of FMA are represented.
+ beta12orEarlier
+ A term from Foundational Model of Anatomy.
+ true
+
+
+
+
+
+
+
+
+
+ EMAP
+
+ A term from the EMAP mouse ontology.
+ true
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ ChEBI
+
+ beta12orEarlier
+ A term from the ChEBI ontology.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ MGED
+
+ beta12orEarlier
+ true
+ A term from the MGED ontology.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ myGrid
+
+ The ontology is provided as two components, the service ontology and the domain ontology. The domain ontology acts provides concepts for core bioinformatics data types and their relations. The service ontology describes the physical and operational features of web services.
+ beta12orEarlier
+ true
+ A term from the myGrid ontology.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ GO (biological process)
+
+ beta12orEarlier
+ true
+ beta12orEarlier
+ Data Type is an enumerated string.
+ A term definition for a biological process from the Gene Ontology (GO).
+
+
+
+
+
+
+
+
+
+ GO (molecular function)
+
+ A term definition for a molecular function from the Gene Ontology (GO).
+ beta12orEarlier
+ Data Type is an enumerated string.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ GO (cellular component)
+
+ beta12orEarlier
+ true
+ A term definition for a cellular component from the Gene Ontology (GO).
+ beta12orEarlier
+ Data Type is an enumerated string.
+
+
+
+
+
+
+
+
+
+ Ontology relation type
+
+ 1.5
+ beta12orEarlier
+ true
+ A relation type defined in an ontology.
+
+
+
+
+
+
+
+
+
+ Ontology concept definition
+
+ beta12orEarlier
+ Ontology class definition
+ The definition of a concept from an ontology.
+
+
+
+
+
+
+
+
+
+ Ontology concept comment
+
+ beta12orEarlier
+ 1.4
+ true
+ A comment on a concept from an ontology.
+
+
+
+
+
+
+
+
+
+ Ontology concept reference
+
+ beta12orEarlier
+ true
+ Reference for a concept from an ontology.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ doc2loc document information
+
+ beta12orEarlier
+ true
+ The doc2loc output includes the url, format, type and availability code of a document for every service provider.
+ beta12orEarlier
+ Information on a published article provided by the doc2loc program.
+
+
+
+
+
+
+
+
+
+ PDB residue number
+
+ WHATIF: pdb_number
+ PDBML:PDB_residue_no
+ beta12orEarlier
+ A residue identifier (a string) from a PDB file.
+
+
+
+
+
+
+
+
+
+ Atomic coordinate
+
+ Cartesian coordinate of an atom (in a molecular structure).
+ beta12orEarlier
+ Cartesian coordinate
+
+
+
+
+
+
+
+
+
+ Atomic x coordinate
+
+ WHATIF: PDBx_Cartn_x
+ Cartesian x coordinate
+ beta12orEarlier
+ PDBML:_atom_site.Cartn_x in PDBML
+ Cartesian x coordinate of an atom (in a molecular structure).
+
+
+
+
+
+
+
+
+
+ Atomic y coordinate
+
+ WHATIF: PDBx_Cartn_y
+ Cartesian y coordinate
+ beta12orEarlier
+ PDBML:_atom_site.Cartn_y in PDBML
+ Cartesian y coordinate of an atom (in a molecular structure).
+
+
+
+
+
+
+
+
+
+ Atomic z coordinate
+
+ PDBML:_atom_site.Cartn_z
+ WHATIF: PDBx_Cartn_z
+ Cartesian z coordinate of an atom (in a molecular structure).
+ beta12orEarlier
+ Cartesian z coordinate
+
+
+
+
+
+
+
+
+
+ PDB atom name
+
+ WHATIF: PDBx_type_symbol
+ beta12orEarlier
+ WHATIF: PDBx_auth_atom_id
+ WHATIF: alternate_atom
+ PDBML:pdbx_PDB_atom_name
+ WHATIF: atom_type
+ Identifier (a string) of a specific atom from a PDB file for a molecular structure.
+
+
+
+
+
+
+
+
+
+
+ Protein atom
+
+ Atom data
+ CHEBI:33250
+ This is a broad data type and is used a placeholder for other, more specific types. It is primarily intended to help navigation of EDAM and would not typically be used for annotation.
+ Data on a single atom from a protein structure.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein residue
+
+ beta12orEarlier
+ Data on a single amino acid residue position in a protein structure.
+ This is a broad data type and is used a placeholder for other, more specific types. It is primarily intended to help navigation of EDAM and would not typically be used for annotation.
+ Residue
+
+
+
+
+
+
+
+
+
+ Atom name
+
+
+ Name of an atom.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ PDB residue name
+
+ Three-letter amino acid residue names as used in PDB files.
+ WHATIF: type
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ PDB model number
+
+ Identifier of a model structure from a PDB file.
+ beta12orEarlier
+ PDBML:pdbx_PDB_model_num
+ Model number
+ WHATIF: model_number
+
+
+
+
+
+
+
+
+
+
+ CATH domain report
+
+ beta12orEarlier
+ true
+ beta13
+ The report (for example http://www.cathdb.info/domain/1cukA01) includes CATH codes for levels in the hierarchy for the domain, level descriptions and relevant data and links.
+ Summary of domain classification information for a CATH domain.
+
+
+
+
+
+
+
+
+
+ CATH representative domain sequences (ATOM)
+
+ beta12orEarlier
+ beta12orEarlier
+ FASTA sequence database (based on ATOM records in PDB) for CATH domains (clustered at different levels of sequence identity).
+ true
+
+
+
+
+
+
+
+
+
+ CATH representative domain sequences (COMBS)
+
+ true
+ FASTA sequence database (based on COMBS sequence data) for CATH domains (clustered at different levels of sequence identity).
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ CATH domain sequences (ATOM)
+
+ true
+ FASTA sequence database for all CATH domains (based on PDB ATOM records).
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ CATH domain sequences (COMBS)
+
+ FASTA sequence database for all CATH domains (based on COMBS sequence data).
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence version
+
+ beta12orEarlier
+ Information on an molecular sequence version.
+ Sequence version information
+
+
+
+
+
+
+
+
+
+ Score
+
+ A numerical value, that is some type of scored value arising for example from a prediction method.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein report (function)
+
+ true
+ For properties that can be mapped to a sequence, use 'Sequence report' instead.
+ beta13
+ Report on general functional properties of specific protein(s).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Gene name (ASPGD)
+
+ 1.3
+ beta12orEarlier
+ true
+ Name of a gene from Aspergillus Genome Database.
+ http://www.geneontology.org/doc/GO.xrf_abbs:ASPGD_LOCUS
+
+
+
+
+
+
+
+
+
+ Gene name (CGD)
+
+ Name of a gene from Candida Genome Database.
+ true
+ http://www.geneontology.org/doc/GO.xrf_abbs:CGD_LOCUS
+ beta12orEarlier
+ 1.3
+
+
+
+
+
+
+
+
+
+ Gene name (dictyBase)
+
+ http://www.geneontology.org/doc/GO.xrf_abbs:dictyBase
+ beta12orEarlier
+ 1.3
+ true
+ Name of a gene from dictyBase database.
+
+
+
+
+
+
+
+
+
+ Gene name (EcoGene primary)
+
+ http://www.geneontology.org/doc/GO.xrf_abbs:ECOGENE_G
+ Primary name of a gene from EcoGene Database.
+ EcoGene primary gene name
+ 1.3
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Gene name (MaizeGDB)
+
+ http://www.geneontology.org/doc/GO.xrf_abbs:MaizeGDB_Locus
+ 1.3
+ Name of a gene from MaizeGDB (maize genes) database.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Gene name (SGD)
+
+ true
+ 1.3
+ beta12orEarlier
+ http://www.geneontology.org/doc/GO.xrf_abbs:SGD_LOCUS
+ Name of a gene from Saccharomyces Genome Database.
+
+
+
+
+
+
+
+
+
+ Gene name (TGD)
+
+ beta12orEarlier
+ 1.3
+ Name of a gene from Tetrahymena Genome Database.
+ true
+ http://www.geneontology.org/doc/GO.xrf_abbs:TGD_LOCUS
+
+
+
+
+
+
+
+
+
+ Gene name (CGSC)
+
+ beta12orEarlier
+ 1.3
+ true
+ http://www.geneontology.org/doc/GO.xrf_abbs: CGSC
+ Symbol of a gene from E.coli Genetic Stock Center.
+
+
+
+
+
+
+
+
+
+ Gene name (HGNC)
+
+ beta12orEarlier
+ HUGO symbol
+ 1.3
+ true
+ HGNC symbol
+ Official gene name
+ HUGO gene name
+ http://www.geneontology.org/doc/GO.xrf_abbs: HGNC_gene
+ HGNC gene name
+ HUGO gene symbol
+ HGNC:[0-9]{1,5}
+ Gene name (HUGO)
+ HGNC gene symbol
+ Symbol of a gene approved by the HUGO Gene Nomenclature Committee.
+
+
+
+
+
+
+
+
+
+ Gene name (MGD)
+
+ MGI:[0-9]+
+ Symbol of a gene from the Mouse Genome Database.
+ http://www.geneontology.org/doc/GO.xrf_abbs: MGD
+ 1.3
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Gene name (Bacillus subtilis)
+
+ http://www.geneontology.org/doc/GO.xrf_abbs: SUBTILISTG
+ Symbol of a gene from Bacillus subtilis Genome Sequence Project.
+ beta12orEarlier
+ 1.3
+ true
+
+
+
+
+
+
+
+
+
+ Gene ID (PlasmoDB)
+
+ Identifier of a gene from PlasmoDB Plasmodium Genome Resource.
+ beta12orEarlier
+ http://www.geneontology.org/doc/GO.xrf_abbs: ApiDB_PlasmoDB
+
+
+
+
+
+
+
+
+
+
+ Gene ID (EcoGene)
+
+ Identifier of a gene from EcoGene Database.
+ EcoGene Accession
+ EcoGene ID
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Gene ID (FlyBase)
+
+ beta12orEarlier
+ Gene identifier from FlyBase database.
+ http://www.geneontology.org/doc/GO.xrf_abbs: FB
+ http://www.geneontology.org/doc/GO.xrf_abbs: FlyBase
+
+
+
+
+
+
+
+
+
+
+ Gene ID (GeneDB Glossina morsitans)
+
+ true
+ http://www.geneontology.org/doc/GO.xrf_abbs: GeneDB_Gmorsitans
+ beta13
+ Gene identifier from Glossina morsitans GeneDB database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Gene ID (GeneDB Leishmania major)
+
+ Gene identifier from Leishmania major GeneDB database.
+ true
+ http://www.geneontology.org/doc/GO.xrf_abbs: GeneDB_Lmajor
+ beta12orEarlier
+ beta13
+
+
+
+
+
+
+
+
+
+ Gene ID (GeneDB Plasmodium falciparum)
+
+ Gene identifier from Plasmodium falciparum GeneDB database.
+ true
+ http://www.geneontology.org/doc/GO.xrf_abbs: GeneDB_Pfalciparum
+ beta13
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Gene ID (GeneDB Schizosaccharomyces pombe)
+
+ http://www.geneontology.org/doc/GO.xrf_abbs: GeneDB_Spombe
+ beta12orEarlier
+ true
+ beta13
+ Gene identifier from Schizosaccharomyces pombe GeneDB database.
+
+
+
+
+
+
+
+
+
+ Gene ID (GeneDB Trypanosoma brucei)
+
+ Gene identifier from Trypanosoma brucei GeneDB database.
+ true
+ beta13
+ beta12orEarlier
+ http://www.geneontology.org/doc/GO.xrf_abbs: GeneDB_Tbrucei
+
+
+
+
+
+
+
+
+
+ Gene ID (Gramene)
+
+ http://www.geneontology.org/doc/GO.xrf_abbs: GR_gene
+ beta12orEarlier
+ http://www.geneontology.org/doc/GO.xrf_abbs: GR_GENE
+ Gene identifier from Gramene database.
+
+
+
+
+
+
+
+
+
+
+ Gene ID (Virginia microbial)
+
+ beta12orEarlier
+ http://www.geneontology.org/doc/GO.xrf_abbs: PAMGO_VMD
+ Gene identifier from Virginia Bioinformatics Institute microbial database.
+ http://www.geneontology.org/doc/GO.xrf_abbs: VMD
+
+
+
+
+
+
+
+
+
+
+ Gene ID (SGN)
+
+ http://www.geneontology.org/doc/GO.xrf_abbs: SGN
+ Gene identifier from Sol Genomics Network.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Gene ID (WormBase)
+
+
+ Gene identifier used by WormBase database.
+ WBGene[0-9]{8}
+ http://www.geneontology.org/doc/GO.xrf_abbs: WB
+ http://www.geneontology.org/doc/GO.xrf_abbs: WormBase
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Gene synonym
+
+ Gene name synonym
+ true
+ Any name (other than the recommended one) for a gene.
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ ORF name
+
+
+ beta12orEarlier
+ The name of an open reading frame attributed by a sequencing project.
+
+
+
+
+
+
+
+
+
+
+ Sequence assembly component
+
+ A component of a larger sequence assembly.
+ true
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Chromosome annotation (aberration)
+
+ beta12orEarlier
+ beta12orEarlier
+ true
+ A report on a chromosome aberration such as abnormalities in chromosome structure.
+
+
+
+
+
+
+
+
+
+ Clone ID
+
+ beta12orEarlier
+ An identifier of a clone (cloned molecular sequence) from a database.
+
+
+
+
+
+
+
+
+
+
+ PDB insertion code
+
+ beta12orEarlier
+ WHATIF: insertion_code
+ PDBML:pdbx_PDB_ins_code
+ An insertion code (part of the residue number) for an amino acid residue from a PDB file.
+
+
+
+
+
+
+
+
+
+ Atomic occupancy
+
+ WHATIF: PDBx_occupancy
+ The fraction of an atom type present at a site in a molecular structure.
+ beta12orEarlier
+ The sum of the occupancies of all the atom types at a site should not normally significantly exceed 1.0.
+
+
+
+
+
+
+
+
+
+ Isotropic B factor
+
+ Isotropic B factor (atomic displacement parameter) for an atom from a PDB file.
+ WHATIF: PDBx_B_iso_or_equiv
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Deletion map
+
+ A cytogenetic map is built from a set of mutant cell lines with sub-chromosomal deletions and a reference wild-type line ('genome deletion panel'). The panel is used to map markers onto the genome by comparing mutant to wild-type banding patterns. Markers are linked (occur in the same deleted region) if they share the same banding pattern (presence or absence) as the deletion panel.
+ beta12orEarlier
+ A cytogenetic map showing chromosome banding patterns in mutant cell lines relative to the wild type.
+ Deletion-based cytogenetic map
+
+
+
+
+
+
+
+
+
+ QTL map
+
+ A genetic map which shows the approximate location of quantitative trait loci (QTL) between two or more markers.
+ beta12orEarlier
+ Quantitative trait locus map
+
+
+
+
+
+
+
+
+
+ Haplotype map
+
+ beta12orEarlier
+ Moby:Haplotyping_Study_obj
+ A map of haplotypes in a genome or other sequence, describing common patterns of genetic variation.
+
+
+
+
+
+
+
+
+
+ Map set data
+
+ beta12orEarlier
+ Data describing a set of multiple genetic or physical maps, typically sharing a common set of features which are mapped.
+ Moby:GCP_CorrelatedLinkageMapSet
+ Moby:GCP_CorrelatedMapSet
+
+
+
+
+
+
+
+
+
+ Map feature
+
+ beta12orEarlier
+ true
+ A feature which may mapped (positioned) on a genetic or other type of map.
+ Moby:MapFeature
+ beta12orEarlier
+ Mappable features may be based on Gramene's notion of map features; see http://www.gramene.org/db/cmap/feature_type_info.
+
+
+
+
+
+
+
+
+
+
+
+ Map type
+
+ A designation of the type of map (genetic map, physical map, sequence map etc) or map set.
+ Map types may be based on Gramene's notion of a map type; see http://www.gramene.org/db/cmap/map_type_info.
+ 1.5
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein fold name
+
+ The name of a protein fold.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Taxon
+
+ Moby:PotentialTaxon
+ Taxonomy rank
+ beta12orEarlier
+ Taxonomic rank
+ For a complete list of taxonomic ranks see https://www.phenoscape.org/wiki/Taxonomic_Rank_Vocabulary.
+ The name of a group of organisms belonging to the same taxonomic rank.
+ Moby:BriefTaxonConcept
+
+
+
+
+
+
+
+
+
+
+ Organism identifier
+
+
+
+
+
+
+
+ beta12orEarlier
+ A unique identifier of a (group of) organisms.
+
+
+
+
+
+
+
+
+
+
+ Genus name
+
+ beta12orEarlier
+ The name of a genus of organism.
+
+
+
+
+
+
+
+
+
+
+ Taxonomic classification
+
+ Moby:TaxonName
+ Moby:GCP_Taxon
+ beta12orEarlier
+ The full name for a group of organisms, reflecting their biological classification and (usually) conforming to a standard nomenclature.
+ Moby:iANT_organism-xml
+ Taxonomic name
+ Name components correspond to levels in a taxonomic hierarchy (e.g. 'Genus', 'Species', etc.) Meta information such as a reference where the name was defined and a date might be included.
+ Taxonomic information
+ Moby:TaxonScientificName
+ Moby:TaxonTCS
+
+
+
+
+
+
+
+
+
+
+ iHOP organism ID
+
+ beta12orEarlier
+ Moby_namespace:iHOPorganism
+ A unique identifier for an organism used in the iHOP database.
+
+
+
+
+
+
+
+
+
+
+ Genbank common name
+
+ Common name for an organism as used in the GenBank database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ NCBI taxon
+
+ The name of a taxon from the NCBI taxonomy database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Synonym
+
+ beta12orEarlier
+ Alternative name
+ beta12orEarlier
+ true
+ An alternative for a word.
+
+
+
+
+
+
+
+
+
+ Misspelling
+
+ A common misspelling of a word.
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Acronym
+
+ true
+ An abbreviation of a phrase or word.
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Misnomer
+
+ A term which is likely to be misleading of its meaning.
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Author ID
+
+ Information on the authors of a published work.
+ Moby:Author
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ DragonDB author identifier
+
+ An identifier representing an author in the DragonDB database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Annotated URI
+
+ beta12orEarlier
+ A URI along with annotation describing the data found at the address.
+ Moby:DescribedLink
+
+
+
+
+
+
+
+
+
+ UniProt keywords
+
+ true
+ beta12orEarlier
+ beta12orEarlier
+ A controlled vocabulary for words and phrases that can appear in the keywords field (KW line) of entries from the UniProt database.
+
+
+
+
+
+
+
+
+
+ Gene ID (GeneFarm)
+
+ Moby_namespace:GENEFARM_GeneID
+ Identifier of a gene from the GeneFarm database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Blattner number
+
+ beta12orEarlier
+ Moby_namespace:Blattner_number
+ The blattner identifier for a gene.
+
+
+
+
+
+
+
+
+
+
+ Gene ID (MIPS Maize)
+
+ MIPS genetic element identifier (Maize)
+ Identifier for genetic elements in MIPS Maize database.
+ beta12orEarlier
+ Moby_namespace:MIPS_GE_Maize
+ beta13
+ true
+
+
+
+
+
+
+
+
+
+ Gene ID (MIPS Medicago)
+
+ MIPS genetic element identifier (Medicago)
+ beta12orEarlier
+ beta13
+ true
+ Moby_namespace:MIPS_GE_Medicago
+ Identifier for genetic elements in MIPS Medicago database.
+
+
+
+
+
+
+
+
+
+ Gene name (DragonDB)
+
+ true
+ The name of an Antirrhinum Gene from the DragonDB database.
+ beta12orEarlier
+ Moby_namespace:DragonDB_Gene
+ 1.3
+
+
+
+
+
+
+
+
+
+ Gene name (Arabidopsis)
+
+ Moby_namespace:ArabidopsisGeneSymbol
+ true
+ A unique identifier for an Arabidopsis gene, which is an acronym or abbreviation of the gene name.
+ beta12orEarlier
+ 1.3
+
+
+
+
+
+
+
+
+
+ iHOP symbol
+
+
+
+ A unique identifier of a protein or gene used in the iHOP database.
+ Moby_namespace:iHOPsymbol
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Gene name (GeneFarm)
+
+ 1.3
+ true
+ Name of a gene from the GeneFarm database.
+ Moby_namespace:GENEFARM_GeneName
+ GeneFarm gene ID
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Locus ID
+
+
+
+
+
+
+
+
+ A unique name or other identifier of a genetic locus, typically conforming to a scheme that names loci (such as predicted genes) depending on their position in a molecular sequence, for example a completely sequenced genome or chromosome.
+ Locus name
+ beta12orEarlier
+ Locus identifier
+
+
+
+
+
+
+
+
+
+
+ Locus ID (AGI)
+
+ AT[1-5]G[0-9]{5}
+ AGI ID
+ Locus identifier for Arabidopsis Genome Initiative (TAIR, TIGR and MIPS databases)
+ http://www.geneontology.org/doc/GO.xrf_abbs:AGI_LocusCode
+ Arabidopsis gene loci number
+ AGI locus code
+ beta12orEarlier
+ AGI identifier
+
+
+
+
+
+
+
+
+
+
+ Locus ID (ASPGD)
+
+ beta12orEarlier
+ http://www.geneontology.org/doc/GO.xrf_abbs: ASPGD
+ http://www.geneontology.org/doc/GO.xrf_abbs: ASPGDID
+ Identifier for loci from ASPGD (Aspergillus Genome Database).
+
+
+
+
+
+
+
+
+
+
+ Locus ID (MGG)
+
+ Identifier for loci from Magnaporthe grisea Database at the Broad Institute.
+ http://www.geneontology.org/doc/GO.xrf_abbs: Broad_MGG
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Locus ID (CGD)
+
+ Identifier for loci from CGD (Candida Genome Database).
+ http://www.geneontology.org/doc/GO.xrf_abbs: CGDID
+ beta12orEarlier
+ CGDID
+ CGD locus identifier
+ http://www.geneontology.org/doc/GO.xrf_abbs: CGD
+
+
+
+
+
+
+
+
+
+
+ Locus ID (CMR)
+
+ http://www.geneontology.org/doc/GO.xrf_abbs: TIGR_CMR
+ Locus identifier for Comprehensive Microbial Resource at the J. Craig Venter Institute.
+ http://www.geneontology.org/doc/GO.xrf_abbs: JCVI_CMR
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ NCBI locus tag
+
+ beta12orEarlier
+ Moby_namespace:LocusID
+ Locus ID (NCBI)
+ http://www.geneontology.org/doc/GO.xrf_abbs: NCBI_locus_tag
+ Identifier for loci from NCBI database.
+
+
+
+
+
+
+
+
+
+
+ Locus ID (SGD)
+
+
+ Identifier for loci from SGD (Saccharomyces Genome Database).
+ http://www.geneontology.org/doc/GO.xrf_abbs: SGDID
+ beta12orEarlier
+ http://www.geneontology.org/doc/GO.xrf_abbs: SGD
+ SGDID
+
+
+
+
+
+
+
+
+
+
+ Locus ID (MMP)
+
+ Identifier of loci from Maize Mapping Project.
+ Moby_namespace:MMP_Locus
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Locus ID (DictyBase)
+
+ Moby_namespace:DDB_gene
+ Identifier of locus from DictyBase (Dictyostelium discoideum).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Locus ID (EntrezGene)
+
+ Identifier of a locus from EntrezGene database.
+ beta12orEarlier
+ Moby_namespace:EntrezGene_ID
+ Moby_namespace:EntrezGene_EntrezGeneID
+
+
+
+
+
+
+
+
+
+
+ Locus ID (MaizeGDB)
+
+ Identifier of locus from MaizeGDB (Maize genome database).
+ Moby_namespace:MaizeGDB_Locus
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Quantitative trait locus
+
+ QTL
+ A QTL sometimes but does not necessarily correspond to a gene.
+ true
+ beta12orEarlier
+ beta12orEarlier
+ A stretch of DNA that is closely linked to the genes underlying a quantitative trait (a phenotype that varies in degree and depends upon the interactions between multiple genes and their environment).
+ Moby:SO_QTL
+
+
+
+
+
+
+
+
+
+ Gene ID (KOME)
+
+ Identifier of a gene from the KOME database.
+ beta12orEarlier
+ Moby_namespace:GeneId
+
+
+
+
+
+
+
+
+
+
+ Locus ID (Tropgene)
+
+ Identifier of a locus from the Tropgene database.
+ Moby:Tropgene_locus
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Alignment
+
+ An alignment of molecular sequences, structures or profiles derived from them.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Atomic property
+
+ General atomic property
+ Data for an atom (in a molecular structure).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ UniProt keyword
+
+ beta12orEarlier
+ A word or phrase that can appear in the keywords field (KW line) of entries from the UniProt database.
+ Moby_namespace:SP_KW
+ http://www.geneontology.org/doc/GO.xrf_abbs: SP_KW
+
+
+
+
+
+
+
+
+
+ Ordered locus name
+
+ beta12orEarlier
+ true
+ A name for a genetic locus conforming to a scheme that names loci (such as predicted genes) depending on their position in a molecular sequence, for example a completely sequenced genome or chromosome.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence coordinates
+
+
+
+ Map position
+ Moby:Position
+ Locus
+ Sequence co-ordinates
+ A position in a map (for example a genetic map), either a single position (point) or a region / interval.
+ Moby:GenePosition
+ This includes positions in genomes based on a reference sequence. A position may be specified for any mappable object, i.e. anything that may have positional information such as a physical position in a chromosome. Data might include sequence region name, strand, coordinate system name, assembly name, start position and end position.
+ Moby:HitPosition
+ beta12orEarlier
+ Moby:MapPosition
+ Moby:Locus
+ Moby:GCP_MapInterval
+ Moby:GCP_MapPosition
+ Moby:GCP_MapPoint
+ PDBML:_atom_site.id
+
+
+
+
+
+
+
+
+
+ Amino acid property
+
+ Data concerning the intrinsic physical (e.g. structural) or chemical properties of one, more or all amino acids.
+ Amino acid data
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Annotation
+
+ beta12orEarlier
+ true
+ beta13
+ This is a broad data type and is used a placeholder for other, more specific types.
+ A human-readable collection of information which (typically) is generated or collated by hand and which describes a biological entity, phenomena or associated primary (e.g. sequence or structural) data, as distinct from the primary data itself and computer-generated reports derived from it.
+
+
+
+
+
+
+
+
+
+ Map data
+
+
+
+
+
+
+
+ Map attribute
+ beta12orEarlier
+ An attribute of a molecular map (genetic or physical), or data extracted from or derived from the analysis of such a map.
+
+
+
+
+
+
+
+
+
+ Vienna RNA structural data
+
+ true
+ Data used by the Vienna RNA analysis package.
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence mask parameter
+
+ beta12orEarlier
+ 1.5
+ true
+ Data used to replace (mask) characters in a molecular sequence.
+
+
+
+
+
+
+
+
+
+ Enzyme kinetics data
+
+
+ Data concerning chemical reaction(s) catalysed by enzyme(s).
+ beta12orEarlier
+ This is a broad data type and is used a placeholder for other, more specific types.
+
+
+
+
+
+
+
+
+
+ Michaelis Menten plot
+
+ A plot giving an approximation of the kinetics of an enzyme-catalysed reaction, assuming simple kinetics (i.e. no intermediate or product inhibition, allostericity or cooperativity). It plots initial reaction rate to the substrate concentration (S) from which the maximum rate (vmax) is apparent.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Hanes Woolf plot
+
+ beta12orEarlier
+ A plot based on the Michaelis Menten equation of enzyme kinetics plotting the ratio of the initial substrate concentration (S) against the reaction velocity (v).
+
+
+
+
+
+
+
+
+
+ Experimental data
+
+ This is a broad data type and is used a placeholder for other, more specific types. It is primarily intended to help navigation of EDAM and would not typically be used for annotation.
+ true
+ Raw data from or annotation on laboratory experiments.
+ beta12orEarlier
+ Experimental measurement data
+ beta13
+
+
+
+
+
+
+
+
+
+
+ Genome version information
+
+ beta12orEarlier
+ true
+ Information on a genome version.
+ 1.5
+
+
+
+
+
+
+
+
+
+ Evidence
+
+ Typically a statement about some data or results, including evidence or the source of a statement, which may include computational prediction, laboratory experiment, literature reference etc.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence record lite
+
+ beta12orEarlier
+ A molecular sequence and minimal metadata, typically an identifier of the sequence and/or a comment.
+ true
+ 1.8
+
+
+
+
+
+
+
+
+
+ Sequence
+
+
+
+
+
+
+
+ http://purl.bioontology.org/ontology/MSH/D008969
+ Sequences
+ http://purl.org/biotop/biotop.owl#BioMolecularSequenceInformation
+ This concept is a placeholder of concepts for primary sequence data including raw sequences and sequence records. It should not normally be used for derivatives such as sequence alignments, motifs or profiles.
+ beta12orEarlier
+ One or more molecular sequences, possibly with associated annotation.
+
+
+
+
+
+
+
+
+
+ Nucleic acid sequence record (lite)
+
+ beta12orEarlier
+ 1.8
+ true
+ A nucleic acid sequence and minimal metadata, typically an identifier of the sequence and/or a comment.
+
+
+
+
+
+
+
+
+
+ Protein sequence record (lite)
+
+ 1.8
+ Sequence record lite (protein)
+ beta12orEarlier
+ A protein sequence and minimal metadata, typically an identifier of the sequence and/or a comment.
+ true
+
+
+
+
+
+
+
+
+
+ Report
+
+ You can use this term by default for any textual report, in case you can't find another, more specific term. Reports may be generated automatically or collated by hand and can include metadata on the origin, source, history, ownership or location of some thing.
+ http://semanticscience.org/resource/SIO_000148
+ Document
+ A human-readable collection of information including annotation on a biological entity or phenomena, computer-generated reports of analysis of primary data (e.g. sequence or structural), and metadata (data about primary data) or any other free (essentially unformatted) text, as distinct from the primary data itself.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Molecular property (general)
+
+ General molecular property
+ General data for a molecule.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Structural data
+
+ This is a broad data type and is used a placeholder for other, more specific types.
+ beta12orEarlier
+ true
+ Data concerning molecular structural data.
+ beta13
+
+
+
+
+
+
+
+
+
+
+ Sequence motif (nucleic acid)
+
+ Nucleic acid sequence motif
+ DNA sequence motif
+ A nucleotide sequence motif.
+ beta12orEarlier
+ RNA sequence motif
+
+
+
+
+
+
+
+
+
+ Sequence motif (protein)
+
+ beta12orEarlier
+ An amino acid sequence motif.
+ Protein sequence motif
+
+
+
+
+
+
+
+
+
+ Search parameter
+
+ beta12orEarlier
+ 1.5
+ true
+ Some simple value controlling a search operation, typically a search of a database.
+
+
+
+
+
+
+
+
+
+ Database search results
+
+ beta12orEarlier
+ A report of hits from searching a database of some type.
+ Search results
+ Database hits
+
+
+
+
+
+
+
+
+
+ Secondary structure
+
+ 1.5
+ true
+ beta12orEarlier
+ The secondary structure assignment (predicted or real) of a nucleic acid or protein.
+
+
+
+
+
+
+
+
+
+ Matrix
+
+ beta12orEarlier
+ Array
+ This is a broad data type and is used a placeholder for other, more specific types.
+ An array of numerical values.
+
+
+
+
+
+
+
+
+
+ Alignment data
+
+ beta12orEarlier
+ 1.8
+ true
+ Data concerning, extracted from, or derived from the analysis of molecular alignment of some type.
+ This is a broad data type and is used a placeholder for other, more specific types.
+ Alignment report
+
+
+
+
+
+
+
+
+
+ Nucleic acid report
+
+ An informative human-readable report about one or more specific nucleic acid molecules, derived from analysis of primary (sequence or structural) data.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Structure report
+
+ An informative report on general information, properties or features of one or more molecular tertiary (3D) structures.
+ beta12orEarlier
+ Structure-derived report
+
+
+
+
+
+
+
+
+
+ Nucleic acid structure data
+
+ Nucleic acid property (structural)
+ This includes reports on the stiffness, curvature, twist/roll data or other conformational parameters or properties.
+ Nucleic acid structural property
+ beta12orEarlier
+ A report on nucleic acid structure-derived data, describing structural properties of a DNA molecule, or any other annotation or information about specific nucleic acid 3D structure(s).
+
+
+
+
+
+
+
+
+
+ Molecular property
+
+ beta12orEarlier
+ SO:0000400
+ A report on the physical (e.g. structural) or chemical properties of molecules, or parts of a molecule.
+ Physicochemical property
+
+
+
+
+
+
+
+
+
+ DNA base structural data
+
+ Structural data for DNA base pairs or runs of bases, such as energy or angle data.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Database entry version information
+
+ true
+ beta12orEarlier
+ 1.5
+ Information on a database (or ontology) entry version, such as name (or other identifier) or parent database, unique identifier of entry, data, author and so on.
+
+
+
+
+
+
+
+
+
+ Accession
+
+ beta12orEarlier
+ http://semanticscience.org/resource/SIO_000731
+ A persistent (stable) and unique identifier, typically identifying an object (entry) from a database.
+ http://semanticscience.org/resource/SIO_000675
+
+
+
+
+
+
+
+
+
+
+ SNP
+
+ single nucleotide polymorphism (SNP) in a DNA sequence.
+ true
+ beta12orEarlier
+ 1.8
+
+
+
+
+
+
+
+
+
+ Data reference
+
+ A list of database accessions or identifiers are usually included.
+ Reference to a dataset (or a cross-reference between two datasets), typically one or more entries in a biological database or ontology.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Job identifier
+
+ http://wsio.org/data_009
+ An identifier of a submitted job.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Name
+
+ http://semanticscience.org/resource/SIO_000116
+ http://usefulinc.com/ns/doap#name
+ "http://www.w3.org/2000/01/rdf-schema#label
+ beta12orEarlier
+ A name of a thing, which need not necessarily uniquely identify it.
+ Symbolic name
+
+
+
+
+
+
+ Closely related, but focusing on labeling and human readability but not on identification.
+
+
+
+
+
+
+
+
+
+
+ Type
+
+ A label (text token) describing the type of a thing, typically an enumerated string (a string with one of a limited set of values).
+ http://purl.org/dc/elements/1.1/type
+ 1.5
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ User ID
+
+ An identifier of a software end-user (typically a person).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ KEGG organism code
+
+
+ A three-letter code used in the KEGG databases to uniquely identify organisms.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Gene name (KEGG GENES)
+
+ beta12orEarlier
+ KEGG GENES entry name
+ [a-zA-Z_0-9]+:[a-zA-Z_0-9\.-]*
+ Name of an entry (gene) from the KEGG GENES database.
+ Moby_namespace:GeneId
+ true
+ 1.3
+
+
+
+
+
+
+
+
+
+ BioCyc ID
+
+
+ Identifier of an object from one of the BioCyc databases.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Compound ID (BioCyc)
+
+
+ BioCyc compound identifier
+ Identifier of a compound from the BioCyc chemical compounds database.
+ BioCyc compound ID
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Reaction ID (BioCyc)
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Identifier of a biological reaction from the BioCyc reactions database.
+
+
+
+
+
+
+
+
+
+
+ Enzyme ID (BioCyc)
+
+
+ BioCyc enzyme ID
+ beta12orEarlier
+ Identifier of an enzyme from the BioCyc enzymes database.
+
+
+
+
+
+
+
+
+
+
+ Reaction ID
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Identifier of a biological reaction from a database.
+
+
+
+
+
+
+
+
+
+
+ Identifier (hybrid)
+
+ An identifier that is re-used for data objects of fundamentally different types (typically served from a single database).
+ beta12orEarlier
+ This branch provides an alternative organisation of the concepts nested under 'Accession' and 'Name'. All concepts under here are already included under 'Accession' or 'Name'.
+
+
+
+
+
+
+
+
+
+
+ Molecular property identifier
+
+
+
+
+
+
+
+ beta12orEarlier
+ Identifier of a molecular property.
+
+
+
+
+
+
+
+
+
+
+ Codon usage table ID
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Identifier of a codon usage table, for example a genetic code.
+ Codon usage table identifier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ FlyBase primary identifier
+
+ beta12orEarlier
+ Primary identifier of an object from the FlyBase database.
+
+
+
+
+
+
+
+
+
+
+ WormBase identifier
+
+ beta12orEarlier
+ Identifier of an object from the WormBase database.
+
+
+
+
+
+
+
+
+
+
+ WormBase wormpep ID
+
+
+ Protein identifier used by WormBase database.
+ CE[0-9]{5}
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Nucleic acid features (codon)
+
+ beta12orEarlier
+ true
+ An informative report on a trinucleotide sequence that encodes an amino acid including the triplet sequence, the encoded amino acid or whether it is a start or stop codon.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Map identifier
+
+
+
+
+
+
+
+ An identifier of a map of a molecular sequence.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Person identifier
+
+ An identifier of a software end-user (typically a person).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Nucleic acid identifier
+
+
+
+
+
+
+
+ Name or other identifier of a nucleic acid molecule.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Translation frame specification
+
+ beta12orEarlier
+ Frame for translation of DNA (3 forward and 3 reverse frames relative to a chromosome).
+
+
+
+
+
+
+
+
+
+ Genetic code identifier
+
+
+
+
+
+
+
+ An identifier of a genetic code.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Genetic code name
+
+
+ Informal name for a genetic code, typically an organism name.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ File format name
+
+
+ Name of a file format such as HTML, PNG, PDF, EMBL, GenBank and so on.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Sequence profile type
+
+ true
+ 1.5
+ A label (text token) describing a type of sequence profile such as frequency matrix, Gribskov profile, hidden Markov model etc.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Operating system name
+
+ beta12orEarlier
+ Name of a computer operating system such as Linux, PC or Mac.
+
+
+
+
+
+
+
+
+
+
+ Mutation type
+
+ beta12orEarlier
+ true
+ beta12orEarlier
+ A type of point or block mutation, including insertion, deletion, change, duplication and moves.
+
+
+
+
+
+
+
+
+
+ Logical operator
+
+ beta12orEarlier
+ A logical operator such as OR, AND, XOR, and NOT.
+
+
+
+
+
+
+
+
+
+
+ Results sort order
+
+ Possible options including sorting by score, rank, by increasing P-value (probability, i.e. most statistically significant hits given first) and so on.
+ beta12orEarlier
+ true
+ 1.5
+ A control of the order of data that is output, for example the order of sequences in an alignment.
+
+
+
+
+
+
+
+
+
+ Toggle
+
+ beta12orEarlier
+ A simple parameter that is a toggle (boolean value), typically a control for a modal tool.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence width
+
+ true
+ beta12orEarlier
+ beta12orEarlier
+ The width of an output sequence or alignment.
+
+
+
+
+
+
+
+
+
+ Gap penalty
+
+ beta12orEarlier
+ A penalty for introducing or extending a gap in an alignment.
+
+
+
+
+
+
+
+
+
+ Nucleic acid melting temperature
+
+ beta12orEarlier
+ A temperature concerning nucleic acid denaturation, typically the temperature at which the two strands of a hybridized or double stranded nucleic acid (DNA or RNA/DNA) molecule separate.
+ Melting temperature
+
+
+
+
+
+
+
+
+
+ Concentration
+
+ beta12orEarlier
+ The concentration of a chemical compound.
+
+
+
+
+
+
+
+
+
+ Window step size
+
+ 1.5
+ beta12orEarlier
+ true
+ Size of the incremental 'step' a sequence window is moved over a sequence.
+
+
+
+
+
+
+
+
+
+ EMBOSS graph
+
+ beta12orEarlier
+ true
+ beta12orEarlier
+ An image of a graph generated by the EMBOSS suite.
+
+
+
+
+
+
+
+
+
+ EMBOSS report
+
+ An application report generated by the EMBOSS suite.
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Sequence offset
+
+ true
+ beta12orEarlier
+ 1.5
+ An offset for a single-point sequence position.
+
+
+
+
+
+
+
+
+
+ Threshold
+
+ 1.5
+ beta12orEarlier
+ true
+ A value that serves as a threshold for a tool (usually to control scoring or output).
+
+
+
+
+
+
+
+
+
+ Protein report (transcription factor)
+
+ beta13
+ true
+ This might include conformational or physicochemical properties, as well as sequence information for transcription factor(s) binding sites.
+ An informative report on a transcription factor protein.
+ Transcription factor binding site data
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Database category name
+
+ true
+ The name of a category of biological or bioinformatics database.
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence profile name
+
+ beta12orEarlier
+ Name of a sequence profile.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Color
+
+ Specification of one or more colors.
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Rendering parameter
+
+ true
+ beta12orEarlier
+ 1.5
+ A parameter that is used to control rendering (drawing) to a device or image.
+ Graphics parameter
+ Graphical parameter
+
+
+
+
+
+
+
+
+
+ Sequence name
+
+
+ Any arbitrary name of a molecular sequence.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Date
+
+ 1.5
+ A temporal date.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Word composition
+
+ beta12orEarlier
+ Word composition data for a molecular sequence.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Fickett testcode plot
+
+ A plot of Fickett testcode statistic (identifying protein coding regions) in a nucleotide sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence similarity plot
+
+
+ Use this concept for calculated substitution rates, relative site variability, data on sites with biased properties, highly conserved or very poorly conserved sites, regions, blocks etc.
+ beta12orEarlier
+ Sequence conservation report
+ Sequence similarity plot
+ A plot of sequence similarities identified from word-matching or character comparison.
+
+
+
+
+
+
+
+
+
+ Helical wheel
+
+ beta12orEarlier
+ An image of peptide sequence sequence looking down the axis of the helix for highlighting amphipathicity and other properties.
+
+
+
+
+
+
+
+
+
+ Helical net
+
+ beta12orEarlier
+ Useful for highlighting amphipathicity and other properties.
+ An image of peptide sequence sequence in a simple 3,4,3,4 repeating pattern that emulates at a simple level the arrangement of residues around an alpha helix.
+
+
+
+
+
+
+
+
+
+ Protein sequence properties plot
+
+ true
+ beta12orEarlier
+ beta12orEarlier
+ A plot of general physicochemical properties of a protein sequence.
+
+
+
+
+
+
+
+
+
+ Protein ionization curve
+
+
+ beta12orEarlier
+ A plot of pK versus pH for a protein.
+
+
+
+
+
+
+
+
+
+ Sequence composition plot
+
+
+ beta12orEarlier
+ A plot of character or word composition / frequency of a molecular sequence.
+
+
+
+
+
+
+
+
+
+ Nucleic acid density plot
+
+
+ beta12orEarlier
+ Density plot (of base composition) for a nucleotide sequence.
+
+
+
+
+
+
+
+
+
+ Sequence trace image
+
+ Image of a sequence trace (nucleotide sequence versus probabilities of each of the 4 bases).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Nucleic acid features (siRNA)
+
+ true
+ 1.5
+ beta12orEarlier
+ A report on siRNA duplexes in mRNA.
+
+
+
+
+
+
+
+
+
+ Sequence set (stream)
+
+ beta12orEarlier
+ true
+ This concept may be used for sequence sets that are expected to be read and processed a single sequence at a time.
+ A collection of multiple molecular sequences and (typically) associated metadata that is intended for sequential processing.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ FlyBase secondary identifier
+
+ Secondary identifier of an object from the FlyBase database.
+ Secondary identifier are used to handle entries that were merged with or split from other entries in the database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Cardinality
+
+ The number of a certain thing.
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Exactly 1
+
+ beta12orEarlier
+ beta12orEarlier
+ A single thing.
+ true
+
+
+
+
+
+
+
+
+ 1 or more
+
+ One or more things.
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+ Exactly 2
+
+ Exactly two things.
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+ 2 or more
+
+ Two or more things.
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+ Sequence checksum
+
+ A fixed-size datum calculated (by using a hash function) for a molecular sequence, typically for purposes of error detection or indexing.
+ beta12orEarlier
+ Hash code
+ Hash sum
+ Hash
+ Hash value
+
+
+
+
+
+
+
+
+
+ Protein features report (chemical modifications)
+
+ 1.8
+ beta12orEarlier
+ chemical modification of a protein.
+ true
+
+
+
+
+
+
+
+
+
+ Error
+
+ beta12orEarlier
+ Data on an error generated by computer system or tool.
+ 1.5
+ true
+
+
+
+
+
+
+
+
+
+ Database entry metadata
+
+ beta12orEarlier
+ Basic information on any arbitrary database entry.
+
+
+
+
+
+
+
+
+
+ Gene cluster
+
+ beta13
+ true
+ beta12orEarlier
+ A cluster of similar genes.
+
+
+
+
+
+
+
+
+
+ Sequence record full
+
+ true
+ beta12orEarlier
+ A molecular sequence and comprehensive metadata (such as a feature table), typically corresponding to a full entry from a molecular sequence database.
+ 1.8
+
+
+
+
+
+
+
+
+
+ Plasmid identifier
+
+ An identifier of a plasmid in a database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Mutation ID
+
+
+ beta12orEarlier
+ A unique identifier of a specific mutation catalogued in a database.
+
+
+
+
+
+
+
+
+
+
+ Mutation annotation (basic)
+
+ Information describing the mutation itself, the organ site, tissue and type of lesion where the mutation has been identified, description of the patient origin and life-style.
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Mutation annotation (prevalence)
+
+ beta12orEarlier
+ true
+ An informative report on the prevalence of mutation(s), including data on samples and mutation prevalence (e.g. by tumour type)..
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Mutation annotation (prognostic)
+
+ beta12orEarlier
+ An informative report on mutation prognostic data, such as information on patient cohort, the study settings and the results of the study.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Mutation annotation (functional)
+
+ An informative report on the functional properties of mutant proteins including transcriptional activities, promotion of cell growth and tumorigenicity, dominant negative effects, capacity to induce apoptosis, cell-cycle arrest or checkpoints in human cells and so on.
+ true
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Codon number
+
+ beta12orEarlier
+ The number of a codon, for instance, at which a mutation is located.
+
+
+
+
+
+
+
+
+
+ Tumor annotation
+
+ true
+ 1.4
+ An informative report on a specific tumor including nature and origin of the sample, anatomic site, organ or tissue, tumor type, including morphology and/or histologic type, and so on.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Server metadata
+
+ Basic information about a server on the web, such as an SRS server.
+ beta12orEarlier
+ 1.5
+ true
+
+
+
+
+
+
+
+
+
+ Database field name
+
+ The name of a field in a database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Sequence cluster ID (SYSTERS)
+
+ SYSTERS cluster ID
+ Unique identifier of a sequence cluster from the SYSTERS database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Ontology metadata
+
+
+
+
+
+
+
+ beta12orEarlier
+ Data concerning a biological ontology.
+
+
+
+
+
+
+
+
+
+ Raw SCOP domain classification
+
+ true
+ beta12orEarlier
+ Raw SCOP domain classification data files.
+ beta13
+ These are the parsable data files provided by SCOP.
+
+
+
+
+
+
+
+
+
+ Raw CATH domain classification
+
+ Raw CATH domain classification data files.
+ These are the parsable data files provided by CATH.
+ true
+ beta13
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Heterogen annotation
+
+ 1.4
+ true
+ beta12orEarlier
+ An informative report on the types of small molecules or 'heterogens' (non-protein groups) that are represented in PDB files.
+
+
+
+
+
+
+
+
+
+ Phylogenetic property values
+
+ beta12orEarlier
+ Phylogenetic property values data.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence set (bootstrapped)
+
+ 1.5
+ beta12orEarlier
+ Bootstrapping is often performed in phylogenetic analysis.
+ true
+ A collection of sequences output from a bootstrapping (resampling) procedure.
+
+
+
+
+
+
+
+
+
+ Phylogenetic consensus tree
+
+ true
+ A consensus phylogenetic tree derived from comparison of multiple trees.
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Schema
+
+ beta12orEarlier
+ true
+ A data schema for organising or transforming data of some type.
+ 1.5
+
+
+
+
+
+
+
+
+
+ DTD
+
+ A DTD (document type definition).
+ true
+ beta12orEarlier
+ 1.5
+
+
+
+
+
+
+
+
+
+ XML Schema
+
+ beta12orEarlier
+ XSD
+ An XML Schema.
+ true
+ 1.5
+
+
+
+
+
+
+
+
+
+ Relax-NG schema
+
+ beta12orEarlier
+ 1.5
+ A relax-NG schema.
+ true
+
+
+
+
+
+
+
+
+
+ XSLT stylesheet
+
+ 1.5
+ beta12orEarlier
+ An XSLT stylesheet.
+ true
+
+
+
+
+
+
+
+
+ Data resource definition name
+
+
+ beta12orEarlier
+ The name of a data type.
+
+
+
+
+
+
+
+
+
+
+ OBO file format name
+
+ Name of an OBO file format such as OBO-XML, plain and so on.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Gene ID (MIPS)
+
+ Identifier for genetic elements in MIPS database.
+ beta12orEarlier
+ MIPS genetic element identifier
+
+
+
+
+
+
+
+
+
+
+ Sequence identifier (protein)
+
+ An identifier of protein sequence(s) or protein sequence database entries.
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Sequence identifier (nucleic acid)
+
+ An identifier of nucleotide sequence(s) or nucleotide sequence database entries.
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ EMBL accession
+
+ EMBL ID
+ beta12orEarlier
+ EMBL accession number
+ EMBL identifier
+ An accession number of an entry from the EMBL sequence database.
+
+
+
+
+
+
+
+
+
+
+ UniProt ID
+
+
+
+
+
+
+
+ UniProtKB identifier
+ An identifier of a polypeptide in the UniProt database.
+ UniProtKB entry name
+ beta12orEarlier
+ UniProt identifier
+ UniProt entry name
+
+
+
+
+
+
+
+
+
+
+ GenBank accession
+
+ GenBank ID
+ GenBank identifier
+ Accession number of an entry from the GenBank sequence database.
+ beta12orEarlier
+ GenBank accession number
+
+
+
+
+
+
+
+
+
+
+ Gramene secondary identifier
+
+ beta12orEarlier
+ Gramene internal identifier
+ Gramene internal ID
+ Secondary (internal) identifier of a Gramene database entry.
+ Gramene secondary ID
+
+
+
+
+
+
+
+
+
+
+ Sequence variation ID
+
+
+ An identifier of an entry from a database of molecular sequence variation.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Gene ID
+
+
+ Gene accession
+ beta12orEarlier
+ A unique (and typically persistent) identifier of a gene in a database, that is (typically) different to the gene name/symbol.
+ Gene code
+
+
+
+
+
+
+
+
+
+
+ Gene name (AceView)
+
+ AceView gene name
+ 1.3
+ true
+ Name of an entry (gene) from the AceView genes database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Gene ID (ECK)
+
+ ECK accession
+ beta12orEarlier
+ E. coli K-12 gene identifier
+ Identifier of an E. coli K-12 gene from EcoGene Database.
+ http://www.geneontology.org/doc/GO.xrf_abbs: ECK
+
+
+
+
+
+
+
+
+
+
+ Gene ID (HGNC)
+
+ HGNC ID
+ beta12orEarlier
+ Identifier for a gene approved by the HUGO Gene Nomenclature Committee.
+
+
+
+
+
+
+
+
+
+
+ Gene name
+
+
+ The name of a gene, (typically) assigned by a person and/or according to a naming scheme. It may contain white space characters and is typically more intuitive and readable than a gene symbol. It (typically) may be used to identify similar genes in different species and to derive a gene symbol.
+ Allele name
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Gene name (NCBI)
+
+ beta12orEarlier
+ 1.3
+ NCBI gene name
+ Name of an entry (gene) from the NCBI genes database.
+ true
+
+
+
+
+
+
+
+
+
+ SMILES string
+
+ A specification of a chemical structure in SMILES format.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ STRING ID
+
+ Unique identifier of an entry from the STRING database of protein-protein interactions.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Virus annotation
+
+ An informative report on a specific virus.
+ true
+ 1.4
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Virus annotation (taxonomy)
+
+ An informative report on the taxonomy of a specific virus.
+ beta12orEarlier
+ true
+ 1.4
+
+
+
+
+
+
+
+
+
+ Reaction ID (SABIO-RK)
+
+ Identifier of a biological reaction from the SABIO-RK reactions database.
+ beta12orEarlier
+ [0-9]+
+
+
+
+
+
+
+
+
+
+
+ Carbohydrate report
+
+ Annotation on or information derived from one or more specific carbohydrate 3D structure(s).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ GI number
+
+ beta12orEarlier
+ NCBI GI number
+ gi number
+ A series of digits that are assigned consecutively to each sequence record processed by NCBI. The GI number bears no resemblance to the Accession number of the sequence record.
+ Nucleotide sequence GI number is shown in the VERSION field of the database record. Protein sequence GI number is shown in the CDS/db_xref field of a nucleotide database record, and the VERSION field of a protein database record.
+
+
+
+
+
+
+
+
+
+
+ NCBI version
+
+ beta12orEarlier
+ NCBI accession.version
+ Nucleotide sequence version contains two letters followed by six digits, a dot, and a version number (or for older nucleotide sequence records, the format is one letter followed by five digits, a dot, and a version number). Protein sequence version contains three letters followed by five digits, a dot, and a version number.
+ An identifier assigned to sequence records processed by NCBI, made of the accession number of the database record followed by a dot and a version number.
+ accession.version
+
+
+
+
+
+
+
+
+
+
+ Cell line name
+
+ beta12orEarlier
+ The name of a cell line.
+
+
+
+
+
+
+
+
+
+
+ Cell line name (exact)
+
+ beta12orEarlier
+ The name of a cell line.
+
+
+
+
+
+
+
+
+
+
+ Cell line name (truncated)
+
+ The name of a cell line.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Cell line name (no punctuation)
+
+ The name of a cell line.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Cell line name (assonant)
+
+ The name of a cell line.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Enzyme ID
+
+
+ beta12orEarlier
+ A unique, persistent identifier of an enzyme.
+ Enzyme accession
+
+
+
+
+
+
+
+
+
+
+ REBASE enzyme number
+
+ Identifier of an enzyme from the REBASE enzymes database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ DrugBank ID
+
+ beta12orEarlier
+ DB[0-9]{5}
+ Unique identifier of a drug from the DrugBank database.
+
+
+
+
+
+
+
+
+
+
+ GI number (protein)
+
+ beta12orEarlier
+ protein gi number
+ A unique identifier assigned to NCBI protein sequence records.
+ Nucleotide sequence GI number is shown in the VERSION field of the database record. Protein sequence GI number is shown in the CDS/db_xref field of a nucleotide database record, and the VERSION field of a protein database record.
+ protein gi
+
+
+
+
+
+
+
+
+
+
+ Bit score
+
+ A score derived from the alignment of two sequences, which is then normalized with respect to the scoring system.
+ Bit scores are normalized with respect to the scoring system and therefore can be used to compare alignment scores from different searches.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Translation phase specification
+
+ beta12orEarlier
+ Phase for translation of DNA (0, 1 or 2) relative to a fragment of the coding sequence.
+ Phase
+
+
+
+
+
+
+
+
+
+ Resource metadata
+
+ Data concerning or describing some core computational resource, as distinct from primary data. This includes metadata on the origin, source, history, ownership or location of some thing.
+ This is a broad data type and is used a placeholder for other, more specific types.
+ Provenance metadata
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Ontology identifier
+
+
+
+
+
+
+
+ beta12orEarlier
+ Any arbitrary identifier of an ontology.
+
+
+
+
+
+
+
+
+
+
+ Ontology concept name
+
+
+ The name of a concept in an ontology.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Genome build identifier
+
+ beta12orEarlier
+ An identifier of a build of a particular genome.
+
+
+
+
+
+
+
+
+
+
+ Pathway or network name
+
+ The name of a biological pathway or network.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Pathway ID (KEGG)
+
+
+ Identifier of a pathway from the KEGG pathway database.
+ beta12orEarlier
+ [a-zA-Z_0-9]{2,3}[0-9]{5}
+ KEGG pathway ID
+
+
+
+
+
+
+
+
+
+
+ Pathway ID (NCI-Nature)
+
+ beta12orEarlier
+ [a-zA-Z_0-9]+
+ Identifier of a pathway from the NCI-Nature pathway database.
+
+
+
+
+
+
+
+
+
+
+ Pathway ID (ConsensusPathDB)
+
+
+ beta12orEarlier
+ Identifier of a pathway from the ConsensusPathDB pathway database.
+
+
+
+
+
+
+
+
+
+
+ Sequence cluster ID (UniRef)
+
+ Unique identifier of an entry from the UniRef database.
+ UniRef cluster id
+ UniRef entry accession
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Sequence cluster ID (UniRef100)
+
+ UniRef100 cluster id
+ beta12orEarlier
+ UniRef100 entry accession
+ Unique identifier of an entry from the UniRef100 database.
+
+
+
+
+
+
+
+
+
+
+ Sequence cluster ID (UniRef90)
+
+ UniRef90 entry accession
+ beta12orEarlier
+ UniRef90 cluster id
+ Unique identifier of an entry from the UniRef90 database.
+
+
+
+
+
+
+
+
+
+
+ Sequence cluster ID (UniRef50)
+
+ beta12orEarlier
+ UniRef50 cluster id
+ UniRef50 entry accession
+ Unique identifier of an entry from the UniRef50 database.
+
+
+
+
+
+
+
+
+
+
+ Ontology data
+
+
+
+
+
+
+
+ Data concerning or derived from an ontology.
+ Ontological data
+ beta12orEarlier
+ This is a broad data type and is used a placeholder for other, more specific types.
+
+
+
+
+
+
+
+
+
+ RNA family report
+
+ beta12orEarlier
+ An informative report on a specific RNA family or other group of classified RNA sequences.
+ RNA family annotation
+
+
+
+
+
+
+
+
+
+ RNA family identifier
+
+
+
+
+
+
+
+ beta12orEarlier
+ Identifier of an RNA family, typically an entry from a RNA sequence classification database.
+
+
+
+
+
+
+
+
+
+
+ RFAM accession
+
+
+ Stable accession number of an entry (RNA family) from the RFAM database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Protein signature type
+
+ beta12orEarlier
+ true
+ A label (text token) describing a type of protein family signature (sequence classifier) from the InterPro database.
+ 1.5
+
+
+
+
+
+
+
+
+
+ Domain-nucleic acid interaction report
+
+ 1.5
+ true
+ An informative report on protein domain-DNA/RNA interaction(s).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Domain-domain interactions
+
+ 1.8
+ An informative report on protein domain-protein domain interaction(s).
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Domain-domain interaction (indirect)
+
+ true
+ beta12orEarlier
+ beta12orEarlier
+ Data on indirect protein domain-protein domain interaction(s).
+
+
+
+
+
+
+
+
+
+ Sequence accession (hybrid)
+
+
+
+
+
+
+
+ Accession number of a nucleotide or protein sequence database entry.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ 2D PAGE data
+
+ This is a broad data type and is used a placeholder for other, more specific types. It is primarily intended to help navigation of EDAM and would not typically be used for annotation.
+ beta13
+ beta12orEarlier
+ true
+ Data concerning two-dimensional polygel electrophoresis.
+
+
+
+
+
+
+
+
+
+
+ 2D PAGE report
+
+ beta12orEarlier
+ two-dimensional gel electrophoresis experiments, gels or spots in a gel.
+ 1.8
+ true
+
+
+
+
+
+
+
+
+
+ Pathway or network accession
+
+
+ A persistent, unique identifier of a biological pathway or network (typically a database entry).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Secondary structure alignment
+
+ Alignment of the (1D representations of) secondary structure of two or more molecules.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ ASTD ID
+
+
+ beta12orEarlier
+ Identifier of an object from the ASTD database.
+
+
+
+
+
+
+
+
+
+
+ ASTD ID (exon)
+
+ beta12orEarlier
+ Identifier of an exon from the ASTD database.
+
+
+
+
+
+
+
+
+
+
+ ASTD ID (intron)
+
+ beta12orEarlier
+ Identifier of an intron from the ASTD database.
+
+
+
+
+
+
+
+
+
+
+ ASTD ID (polya)
+
+ Identifier of a polyA signal from the ASTD database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ ASTD ID (tss)
+
+ Identifier of a transcription start site from the ASTD database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ 2D PAGE spot report
+
+ 2D PAGE spot annotation
+ beta12orEarlier
+ An informative report on individual spot(s) from a two-dimensional (2D PAGE) gel.
+ 1.8
+ true
+
+
+
+
+
+
+
+
+
+ Spot ID
+
+
+ beta12orEarlier
+ Unique identifier of a spot from a two-dimensional (protein) gel.
+
+
+
+
+
+
+
+
+
+
+ Spot serial number
+
+ Unique identifier of a spot from a two-dimensional (protein) gel in the SWISS-2DPAGE database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Spot ID (HSC-2DPAGE)
+
+ Unique identifier of a spot from a two-dimensional (protein) gel from a HSC-2DPAGE database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Protein-motif interaction
+
+ beta13
+ true
+ Data on the interaction of a protein (or protein domain) with specific structural (3D) and/or sequence motifs.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Strain identifier
+
+ Identifier of a strain of an organism variant, typically a plant, virus or bacterium.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ CABRI accession
+
+
+ A unique identifier of an item from the CABRI database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Experiment report (genotyping)
+
+ true
+ Report of genotype experiment including case control, population, and family studies. These might use array based methods and re-sequencing methods.
+ 1.8
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Genotype experiment ID
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Identifier of an entry from a database of genotype experiment metadata.
+
+
+
+
+
+
+
+
+
+
+ EGA accession
+
+ beta12orEarlier
+ Identifier of an entry from the EGA database.
+
+
+
+
+
+
+
+
+
+
+ IPI protein ID
+
+ Identifier of a protein entry catalogued in the International Protein Index (IPI) database.
+ IPI[0-9]{8}
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ RefSeq accession (protein)
+
+ RefSeq protein ID
+ Accession number of a protein from the RefSeq database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ EPD ID
+
+ beta12orEarlier
+ Identifier of an entry (promoter) from the EPD database.
+ EPD identifier
+
+
+
+
+
+
+
+
+
+
+ TAIR accession
+
+
+ beta12orEarlier
+ Identifier of an entry from the TAIR database.
+
+
+
+
+
+
+
+
+
+
+ TAIR accession (At gene)
+
+ beta12orEarlier
+ Identifier of an Arabidopsis thaliana gene from the TAIR database.
+
+
+
+
+
+
+
+
+
+
+ UniSTS accession
+
+ beta12orEarlier
+ Identifier of an entry from the UniSTS database.
+
+
+
+
+
+
+
+
+
+
+ UNITE accession
+
+ beta12orEarlier
+ Identifier of an entry from the UNITE database.
+
+
+
+
+
+
+
+
+
+
+ UTR accession
+
+ beta12orEarlier
+ Identifier of an entry from the UTR database.
+
+
+
+
+
+
+
+
+
+
+ UniParc accession
+
+ beta12orEarlier
+ UPI[A-F0-9]{10}
+ Accession number of a UniParc (protein sequence) database entry.
+ UniParc ID
+ UPI
+
+
+
+
+
+
+
+
+
+
+ mFLJ/mKIAA number
+
+ beta12orEarlier
+ Identifier of an entry from the Rouge or HUGE databases.
+
+
+
+
+
+
+
+
+
+
+ Fungi annotation
+
+ true
+ beta12orEarlier
+ 1.4
+ An informative report on a specific fungus.
+
+
+
+
+
+
+
+
+
+ Fungi annotation (anamorph)
+
+ beta12orEarlier
+ An informative report on a specific fungus anamorph.
+ 1.4
+ true
+
+
+
+
+
+
+
+
+
+ Gene features report (exon)
+
+ true
+ exons in a nucleotide sequences.
+ 1.8
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Ensembl protein ID
+
+
+ Ensembl ID (protein)
+ beta12orEarlier
+ Protein ID (Ensembl)
+ Unique identifier for a protein from the Ensembl database.
+
+
+
+
+
+
+
+
+
+
+ Gene transcriptional features report
+
+ 1.8
+ beta12orEarlier
+ transcription of DNA into RNA including the regulation of transcription.
+ true
+
+
+
+
+
+
+
+
+
+ Toxin annotation
+
+ beta12orEarlier
+ An informative report on a specific toxin.
+ 1.4
+ true
+
+
+
+
+
+
+
+
+
+ Protein report (membrane protein)
+
+ beta12orEarlier
+ true
+ An informative report on a membrane protein.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein-drug interaction report
+
+
+
+ An informative report on tentative or known protein-drug interaction(s).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Map data
+
+ beta12orEarlier
+ This is a broad data type and is used a placeholder for other, more specific types.
+ true
+ beta13
+ Data concerning a map of molecular sequence(s).
+
+
+
+
+
+
+
+
+
+
+ Phylogenetic data
+
+ Data concerning phylogeny, typically of molecular sequences, including reports of information concerning or derived from a phylogenetic tree, or from comparing two or more phylogenetic trees.
+ Phylogenetic data
+ This is a broad data type and is used a placeholder for other, more specific types.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein data
+
+ This is a broad data type and is used a placeholder for other, more specific types.
+ beta13
+ Data concerning one or more protein molecules.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Nucleic acid data
+
+ true
+ Data concerning one or more nucleic acid molecules.
+ beta13
+ beta12orEarlier
+ This is a broad data type and is used a placeholder for other, more specific types.
+
+
+
+
+
+
+
+
+
+ Article data
+
+ beta12orEarlier
+ This is a broad data type and is used a placeholder for other, more specific types. It is primarily intended to help navigation of EDAM and would not typically be used for annotation. It includes concepts that are best described as scientific text or closely concerned with or derived from text.
+ Article report
+ Data concerning, extracted from, or derived from the analysis of a scientific text (or texts) such as a full text article from a scientific journal.
+
+
+
+
+
+
+
+
+
+
+ Parameter
+
+ http://semanticscience.org/resource/SIO_000144
+ Tool-specific parameter
+ beta12orEarlier
+ http://www.e-lico.eu/ontologies/dmo/DMOP/DMOP.owl#Parameter
+ Typically a simple numerical or string value that controls the operation of a tool.
+ Parameters
+ Tool parameter
+
+
+
+
+
+
+
+
+
+ Molecular data
+
+ Molecule-specific data
+ true
+ Data concerning a specific type of molecule.
+ beta13
+ beta12orEarlier
+ This is a broad data type and is used a placeholder for other, more specific types.
+
+
+
+
+
+
+
+
+
+ Molecule report
+
+ An informative report on a specific molecule.
+ beta12orEarlier
+ Molecular report
+ 1.5
+ true
+
+
+
+
+
+
+
+
+
+
+ Organism report
+
+ An informative report on a specific organism.
+ beta12orEarlier
+ Organism annotation
+
+
+
+
+
+
+
+
+
+ Experiment report
+
+ Experiment metadata
+ beta12orEarlier
+ Experiment annotation
+ Annotation on a wet lab experiment, such as experimental conditions.
+
+
+
+
+
+
+
+
+
+ Nucleic acid features report (mutation)
+
+ DNA mutation.
+ 1.8
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence attribute
+
+ An attribute of a molecular sequence, possibly in reference to some other sequence.
+ Sequence parameter
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence tag profile
+
+ SAGE, MPSS and SBS experiments are usually performed to study gene expression. The sequence tags are typically subsequently annotated (after a database search) with the mRNA (and therefore gene) the tag was extracted from.
+ beta12orEarlier
+ Sequencing-based expression profile
+ Output from a serial analysis of gene expression (SAGE), massively parallel signature sequencing (MPSS) or sequencing by synthesis (SBS) experiment. In all cases this is a list of short sequence tags and the number of times it is observed.
+
+
+
+
+
+
+
+
+
+ Mass spectrometry data
+
+ beta12orEarlier
+ Data concerning a mass spectrometry measurement.
+
+
+
+
+
+
+
+
+
+ Protein structure raw data
+
+ beta12orEarlier
+ Raw data from experimental methods for determining protein structure.
+ This is a broad data type and is used a placeholder for other, more specific types. It is primarily intended to help navigation of EDAM and would not typically be used for annotation.
+
+
+
+
+
+
+
+
+
+ Mutation identifier
+
+ An identifier of a mutation.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Alignment data
+
+ This is a broad data type and is used a placeholder for other, more specific types. This includes entities derived from sequences and structures such as motifs and profiles.
+ true
+ beta13
+ Data concerning an alignment of two or more molecular sequences, structures or derived data.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Data index data
+
+ true
+ Data concerning an index of data.
+ beta12orEarlier
+ beta13
+ Database index
+ This is a broad data type and is used a placeholder for other, more specific types.
+
+
+
+
+
+
+
+
+
+ Amino acid name (single letter)
+
+ beta12orEarlier
+ Single letter amino acid identifier, e.g. G.
+
+
+
+
+
+
+
+
+
+
+ Amino acid name (three letter)
+
+ beta12orEarlier
+ Three letter amino acid identifier, e.g. GLY.
+
+
+
+
+
+
+
+
+
+
+ Amino acid name (full name)
+
+ beta12orEarlier
+ Full name of an amino acid, e.g. Glycine.
+
+
+
+
+
+
+
+
+
+
+ Toxin identifier
+
+
+
+
+
+
+
+ beta12orEarlier
+ Identifier of a toxin.
+
+
+
+
+
+
+
+
+
+
+ ArachnoServer ID
+
+ Unique identifier of a toxin from the ArachnoServer database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Expressed gene list
+
+ beta12orEarlier
+ true
+ 1.5
+ Gene annotation (expressed gene list)
+ A simple summary of expressed genes.
+
+
+
+
+
+
+
+
+
+ BindingDB Monomer ID
+
+ Unique identifier of a monomer from the BindingDB database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ GO concept name
+
+ true
+ beta12orEarlier
+ beta12orEarlier
+ The name of a concept from the GO ontology.
+
+
+
+
+
+
+
+
+
+ GO concept ID (biological process)
+
+ [0-9]{7}|GO:[0-9]{7}
+ beta12orEarlier
+ An identifier of a 'biological process' concept from the the Gene Ontology.
+
+
+
+
+
+
+
+
+
+
+ GO concept ID (molecular function)
+
+ beta12orEarlier
+ [0-9]{7}|GO:[0-9]{7}
+ An identifier of a 'molecular function' concept from the the Gene Ontology.
+
+
+
+
+
+
+
+
+
+
+ GO concept name (cellular component)
+
+ The name of a concept for a cellular component from the GO ontology.
+ true
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Northern blot image
+
+ beta12orEarlier
+ An image arising from a Northern Blot experiment.
+
+
+
+
+
+
+
+
+
+ Blot ID
+
+
+ Unique identifier of a blot from a Northern Blot.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ BlotBase blot ID
+
+ beta12orEarlier
+ Unique identifier of a blot from a Northern Blot from the BlotBase database.
+
+
+
+
+
+
+
+
+
+
+ Hierarchy
+
+ beta12orEarlier
+ Raw data on a biological hierarchy, describing the hierarchy proper, hierarchy components and possibly associated annotation.
+ Hierarchy annotation
+
+
+
+
+
+
+
+
+
+ Hierarchy identifier
+
+ Identifier of an entry from a database of biological hierarchies.
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Brite hierarchy ID
+
+ beta12orEarlier
+ Identifier of an entry from the Brite database of biological hierarchies.
+
+
+
+
+
+
+
+
+
+
+ Cancer type
+
+ true
+ A type (represented as a string) of cancer.
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ BRENDA organism ID
+
+ A unique identifier for an organism used in the BRENDA database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ UniGene taxon
+
+ The name of a taxon using the controlled vocabulary of the UniGene database.
+ UniGene organism abbreviation
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ UTRdb taxon
+
+ beta12orEarlier
+ The name of a taxon using the controlled vocabulary of the UTRdb database.
+
+
+
+
+
+
+
+
+
+
+ Catalogue ID
+
+ beta12orEarlier
+ An identifier of a catalogue of biological resources.
+ Catalogue identifier
+
+
+
+
+
+
+
+
+
+
+ CABRI catalogue name
+
+
+ The name of a catalogue of biological resources from the CABRI database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Secondary structure alignment metadata
+
+ An informative report on protein secondary structure alignment-derived data or metadata.
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Molecule interaction report
+
+ An informative report on the physical, chemical or other information concerning the interaction of two or more molecules (or parts of molecules).
+ beta12orEarlier
+ Molecular interaction report
+ Molecular interaction data
+
+
+
+
+
+
+
+
+ Pathway or network
+
+
+
+
+
+
+
+ Network
+ beta12orEarlier
+ Pathway
+ Primary data about a specific biological pathway or network (the nodes and connections within the pathway or network).
+
+
+
+
+
+
+
+
+
+ Small molecule data
+
+ true
+ This is a broad data type and is used a placeholder for other, more specific types.
+ beta12orEarlier
+ beta13
+ Data concerning one or more small molecules.
+
+
+
+
+
+
+
+
+
+ Genotype and phenotype data
+
+ beta12orEarlier
+ true
+ beta13
+ Data concerning a particular genotype, phenotype or a genotype / phenotype relation.
+
+
+
+
+
+
+
+
+
+ Gene expression data
+
+
+
+
+
+
+
+ beta12orEarlier
+ Image or hybridisation data for a microarray, typically a study of gene expression.
+ Microarray data
+ This is a broad data type and is used a placeholder for other, more specific types. See also http://edamontology.org/data_0931
+
+
+
+
+
+
+
+
+
+ Compound ID (KEGG)
+
+
+ C[0-9]+
+ Unique identifier of a chemical compound from the KEGG database.
+ beta12orEarlier
+ KEGG compound ID
+ KEGG compound identifier
+
+
+
+
+
+
+
+
+
+
+ RFAM name
+
+
+ Name (not necessarily stable) an entry (RNA family) from the RFAM database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Reaction ID (KEGG)
+
+
+ Identifier of a biological reaction from the KEGG reactions database.
+ R[0-9]+
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Drug ID (KEGG)
+
+
+ beta12orEarlier
+ Unique identifier of a drug from the KEGG Drug database.
+ D[0-9]+
+
+
+
+
+
+
+
+
+
+
+ Ensembl ID
+
+
+ beta12orEarlier
+ ENS[A-Z]*[FPTG][0-9]{11}
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl database.
+ Ensembl IDs
+
+
+
+
+
+
+
+
+
+
+ ICD identifier
+
+
+
+
+
+
+
+ An identifier of a disease from the International Classification of Diseases (ICD) database.
+ beta12orEarlier
+ [A-Z][0-9]+(\.[-[0-9]+])?
+
+
+
+
+
+
+
+
+
+
+ Sequence cluster ID (CluSTr)
+
+ Unique identifier of a sequence cluster from the CluSTr database.
+ [0-9A-Za-z]+:[0-9]+:[0-9]{1,5}(\.[0-9])?
+ CluSTr ID
+ beta12orEarlier
+ CluSTr cluster ID
+
+
+
+
+
+
+
+
+
+
+ KEGG Glycan ID
+
+
+ G[0-9]+
+ Unique identifier of a glycan ligand from the KEGG GLYCAN database (a subset of KEGG LIGAND).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ TCDB ID
+
+ beta12orEarlier
+ OBO file for regular expression.
+ TC number
+ [0-9]+\.[A-Z]\.[0-9]+\.[0-9]+\.[0-9]+
+ A unique identifier of a family from the transport classification database (TCDB) of membrane transport proteins.
+
+
+
+
+
+
+
+
+
+
+ MINT ID
+
+ MINT\-[0-9]{1,5}
+ Unique identifier of an entry from the MINT database of protein-protein interactions.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ DIP ID
+
+ Unique identifier of an entry from the DIP database of protein-protein interactions.
+ beta12orEarlier
+ DIP[\:\-][0-9]{3}[EN]
+
+
+
+
+
+
+
+
+
+
+ Signaling Gateway protein ID
+
+ beta12orEarlier
+ Unique identifier of a protein listed in the UCSD-Nature Signaling Gateway Molecule Pages database.
+ A[0-9]{6}
+
+
+
+
+
+
+
+
+
+
+ Protein modification ID
+
+
+ beta12orEarlier
+ Identifier of a protein modification catalogued in a database.
+
+
+
+
+
+
+
+
+
+
+ RESID ID
+
+ Identifier of a protein modification catalogued in the RESID database.
+ AA[0-9]{4}
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ RGD ID
+
+
+ [0-9]{4,7}
+ beta12orEarlier
+ Identifier of an entry from the RGD database.
+
+
+
+
+
+
+
+
+
+
+ TAIR accession (protein)
+
+
+
+
+
+
+
+
+ AASequence:[0-9]{10}
+ Identifier of a protein sequence from the TAIR database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Compound ID (HMDB)
+
+ HMDB[0-9]{5}
+ beta12orEarlier
+ HMDB ID
+ Identifier of a small molecule metabolite from the Human Metabolome Database (HMDB).
+
+
+
+
+
+
+
+
+
+
+ LIPID MAPS ID
+
+ beta12orEarlier
+ LM ID
+ Identifier of an entry from the LIPID MAPS database.
+ LM(FA|GL|GP|SP|ST|PR|SL|PK)[0-9]{4}([0-9a-zA-Z]{4})?
+
+
+
+
+
+
+
+
+
+
+ PeptideAtlas ID
+
+ Identifier of a peptide from the PeptideAtlas peptide databases.
+ PDBML:pdbx_PDB_strand_id
+ beta12orEarlier
+ PAp[0-9]{8}
+
+
+
+
+
+
+
+
+
+
+ Molecular interaction ID
+
+ Identifier of a report of molecular interactions from a database (typically).
+ true
+ beta12orEarlier
+ 1.7
+
+
+
+
+
+
+
+
+
+ BioGRID interaction ID
+
+ [0-9]+
+ beta12orEarlier
+ A unique identifier of an interaction from the BioGRID database.
+
+
+
+
+
+
+
+
+
+
+ Enzyme ID (MEROPS)
+
+ MEROPS ID
+ Unique identifier of a peptidase enzyme from the MEROPS database.
+ beta12orEarlier
+ S[0-9]{2}\.[0-9]{3}
+
+
+
+
+
+
+
+
+
+
+ Mobile genetic element ID
+
+
+ An identifier of a mobile genetic element.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ ACLAME ID
+
+ beta12orEarlier
+ mge:[0-9]+
+ An identifier of a mobile genetic element from the Aclame database.
+
+
+
+
+
+
+
+
+
+
+ SGD ID
+
+
+ PWY[a-zA-Z_0-9]{2}\-[0-9]{3}
+ beta12orEarlier
+ Identifier of an entry from the Saccharomyces genome database (SGD).
+
+
+
+
+
+
+
+
+
+
+ Book ID
+
+
+ beta12orEarlier
+ Unique identifier of a book.
+
+
+
+
+
+
+
+
+
+
+ ISBN
+
+ beta12orEarlier
+ (ISBN)?(-13|-10)?[:]?[ ]?([0-9]{2,3}[ -]?)?[0-9]{1,5}[ -]?[0-9]{1,7}[ -]?[0-9]{1,6}[ -]?([0-9]|X)
+ The International Standard Book Number (ISBN) is for identifying printed books.
+
+
+
+
+
+
+
+
+
+
+ Compound ID (3DMET)
+
+ B[0-9]{5}
+ 3DMET ID
+ beta12orEarlier
+ Identifier of a metabolite from the 3DMET database.
+
+
+
+
+
+
+
+
+
+
+ MatrixDB interaction ID
+
+ ([A-NR-Z][0-9][A-Z][A-Z0-9][A-Z0-9][0-9])_.*|([OPQ][0-9][A-Z0-9][A-Z0-9][A-Z0-9][0-9]_.*)|(GAG_.*)|(MULT_.*)|(PFRAG_.*)|(LIP_.*)|(CAT_.*)
+ A unique identifier of an interaction from the MatrixDB database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ cPath ID
+
+
+ [0-9]+
+ These identifiers are unique within the cPath database, however, they are not stable between releases.
+ beta12orEarlier
+ A unique identifier for pathways, reactions, complexes and small molecules from the cPath (Pathway Commons) database.
+
+
+
+
+
+
+
+
+
+
+ PubChem bioassay ID
+
+
+ Identifier of an assay from the PubChem database.
+ [0-9]+
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ PubChem ID
+
+
+ PubChem identifier
+ beta12orEarlier
+ Identifier of an entry from the PubChem database.
+
+
+
+
+
+
+
+
+
+
+ Reaction ID (MACie)
+
+ beta12orEarlier
+ M[0-9]{4}
+ MACie entry number
+ Identifier of an enzyme reaction mechanism from the MACie database.
+
+
+
+
+
+
+
+
+
+
+ Gene ID (miRBase)
+
+ beta12orEarlier
+ miRNA name
+ miRNA ID
+ Identifier for a gene from the miRBase database.
+ MI[0-9]{7}
+ miRNA identifier
+
+
+
+
+
+
+
+
+
+
+ Gene ID (ZFIN)
+
+ Identifier for a gene from the Zebrafish information network genome (ZFIN) database.
+ beta12orEarlier
+ ZDB\-GENE\-[0-9]+\-[0-9]+
+
+
+
+
+
+
+
+
+
+
+ Reaction ID (Rhea)
+
+ [0-9]{5}
+ Identifier of an enzyme-catalysed reaction from the Rhea database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Pathway ID (Unipathway)
+
+ UPA[0-9]{5}
+ upaid
+ beta12orEarlier
+ Identifier of a biological pathway from the Unipathway database.
+
+
+
+
+
+
+
+
+
+
+ Compound ID (ChEMBL)
+
+ Identifier of a small molecular from the ChEMBL database.
+ ChEMBL ID
+ beta12orEarlier
+ [0-9]+
+
+
+
+
+
+
+
+
+
+
+ LGICdb identifier
+
+ Unique identifier of an entry from the Ligand-gated ion channel (LGICdb) database.
+ beta12orEarlier
+ [a-zA-Z_0-9]+
+
+
+
+
+
+
+
+
+
+
+ Reaction kinetics ID (SABIO-RK)
+
+ Identifier of a biological reaction (kinetics entry) from the SABIO-RK reactions database.
+ [0-9]+
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ PharmGKB ID
+
+
+ beta12orEarlier
+ Identifier of an entry from the pharmacogenetics and pharmacogenomics knowledge base (PharmGKB).
+ PA[0-9]+
+
+
+
+
+
+
+
+
+
+
+ Pathway ID (PharmGKB)
+
+
+ PA[0-9]+
+ Identifier of a pathway from the pharmacogenetics and pharmacogenomics knowledge base (PharmGKB).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Disease ID (PharmGKB)
+
+
+ Identifier of a disease from the pharmacogenetics and pharmacogenomics knowledge base (PharmGKB).
+ beta12orEarlier
+ PA[0-9]+
+
+
+
+
+
+
+
+
+
+
+ Drug ID (PharmGKB)
+
+
+ beta12orEarlier
+ Identifier of a drug from the pharmacogenetics and pharmacogenomics knowledge base (PharmGKB).
+ PA[0-9]+
+
+
+
+
+
+
+
+
+
+
+ Drug ID (TTD)
+
+ DAP[0-9]+
+ Identifier of a drug from the Therapeutic Target Database (TTD).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Target ID (TTD)
+
+ TTDS[0-9]+
+ Identifier of a target protein from the Therapeutic Target Database (TTD).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Cell type identifier
+
+ beta12orEarlier
+ Cell type ID
+ A unique identifier of a type or group of cells.
+
+
+
+
+
+
+
+
+
+
+ NeuronDB ID
+
+ [0-9]+
+ beta12orEarlier
+ A unique identifier of a neuron from the NeuronDB database.
+
+
+
+
+
+
+
+
+
+
+ NeuroMorpho ID
+
+ beta12orEarlier
+ A unique identifier of a neuron from the NeuroMorpho database.
+ [a-zA-Z_0-9]+
+
+
+
+
+
+
+
+
+
+
+ Compound ID (ChemIDplus)
+
+ Identifier of a chemical from the ChemIDplus database.
+ ChemIDplus ID
+ [0-9]+
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Pathway ID (SMPDB)
+
+ beta12orEarlier
+ Identifier of a pathway from the Small Molecule Pathway Database (SMPDB).
+ SMP[0-9]{5}
+
+
+
+
+
+
+
+
+
+
+ BioNumbers ID
+
+ Identifier of an entry from the BioNumbers database of key numbers and associated data in molecular biology.
+ [0-9]+
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ T3DB ID
+
+ beta12orEarlier
+ T3D[0-9]+
+ Unique identifier of a toxin from the Toxin and Toxin Target Database (T3DB) database.
+
+
+
+
+
+
+
+
+
+
+ Carbohydrate identifier
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Identifier of a carbohydrate.
+
+
+
+
+
+
+
+
+
+
+ GlycomeDB ID
+
+ Identifier of an entry from the GlycomeDB database.
+ beta12orEarlier
+ [0-9]+
+
+
+
+
+
+
+
+
+
+
+ LipidBank ID
+
+ beta12orEarlier
+ [a-zA-Z_0-9]+[0-9]+
+ Identifier of an entry from the LipidBank database.
+
+
+
+
+
+
+
+
+
+
+ CDD ID
+
+ beta12orEarlier
+ cd[0-9]{5}
+ Identifier of a conserved domain from the Conserved Domain Database.
+
+
+
+
+
+
+
+
+
+
+ MMDB ID
+
+ [0-9]{1,5}
+ beta12orEarlier
+ An identifier of an entry from the MMDB database.
+ MMDB accession
+
+
+
+
+
+
+
+
+
+
+ iRefIndex ID
+
+ Unique identifier of an entry from the iRefIndex database of protein-protein interactions.
+ beta12orEarlier
+ [0-9]+
+
+
+
+
+
+
+
+
+
+
+ ModelDB ID
+
+ Unique identifier of an entry from the ModelDB database.
+ [0-9]+
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Pathway ID (DQCS)
+
+ [0-9]+
+ Identifier of a signaling pathway from the Database of Quantitative Cellular Signaling (DQCS).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Ensembl ID (Homo sapiens)
+
+ beta12orEarlier
+ true
+ beta12orEarlier
+ ENS([EGTP])[0-9]{11}
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database (Homo sapiens division).
+
+
+
+
+
+
+
+
+
+ Ensembl ID ('Bos taurus')
+
+ beta12orEarlier
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database ('Bos taurus' division).
+ true
+ beta12orEarlier
+ ENSBTA([EGTP])[0-9]{11}
+
+
+
+
+
+
+
+
+
+ Ensembl ID ('Canis familiaris')
+
+ beta12orEarlier
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database ('Canis familiaris' division).
+ true
+ ENSCAF([EGTP])[0-9]{11}
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Ensembl ID ('Cavia porcellus')
+
+ ENSCPO([EGTP])[0-9]{11}
+ true
+ beta12orEarlier
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database ('Cavia porcellus' division).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Ensembl ID ('Ciona intestinalis')
+
+ true
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database ('Ciona intestinalis' division).
+ beta12orEarlier
+ beta12orEarlier
+ ENSCIN([EGTP])[0-9]{11}
+
+
+
+
+
+
+
+
+
+ Ensembl ID ('Ciona savignyi')
+
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database ('Ciona savignyi' division).
+ ENSCSAV([EGTP])[0-9]{11}
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Ensembl ID ('Danio rerio')
+
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database ('Danio rerio' division).
+ true
+ beta12orEarlier
+ beta12orEarlier
+ ENSDAR([EGTP])[0-9]{11}
+
+
+
+
+
+
+
+
+
+ Ensembl ID ('Dasypus novemcinctus')
+
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database ('Dasypus novemcinctus' division).
+ beta12orEarlier
+ beta12orEarlier
+ ENSDNO([EGTP])[0-9]{11}
+ true
+
+
+
+
+
+
+
+
+
+ Ensembl ID ('Echinops telfairi')
+
+ ENSETE([EGTP])[0-9]{11}
+ true
+ beta12orEarlier
+ beta12orEarlier
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database ('Echinops telfairi' division).
+
+
+
+
+
+
+
+
+
+ Ensembl ID ('Erinaceus europaeus')
+
+ true
+ ENSEEU([EGTP])[0-9]{11}
+ beta12orEarlier
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database ('Erinaceus europaeus' division).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Ensembl ID ('Felis catus')
+
+ beta12orEarlier
+ true
+ ENSFCA([EGTP])[0-9]{11}
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database ('Felis catus' division).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Ensembl ID ('Gallus gallus')
+
+ ENSGAL([EGTP])[0-9]{11}
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database ('Gallus gallus' division).
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Ensembl ID ('Gasterosteus aculeatus')
+
+ beta12orEarlier
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database ('Gasterosteus aculeatus' division).
+ true
+ ENSGAC([EGTP])[0-9]{11}
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Ensembl ID ('Homo sapiens')
+
+ ENSHUM([EGTP])[0-9]{11}
+ beta12orEarlier
+ beta12orEarlier
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database ('Homo sapiens' division).
+ true
+
+
+
+
+
+
+
+
+
+ Ensembl ID ('Loxodonta africana')
+
+ beta12orEarlier
+ true
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database ('Loxodonta africana' division).
+ ENSLAF([EGTP])[0-9]{11}
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Ensembl ID ('Macaca mulatta')
+
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database ('Macaca mulatta' division).
+ beta12orEarlier
+ ENSMMU([EGTP])[0-9]{11}
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Ensembl ID ('Monodelphis domestica')
+
+ beta12orEarlier
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database ('Monodelphis domestica' division).
+ true
+ ENSMOD([EGTP])[0-9]{11}
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Ensembl ID ('Mus musculus')
+
+ ENSMUS([EGTP])[0-9]{11}
+ true
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database ('Mus musculus' division).
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Ensembl ID ('Myotis lucifugus')
+
+ beta12orEarlier
+ ENSMLU([EGTP])[0-9]{11}
+ true
+ beta12orEarlier
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database ('Myotis lucifugus' division).
+
+
+
+
+
+
+
+
+
+ Ensembl ID ("Ornithorhynchus anatinus")
+
+ beta12orEarlier
+ true
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database ('Ornithorhynchus anatinus' division).
+ ENSOAN([EGTP])[0-9]{11}
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Ensembl ID ('Oryctolagus cuniculus')
+
+ beta12orEarlier
+ ENSOCU([EGTP])[0-9]{11}
+ true
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database ('Oryctolagus cuniculus' division).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Ensembl ID ('Oryzias latipes')
+
+ ENSORL([EGTP])[0-9]{11}
+ true
+ beta12orEarlier
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database ('Oryzias latipes' division).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Ensembl ID ('Otolemur garnettii')
+
+ beta12orEarlier
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database ('Otolemur garnettii' division).
+ true
+ beta12orEarlier
+ ENSSAR([EGTP])[0-9]{11}
+
+
+
+
+
+
+
+
+
+ Ensembl ID ('Pan troglodytes')
+
+ beta12orEarlier
+ beta12orEarlier
+ ENSPTR([EGTP])[0-9]{11}
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database ('Pan troglodytes' division).
+ true
+
+
+
+
+
+
+
+
+
+ Ensembl ID ('Rattus norvegicus')
+
+ beta12orEarlier
+ true
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database ('Rattus norvegicus' division).
+ ENSRNO([EGTP])[0-9]{11}
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Ensembl ID ('Spermophilus tridecemlineatus')
+
+ true
+ beta12orEarlier
+ ENSSTO([EGTP])[0-9]{11}
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database ('Spermophilus tridecemlineatus' division).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Ensembl ID ('Takifugu rubripes')
+
+ beta12orEarlier
+ beta12orEarlier
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database ('Takifugu rubripes' division).
+ ENSFRU([EGTP])[0-9]{11}
+ true
+
+
+
+
+
+
+
+
+
+ Ensembl ID ('Tupaia belangeri')
+
+ beta12orEarlier
+ beta12orEarlier
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database ('Tupaia belangeri' division).
+ true
+ ENSTBE([EGTP])[0-9]{11}
+
+
+
+
+
+
+
+
+
+ Ensembl ID ('Xenopus tropicalis')
+
+ Identifier of an entry (exon, gene, transcript or protein) from the Ensembl 'core' database ('Xenopus tropicalis' division).
+ beta12orEarlier
+ beta12orEarlier
+ true
+ ENSXET([EGTP])[0-9]{11}
+
+
+
+
+
+
+
+
+
+ CATH identifier
+
+ beta12orEarlier
+ Identifier of a protein domain (or other node) from the CATH database.
+
+
+
+
+
+
+
+
+
+
+ CATH node ID (family)
+
+ beta12orEarlier
+ A code number identifying a family from the CATH database.
+ 2.10.10.10
+
+
+
+
+
+
+
+
+
+
+ Enzyme ID (CAZy)
+
+ Identifier of an enzyme from the CAZy enzymes database.
+ beta12orEarlier
+ CAZy ID
+
+
+
+
+
+
+
+
+
+
+ Clone ID (IMAGE)
+
+ I.M.A.G.E. cloneID
+ IMAGE cloneID
+ A unique identifier assigned by the I.M.A.G.E. consortium to a clone (cloned molecular sequence).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ GO concept ID (cellular compartment)
+
+ An identifier of a 'cellular compartment' concept from the Gene Ontology.
+ [0-9]{7}|GO:[0-9]{7}
+ beta12orEarlier
+ GO concept identifier (cellular compartment)
+
+
+
+
+
+
+
+
+
+
+ Chromosome name (BioCyc)
+
+ Name of a chromosome as used in the BioCyc database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ CleanEx entry name
+
+ beta12orEarlier
+ An identifier of a gene expression profile from the CleanEx database.
+
+
+
+
+
+
+
+
+
+
+ CleanEx dataset code
+
+ beta12orEarlier
+ An identifier of (typically a list of) gene expression experiments catalogued in the CleanEx database.
+
+
+
+
+
+
+
+
+
+
+ Genome report
+
+ An informative report of general information concerning a genome as a whole.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein ID (CORUM)
+
+ beta12orEarlier
+ CORUM complex ID
+ Unique identifier for a protein complex from the CORUM database.
+
+
+
+
+
+
+
+
+
+
+ CDD PSSM-ID
+
+ beta12orEarlier
+ Unique identifier of a position-specific scoring matrix from the CDD database.
+
+
+
+
+
+
+
+
+
+
+ Protein ID (CuticleDB)
+
+ CuticleDB ID
+ beta12orEarlier
+ Unique identifier for a protein from the CuticleDB database.
+
+
+
+
+
+
+
+
+
+
+ DBD ID
+
+ Identifier of a predicted transcription factor from the DBD database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Oligonucleotide probe annotation
+
+
+
+
+
+
+
+ beta12orEarlier
+ General annotation on an oligonucleotide probe.
+
+
+
+
+
+
+
+
+
+ Oligonucleotide ID
+
+
+ Identifier of an oligonucleotide from a database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ dbProbe ID
+
+ Identifier of an oligonucleotide probe from the dbProbe database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Dinucleotide property
+
+ beta12orEarlier
+ Physicochemical property data for one or more dinucleotides.
+
+
+
+
+
+
+
+
+
+ DiProDB ID
+
+ beta12orEarlier
+ Identifier of an dinucleotide property from the DiProDB database.
+
+
+
+
+
+
+
+
+
+
+ Protein features report (disordered structure)
+
+ 1.8
+ true
+ beta12orEarlier
+ disordered structure in a protein.
+
+
+
+
+
+
+
+
+
+ Protein ID (DisProt)
+
+ DisProt ID
+ beta12orEarlier
+ Unique identifier for a protein from the DisProt database.
+
+
+
+
+
+
+
+
+
+
+ Embryo report
+
+ Annotation on an embryo or concerning embryological development.
+ true
+ Embryo annotation
+ beta12orEarlier
+ 1.5
+
+
+
+
+
+
+
+
+
+ Ensembl transcript ID
+
+
+ beta12orEarlier
+ Transcript ID (Ensembl)
+ Unique identifier for a gene transcript from the Ensembl database.
+
+
+
+
+
+
+
+
+
+
+ Inhibitor annotation
+
+ 1.4
+ beta12orEarlier
+ An informative report on one or more small molecules that are enzyme inhibitors.
+ true
+
+
+
+
+
+
+
+
+
+ Promoter ID
+
+
+ beta12orEarlier
+ An identifier of a promoter of a gene that is catalogued in a database.
+ Moby:GeneAccessionList
+
+
+
+
+
+
+
+
+
+
+ EST accession
+
+ Identifier of an EST sequence.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ COGEME EST ID
+
+ beta12orEarlier
+ Identifier of an EST sequence from the COGEME database.
+
+
+
+
+
+
+
+
+
+
+ COGEME unisequence ID
+
+ Identifier of a unisequence from the COGEME database.
+ A unisequence is a single sequence assembled from ESTs.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Protein family ID (GeneFarm)
+
+ GeneFarm family ID
+ beta12orEarlier
+ Accession number of an entry (family) from the TIGRFam database.
+
+
+
+
+
+
+
+
+
+
+ Family name
+
+ beta12orEarlier
+ The name of a family of organism.
+
+
+
+
+
+
+
+
+
+
+ Genus name (virus)
+
+ true
+ The name of a genus of viruses.
+ beta13
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Family name (virus)
+
+ beta13
+ The name of a family of viruses.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Database name (SwissRegulon)
+
+ true
+ beta13
+ The name of a SwissRegulon database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence feature ID (SwissRegulon)
+
+ beta12orEarlier
+ A feature identifier as used in the SwissRegulon database.
+ This can be name of a gene, the ID of a TFBS, or genomic coordinates in form "chr:start..end".
+
+
+
+
+
+
+
+
+
+
+ FIG ID
+
+ A FIG ID consists of four parts: a prefix, genome id, locus type and id number.
+ A unique identifier of gene in the NMPDR database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Gene ID (Xenbase)
+
+ A unique identifier of gene in the Xenbase database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Gene ID (Genolist)
+
+ beta12orEarlier
+ A unique identifier of gene in the Genolist database.
+
+
+
+
+
+
+
+
+
+
+ Gene name (Genolist)
+
+ beta12orEarlier
+ true
+ Genolist gene name
+ 1.3
+ Name of an entry (gene) from the Genolist genes database.
+
+
+
+
+
+
+
+
+
+ ABS ID
+
+ ABS identifier
+ beta12orEarlier
+ Identifier of an entry (promoter) from the ABS database.
+
+
+
+
+
+
+
+
+
+
+ AraC-XylS ID
+
+ Identifier of a transcription factor from the AraC-XylS database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Gene name (HUGO)
+
+ beta12orEarlier
+ beta12orEarlier
+ true
+ Name of an entry (gene) from the HUGO database.
+
+
+
+
+
+
+
+
+
+ Locus ID (PseudoCAP)
+
+ beta12orEarlier
+ Identifier of a locus from the PseudoCAP database.
+
+
+
+
+
+
+
+
+
+
+ Locus ID (UTR)
+
+ beta12orEarlier
+ Identifier of a locus from the UTR database.
+
+
+
+
+
+
+
+
+
+
+ MonosaccharideDB ID
+
+ Unique identifier of a monosaccharide from the MonosaccharideDB database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Database name (CMD)
+
+ beta12orEarlier
+ true
+ The name of a subdivision of the Collagen Mutation Database (CMD) database.
+ beta13
+
+
+
+
+
+
+
+
+
+ Database name (Osteogenesis)
+
+ beta12orEarlier
+ true
+ beta13
+ The name of a subdivision of the Osteogenesis database.
+
+
+
+
+
+
+
+
+
+ Genome identifier
+
+ An identifier of a particular genome.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ GenomeReviews ID
+
+ beta12orEarlier
+ An identifier of a particular genome.
+
+
+
+
+
+
+
+
+
+
+ GlycoMap ID
+
+ [0-9]+
+ beta12orEarlier
+ Identifier of an entry from the GlycosciencesDB database.
+
+
+
+
+
+
+
+
+
+
+ Carbohydrate conformational map
+
+ beta12orEarlier
+ A conformational energy map of the glycosidic linkages in a carbohydrate molecule.
+
+
+
+
+
+
+
+
+
+ Gene features report (intron)
+
+ introns in a nucleotide sequences.
+ true
+ beta12orEarlier
+ 1.8
+
+
+
+
+
+
+
+
+
+ Transcription factor name
+
+
+ The name of a transcription factor.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ TCID
+
+ Identifier of a membrane transport proteins from the transport classification database (TCDB).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Pfam domain name
+
+ beta12orEarlier
+ Name of a domain from the Pfam database.
+ PF[0-9]{5}
+
+
+
+
+
+
+
+
+
+
+ Pfam clan ID
+
+ beta12orEarlier
+ CL[0-9]{4}
+ Accession number of a Pfam clan.
+
+
+
+
+
+
+
+
+
+
+ Gene ID (VectorBase)
+
+ VectorBase ID
+ beta12orEarlier
+ Identifier for a gene from the VectorBase database.
+
+
+
+
+
+
+
+
+
+
+ UTRSite ID
+
+ Identifier of an entry from the UTRSite database of regulatory motifs in eukaryotic UTRs.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Sequence signature report
+
+
+
+
+
+
+
+ Sequence motif report
+ Sequence profile report
+ An informative report about a specific or conserved pattern in a molecular sequence, such as its context in genes or proteins, its role, origin or method of construction, etc.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Locus annotation
+
+ Locus report
+ true
+ beta12orEarlier
+ An informative report on a particular locus.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein name (UniProt)
+
+ Official name of a protein as used in the UniProt database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Term ID list
+
+ One or more terms from one or more controlled vocabularies which are annotations on an entity.
+ beta12orEarlier
+ true
+ The concepts are typically provided as a persistent identifier or some other link the source ontologies. Evidence of the validity of the annotation might be included.
+ 1.5
+
+
+
+
+
+
+
+
+
+ HAMAP ID
+
+ Name of a protein family from the HAMAP database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Identifier with metadata
+
+ Basic information concerning an identifier of data (typically including the identifier itself). For example, a gene symbol with information concerning its provenance.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Gene symbol annotation
+
+ true
+ beta12orEarlier
+ Annotation about a gene symbol.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Transcript ID
+
+
+
+
+
+
+
+
+ Identifier of a RNA transcript.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ HIT ID
+
+ Identifier of an RNA transcript from the H-InvDB database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ HIX ID
+
+ A unique identifier of gene cluster in the H-InvDB database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ HPA antibody id
+
+ beta12orEarlier
+ Identifier of a antibody from the HPA database.
+
+
+
+
+
+
+
+
+
+
+ IMGT/HLA ID
+
+ Identifier of a human major histocompatibility complex (HLA) or other protein from the IMGT/HLA database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Gene ID (JCVI)
+
+ A unique identifier of gene assigned by the J. Craig Venter Institute (JCVI).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Kinase name
+
+ beta12orEarlier
+ The name of a kinase protein.
+
+
+
+
+
+
+
+
+
+
+ ConsensusPathDB entity ID
+
+
+ Identifier of a physical entity from the ConsensusPathDB database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ ConsensusPathDB entity name
+
+
+ beta12orEarlier
+ Name of a physical entity from the ConsensusPathDB database.
+
+
+
+
+
+
+
+
+
+
+ CCAP strain number
+
+ The number of a strain of algae and protozoa from the CCAP database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Stock number
+
+
+ beta12orEarlier
+ An identifier of stock from a catalogue of biological resources.
+
+
+
+
+
+
+
+
+
+
+ Stock number (TAIR)
+
+ beta12orEarlier
+ A stock number from The Arabidopsis information resource (TAIR).
+
+
+
+
+
+
+
+
+
+
+ REDIdb ID
+
+ beta12orEarlier
+ Identifier of an entry from the RNA editing database (REDIdb).
+
+
+
+
+
+
+
+
+
+
+ SMART domain name
+
+ Name of a domain from the SMART database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Protein family ID (PANTHER)
+
+ beta12orEarlier
+ Panther family ID
+ Accession number of an entry (family) from the PANTHER database.
+
+
+
+
+
+
+
+
+
+
+ RNAVirusDB ID
+
+ beta12orEarlier
+ Could list (or reference) other taxa here from https://www.phenoscape.org/wiki/Taxonomic_Rank_Vocabulary.
+ A unique identifier for a virus from the RNAVirusDB database.
+
+
+
+
+
+
+
+
+
+
+ Virus ID
+
+
+ beta12orEarlier
+ An accession of annotation on a (group of) viruses (catalogued in a database).
+
+
+
+
+
+
+
+
+
+
+ NCBI Genome Project ID
+
+ An identifier of a genome project assigned by NCBI.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ NCBI genome accession
+
+ A unique identifier of a whole genome assigned by the NCBI.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Sequence profile data
+
+ 1.8
+ Data concerning, extracted from, or derived from the analysis of a sequence profile, such as its name, length, technical details about the profile or it's construction, the biological role or annotation, and so on.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein ID (TopDB)
+
+ beta12orEarlier
+ TopDB ID
+ Unique identifier for a membrane protein from the TopDB database.
+
+
+
+
+
+
+
+
+
+
+ Gel ID
+
+ Gel identifier
+ Identifier of a two-dimensional (protein) gel.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Reference map name (SWISS-2DPAGE)
+
+
+ beta12orEarlier
+ Name of a reference map gel from the SWISS-2DPAGE database.
+
+
+
+
+
+
+
+
+
+
+ Protein ID (PeroxiBase)
+
+ PeroxiBase ID
+ beta12orEarlier
+ Unique identifier for a peroxidase protein from the PeroxiBase database.
+
+
+
+
+
+
+
+
+
+
+ SISYPHUS ID
+
+ beta12orEarlier
+ Identifier of an entry from the SISYPHUS database of tertiary structure alignments.
+
+
+
+
+
+
+
+
+
+
+ ORF ID
+
+
+ beta12orEarlier
+ Accession of an open reading frame (catalogued in a database).
+
+
+
+
+
+
+
+
+
+
+ ORF identifier
+
+ An identifier of an open reading frame.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Linucs ID
+
+ Identifier of an entry from the GlycosciencesDB database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Protein ID (LGICdb)
+
+ beta12orEarlier
+ LGICdb ID
+ Unique identifier for a ligand-gated ion channel protein from the LGICdb database.
+
+
+
+
+
+
+
+
+
+
+ MaizeDB ID
+
+ beta12orEarlier
+ Identifier of an EST sequence from the MaizeDB database.
+
+
+
+
+
+
+
+
+
+
+ Gene ID (MfunGD)
+
+ beta12orEarlier
+ A unique identifier of gene in the MfunGD database.
+
+
+
+
+
+
+
+
+
+
+ Orpha number
+
+
+
+
+
+
+
+ beta12orEarlier
+ An identifier of a disease from the Orpha database.
+
+
+
+
+
+
+
+
+
+
+ Protein ID (EcID)
+
+ beta12orEarlier
+ Unique identifier for a protein from the EcID database.
+
+
+
+
+
+
+
+
+
+
+ Clone ID (RefSeq)
+
+
+ A unique identifier of a cDNA molecule catalogued in the RefSeq database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Protein ID (ConoServer)
+
+ beta12orEarlier
+ Unique identifier for a cone snail toxin protein from the ConoServer database.
+
+
+
+
+
+
+
+
+
+
+ GeneSNP ID
+
+ Identifier of a GeneSNP database entry.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Lipid identifier
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Identifier of a lipid.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Databank
+
+ true
+ beta12orEarlier
+ A flat-file (textual) data archive.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+ Web portal
+
+ A web site providing data (web pages) on a common theme to a HTTP client.
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+ Gene ID (VBASE2)
+
+ Identifier for a gene from the VBASE2 database.
+ beta12orEarlier
+ VBASE2 ID
+
+
+
+
+
+
+
+
+
+
+ DPVweb ID
+
+ DPVweb virus ID
+ beta12orEarlier
+ A unique identifier for a virus from the DPVweb database.
+
+
+
+
+
+
+
+
+
+
+ Pathway ID (BioSystems)
+
+ beta12orEarlier
+ Identifier of a pathway from the BioSystems pathway database.
+ [0-9]+
+
+
+
+
+
+
+
+
+
+
+ Experimental data (proteomics)
+
+ true
+ Data concerning a proteomics experiment.
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Abstract
+
+ beta12orEarlier
+ An abstract of a scientific article.
+
+
+
+
+
+
+
+
+
+ Lipid structure
+
+ beta12orEarlier
+ 3D coordinate and associated data for a lipid structure.
+
+
+
+
+
+
+
+
+
+ Drug structure
+
+ beta12orEarlier
+ 3D coordinate and associated data for the (3D) structure of a drug.
+
+
+
+
+
+
+
+
+
+ Toxin structure
+
+ 3D coordinate and associated data for the (3D) structure of a toxin.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Position-specific scoring matrix
+
+
+ beta12orEarlier
+ PSSM
+ A simple matrix of numbers, where each value (or column of values) is derived derived from analysis of the corresponding position in a sequence alignment.
+
+
+
+
+
+
+
+
+
+ Distance matrix
+
+ A matrix of distances between molecular entities, where a value (distance) is (typically) derived from comparison of two entities and reflects their similarity.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Structural distance matrix
+
+ Distances (values representing similarity) between a group of molecular structures.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Article metadata
+
+ true
+ beta12orEarlier
+ Bibliographic data concerning scientific article(s).
+ 1.5
+
+
+
+
+
+
+
+
+
+ Ontology concept
+
+ beta12orEarlier
+ This includes any fields from the concept definition such as concept name, definition, comments and so on.
+ A concept from a biological ontology.
+
+
+
+
+
+
+
+
+
+ Codon usage bias
+
+ A numerical measure of differences in the frequency of occurrence of synonymous codons in DNA sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Northern blot report
+
+ true
+ beta12orEarlier
+ 1.8
+ Northern Blot experiments.
+
+
+
+
+
+
+
+
+
+ Nucleic acid features report (VNTR)
+
+ 1.8
+ beta12orEarlier
+ true
+ variable number of tandem repeat (VNTR) polymorphism in a DNA sequence.
+
+
+
+
+
+
+
+
+
+ Nucleic acid features report (microsatellite)
+
+ true
+ microsatellite polymorphism in a DNA sequence.
+ 1.8
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Nucleic acid features report (RFLP)
+
+ beta12orEarlier
+ true
+ 1.8
+ restriction fragment length polymorphisms (RFLP) in a DNA sequence.
+
+
+
+
+
+
+
+
+
+ Radiation hybrid map
+
+ The radiation method can break very closely linked markers providing a more detailed map. Most genetic markers and subsequences may be located to a defined map position and with a more precise estimates of distance than a linkage map.
+ A map showing distance between genetic markers estimated by radiation-induced breaks in a chromosome.
+ beta12orEarlier
+ RH map
+
+
+
+
+
+
+
+
+
+ ID list
+
+ A simple list of data identifiers (such as database accessions), possibly with additional basic information on the addressed data.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Phylogenetic gene frequencies data
+
+ beta12orEarlier
+ Gene frequencies data that may be read during phylogenetic tree calculation.
+
+
+
+
+
+
+
+
+
+ Sequence set (polymorphic)
+
+ beta13
+ beta12orEarlier
+ true
+ A set of sub-sequences displaying some type of polymorphism, typically indicating the sequence in which they occur, their position and other metadata.
+
+
+
+
+
+
+
+
+
+ DRCAT resource
+
+ 1.5
+ An entry (resource) from the DRCAT bioinformatics resource catalogue.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Protein complex
+
+ beta12orEarlier
+ 3D coordinate and associated data for a multi-protein complex; two or more polypeptides chains in a stable, functional association with one another.
+
+
+
+
+
+
+
+
+
+ Protein structural motif
+
+ beta12orEarlier
+ 3D coordinate and associated data for a protein (3D) structural motif; any group of contiguous or non-contiguous amino acid residues but typically those forming a feature with a structural or functional role.
+
+
+
+
+
+
+
+
+
+ Lipid report
+
+ beta12orEarlier
+ Annotation on or information derived from one or more specific lipid 3D structure(s).
+
+
+
+
+
+
+
+
+
+ Secondary structure image
+
+ 1.4
+ beta12orEarlier
+ Image of one or more molecular secondary structures.
+ true
+
+
+
+
+
+
+
+
+
+ Secondary structure report
+
+ Secondary structure-derived report
+ beta12orEarlier
+ true
+ An informative report on general information, properties or features of one or more molecular secondary structures.
+ 1.5
+
+
+
+
+
+
+
+
+
+ DNA features
+
+ beta12orEarlier
+ DNA sequence-specific feature annotation (not in a feature table).
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ RNA features report
+
+ true
+ beta12orEarlier
+ 1.5
+ Features concerning RNA or regions of DNA that encode an RNA molecule.
+ RNA features
+ Nucleic acid features (RNA features)
+
+
+
+
+
+
+
+
+
+ Plot
+
+ beta12orEarlier
+ true
+ beta12orEarlier
+ Biological data that has been plotted as a graph of some type.
+
+
+
+
+
+
+
+
+
+ Nucleic acid features report (polymorphism)
+
+ true
+ DNA polymorphism.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein sequence record
+
+
+ A protein sequence and associated metadata.
+ beta12orEarlier
+ Protein sequence record
+ Sequence record (protein)
+
+
+
+
+
+
+
+
+
+ Nucleic acid sequence record
+
+
+ RNA sequence record
+ Nucleotide sequence record
+ A nucleic acid sequence and associated metadata.
+ beta12orEarlier
+ DNA sequence record
+ Sequence record (nucleic acid)
+
+
+
+
+
+
+
+
+
+ Protein sequence record (full)
+
+ A protein sequence and comprehensive metadata (such as a feature table), typically corresponding to a full entry from a molecular sequence database.
+ 1.8
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Nucleic acid sequence record (full)
+
+ true
+ A nucleic acid sequence and comprehensive metadata (such as a feature table), typically corresponding to a full entry from a molecular sequence database.
+ beta12orEarlier
+ 1.8
+
+
+
+
+
+
+
+
+
+ Biological model accession
+
+
+ beta12orEarlier
+ Accession of a mathematical model, typically an entry from a database.
+
+
+
+
+
+
+
+
+
+
+ Cell type name
+
+
+ The name of a type or group of cells.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Cell type accession
+
+
+ beta12orEarlier
+ Accession of a type or group of cells (catalogued in a database).
+
+
+
+
+
+
+
+
+
+
+ Compound accession
+
+
+ Small molecule accession
+ Accession of an entry from a database of chemicals.
+ beta12orEarlier
+ Chemical compound accession
+
+
+
+
+
+
+
+
+
+
+ Drug accession
+
+
+ Accession of a drug.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Toxin name
+
+
+ Name of a toxin.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Toxin accession
+
+
+ beta12orEarlier
+ Accession of a toxin (catalogued in a database).
+
+
+
+
+
+
+
+
+
+
+ Monosaccharide accession
+
+
+ Accession of a monosaccharide (catalogued in a database).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Drug name
+
+
+ beta12orEarlier
+ Common name of a drug.
+
+
+
+
+
+
+
+
+
+
+ Carbohydrate accession
+
+
+ Accession of an entry from a database of carbohydrates.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Molecule accession
+
+
+ Accession of a specific molecule (catalogued in a database).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Data resource definition accession
+
+
+ beta12orEarlier
+ Accession of a data definition (catalogued in a database).
+
+
+
+
+
+
+
+
+
+
+ Genome accession
+
+
+ An accession of a particular genome (in a database).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Map accession
+
+
+ An accession of a map of a molecular sequence (deposited in a database).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Lipid accession
+
+
+ beta12orEarlier
+ Accession of an entry from a database of lipids.
+
+
+
+
+
+
+
+
+
+
+ Peptide ID
+
+
+ beta12orEarlier
+ Accession of a peptide deposited in a database.
+
+
+
+
+
+
+
+
+
+
+ Protein accession
+
+
+ Protein accessions
+ beta12orEarlier
+ Accession of a protein deposited in a database.
+
+
+
+
+
+
+
+
+
+
+ Organism accession
+
+
+ An accession of annotation on a (group of) organisms (catalogued in a database).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Organism name
+
+
+ Moby:Organism_Name
+ Moby:OrganismsShortName
+ Moby:OccurrenceRecord
+ Moby:BriefOccurrenceRecord
+ Moby:FirstEpithet
+ Moby:InfraspecificEpithet
+ beta12orEarlier
+ Moby:OrganismsLongName
+ The name of an organism (or group of organisms).
+
+
+
+
+
+
+
+
+
+
+ Protein family accession
+
+
+ beta12orEarlier
+ Accession of a protein family (that is deposited in a database).
+
+
+
+
+
+
+
+
+
+
+ Transcription factor accession
+
+
+
+ beta12orEarlier
+ Accession of an entry from a database of transcription factors or binding sites.
+
+
+
+
+
+
+
+
+
+
+ Strain accession
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Identifier of a strain of an organism variant, typically a plant, virus or bacterium.
+
+
+
+
+
+
+
+
+
+
+ Virus identifier
+
+ An accession of annotation on a (group of) viruses (catalogued in a database).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Sequence features metadata
+
+ beta12orEarlier
+ Metadata on sequence features.
+
+
+
+
+
+
+
+
+
+ Gramene identifier
+
+ beta12orEarlier
+ Identifier of a Gramene database entry.
+
+
+
+
+
+
+
+
+
+
+ DDBJ accession
+
+ beta12orEarlier
+ DDBJ accession number
+ DDBJ identifier
+ DDBJ ID
+ An identifier of an entry from the DDBJ sequence database.
+
+
+
+
+
+
+
+
+
+
+ ConsensusPathDB identifier
+
+ beta12orEarlier
+ An identifier of an entity from the ConsensusPathDB database.
+
+
+
+
+
+
+
+
+
+
+ Sequence data
+
+ This is a broad data type and is used a placeholder for other, more specific types.
+ 1.8
+ beta12orEarlier
+ true
+ Data concerning, extracted from, or derived from the analysis of molecular sequence(s).
+
+
+
+
+
+
+
+
+
+ Codon usage
+
+ beta12orEarlier
+ true
+ beta13
+ Data concerning codon usage.
+ This is a broad data type and is used a placeholder for other, more specific types.
+
+
+
+
+
+
+
+
+
+ Article report
+
+ beta12orEarlier
+ 1.5
+ Data derived from the analysis of a scientific text such as a full text article from a scientific journal.
+ true
+
+
+
+
+
+
+
+
+
+ Sequence report
+
+ An informative report of information about molecular sequence(s), including basic information (metadata), and reports generated from molecular sequence analysis, including positional features and non-positional properties.
+ beta12orEarlier
+ Sequence-derived report
+
+
+
+
+
+
+
+
+
+ Protein secondary structure report
+
+ An informative report about the properties or features of one or more protein secondary structures.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Hopp and Woods plot
+
+
+ A Hopp and Woods plot of predicted antigenicity of a peptide or protein.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Nucleic acid melting curve
+
+
+ Shows the proportion of nucleic acid which are double-stranded versus temperature.
+ A melting curve of a double-stranded nucleic acid molecule (DNA or DNA/RNA).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Nucleic acid probability profile
+
+ A probability profile of a double-stranded nucleic acid molecule (DNA or DNA/RNA).
+ beta12orEarlier
+ Shows the probability of a base pair not being melted (i.e. remaining as double-stranded DNA) at a specified temperature
+
+
+
+
+
+
+
+
+
+ Nucleic acid temperature profile
+
+ A temperature profile of a double-stranded nucleic acid molecule (DNA or DNA/RNA).
+ Plots melting temperature versus base position.
+ beta12orEarlier
+ Melting map
+
+
+
+
+
+
+
+
+
+ Gene regulatory network report
+
+ 1.8
+ A report typically including a map (diagram) of a gene regulatory network.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ 2D PAGE gel report
+
+ An informative report on a two-dimensional (2D PAGE) gel.
+ 2D PAGE image report
+ 1.8
+ true
+ 2D PAGE gel annotation
+ beta12orEarlier
+ 2D PAGE image annotation
+
+
+
+
+
+
+
+
+
+ Oligonucleotide probe sets annotation
+
+ beta12orEarlier
+ General annotation on a set of oligonucleotide probes, such as the gene name with which the probe set is associated and which probes belong to the set.
+
+
+
+
+
+
+
+
+
+ Microarray image
+
+ 1.5
+ beta12orEarlier
+ Gene expression image
+ An image from a microarray experiment which (typically) allows a visualisation of probe hybridisation and gene-expression data.
+ true
+
+
+
+
+
+
+
+
+
+ Image
+
+ http://semanticscience.org/resource/SIO_000081
+ Biological or biomedical data has been rendered into an image, typically for display on screen.
+ http://semanticscience.org/resource/SIO_000079
+ Image data
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence image
+
+
+ Image of a molecular sequence, possibly with sequence features or properties shown.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein hydropathy data
+
+ Protein hydropathy report
+ A report on protein properties concerning hydropathy.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Workflow data
+
+ beta12orEarlier
+ beta13
+ Data concerning a computational workflow.
+ true
+
+
+
+
+
+
+
+
+
+ Workflow
+
+ true
+ beta12orEarlier
+ 1.5
+ A computational workflow.
+
+
+
+
+
+
+
+
+
+ Secondary structure data
+
+ beta13
+ true
+ beta12orEarlier
+ Data concerning molecular secondary structure data.
+
+
+
+
+
+
+
+
+
+ Protein sequence (raw)
+
+
+ Raw protein sequence
+ beta12orEarlier
+ Raw sequence (protein)
+ A raw protein sequence (string of characters).
+
+
+
+
+
+
+
+
+
+ Nucleic acid sequence (raw)
+
+
+ Nucleic acid raw sequence
+ beta12orEarlier
+ Nucleotide sequence (raw)
+ Raw sequence (nucleic acid)
+ A raw nucleic acid sequence.
+
+
+
+
+
+
+
+
+
+ Protein sequence
+
+ One or more protein sequences, possibly with associated annotation.
+ Protein sequences
+ beta12orEarlier
+ http://purl.org/biotop/biotop.owl#AminoAcidSequenceInformation
+
+
+
+
+
+
+
+
+
+ Nucleic acid sequence
+
+ One or more nucleic acid sequences, possibly with associated annotation.
+ beta12orEarlier
+ DNA sequence
+ Nucleotide sequence
+ Nucleotide sequences
+ Nucleic acid sequences
+ http://purl.org/biotop/biotop.owl#NucleotideSequenceInformation
+
+
+
+
+
+
+
+
+
+ Reaction data
+
+ Enzyme kinetics annotation
+ This is a broad data type and is used a placeholder for other, more specific types.
+ beta12orEarlier
+ Reaction annotation
+ Data concerning a biochemical reaction, typically data and more general annotation on the kinetics of enzyme-catalysed reaction.
+
+
+
+
+
+
+
+
+
+ Peptide property
+
+ beta12orEarlier
+ Peptide data
+ Data concerning small peptides.
+
+
+
+
+
+
+
+
+
+ Protein classification
+
+ This is a broad data type and is used a placeholder for other, more specific types.
+ Protein classification data
+ An informative report concerning the classification of protein sequences or structures.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+ Sequence motif data
+
+ true
+ 1.8
+ Data concerning specific or conserved pattern in molecular sequences.
+ beta12orEarlier
+ This is a broad data type and is used a placeholder for other, more specific types.
+
+
+
+
+
+
+
+
+
+ Sequence profile data
+
+ beta12orEarlier
+ true
+ This is a broad data type and is used a placeholder for other, more specific types.
+ beta13
+ Data concerning models representing a (typically multiple) sequence alignment.
+
+
+
+
+
+
+
+
+
+ Pathway or network data
+
+ Data concerning a specific biological pathway or network.
+ beta13
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Pathway or network report
+
+
+
+
+
+
+
+ beta12orEarlier
+ An informative report concerning or derived from the analysis of a biological pathway or network, such as a map (diagram) or annotation.
+
+
+
+
+
+
+
+
+
+ Nucleic acid thermodynamic data
+
+ Nucleic acid property (thermodynamic or kinetic)
+ A thermodynamic or kinetic property of a nucleic acid molecule.
+ Nucleic acid thermodynamic property
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Nucleic acid classification
+
+ This is a broad data type and is used a placeholder for other, more specific types.
+ beta12orEarlier
+ Data concerning the classification of nucleic acid sequences or structures.
+ Nucleic acid classification data
+
+
+
+
+
+
+
+
+ Classification report
+
+ This can include an entire classification, components such as classifiers, assignments of entities to a classification and so on.
+ beta12orEarlier
+ true
+ Classification data
+ A report on a classification of molecular sequences, structures or other entities.
+ 1.5
+
+
+
+
+
+
+
+
+
+ Protein features report (key folding sites)
+
+ beta12orEarlier
+ key residues involved in protein folding.
+ 1.8
+ true
+
+
+
+
+
+
+
+
+
+ Protein torsion angle data
+
+ Torsion angle data
+ Torsion angle data for a protein structure.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein structure image
+
+
+ An image of protein structure.
+ beta12orEarlier
+ Structure image (protein)
+
+
+
+
+
+
+
+
+
+ Phylogenetic character weights
+
+ Weights for sequence positions or characters in phylogenetic analysis where zero is defined as unweighted.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Annotation track
+
+ beta12orEarlier
+ Genomic track
+ Annotation of one particular positional feature on a biomolecular (typically genome) sequence, suitable for import and display in a genome browser.
+ Genome annotation track
+ Genome-browser track
+ Genome track
+ Sequence annotation track
+
+
+
+
+
+
+
+
+
+ UniProt accession
+
+
+
+
+
+
+
+ UniProtKB accession number
+ beta12orEarlier
+ P43353|Q7M1G0|Q9C199|A5A6J6
+ UniProt entry accession
+ [OPQ][0-9][A-Z0-9]{3}[0-9]|[A-NR-Z][0-9]([A-Z][A-Z0-9]{2}[0-9]){1,2}
+ Swiss-Prot entry accession
+ TrEMBL entry accession
+ Accession number of a UniProt (protein sequence) database entry.
+ UniProtKB accession
+ UniProt accession number
+
+
+
+
+
+
+
+
+
+
+ NCBI genetic code ID
+
+
+ Identifier of a genetic code in the NCBI list of genetic codes.
+ [1-9][0-9]?
+ 16
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Ontology concept identifier
+
+
+
+
+
+
+
+ Identifier of a concept in an ontology of biological or bioinformatics concepts and relations.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ GO concept name (biological process)
+
+ true
+ The name of a concept for a biological process from the GO ontology.
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ GO concept name (molecular function)
+
+ true
+ beta12orEarlier
+ The name of a concept for a molecular function from the GO ontology.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Taxonomy
+
+
+
+
+
+
+
+ This is a broad data type and is used a placeholder for other, more specific types.
+ beta12orEarlier
+ Data concerning the classification, identification and naming of organisms.
+ Taxonomic data
+
+
+
+
+
+
+
+
+
+ Protein ID (EMBL/GenBank/DDBJ)
+
+ beta13
+ EMBL/GENBANK/DDBJ coding feature protein identifier, issued by International collaborators.
+ This qualifier consists of a stable ID portion (3+5 format with 3 position letters and 5 numbers) plus a version number after the decimal point. When the protein sequence encoded by the CDS changes, only the version number of the /protein_id value is incremented; the stable part of the /protein_id remains unchanged and as a result will permanently be associated with a given protein; this qualifier is valid only on CDS features which translate into a valid protein.
+
+
+
+
+
+
+
+
+
+
+ Core data
+
+ Core data entities typically have a format and may be identified by an accession number.
+ A type of data that (typically) corresponds to entries from the primary biological databases and which is (typically) the primary input or output of a tool, i.e. the data the tool processes or generates, as distinct from metadata and identifiers which describe and identify such core data, parameters that control the behaviour of tools, reports of derivative data generated by tools and annotation.
+ 1.5
+ true
+ beta13
+
+
+
+
+
+
+
+
+
+ Sequence feature identifier
+
+
+
+
+
+
+
+ beta13
+ Name or other identifier of molecular sequence feature(s).
+
+
+
+
+
+
+
+
+
+
+ Structure identifier
+
+
+
+
+
+
+
+ beta13
+ An identifier of a molecular tertiary structure, typically an entry from a structure database.
+
+
+
+
+
+
+
+
+
+
+ Matrix identifier
+
+
+
+
+
+
+
+ An identifier of an array of numerical values, such as a comparison matrix.
+ beta13
+
+
+
+
+
+
+
+
+
+
+ Protein sequence composition
+
+ beta13
+ 1.8
+ true
+ A report (typically a table) on character or word composition / frequency of protein sequence(s).
+
+
+
+
+
+
+
+
+
+ Nucleic acid sequence composition (report)
+
+ 1.8
+ A report (typically a table) on character or word composition / frequency of nucleic acid sequence(s).
+ true
+ beta13
+
+
+
+
+
+
+
+
+
+ Protein domain classification node
+
+ beta13
+ A node from a classification of protein structural domain(s).
+ true
+ 1.5
+
+
+
+
+
+
+
+
+
+ CAS number
+
+ beta13
+ CAS registry number
+ Unique numerical identifier of chemicals in the scientific literature, as assigned by the Chemical Abstracts Service.
+
+
+
+
+
+
+
+
+
+
+ ATC code
+
+ Unique identifier of a drug conforming to the Anatomical Therapeutic Chemical (ATC) Classification System, a drug classification system controlled by the WHO Collaborating Centre for Drug Statistics Methodology (WHOCC).
+ beta13
+
+
+
+
+
+
+
+
+
+
+ UNII
+
+ beta13
+ A unique, unambiguous, alphanumeric identifier of a chemical substance as catalogued by the Substance Registration System of the Food and Drug Administration (FDA).
+ Unique Ingredient Identifier
+
+
+
+
+
+
+
+
+
+
+ Geotemporal metadata
+
+ 1.5
+ beta13
+ true
+ Basic information concerning geographical location or time.
+
+
+
+
+
+
+
+
+
+ System metadata
+
+ Metadata concerning the software, hardware or other aspects of a computer system.
+ beta13
+
+
+
+
+
+
+
+
+
+ Sequence feature name
+
+
+ A name of a sequence feature, e.g. the name of a feature to be displayed to an end-user.
+ beta13
+
+
+
+
+
+
+
+
+
+
+ Experimental measurement
+
+ beta13
+ Raw data such as measurements or other results from laboratory experiments, as generated from laboratory hardware.
+ Experimental measurement data
+ Measurement
+ This is a broad data type and is used a placeholder for other, more specific types. It is primarily intended to help navigation of EDAM and would not typically be used for annotation.
+ Measured data
+ Experimentally measured data
+ Measurement metadata
+ Measurement data
+ Raw experimental data
+
+
+
+
+
+
+
+
+
+ Raw microarray data
+
+
+ beta13
+ Raw data (typically MIAME-compliant) for hybridisations from a microarray experiment.
+ Such data as found in Affymetrix CEL or GPR files.
+
+
+
+
+
+
+
+
+
+ Processed microarray data
+
+
+
+
+
+
+
+ Data generated from processing and analysis of probe set data from a microarray experiment.
+ Gene annotation (expression)
+ Microarray probe set data
+ beta13
+ Gene expression report
+ Such data as found in Affymetrix .CHP files or data from other software such as RMA or dChip.
+
+
+
+
+
+
+
+
+
+ Gene expression matrix
+
+
+ This combines data from all hybridisations.
+ beta13
+ Normalised microarray data
+ The final processed (normalised) data for a set of hybridisations in a microarray experiment.
+ Gene expression data matrix
+
+
+
+
+
+
+
+
+
+ Sample annotation
+
+ Annotation on a biological sample, for example experimental factors and their values.
+ This might include compound and dose in a dose response experiment.
+ beta13
+
+
+
+
+
+
+
+
+
+ Microarray metadata
+
+ This might include gene identifiers, genomic coordinates, probe oligonucleotide sequences etc.
+ Annotation on the array itself used in a microarray experiment.
+ beta13
+
+
+
+
+
+
+
+
+
+ Microarray protocol annotation
+
+ true
+ This might describe e.g. the normalisation methods used to process the raw data.
+ beta13
+ 1.8
+ Annotation on laboratory and/or data processing protocols used in an microarray experiment.
+
+
+
+
+
+
+
+
+
+ Microarray hybridisation data
+
+ Data concerning the hybridisations measured during a microarray experiment.
+ beta13
+
+
+
+
+
+
+
+
+
+ Protein features report (topological domains)
+
+ 1.8
+ beta13
+ topological domains such as cytoplasmic regions in a protein.
+ true
+
+
+
+
+
+
+
+
+
+ Sequence features (compositionally-biased regions)
+
+ 1.5
+ beta13
+ true
+ A report of regions in a molecular sequence that are biased to certain characters.
+
+
+
+
+
+
+
+
+
+ Nucleic acid features (difference and change)
+
+ beta13
+ A report on features in a nucleic acid sequence that indicate changes to or differences between sequences.
+ 1.5
+ true
+
+
+
+
+
+
+
+
+
+ Nucleic acid features report (expression signal)
+
+ true
+ beta13
+ regions within a nucleic acid sequence containing a signal that alters a biological function.
+ 1.8
+
+
+
+
+
+
+
+
+
+ Nucleic acid features report (binding)
+
+ nucleic acids binding to some other molecule.
+ 1.8
+ true
+ beta13
+ This includes ribosome binding sites (Shine-Dalgarno sequence in prokaryotes).
+
+
+
+
+
+
+
+
+
+ Nucleic acid repeats (report)
+
+ true
+ repetitive elements within a nucleic acid sequence.
+ 1.8
+ beta13
+
+
+
+
+
+
+
+
+
+ Nucleic acid features report (replication and recombination)
+
+ beta13
+ true
+ 1.8
+ DNA replication or recombination.
+
+
+
+
+
+
+
+
+
+ Nucleic acid structure report
+
+
+ A report on regions within a nucleic acid sequence which form secondary or tertiary (3D) structures.
+ Stem loop (report)
+ d-loop (report)
+ Nucleic acid features (structure)
+ Quadruplexes (report)
+ beta13
+
+
+
+
+
+
+
+
+
+ Protein features report (repeats)
+
+ 1.8
+ short repetitive subsequences (repeat sequences) in a protein sequence.
+ beta13
+ true
+
+
+
+
+
+
+
+
+
+ Sequence motif matches (protein)
+
+ Report on the location of matches to profiles, motifs (conserved or functional patterns) or other signatures in one or more protein sequences.
+ 1.8
+ beta13
+ true
+
+
+
+
+
+
+
+
+
+ Sequence motif matches (nucleic acid)
+
+ Report on the location of matches to profiles, motifs (conserved or functional patterns) or other signatures in one or more nucleic acid sequences.
+ beta13
+ true
+ 1.8
+
+
+
+
+
+
+
+
+
+ Nucleic acid features (d-loop)
+
+ beta13
+ true
+ 1.5
+ A report on displacement loops in a mitochondrial DNA sequence.
+ A displacement loop is a region of mitochondrial DNA in which one of the strands is displaced by an RNA molecule.
+
+
+
+
+
+
+
+
+
+ Nucleic acid features (stem loop)
+
+ beta13
+ true
+ A report on stem loops in a DNA sequence.
+ 1.5
+ A stem loop is a hairpin structure; a double-helical structure formed when two complementary regions of a single strand of RNA or DNA molecule form base-pairs.
+
+
+
+
+
+
+
+
+
+ Gene transcript report
+
+ This includes 5'untranslated region (5'UTR), coding sequences (CDS), exons, intervening sequences (intron) and 3'untranslated regions (3'UTR).
+ Nucleic acid features (mRNA features)
+ beta13
+ Transcript (report)
+ mRNA features
+ Gene transcript annotation
+ Clone or EST (report)
+ mRNA (report)
+ An informative report on features of a messenger RNA (mRNA) molecules including precursor RNA, primary (unprocessed) transcript and fully processed molecules. This includes reports on a specific gene transcript, clone or EST.
+
+
+
+
+
+
+
+
+
+
+ Nucleic acid features report (signal or transit peptide)
+
+ true
+ coding sequences for a signal or transit peptide.
+ 1.8
+ beta13
+
+
+
+
+
+
+
+
+
+ Non-coding RNA
+
+ beta13
+ true
+ features of non-coding or functional RNA molecules, including tRNA and rRNA.
+ 1.8
+
+
+
+
+
+
+
+
+
+ Transcriptional features (report)
+
+ 1.5
+ true
+ This includes promoters, CAAT signals, TATA signals, -35 signals, -10 signals, GC signals, primer binding sites for initiation of transcription or reverse transcription, enhancer, attenuator, terminators and ribosome binding sites.
+ Features concerning transcription of DNA into RNA including the regulation of transcription.
+ beta13
+
+
+
+
+
+
+
+
+
+ Nucleic acid features report (STS)
+
+ sequence tagged sites (STS) in nucleic acid sequences.
+ 1.8
+ true
+ beta13
+
+
+
+
+
+
+
+
+
+ Nucleic acid features (immunoglobulin gene structure)
+
+ true
+ beta13
+ 1.5
+ A report on predicted or actual immunoglobulin gene structure including constant, switch and variable regions and diversity, joining and variable segments.
+
+
+
+
+
+
+
+
+
+ SCOP class
+
+ 1.5
+ beta13
+ true
+ Information on a 'class' node from the SCOP database.
+
+
+
+
+
+
+
+
+
+ SCOP fold
+
+ beta13
+ Information on a 'fold' node from the SCOP database.
+ 1.5
+ true
+
+
+
+
+
+
+
+
+
+ SCOP superfamily
+
+ beta13
+ Information on a 'superfamily' node from the SCOP database.
+ 1.5
+ true
+
+
+
+
+
+
+
+
+
+ SCOP family
+
+ 1.5
+ true
+ Information on a 'family' node from the SCOP database.
+ beta13
+
+
+
+
+
+
+
+
+
+ SCOP protein
+
+ Information on a 'protein' node from the SCOP database.
+ true
+ beta13
+ 1.5
+
+
+
+
+
+
+
+
+
+ SCOP species
+
+ 1.5
+ true
+ beta13
+ Information on a 'species' node from the SCOP database.
+
+
+
+
+
+
+
+
+
+ Mass spectrometry experiment
+
+ 1.8
+ true
+ mass spectrometry experiments.
+ beta13
+
+
+
+
+
+
+
+
+
+ Gene family report
+
+ An informative report on a particular family of genes, typically a set of genes with similar sequence that originate from duplication of a common ancestor gene, or any other classification of nucleic acid sequences or structures that reflects gene structure.
+ This includes reports on on gene homologues between species.
+ beta13
+ Gene annotation (homology information)
+ Homology information
+ Gene annotation (homology)
+ Nucleic acid classification
+ Gene family annotation
+ Gene homology (report)
+
+
+
+
+
+
+
+
+
+ Protein image
+
+ beta13
+ An image of a protein.
+
+
+
+
+
+
+
+
+
+ Protein alignment
+
+ An alignment of protein sequences and/or structures.
+ beta13
+
+
+
+
+
+
+
+
+
+ NGS experiment
+
+ 1.8
+ 1.0
+ sequencing experiment, including samples, sampling, preparation, sequencing, and analysis.
+ true
+
+
+
+
+
+
+
+
+
+ Sequence assembly report
+
+ An informative report about a DNA sequence assembly.
+ 1.1
+ This might include an overall quality assement of the assembly and summary statistics including counts, average length and number of bases for reads, matches and non-matches, contigs, reads in pairs etc.
+ Assembly report
+
+
+
+
+
+
+
+
+
+ Genome index
+
+ 1.1
+ Many sequence alignment tasks involving many or very large sequences rely on a precomputed index of the sequence to accelerate the alignment.
+ An index of a genome sequence.
+
+
+
+
+
+
+
+
+
+ GWAS report
+
+ 1.8
+ 1.1
+ Report concerning genome-wide association study experiments.
+ true
+ Genome-wide association study
+
+
+
+
+
+
+
+
+
+ Cytoband position
+
+ 1.2
+ The position of a cytogenetic band in a genome.
+ Information might include start and end position in a chromosome sequence, chromosome identifier, name of band and so on.
+
+
+
+
+
+
+
+
+
+ Cell type ontology ID
+
+
+ CL ID
+ Cell type ontology concept ID.
+ CL_[0-9]{7}
+ 1.2
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Kinetic model
+
+ 1.2
+ Mathematical model of a network, that contains biochemical kinetics.
+
+
+
+
+
+
+
+
+
+ COSMIC ID
+
+ COSMIC identifier
+ cosmic ID
+ Identifier of a COSMIC database entry.
+ cosmic identifier
+ cosmic id
+ 1.3
+
+
+
+
+
+
+
+
+
+
+ HGMD ID
+
+ Identifier of a HGMD database entry.
+ hgmd ID
+ hgmd identifier
+ beta12orEarlier
+ hgmd id
+ HGMD identifier
+
+
+
+
+
+
+
+
+
+
+ Sequence assembly ID
+
+ Sequence assembly version
+ Unique identifier of sequence assembly.
+ 1.3
+
+
+
+
+
+
+
+
+
+
+ Sequence feature type
+
+ true
+ A label (text token) describing a type of sequence feature such as gene, transcript, cds, exon, repeat, simple, misc, variation, somatic variation, structural variation, somatic structural variation, constrained or regulatory.
+ 1.3
+ 1.5
+
+
+
+
+
+
+
+
+
+ Gene homology (report)
+
+ beta12orEarlier
+ true
+ An informative report on gene homologues between species.
+ 1.5
+
+
+
+
+
+
+
+
+
+ Ensembl gene tree ID
+
+
+ ENSGT00390000003602
+ Ensembl ID (gene tree)
+ Unique identifier for a gene tree from the Ensembl database.
+ 1.3
+
+
+
+
+
+
+
+
+
+
+ Gene tree
+
+ 1.3
+ A phylogenetic tree that is an estimate of the character's phylogeny.
+
+
+
+
+
+
+
+
+
+ Species tree
+
+ A phylogenetic tree that reflects phylogeny of the taxa from which the characters (used in calculating the tree) were sampled.
+ 1.3
+
+
+
+
+
+
+
+
+
+ Sample ID
+
+
+
+
+
+
+
+
+ 1.3
+ Sample accession
+ Name or other identifier of an entry from a biosample database.
+
+
+
+
+
+
+
+
+
+
+ MGI accession
+
+
+ Identifier of an object from the MGI database.
+ 1.3
+
+
+
+
+
+
+
+
+
+
+ Phenotype name
+
+
+ 1.3
+ Name of a phenotype.
+ Phenotypes
+ Phenotype
+
+
+
+
+
+
+
+
+
+
+ Transition matrix
+
+ A HMM transition matrix contains the probabilities of switching from one HMM state to another.
+ Consider for example an HMM with two states (AT-rich and GC-rich). The transition matrix will hold the probabilities of switching from the AT-rich to the GC-rich state, and vica versa.
+ HMM transition matrix
+ 1.4
+
+
+
+
+
+
+
+
+ Emission matrix
+
+ A HMM emission matrix holds the probabilities of choosing the four nucleotides (A, C, G and T) in each of the states of a HMM.
+ 1.4
+ Consider for example an HMM with two states (AT-rich and GC-rich). The emission matrix holds the probabilities of choosing each of the four nucleotides (A, C, G and T) in the AT-rich state and in the GC-rich state.
+ HMM emission matrix
+
+
+
+
+
+
+
+
+ Hidden Markov model
+
+ A statistical Markov model of a system which is assumed to be a Markov process with unobserved (hidden) states.
+ 1.4
+
+
+
+
+
+
+
+
+ Format identifier
+
+ An identifier of a data format.
+ 1.4
+
+
+
+
+
+
+
+
+ Raw image
+
+ 1.5
+ Amino acid data
+ http://semanticscience.org/resource/SIO_000081
+ beta12orEarlier
+ Image data
+ Raw biological or biomedical image generated by some experimental technique.
+
+
+
+
+
+
+
+
+
+ Carbohydrate property
+
+ Carbohydrate data
+ Data concerning the intrinsic physical (e.g. structural) or chemical properties of one, more or all carbohydrates.
+ 1.5
+
+
+
+
+
+
+
+
+
+ Proteomics experiment report
+
+ true
+ 1.8
+ Report concerning proteomics experiments.
+ 1.5
+
+
+
+
+
+
+
+
+
+ RNAi report
+
+ 1.5
+ RNAi experiments.
+ true
+ 1.8
+
+
+
+
+
+
+
+
+
+ Simulation experiment report
+
+ 1.5
+ biological computational model experiments (simulation), for example the minimum information required in order to permit its correct interpretation and reproduction.
+ true
+ 1.8
+
+
+
+
+
+
+
+
+
+ MRI image
+
+
+
+
+
+
+
+ MRT image
+ 1.7
+ Magnetic resonance tomography image
+ Nuclear magnetic resonance imaging image
+
+ Magnetic resonance imaging image
+
+ NMRI image
+ An imaging technique that uses magnetic fields and radiowaves to form images, typically to investigate the anatomy and physiology of the human body.
+
+
+
+
+
+
+
+
+
+ Cell migration track image
+
+
+
+
+
+
+
+ 1.7
+ An image from a cell migration track assay.
+
+
+
+
+
+
+
+
+
+ Rate of association
+
+ kon
+ 1.7
+ Rate of association of a protein with another protein or some other molecule.
+
+
+
+
+
+
+
+
+
+ Gene order
+
+ Such data are often used for genome rearrangement tools and phylogenetic tree labeling.
+ Multiple gene identifiers in a specific order.
+ 1.7
+
+
+
+
+
+
+
+
+
+ Spectrum
+
+ 1.7
+ The spectrum of frequencies of electromagnetic radiation emitted from a molecule as a result of some spectroscopy experiment.
+ Spectra
+
+
+
+
+
+
+
+
+
+ NMR spectrum
+
+
+
+
+
+
+
+ Spectral information for a molecule from a nuclear magnetic resonance experiment.
+ 1.7
+ NMR spectra
+
+
+
+
+
+
+
+
+
+ Chemical structure sketch
+
+ Chemical structure sketches are used for presentational purposes but also as inputs to various analysis software.
+ 1.8
+ Small molecule sketch
+ A sketch of a small molecule made with some specialised drawing package.
+
+
+
+
+
+
+
+
+
+ Nucleic acid signature
+
+ 1.8
+ An informative report about a specific or conserved nucleic acid sequence pattern.
+
+
+
+
+
+
+
+
+
+ DNA sequence
+
+ DNA sequences
+ 1.8
+ A DNA sequence.
+
+
+
+
+
+
+
+
+
+ RNA sequence
+
+ A DNA sequence.
+ DNA sequences
+ RNA sequences
+ 1.8
+
+
+
+
+
+
+
+
+
+ RNA sequence (raw)
+
+
+ Raw sequence (RNA)
+ 1.8
+ A raw RNA sequence.
+ RNA raw sequence
+
+
+
+
+
+
+
+
+
+ DNA sequence (raw)
+
+
+ Raw sequence (DNA)
+ A raw DNA sequence.
+ 1.8
+ DNA raw sequence
+
+
+
+
+
+
+
+
+
+ Sequence variations
+
+
+
+
+
+
+
+ 1.8
+ Data on gene sequence variations resulting large-scale genotyping and DNA sequencing projects.
+ Gene sequence variations
+ Variations are stored along with a reference genome.
+
+
+
+
+
+
+
+
+
+ Bibliography
+
+ 1.8
+ A list of publications such as scientic papers or books.
+
+
+
+
+
+
+
+
+
+ Ontology mapping
+
+ A mapping of supplied textual terms or phrases to ontology concepts (URIs).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Image metadata
+
+ Image-associated data
+ This can include basic provenance and technical information about the image, scientific annotation and so on.
+ Any data concerning a specific biological or biomedical image.
+ 1.9
+ Image data
+ Image-related data
+
+
+
+
+
+
+
+
+
+ Clinical trial report
+
+ Clinical trial information
+ A report concerning a clinical trial.
+ 1.9
+
+
+
+
+
+
+
+
+
+ Reference sample report
+
+ 1.10
+ A report about a biosample.
+ Biosample report
+
+
+
+
+
+
+
+
+
+ Gene Expression Atlas Experiment ID
+
+ Accession number of an entry from the Gene Expression Atlas.
+ 1.10
+
+
+
+
+
+
+
+
+
+
+ SMILES
+
+
+ Chemical structure specified in Simplified Molecular Input Line Entry System (SMILES) line notation.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+
+
+
+ InChI
+
+
+ Chemical structure specified in IUPAC International Chemical Identifier (InChI) line notation.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ mf
+
+
+ Chemical structure specified by Molecular Formula (MF), including a count of each element in a compound.
+ beta12orEarlier
+ The general MF query format consists of a series of valid atomic symbols, with an optional number or range.
+
+
+
+
+
+
+
+
+
+ inchikey
+
+
+ The InChIKey (hashed InChI) is a fixed length (25 character) condensed digital representation of an InChI chemical structure specification. It uniquely identifies a chemical compound.
+ beta12orEarlier
+ An InChI identifier is not human-readable but is more suitable for web searches than an InChI chemical structure specification.
+
+
+
+
+
+
+
+
+
+ smarts
+
+ SMILES ARbitrary Target Specification (SMARTS) format for chemical structure specification, which is a subset of the SMILES line notation.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ unambiguous pure
+
+
+ beta12orEarlier
+ Alphabet for a molecular sequence with possible unknown positions but without ambiguity or non-sequence characters.
+
+
+
+
+
+
+
+
+
+ nucleotide
+
+
+ Non-sequence characters may be used for example for gaps.
+ http://onto.eva.mpg.de/ontologies/gfo-bio.owl#Nucleotide_sequence
+ beta12orEarlier
+ Alphabet for a nucleotide sequence with possible ambiguity, unknown positions and non-sequence characters.
+
+
+
+
+
+
+
+
+
+ protein
+
+
+ Alphabet for a protein sequence with possible ambiguity, unknown positions and non-sequence characters.
+ beta12orEarlier
+ Non-sequence characters may be used for gaps and translation stop.
+ http://onto.eva.mpg.de/ontologies/gfo-bio.owl#Amino_acid_sequence
+
+
+
+
+
+
+
+
+
+ consensus
+
+
+ beta12orEarlier
+ Alphabet for the consensus of two or more molecular sequences.
+
+
+
+
+
+
+
+
+
+ pure nucleotide
+
+
+ beta12orEarlier
+ Alphabet for a nucleotide sequence with possible ambiguity and unknown positions but without non-sequence characters.
+
+
+
+
+
+
+
+
+
+ unambiguous pure nucleotide
+
+
+ beta12orEarlier
+ Alphabet for a nucleotide sequence (characters ACGTU only) with possible unknown positions but without ambiguity or non-sequence characters .
+
+
+
+
+
+
+
+
+
+ dna
+
+ beta12orEarlier
+ http://onto.eva.mpg.de/ontologies/gfo-bio.owl#DNA_sequence
+ Alphabet for a DNA sequence with possible ambiguity, unknown positions and non-sequence characters.
+
+
+
+
+
+
+
+
+
+ rna
+
+ Alphabet for an RNA sequence with possible ambiguity, unknown positions and non-sequence characters.
+ http://onto.eva.mpg.de/ontologies/gfo-bio.owl#RNA_sequence
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ unambiguous pure dna
+
+
+ Alphabet for a DNA sequence (characters ACGT only) with possible unknown positions but without ambiguity or non-sequence characters.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ pure dna
+
+
+ Alphabet for a DNA sequence with possible ambiguity and unknown positions but without non-sequence characters.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ unambiguous pure rna sequence
+
+
+ Alphabet for an RNA sequence (characters ACGU only) with possible unknown positions but without ambiguity or non-sequence characters.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ pure rna
+
+
+ Alphabet for an RNA sequence with possible ambiguity and unknown positions but without non-sequence characters.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ unambiguous pure protein
+
+
+ beta12orEarlier
+ Alphabet for any protein sequence with possible unknown positions but without ambiguity or non-sequence characters.
+
+
+
+
+
+
+
+
+
+ pure protein
+
+
+ beta12orEarlier
+ Alphabet for any protein sequence with possible ambiguity and unknown positions but without non-sequence characters.
+
+
+
+
+
+
+
+
+
+ UniGene entry format
+
+ beta12orEarlier
+ Format of an entry from UniGene.
+ A UniGene entry includes a set of transcript sequences assigned to the same transcription locus (gene or expressed pseudogene), with information on protein similarities, gene expression, cDNA clone reagents, and genomic location.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ COG sequence cluster format
+
+ beta12orEarlier
+ true
+ beta12orEarlier
+ Format of an entry from the COG database of clusters of (related) protein sequences.
+
+
+
+
+
+
+
+
+
+ EMBL feature location
+
+
+ beta12orEarlier
+ Feature location
+ Format for sequence positions (feature location) as used in DDBJ/EMBL/GenBank database.
+
+
+
+
+
+
+
+
+
+ quicktandem
+
+
+ Report format for tandem repeats in a nucleotide sequence (format generated by the Sanger Centre quicktandem program).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sanger inverted repeats
+
+
+ beta12orEarlier
+ Report format for inverted repeats in a nucleotide sequence (format generated by the Sanger Centre inverted program).
+
+
+
+
+
+
+
+
+
+ EMBOSS repeat
+
+
+ Report format for tandem repeats in a sequence (an EMBOSS report format).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ est2genome format
+
+
+ beta12orEarlier
+ Format of a report on exon-intron structure generated by EMBOSS est2genome.
+
+
+
+
+
+
+
+
+
+ restrict format
+
+
+ Report format for restriction enzyme recognition sites used by EMBOSS restrict program.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ restover format
+
+
+ beta12orEarlier
+ Report format for restriction enzyme recognition sites used by EMBOSS restover program.
+
+
+
+
+
+
+
+
+
+ REBASE restriction sites
+
+
+ beta12orEarlier
+ Report format for restriction enzyme recognition sites used by REBASE database.
+
+
+
+
+
+
+
+
+
+ FASTA search results format
+
+
+ Format of results of a sequence database search using FASTA.
+ beta12orEarlier
+ This includes (typically) score data, alignment data and a histogram (of observed and expected distribution of E values.)
+
+
+
+
+
+
+
+
+
+ BLAST results
+
+
+ Format of results of a sequence database search using some variant of BLAST.
+ beta12orEarlier
+ This includes score data, alignment data and summary table.
+
+
+
+
+
+
+
+
+
+ mspcrunch
+
+
+ beta12orEarlier
+ Format of results of a sequence database search using some variant of MSPCrunch.
+
+
+
+
+
+
+
+
+
+ Smith-Waterman format
+
+
+ beta12orEarlier
+ Format of results of a sequence database search using some variant of Smith Waterman.
+
+
+
+
+
+
+
+
+
+ dhf
+
+
+ The hits are relatives to a SCOP or CATH family and are found from a search of a sequence database.
+ beta12orEarlier
+ Format of EMBASSY domain hits file (DHF) of hits (sequences) with domain classification information.
+
+
+
+
+
+
+
+
+
+ lhf
+
+
+ beta12orEarlier
+ Format of EMBASSY ligand hits file (LHF) of database hits (sequences) with ligand classification information.
+ The hits are putative ligand-binding sequences and are found from a search of a sequence database.
+
+
+
+
+
+
+
+
+
+ InterPro hits format
+
+
+ Results format for searches of the InterPro database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ InterPro protein view report format
+
+ Format of results of a search of the InterPro database showing matches of query protein sequence(s) to InterPro entries.
+ The report includes a classification of regions in a query protein sequence which are assigned to a known InterPro protein family or group.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ InterPro match table format
+
+ Format of results of a search of the InterPro database showing matches between protein sequence(s) and signatures for an InterPro entry.
+ beta12orEarlier
+ The table presents matches between query proteins (rows) and signature methods (columns) for this entry. Alternatively the sequence(s) might be from from the InterPro entry itself. The match position in the protein sequence and match status (true positive, false positive etc) are indicated.
+
+
+
+
+
+
+
+
+
+ HMMER Dirichlet prior
+
+
+ beta12orEarlier
+ Dirichlet distribution HMMER format.
+
+
+
+
+
+
+
+
+
+ MEME Dirichlet prior
+
+
+ beta12orEarlier
+ Dirichlet distribution MEME format.
+
+
+
+
+
+
+
+
+
+ HMMER emission and transition
+
+
+ Format of a report from the HMMER package on the emission and transition counts of a hidden Markov model.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ prosite-pattern
+
+
+ Format of a regular expression pattern from the Prosite database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ EMBOSS sequence pattern
+
+
+ Format of an EMBOSS sequence pattern.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ meme-motif
+
+
+ A motif in the format generated by the MEME program.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ prosite-profile
+
+
+ Sequence profile (sequence classifier) format used in the PROSITE database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ JASPAR format
+
+
+ beta12orEarlier
+ A profile (sequence classifier) in the format used in the JASPAR database.
+
+
+
+
+
+
+
+
+
+ MEME background Markov model
+
+
+ Format of the model of random sequences used by MEME.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ HMMER format
+
+
+ Format of a hidden Markov model representation used by the HMMER package.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ HMMER-aln
+
+
+
+ beta12orEarlier
+ FASTA-style format for multiple sequences aligned by HMMER package to an HMM.
+
+
+
+
+
+
+
+
+
+ DIALIGN format
+
+
+ Format of multiple sequences aligned by DIALIGN package.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ daf
+
+
+ The format is clustal-like and includes annotation of domain family classification information.
+ EMBASSY 'domain alignment file' (DAF) format, containing a sequence alignment of protein domains belonging to the same SCOP or CATH family.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence-MEME profile alignment
+
+
+ beta12orEarlier
+ Format for alignment of molecular sequences to MEME profiles (position-dependent scoring matrices) as generated by the MAST tool from the MEME package.
+
+
+
+
+
+
+
+
+
+ HMMER profile alignment (sequences versus HMMs)
+
+
+ Format used by the HMMER package for an alignment of a sequence against a hidden Markov model database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ HMMER profile alignment (HMM versus sequences)
+
+
+ Format used by the HMMER package for of an alignment of a hidden Markov model against a sequence database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Phylip distance matrix
+
+
+ Data Type must include the distance matrix, probably as pairs of sequence identifiers with a distance (integer or float).
+ beta12orEarlier
+ Format of PHYLIP phylogenetic distance matrix data.
+
+
+
+
+
+
+
+
+
+ ClustalW dendrogram
+
+
+ beta12orEarlier
+ Dendrogram (tree file) format generated by ClustalW.
+
+
+
+
+
+
+
+
+
+ Phylip tree raw
+
+
+ Raw data file format used by Phylip from which a phylogenetic tree is directly generated or plotted.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Phylip continuous quantitative characters
+
+
+ beta12orEarlier
+ PHYLIP file format for continuous quantitative character data.
+
+
+
+
+
+
+
+
+
+ Phylogenetic property values format
+
+ Format of phylogenetic property data.
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Phylip character frequencies format
+
+
+ beta12orEarlier
+ PHYLIP file format for phylogenetics character frequency data.
+
+
+
+
+
+
+
+
+
+ Phylip discrete states format
+
+
+ Format of PHYLIP discrete states data.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Phylip cliques format
+
+
+ beta12orEarlier
+ Format of PHYLIP cliques data.
+
+
+
+
+
+
+
+
+
+ Phylip tree format
+
+
+ Phylogenetic tree data format used by the PHYLIP program.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ TreeBASE format
+
+
+ beta12orEarlier
+ The format of an entry from the TreeBASE database of phylogenetic data.
+
+
+
+
+
+
+
+
+
+ TreeFam format
+
+
+ beta12orEarlier
+ The format of an entry from the TreeFam database of phylogenetic data.
+
+
+
+
+
+
+
+
+
+ Phylip tree distance format
+
+
+ Format for distances, such as Branch Score distance, between two or more phylogenetic trees as used by the Phylip package.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ dssp
+
+
+ beta12orEarlier
+ The DSSP database is built using the DSSP application which defines secondary structure, geometrical features and solvent exposure of proteins, given atomic coordinates in PDB format.
+ Format of an entry from the DSSP database (Dictionary of Secondary Structure in Proteins).
+
+
+
+
+
+
+
+
+
+ hssp
+
+
+ Entry format of the HSSP database (Homology-derived Secondary Structure in Proteins).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Dot-bracket format
+
+
+ beta12orEarlier
+ Format of RNA secondary structure in dot-bracket notation, originally generated by the Vienna RNA package/server.
+ Vienna RNA secondary structure format
+ Vienna RNA format
+
+
+
+
+
+
+
+
+
+ Vienna local RNA secondary structure format
+
+
+ Format of local RNA secondary structure components with free energy values, generated by the Vienna RNA package/server.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ PDB database entry format
+
+
+
+
+
+
+
+ beta12orEarlier
+ PDB entry format
+ Format of an entry (or part of an entry) from the PDB database.
+
+
+
+
+
+
+
+
+
+ PDB
+
+
+ PDB format
+ beta12orEarlier
+ Entry format of PDB database in PDB format.
+
+
+
+
+
+
+
+
+
+ mmCIF
+
+
+ Chemical MIME (http://www.ch.ic.ac.uk/chemime): chemical/x-mmcif
+ Entry format of PDB database in mmCIF format.
+ beta12orEarlier
+ mmcif
+
+
+
+
+
+
+
+
+
+ PDBML
+
+
+ Entry format of PDB database in PDBML (XML) format.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Domainatrix 3D-1D scoring matrix format
+
+ beta12orEarlier
+ true
+ beta12orEarlier
+ Format of a matrix of 3D-1D scores used by the EMBOSS Domainatrix applications.
+
+
+
+
+
+
+
+
+ aaindex
+
+
+ Amino acid index format used by the AAindex database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ IntEnz enzyme report format
+
+ beta12orEarlier
+ beta12orEarlier
+ Format of an entry from IntEnz (The Integrated Relational Enzyme Database).
+ IntEnz is the master copy of the Enzyme Nomenclature, the recommendations of the NC-IUBMB on the Nomenclature and Classification of Enzyme-Catalysed Reactions.
+ true
+
+
+
+
+
+
+
+
+
+ BRENDA enzyme report format
+
+ true
+ Format of an entry from the BRENDA enzyme database.
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ KEGG REACTION enzyme report format
+
+ true
+ beta12orEarlier
+ Format of an entry from the KEGG REACTION database of biochemical reactions.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ KEGG ENZYME enzyme report format
+
+ beta12orEarlier
+ true
+ Format of an entry from the KEGG ENZYME database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ REBASE proto enzyme report format
+
+ Format of an entry from the proto section of the REBASE enzyme database.
+ true
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ REBASE withrefm enzyme report format
+
+ beta12orEarlier
+ true
+ beta12orEarlier
+ Format of an entry from the withrefm section of the REBASE enzyme database.
+
+
+
+
+
+
+
+
+
+ Pcons report format
+
+
+ Format of output of the Pcons Model Quality Assessment Program (MQAP).
+ beta12orEarlier
+ Pcons ranks protein models by assessing their quality based on the occurrence of recurring common three-dimensional structural patterns. Pcons returns a score reflecting the overall global quality and a score for each individual residue in the protein reflecting the local residue quality.
+
+
+
+
+
+
+
+
+
+ ProQ report format
+
+
+ beta12orEarlier
+ ProQ is a neural network-based predictor that predicts the quality of a protein model based on the number of structural features.
+ Format of output of the ProQ protein model quality predictor.
+
+
+
+
+
+
+
+
+
+ SMART domain assignment report format
+
+ beta12orEarlier
+ true
+ Format of SMART domain assignment data.
+ The SMART output file includes data on genetically mobile domains / analysis of domain architectures, including phyletic distributions, functional class, tertiary structures and functionally important residues.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ BIND entry format
+
+ Entry format for the BIND database of protein interaction.
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ IntAct entry format
+
+ beta12orEarlier
+ beta12orEarlier
+ Entry format for the IntAct database of protein interaction.
+ true
+
+
+
+
+
+
+
+
+
+ InterPro entry format
+
+ Entry format for the InterPro database of protein signatures (sequence classifiers) and classified sequences.
+ true
+ beta12orEarlier
+ This includes signature metadata, sequence references and a reference to the signature itself. There is normally a header (entry accession numbers and name), abstract, taxonomy information, example proteins etc. Each entry also includes a match list which give a number of different views of the signature matches for the sequences in each InterPro entry.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ InterPro entry abstract format
+
+ true
+ beta12orEarlier
+ References are included and a functional inference is made where possible.
+ beta12orEarlier
+ Entry format for the textual abstract of signatures in an InterPro entry and its protein matches.
+
+
+
+
+
+
+
+
+
+ Gene3D entry format
+
+ Entry format for the Gene3D protein secondary database.
+ true
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ PIRSF entry format
+
+ beta12orEarlier
+ Entry format for the PIRSF protein secondary database.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ PRINTS entry format
+
+ beta12orEarlier
+ beta12orEarlier
+ true
+ Entry format for the PRINTS protein secondary database.
+
+
+
+
+
+
+
+
+
+ Panther Families and HMMs entry format
+
+ beta12orEarlier
+ beta12orEarlier
+ Entry format for the Panther library of protein families and subfamilies.
+ true
+
+
+
+
+
+
+
+
+
+ Pfam entry format
+
+ Entry format for the Pfam protein secondary database.
+ true
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ SMART entry format
+
+ true
+ beta12orEarlier
+ Entry format for the SMART protein secondary database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Superfamily entry format
+
+ Entry format for the Superfamily protein secondary database.
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ TIGRFam entry format
+
+ beta12orEarlier
+ true
+ Entry format for the TIGRFam protein secondary database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ ProDom entry format
+
+ Entry format for the ProDom protein domain classification database.
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ FSSP entry format
+
+ Entry format for the FSSP database.
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ findkm
+
+
+ beta12orEarlier
+ A report format for the kinetics of enzyme-catalysed reaction(s) in a format generated by EMBOSS findkm. This includes Michaelis Menten plot, Hanes Woolf plot, Michaelis Menten constant (Km) and maximum velocity (Vmax).
+
+
+
+
+
+
+
+
+
+ Ensembl gene report format
+
+ beta12orEarlier
+ Entry format of Ensembl genome database.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ DictyBase gene report format
+
+ true
+ beta12orEarlier
+ Entry format of DictyBase genome database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ CGD gene report format
+
+ beta12orEarlier
+ true
+ beta12orEarlier
+ Entry format of Candida Genome database.
+
+
+
+
+
+
+
+
+
+ DragonDB gene report format
+
+ beta12orEarlier
+ Entry format of DragonDB genome database.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ EcoCyc gene report format
+
+ Entry format of EcoCyc genome database.
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ FlyBase gene report format
+
+ true
+ beta12orEarlier
+ beta12orEarlier
+ Entry format of FlyBase genome database.
+
+
+
+
+
+
+
+
+
+ Gramene gene report format
+
+ beta12orEarlier
+ beta12orEarlier
+ Entry format of Gramene genome database.
+ true
+
+
+
+
+
+
+
+
+
+ KEGG GENES gene report format
+
+ true
+ beta12orEarlier
+ Entry format of KEGG GENES genome database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ MaizeGDB gene report format
+
+ beta12orEarlier
+ beta12orEarlier
+ true
+ Entry format of the Maize genetics and genomics database (MaizeGDB).
+
+
+
+
+
+
+
+
+
+ MGD gene report format
+
+ Entry format of the Mouse Genome Database (MGD).
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ RGD gene report format
+
+ true
+ beta12orEarlier
+ Entry format of the Rat Genome Database (RGD).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ SGD gene report format
+
+ true
+ beta12orEarlier
+ beta12orEarlier
+ Entry format of the Saccharomyces Genome Database (SGD).
+
+
+
+
+
+
+
+
+
+ GeneDB gene report format
+
+ Entry format of the Sanger GeneDB genome database.
+ true
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ TAIR gene report format
+
+ beta12orEarlier
+ beta12orEarlier
+ Entry format of The Arabidopsis Information Resource (TAIR) genome database.
+ true
+
+
+
+
+
+
+
+
+
+ WormBase gene report format
+
+ Entry format of the WormBase genomes database.
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ ZFIN gene report format
+
+ beta12orEarlier
+ beta12orEarlier
+ true
+ Entry format of the Zebrafish Information Network (ZFIN) genome database.
+
+
+
+
+
+
+
+
+
+ TIGR gene report format
+
+ true
+ Entry format of the TIGR genome database.
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ dbSNP polymorphism report format
+
+ beta12orEarlier
+ Entry format for the dbSNP database.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ OMIM entry format
+
+ beta12orEarlier
+ true
+ beta12orEarlier
+ Format of an entry from the OMIM database of genotypes and phenotypes.
+
+
+
+
+
+
+
+
+
+ HGVbase entry format
+
+ true
+ Format of a record from the HGVbase database of genotypes and phenotypes.
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ HIVDB entry format
+
+ beta12orEarlier
+ beta12orEarlier
+ true
+ Format of a record from the HIVDB database of genotypes and phenotypes.
+
+
+
+
+
+
+
+
+
+ KEGG DISEASE entry format
+
+ beta12orEarlier
+ Format of an entry from the KEGG DISEASE database.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Primer3 primer
+
+
+ Report format on PCR primers and hybridization oligos as generated by Whitehead primer3 program.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ ABI
+
+
+ A format of raw sequence read data from an Applied Biosystems sequencing machine.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ mira
+
+
+ Format of MIRA sequence trace information file.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ CAF
+
+
+ Common Assembly Format (CAF). A sequence assembly format including contigs, base-call qualities, and other metadata.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+
+ exp
+
+
+ Sequence assembly project file EXP format.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ SCF
+
+
+ Staden Chromatogram Files format (SCF) of base-called sequence reads, qualities, and other metadata.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+
+ PHD
+
+
+ beta12orEarlier
+ PHD sequence trace format to store serialised chromatogram data (reads).
+
+
+
+
+
+
+
+
+
+
+
+ dat
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Format of Affymetrix data file of raw image data.
+ Affymetrix image data file format
+
+
+
+
+
+
+
+
+
+ cel
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Affymetrix probe raw data format
+ Format of Affymetrix data file of information about (raw) expression levels of the individual probes.
+
+
+
+
+
+
+
+
+
+ affymetrix
+
+
+ Format of affymetrix gene cluster files (hc-genes.txt, hc-chips.txt) from hierarchical clustering.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ ArrayExpress entry format
+
+ beta12orEarlier
+ true
+ Entry format for the ArrayExpress microarrays database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ affymetrix-exp
+
+
+ Affymetrix data file format for information about experimental conditions and protocols.
+ Affymetrix experimental conditions data file format
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ CHP
+
+
+
+
+
+
+
+
+ Affymetrix probe normalised data format
+ beta12orEarlier
+ Format of Affymetrix data file of information about (normalised) expression levels of the individual probes.
+
+
+
+
+
+
+
+
+
+ EMDB entry format
+
+ beta12orEarlier
+ Format of an entry from the Electron Microscopy DataBase (EMDB).
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ KEGG PATHWAY entry format
+
+ beta12orEarlier
+ beta12orEarlier
+ The format of an entry from the KEGG PATHWAY database of pathway maps for molecular interactions and reaction networks.
+ true
+
+
+
+
+
+
+
+
+
+ MetaCyc entry format
+
+ true
+ beta12orEarlier
+ The format of an entry from the MetaCyc metabolic pathways database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ HumanCyc entry format
+
+ The format of a report from the HumanCyc metabolic pathways database.
+ true
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ INOH entry format
+
+ beta12orEarlier
+ true
+ The format of an entry from the INOH signal transduction pathways database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ PATIKA entry format
+
+ beta12orEarlier
+ The format of an entry from the PATIKA biological pathways database.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Reactome entry format
+
+ beta12orEarlier
+ The format of an entry from the reactome biological pathways database.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ aMAZE entry format
+
+ beta12orEarlier
+ true
+ The format of an entry from the aMAZE biological pathways and molecular interactions database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ CPDB entry format
+
+ The format of an entry from the CPDB database.
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Panther Pathways entry format
+
+ beta12orEarlier
+ true
+ beta12orEarlier
+ The format of an entry from the Panther Pathways database.
+
+
+
+
+
+
+
+
+
+ Taverna workflow format
+
+
+ Format of Taverna workflows.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ BioModel mathematical model format
+
+ beta12orEarlier
+ beta12orEarlier
+ Format of mathematical models from the BioModel database.
+ true
+ Models are annotated and linked to relevant data resources, such as publications, databases of compounds and pathways, controlled vocabularies, etc.
+
+
+
+
+
+
+
+
+
+ KEGG LIGAND entry format
+
+ The format of an entry from the KEGG LIGAND chemical database.
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ KEGG COMPOUND entry format
+
+ beta12orEarlier
+ The format of an entry from the KEGG COMPOUND database.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ KEGG PLANT entry format
+
+ beta12orEarlier
+ beta12orEarlier
+ The format of an entry from the KEGG PLANT database.
+ true
+
+
+
+
+
+
+
+
+
+ KEGG GLYCAN entry format
+
+ true
+ beta12orEarlier
+ The format of an entry from the KEGG GLYCAN database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ PubChem entry format
+
+ beta12orEarlier
+ The format of an entry from PubChem.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ ChemSpider entry format
+
+ beta12orEarlier
+ The format of an entry from a database of chemical structures and property predictions.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ ChEBI entry format
+
+ beta12orEarlier
+ beta12orEarlier
+ The format of an entry from Chemical Entities of Biological Interest (ChEBI).
+ true
+ ChEBI includes an ontological classification defining relations between entities or classes of entities.
+
+
+
+
+
+
+
+
+
+ MSDchem ligand dictionary entry format
+
+ The format of an entry from the MSDchem ligand dictionary.
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ HET group dictionary entry format
+
+
+ The format of an entry from the HET group dictionary (HET groups from PDB files).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ KEGG DRUG entry format
+
+ The format of an entry from the KEGG DRUG database.
+ true
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ PubMed citation
+
+
+ beta12orEarlier
+ Format of bibliographic reference as used by the PubMed database.
+
+
+
+
+
+
+
+
+
+ Medline Display Format
+
+
+ beta12orEarlier
+ Format for abstracts of scientific articles from the Medline database.
+ Bibliographic reference information including citation information is included
+
+
+
+
+
+
+
+
+
+ CiteXplore-core
+
+
+ beta12orEarlier
+ CiteXplore 'core' citation format including title, journal, authors and abstract.
+
+
+
+
+
+
+
+
+
+ CiteXplore-all
+
+
+ CiteXplore 'all' citation format includes all known details such as Mesh terms and cross-references.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ pmc
+
+
+ beta12orEarlier
+ Article format of the PubMed Central database.
+
+
+
+
+
+
+
+
+
+ iHOP text mining abstract format
+
+
+ beta12orEarlier
+ iHOP abstract format.
+
+
+
+
+
+
+
+
+
+ Oscar3
+
+
+ Oscar 3 performs chemistry-specific parsing of chemical documents. It attempts to identify chemical names, ontology concepts and chemical data from a document.
+ Text mining abstract format from the Oscar 3 application.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ PDB atom record format
+
+ true
+ beta13
+ beta12orEarlier
+ Format of an ATOM record (describing data for an individual atom) from a PDB file.
+
+
+
+
+
+
+
+
+
+ CATH chain report format
+
+ The report (for example http://www.cathdb.info/chain/1cukA) includes chain identifiers, domain identifiers and CATH codes for domains in a given protein chain.
+ beta12orEarlier
+ Format of CATH domain classification information for a polypeptide chain.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ CATH PDB report format
+
+ beta12orEarlier
+ beta12orEarlier
+ true
+ Format of CATH domain classification information for a protein PDB file.
+ The report (for example http://www.cathdb.info/pdb/1cuk) includes chain identifiers, domain identifiers and CATH codes for domains in a given PDB file.
+
+
+
+
+
+
+
+
+
+ NCBI gene report format
+
+ true
+ Entry (gene) format of the NCBI database.
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ GeneIlluminator gene report format
+
+ Report format for biological functions associated with a gene name and its alternative names (synonyms, homonyms), as generated by the GeneIlluminator service.
+ This includes a gene name and abbreviation of the name which may be in a name space indicating the gene status and relevant organisation.
+ beta12orEarlier
+ beta12orEarlier
+ Moby:GI_Gene
+ true
+
+
+
+
+
+
+
+
+
+ BacMap gene card format
+
+ Format of a report on the DNA and protein sequences for a given gene label from a bacterial chromosome maps from the BacMap database.
+ true
+ beta12orEarlier
+ beta12orEarlier
+ Moby:BacMapGeneCard
+
+
+
+
+
+
+
+
+
+ ColiCard report format
+
+ Format of a report on Escherichia coli genes, proteins and molecules from the CyberCell Database (CCDB).
+ true
+ beta12orEarlier
+ Moby:ColiCard
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ PlasMapper TextMap
+
+
+ beta12orEarlier
+ Map of a plasmid (circular DNA) in PlasMapper TextMap format.
+
+
+
+
+
+
+
+
+
+ newick
+
+
+ nh
+ beta12orEarlier
+ Phylogenetic tree Newick (text) format.
+
+
+
+
+
+
+
+
+
+ TreeCon format
+
+
+ beta12orEarlier
+ Phylogenetic tree TreeCon (text) format.
+
+
+
+
+
+
+
+
+
+ Nexus format
+
+
+ Phylogenetic tree Nexus (text) format.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Format
+
+
+
+ http://en.wikipedia.org/wiki/File_format
+ http://purl.org/biotop/biotop.owl#MachineLanguage
+ File format
+ Data model
+ http://www.onto-med.de/ontologies/gfo.owl#Symbol_structure
+ Exchange format
+ "http://purl.obolibrary.org/obo/IAO_0000098"
+ http://semanticscience.org/resource/SIO_000612
+ http://semanticscience.org/resource/SIO_000618
+ beta12orEarlier
+ http://www.ifomis.org/bfo/1.1/snap#Continuant
+ http://www.loa-cnr.it/ontologies/DOLCE-Lite.owl#quality
+ "http://purl.org/dc/elements/1.1/format"
+ http://wsio.org/compression_004
+ A defined way or layout of representing and structuring data in a computer file, blob, string, message, or elsewhere.
+ http://en.wikipedia.org/wiki/List_of_file_formats
+ http://www.ifomis.org/bfo/1.1/snap#Quality
+ Data format
+ http://purl.org/biotop/biotop.owl#Quality
+ The main focus in EDAM lies on formats as means of structuring data exchanged between different tools or resources. The serialisation, compression, or encoding of concrete data formats/models is not in scope of EDAM. Format 'is format of' Data.
+ http://www.onto-med.de/ontologies/gfo.owl#Perpetuant
+
+
+
+
+ A defined data format has its implicit or explicit data model, and EDAM does not distinguish the two. Some data models however do not have any standard way of serialisation into an exchange format, and those are thus not considered formats in EDAM. (Remark: even broader - or closely related - term to 'Data model' would be an 'Information model'.)
+ Data model
+
+
+
+
+ File format denotes only formats of a computer file, but the same formats apply also to data blobs or exchanged messages.
+ File format
+
+
+
+
+
+
+
+
+
+ Atomic data format
+
+ beta12orEarlier
+ beta13
+ Data format for an individual atom.
+ true
+
+
+
+
+
+
+
+
+
+ Sequence record format
+
+
+
+
+
+
+
+ Data format for a molecular sequence record.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence feature annotation format
+
+
+
+
+
+
+
+ beta12orEarlier
+ Data format for molecular sequence feature information.
+
+
+
+
+
+
+
+
+
+ Alignment format
+
+
+
+
+
+
+
+ Data format for molecular sequence alignment information.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ acedb
+
+ beta12orEarlier
+ ACEDB sequence format.
+
+
+
+
+
+
+
+
+
+ clustal sequence format
+
+ true
+ beta12orEarlier
+ Clustalw output format.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ codata
+
+
+ Codata entry format.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ dbid
+
+ beta12orEarlier
+ Fasta format variant with database name before ID.
+
+
+
+
+
+
+
+
+
+ EMBL format
+
+
+ EMBL entry format.
+ EMBL sequence format
+ EMBL
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Staden experiment format
+
+
+ Staden experiment file format.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ FASTA
+
+
+ beta12orEarlier
+ FASTA format
+ FASTA sequence format
+ FASTA format including NCBI-style IDs.
+
+
+
+
+
+
+
+
+
+ FASTQ
+
+ FASTQ short read format ignoring quality scores.
+ beta12orEarlier
+ FASTAQ
+ fq
+
+
+
+
+
+
+
+
+
+ FASTQ-illumina
+
+ FASTQ Illumina 1.3 short read format.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ FASTQ-sanger
+
+ FASTQ short read format with phred quality.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ FASTQ-solexa
+
+ FASTQ Solexa/Illumina 1.0 short read format.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ fitch program
+
+
+ Fitch program format.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ GCG
+
+
+ GCG SSF
+ beta12orEarlier
+ GCG SSF (single sequence file) file format.
+ GCG sequence file format.
+
+
+
+
+
+
+
+
+
+ GenBank format
+
+
+ beta12orEarlier
+ Genbank entry format.
+
+
+
+
+
+
+
+
+
+ genpept
+
+ beta12orEarlier
+ Genpept protein entry format.
+ Currently identical to refseqp format
+
+
+
+
+
+
+
+
+
+ GFF2-seq
+
+
+ GFF feature file format with sequence in the header.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ GFF3-seq
+
+
+ GFF3 feature file format with sequence.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ giFASTA format
+
+ FASTA sequence format including NCBI-style GIs.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ hennig86
+
+
+ beta12orEarlier
+ Hennig86 output sequence format.
+
+
+
+
+
+
+
+
+
+ ig
+
+
+ Intelligenetics sequence format.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ igstrict
+
+
+ beta12orEarlier
+ Intelligenetics sequence format (strict version).
+
+
+
+
+
+
+
+
+
+ jackknifer
+
+
+ Jackknifer interleaved and non-interleaved sequence format.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ mase format
+
+
+ beta12orEarlier
+ Mase program sequence format.
+
+
+
+
+
+
+
+
+
+ mega-seq
+
+
+ beta12orEarlier
+ Mega interleaved and non-interleaved sequence format.
+
+
+
+
+
+
+
+
+
+ MSF
+
+ GCG MSF
+ beta12orEarlier
+ GCG MSF (multiple sequence file) file format.
+
+
+
+
+
+
+
+
+
+ nbrf/pir
+
+ NBRF/PIR entry sequence format.
+ nbrf
+ beta12orEarlier
+ pir
+
+
+
+
+
+
+
+
+
+ nexus-seq
+
+
+
+ beta12orEarlier
+ Nexus/paup interleaved sequence format.
+
+
+
+
+
+
+
+
+
+ pdbatom
+
+
+
+ pdb format in EMBOSS.
+ beta12orEarlier
+ PDB sequence format (ATOM lines).
+
+
+
+
+
+
+
+
+
+ pdbatomnuc
+
+
+
+ beta12orEarlier
+ pdbnuc format in EMBOSS.
+ PDB nucleotide sequence format (ATOM lines).
+
+
+
+
+
+
+
+
+
+ pdbseqresnuc
+
+
+
+ pdbnucseq format in EMBOSS.
+ PDB nucleotide sequence format (SEQRES lines).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ pdbseqres
+
+
+
+ PDB sequence format (SEQRES lines).
+ beta12orEarlier
+ pdbseq format in EMBOSS.
+
+
+
+
+
+
+
+
+
+ Pearson format
+
+ beta12orEarlier
+ Plain old FASTA sequence format (unspecified format for IDs).
+
+
+
+
+
+
+
+
+
+ phylip sequence format
+
+ beta12orEarlier
+ Phylip interleaved sequence format.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ phylipnon sequence format
+
+ true
+ Phylip non-interleaved sequence format.
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ raw
+
+
+ beta12orEarlier
+ Raw sequence format with no non-sequence characters.
+
+
+
+
+
+
+
+
+
+ refseqp
+
+
+ beta12orEarlier
+ Refseq protein entry sequence format.
+ Currently identical to genpept format
+
+
+
+
+
+
+
+
+
+ selex sequence format
+
+ beta12orEarlier
+ true
+ beta12orEarlier
+ Selex sequence format.
+
+
+
+
+
+
+
+
+
+ Staden format
+
+
+ beta12orEarlier
+ Staden suite sequence format.
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Stockholm format
+
+
+ Stockholm multiple sequence alignment format (used by Pfam and Rfam).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+
+ strider format
+
+
+ DNA strider output sequence format.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ UniProtKB format
+
+ UniProt format
+ SwissProt format
+ beta12orEarlier
+ UniProtKB entry sequence format.
+
+
+
+
+
+
+
+
+
+ plain text format (unformatted)
+
+
+ beta12orEarlier
+ Plain text sequence format (essentially unformatted).
+
+
+
+
+
+
+
+
+
+ treecon sequence format
+
+ true
+ beta12orEarlier
+ beta12orEarlier
+ Treecon output sequence format.
+
+
+
+
+
+
+
+
+
+ ASN.1 sequence format
+
+
+ NCBI ASN.1-based sequence format.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ DAS format
+
+
+ das sequence format
+ DAS sequence (XML) format (any type).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ dasdna
+
+
+ beta12orEarlier
+ DAS sequence (XML) format (nucleotide-only).
+ The use of this format is deprecated.
+
+
+
+
+
+
+
+
+
+ debug-seq
+
+
+ EMBOSS debugging trace sequence format of full internal data content.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ jackknifernon
+
+
+ beta12orEarlier
+ Jackknifer output sequence non-interleaved format.
+
+
+
+
+
+
+
+
+
+ meganon sequence format
+
+ beta12orEarlier
+ beta12orEarlier
+ Mega non-interleaved output sequence format.
+ true
+
+
+
+
+
+
+
+
+
+ NCBI format
+
+ NCBI FASTA sequence format with NCBI-style IDs.
+ beta12orEarlier
+ There are several variants of this.
+
+
+
+
+
+
+
+
+
+ nexusnon
+
+
+
+ Nexus/paup non-interleaved sequence format.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ GFF2
+
+ beta12orEarlier
+ General Feature Format (GFF) of sequence features.
+
+
+
+
+
+
+
+
+
+
+
+ GFF3
+
+ beta12orEarlier
+ Generic Feature Format version 3 (GFF3) of sequence features.
+
+
+
+
+
+
+
+
+
+
+
+ pir
+
+ true
+ 1.7
+ PIR feature format.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ swiss feature
+
+ true
+ Swiss-Prot feature format.
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ DASGFF
+
+
+ DAS GFF (XML) feature format.
+ das feature
+ DASGFF feature
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ debug-feat
+
+
+ EMBOSS debugging trace feature format of full internal data content.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ EMBL feature
+
+ beta12orEarlier
+ EMBL feature format.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ GenBank feature
+
+ beta12orEarlier
+ Genbank feature format.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ ClustalW format
+
+
+ clustal
+ beta12orEarlier
+ ClustalW format for (aligned) sequences.
+
+
+
+
+
+
+
+
+
+ debug
+
+
+ EMBOSS alignment format for debugging trace of full internal data content.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ FASTA-aln
+
+
+ beta12orEarlier
+ Fasta format for (aligned) sequences.
+
+
+
+
+
+
+
+
+
+ markx0
+
+ beta12orEarlier
+ Pearson MARKX0 alignment format.
+
+
+
+
+
+
+
+
+
+ markx1
+
+ Pearson MARKX1 alignment format.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ markx10
+
+ beta12orEarlier
+ Pearson MARKX10 alignment format.
+
+
+
+
+
+
+
+
+
+ markx2
+
+ beta12orEarlier
+ Pearson MARKX2 alignment format.
+
+
+
+
+
+
+
+
+
+ markx3
+
+ beta12orEarlier
+ Pearson MARKX3 alignment format.
+
+
+
+
+
+
+
+
+
+ match
+
+
+ Alignment format for start and end of matches between sequence pairs.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ mega
+
+ Mega format for (typically aligned) sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ meganon
+
+ Mega non-interleaved format for (typically aligned) sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ msf alignment format
+
+ true
+ beta12orEarlier
+ beta12orEarlier
+ MSF format for (aligned) sequences.
+
+
+
+
+
+
+
+
+
+ nexus alignment format
+
+ Nexus/paup format for (aligned) sequences.
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ nexusnon alignment format
+
+ beta12orEarlier
+ true
+ Nexus/paup non-interleaved format for (aligned) sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ pair
+
+ EMBOSS simple sequence pair alignment format.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ PHYLIP format
+
+ phy
+ beta12orEarlier
+ ph
+ http://www.bioperl.org/wiki/PHYLIP_multiple_alignment_format
+ PHYLIP interleaved format
+ Phylip format for (aligned) sequences.
+
+
+
+
+
+
+
+
+
+ phylipnon
+
+ http://www.bioperl.org/wiki/PHYLIP_multiple_alignment_format
+ beta12orEarlier
+ PHYLIP sequential format
+ Phylip non-interleaved format for (aligned) sequences.
+
+
+
+
+
+
+
+
+
+ scores format
+
+
+ Alignment format for score values for pairs of sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ selex
+
+
+
+ beta12orEarlier
+ SELEX format for (aligned) sequences.
+
+
+
+
+
+
+
+
+
+ EMBOSS simple format
+
+
+ EMBOSS simple multiple alignment format.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ srs format
+
+
+ beta12orEarlier
+ Simple multiple sequence (alignment) format for SRS.
+
+
+
+
+
+
+
+
+
+ srspair
+
+
+ beta12orEarlier
+ Simple sequence pair (alignment) format for SRS.
+
+
+
+
+
+
+
+
+
+ T-Coffee format
+
+
+ T-Coffee program alignment format.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ TreeCon-seq
+
+
+
+ Treecon format for (aligned) sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree format
+
+
+
+
+
+
+
+ Data format for a phylogenetic tree.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Biological pathway or network format
+
+
+
+
+
+
+
+ beta12orEarlier
+ Data format for a biological pathway or network.
+
+
+
+
+
+
+
+
+
+ Sequence-profile alignment format
+
+
+
+
+
+
+
+ beta12orEarlier
+ Data format for a sequence-profile alignment.
+
+
+
+
+
+
+
+
+
+ Sequence-profile alignment (HMM) format
+
+ beta12orEarlier
+ beta12orEarlier
+ true
+ Data format for a sequence-HMM profile alignment.
+
+
+
+
+
+
+
+
+
+ Amino acid index format
+
+
+
+
+
+
+
+ Data format for an amino acid index.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Article format
+
+
+
+
+
+
+
+ beta12orEarlier
+ Literature format
+ Data format for a full-text scientific article.
+
+
+
+
+
+
+
+
+
+ Text mining report format
+
+
+
+
+
+
+
+ beta12orEarlier
+ Data format for an abstract (report) from text mining.
+
+
+
+
+
+
+
+
+
+ Enzyme kinetics report format
+
+
+
+
+
+
+
+ Data format for reports on enzyme kinetics.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Small molecule report format
+
+
+
+
+
+
+
+ beta12orEarlier
+ Chemical compound annotation format
+ Format of a report on a chemical compound.
+
+
+
+
+
+
+
+
+
+ Gene annotation format
+
+
+
+
+
+
+
+ Format of a report on a particular locus, gene, gene system or groups of genes.
+ beta12orEarlier
+ Gene features format
+
+
+
+
+
+
+
+
+
+ Workflow format
+
+ beta12orEarlier
+ Format of a workflow.
+
+
+
+
+
+
+
+
+
+ Tertiary structure format
+
+ beta12orEarlier
+ Data format for a molecular tertiary structure.
+
+
+
+
+
+
+
+
+
+ Biological model format
+
+ Data format for a biological model.
+ beta12orEarlier
+ 1.2
+ true
+
+
+
+
+
+
+
+
+
+ Chemical formula format
+
+
+
+
+
+
+
+ beta12orEarlier
+ Text format of a chemical formula.
+
+
+
+
+
+
+
+
+
+ Phylogenetic character data format
+
+
+
+
+
+
+
+ beta12orEarlier
+ Format of raw (unplotted) phylogenetic data.
+
+
+
+
+
+
+
+
+
+ Phylogenetic continuous quantitative character format
+
+
+
+
+
+
+
+ Format of phylogenetic continuous quantitative character data.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Phylogenetic discrete states format
+
+
+
+
+
+
+
+ Format of phylogenetic discrete states data.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree report (cliques) format
+
+
+
+
+
+
+
+ Format of phylogenetic cliques data.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree report (invariants) format
+
+
+
+
+
+
+
+ beta12orEarlier
+ Format of phylogenetic invariants data.
+
+
+
+
+
+
+
+
+
+ Electron microscopy model format
+
+ beta12orEarlier
+ true
+ beta12orEarlier
+ Annotation format for electron microscopy models.
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree report (tree distances) format
+
+
+
+
+
+
+
+ Format for phylogenetic tree distance data.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Polymorphism report format
+
+ beta12orEarlier
+ true
+ 1.0
+ Format for sequence polymorphism data.
+
+
+
+
+
+
+
+
+ Protein family report format
+
+
+
+
+
+
+
+ beta12orEarlier
+ Format for reports on a protein family.
+
+
+
+
+
+
+
+
+
+ Protein interaction format
+
+
+
+
+
+
+
+ beta12orEarlier
+ Format for molecular interaction data.
+ Molecular interaction format
+
+
+
+
+
+
+
+
+
+ Sequence assembly format
+
+
+
+
+
+
+
+ beta12orEarlier
+ Format for sequence assembly data.
+
+
+
+
+
+
+
+
+
+ Microarray experiment data format
+
+ Format for information about a microarray experimental per se (not the data generated from that experiment).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence trace format
+
+
+
+
+
+
+
+ Format for sequence trace data (i.e. including base call information).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Gene expression report format
+
+
+
+
+
+
+
+ Gene expression data format
+ Format of a file of gene expression data, e.g. a gene expression matrix or profile.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Genotype and phenotype annotation format
+
+ beta12orEarlier
+ true
+ Format of a report on genotype / phenotype information.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Map format
+
+
+
+
+
+
+
+ Format of a map of (typically one) molecular sequence annotated with features.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Nucleic acid features (primers) format
+
+ beta12orEarlier
+ Format of a report on PCR primers or hybridization oligos in a nucleic acid sequence.
+
+
+
+
+
+
+
+
+
+ Protein report format
+
+
+
+
+
+
+
+ Format of a report of general information about a specific protein.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein report (enzyme) format
+
+ Format of a report of general information about a specific enzyme.
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ 3D-1D scoring matrix format
+
+
+
+
+
+
+
+ beta12orEarlier
+ Format of a matrix of 3D-1D scores (amino acid environment probabilities).
+
+
+
+
+
+
+
+
+
+ Protein structure report (quality evaluation) format
+
+
+
+
+
+
+
+ Format of a report on the quality of a protein three-dimensional model.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Database hits (sequence) format
+
+
+
+
+
+
+
+ Format of a report on sequence hits and associated data from searching a sequence database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence distance matrix format
+
+
+
+
+
+
+
+ beta12orEarlier
+ Format of a matrix of genetic distances between molecular sequences.
+
+
+
+
+
+
+
+
+
+ Sequence motif format
+
+
+
+
+
+
+
+ Format of a sequence motif.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence profile format
+
+
+
+
+
+
+
+ Format of a sequence profile.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Hidden Markov model format
+
+
+
+
+
+
+
+ beta12orEarlier
+ Format of a hidden Markov model.
+
+
+
+
+
+
+
+
+
+ Dirichlet distribution format
+
+
+
+
+
+
+
+ Data format of a dirichlet distribution.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ HMM emission and transition counts format
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Data format for the emission and transition counts of a hidden Markov model.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ RNA secondary structure format
+
+
+
+
+
+
+
+ beta12orEarlier
+ Format for secondary structure (predicted or real) of an RNA molecule.
+
+
+
+
+
+
+
+
+
+ Protein secondary structure format
+
+ Format for secondary structure (predicted or real) of a protein molecule.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence range format
+
+
+
+
+
+
+
+ beta12orEarlier
+ Format used to specify range(s) of sequence positions.
+
+
+
+
+
+
+
+
+
+ pure
+
+
+ Alphabet for molecular sequence with possible unknown positions but without non-sequence characters.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ unpure
+
+
+ Alphabet for a molecular sequence with possible unknown positions but possibly with non-sequence characters.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ unambiguous sequence
+
+
+ Alphabet for a molecular sequence with possible unknown positions but without ambiguity characters.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ ambiguous
+
+
+ beta12orEarlier
+ Alphabet for a molecular sequence with possible unknown positions and possible ambiguity characters.
+
+
+
+
+
+
+
+
+
+ Sequence features (repeats) format
+
+ beta12orEarlier
+ Format used for map of repeats in molecular (typically nucleotide) sequences.
+
+
+
+
+
+
+
+
+
+ Nucleic acid features (restriction sites) format
+
+ beta12orEarlier
+ Format used for report on restriction enzyme recognition sites in nucleotide sequences.
+
+
+
+
+
+
+
+
+
+ Gene features (coding region) format
+
+ beta12orEarlier
+ Format used for report on coding regions in nucleotide sequences.
+ true
+ 1.10
+
+
+
+
+
+
+
+
+
+ Sequence cluster format
+
+
+
+
+
+
+
+ beta12orEarlier
+ Format used for clusters of molecular sequences.
+
+
+
+
+
+
+
+
+
+ Sequence cluster format (protein)
+
+ Format used for clusters of protein sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence cluster format (nucleic acid)
+
+ Format used for clusters of nucleotide sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Gene cluster format
+
+ true
+ beta13
+ beta12orEarlier
+ Format used for clusters of genes.
+
+
+
+
+
+
+
+
+
+ EMBL-like (text)
+
+
+ This concept may be used for the many non-standard EMBL-like text formats.
+ beta12orEarlier
+ A text format resembling EMBL entry format.
+
+
+
+
+
+
+
+
+
+ FASTQ-like format (text)
+
+
+ A text format resembling FASTQ short read format.
+ This concept may be used for non-standard FASTQ short read-like formats.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ EMBLXML
+
+ XML format for EMBL entries.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ cdsxml
+
+ XML format for EMBL entries.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ insdxml
+
+ beta12orEarlier
+ XML format for EMBL entries.
+
+
+
+
+
+
+
+
+
+ geneseq
+
+ Geneseq sequence format.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ UniProt-like (text)
+
+
+ A text sequence format resembling uniprotkb entry format.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ UniProt format
+
+ beta12orEarlier
+ true
+ UniProt entry sequence format.
+ 1.8
+
+
+
+
+
+
+
+
+
+ ipi
+
+ 1.8
+ beta12orEarlier
+ ipi sequence format.
+ true
+
+
+
+
+
+
+
+
+
+ medline
+
+
+ Abstract format used by MedLine database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Ontology format
+
+
+
+
+
+
+
+ Format used for ontologies.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ OBO format
+
+ beta12orEarlier
+ A serialisation format conforming to the Open Biomedical Ontologies (OBO) model.
+
+
+
+
+
+
+
+
+
+ OWL format
+
+
+ A serialisation format conforming to the Web Ontology Language (OWL) model.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ FASTA-like (text)
+
+
+ This concept may also be used for the many non-standard FASTA-like formats.
+ http://filext.com/file-extension/FASTA
+ beta12orEarlier
+ A text format resembling FASTA format.
+
+
+
+
+
+
+
+
+
+ Sequence record full format
+
+ 1.8
+ beta12orEarlier
+ Data format for a molecular sequence record, typically corresponding to a full entry from a molecular sequence database.
+ true
+
+
+
+
+
+
+
+
+
+ Sequence record lite format
+
+ true
+ 1.8
+ beta12orEarlier
+ Data format for a molecular sequence record 'lite', typically molecular sequence and minimal metadata, such as an identifier of the sequence and/or a comment.
+
+
+
+
+
+
+
+
+
+ EMBL format (XML)
+
+ beta12orEarlier
+ An XML format for EMBL entries.
+ This is a placeholder for other more specific concepts. It should not normally be used for annotation.
+
+
+
+
+
+
+
+
+
+ GenBank-like format (text)
+
+
+ A text format resembling GenBank entry (plain text) format.
+ This concept may be used for the non-standard GenBank-like text formats.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence feature table format (text)
+
+ Text format for a sequence feature table.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Strain data format
+
+ Format of a report on organism strain data / cell line.
+ beta12orEarlier
+ true
+ 1.0
+
+
+
+
+
+
+
+
+ CIP strain data format
+
+ Format for a report of strain data as used for CIP database entries.
+ true
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ phylip property values
+
+ true
+ PHYLIP file format for phylogenetic property data.
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ STRING entry format (HTML)
+
+ beta12orEarlier
+ true
+ beta12orEarlier
+ Entry format (HTML) for the STRING database of protein interaction.
+
+
+
+
+
+
+
+
+
+ STRING entry format (XML)
+
+
+ Entry format (XML) for the STRING database of protein interaction.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ GFF
+
+
+ GFF feature format (of indeterminate version).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ GTF
+
+ Gene Transfer Format (GTF), a restricted version of GFF.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+
+
+ FASTA-HTML
+
+
+ FASTA format wrapped in HTML elements.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ EMBL-HTML
+
+
+ EMBL entry format wrapped in HTML elements.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ BioCyc enzyme report format
+
+ true
+ beta12orEarlier
+ beta12orEarlier
+ Format of an entry from the BioCyc enzyme database.
+
+
+
+
+
+
+
+
+
+ ENZYME enzyme report format
+
+ Format of an entry from the Enzyme nomenclature database (ENZYME).
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ PseudoCAP gene report format
+
+ true
+ beta12orEarlier
+ beta12orEarlier
+ Format of a report on a gene from the PseudoCAP database.
+
+
+
+
+
+
+
+
+
+ GeneCards gene report format
+
+ Format of a report on a gene from the GeneCards database.
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Textual format
+
+ http://filext.com/file-extension/TSV
+ http://www.iana.org/assignments/media-types/text/plain
+ Textual format.
+ Data in text format can be compressed into binary format, or can be a value of an XML element or attribute. Markup formats are not considered textual (or more precisely, not plain-textual).
+ txt
+ http://filext.com/file-extension/TXT
+ Plain text
+ http://www.iana.org/assignments/media-types/media-types.xhtml#text
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ HTML
+
+
+
+
+
+
+
+ HTML format.
+ beta12orEarlier
+ http://filext.com/file-extension/HTML
+ Hypertext Markup Language
+
+
+
+
+
+
+
+
+
+ XML
+
+ Data in XML format can be serialised into text, or binary format.
+ eXtensible Markup Language (XML) format.
+ beta12orEarlier
+ http://filext.com/file-extension/XML
+ Extensible Markup Language
+
+
+
+
+
+
+
+
+
+ Binary format
+
+ Only specific native binary formats are listed under 'Binary format' in EDAM. Generic binary formats - such as any data being zipped, or any XML data being serialised into the Efficient XML Interchange (EXI) format - are not modelled in EDAM. Refer to http://wsio.org/compression_004.
+ beta12orEarlier
+ Binary format.
+
+
+
+
+
+
+
+
+
+ URI format
+
+ beta13
+ true
+ Typical textual representation of a URI.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ NCI-Nature pathway entry format
+
+ beta12orEarlier
+ true
+ The format of an entry from the NCI-Nature pathways database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Format (typed)
+
+ This concept exists only to assist EDAM maintenance and navigation in graphical browsers. It does not add semantic information. The concept branch under 'Format (typed)' provides an alternative organisation of the concepts nested under the other top-level branches ('Binary', 'HTML', 'RDF', 'Text' and 'XML'. All concepts under here are already included under those branches.
+ beta12orEarlier
+ A broad class of format distinguished by the scientific nature of the data that is identified.
+
+
+
+
+
+
+
+
+
+ BioXSD
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ BioXSD XML format
+ beta12orEarlier
+ BioXSD XML format of basic bioinformatics types of data (sequence records, alignments, feature records, references to resources, and more).
+
+
+
+
+
+
+
+
+
+
+
+ RDF format
+
+
+ beta12orEarlier
+ A serialisation format conforming to the Resource Description Framework (RDF) model.
+
+
+
+
+
+
+
+
+
+ GenBank-HTML
+
+
+ beta12orEarlier
+ Genbank entry format wrapped in HTML elements.
+
+
+
+
+
+
+
+
+
+ Protein features (domains) format
+
+ beta12orEarlier
+ true
+ beta12orEarlier
+ Format of a report on protein features (domain composition).
+
+
+
+
+
+
+
+
+
+ EMBL-like format
+
+ beta12orEarlier
+ A format resembling EMBL entry (plain text) format.
+ This concept may be used for the many non-standard EMBL-like formats.
+
+
+
+
+
+
+
+
+
+ FASTQ-like format
+
+ A format resembling FASTQ short read format.
+ This concept may be used for non-standard FASTQ short read-like formats.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ FASTA-like
+
+ This concept may be used for the many non-standard FASTA-like formats.
+ beta12orEarlier
+ A format resembling FASTA format.
+
+
+
+
+
+
+
+
+
+ uniprotkb-like format
+
+
+ beta12orEarlier
+ A sequence format resembling uniprotkb entry format.
+
+
+
+
+
+
+
+
+
+ Sequence feature table format
+
+
+
+
+
+
+
+ Format for a sequence feature table.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ OBO
+
+
+ beta12orEarlier
+ OBO ontology text format.
+
+
+
+
+
+
+
+
+
+ OBO-XML
+
+
+ beta12orEarlier
+ OBO ontology XML format.
+
+
+
+
+
+
+
+
+
+ Sequence record format (text)
+
+ Data format for a molecular sequence record.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence record format (XML)
+
+ beta12orEarlier
+ Data format for a molecular sequence record.
+
+
+
+
+
+
+
+
+
+ Sequence feature table format (XML)
+
+ XML format for a sequence feature table.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Alignment format (text)
+
+ Text format for molecular sequence alignment information.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Alignment format (XML)
+
+ XML format for molecular sequence alignment information.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree format (text)
+
+ beta12orEarlier
+ Text format for a phylogenetic tree.
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree format (XML)
+
+ beta12orEarlier
+ XML format for a phylogenetic tree.
+
+
+
+
+
+
+
+
+
+ EMBL-like (XML)
+
+
+ An XML format resembling EMBL entry format.
+ This concept may be used for the any non-standard EMBL-like XML formats.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ GenBank-like format
+
+ A format resembling GenBank entry (plain text) format.
+ beta12orEarlier
+ This concept may be used for the non-standard GenBank-like formats.
+
+
+
+
+
+
+
+
+
+ STRING entry format
+
+ beta12orEarlier
+ Entry format for the STRING database of protein interaction.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Sequence assembly format (text)
+
+ beta12orEarlier
+ Text format for sequence assembly data.
+
+
+
+
+
+
+
+
+
+ Amino acid identifier format
+
+ beta13
+ Text format (representation) of amino acid residues.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ completely unambiguous
+
+
+ beta12orEarlier
+ Alphabet for a molecular sequence without any unknown positions or ambiguity characters.
+
+
+
+
+
+
+
+
+
+ completely unambiguous pure
+
+
+ beta12orEarlier
+ Alphabet for a molecular sequence without unknown positions, ambiguity or non-sequence characters.
+
+
+
+
+
+
+
+
+
+ completely unambiguous pure nucleotide
+
+
+ Alphabet for a nucleotide sequence (characters ACGTU only) without unknown positions, ambiguity or non-sequence characters .
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ completely unambiguous pure dna
+
+
+ beta12orEarlier
+ Alphabet for a DNA sequence (characters ACGT only) without unknown positions, ambiguity or non-sequence characters.
+
+
+
+
+
+
+
+
+
+ completely unambiguous pure rna sequence
+
+
+ Alphabet for an RNA sequence (characters ACGU only) without unknown positions, ambiguity or non-sequence characters.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Raw sequence format
+
+
+
+
+
+
+
+ http://www.onto-med.de/ontologies/gfo.owl#Symbol_sequence
+ beta12orEarlier
+ Format of a raw molecular sequence (i.e. the alphabet used).
+
+
+
+
+
+
+
+
+
+ BAM
+
+
+
+ beta12orEarlier
+ BAM format, the binary, BGZF-formatted compressed version of SAM format for alignment of nucleotide sequences (e.g. sequencing reads) to (a) reference sequence(s). May contain base-call and alignment qualities and other data.
+
+
+
+
+
+
+
+
+
+
+
+ SAM
+
+
+
+ The format supports short and long reads (up to 128Mbp) produced by different sequencing platforms and is used to hold mapped data within the GATK and across the Broad Institute, the Sanger Centre, and throughout the 1000 Genomes project.
+ beta12orEarlier
+ Sequence Alignment/Map (SAM) format for alignment of nucleotide sequences (e.g. sequencing reads) to (a) reference sequence(s). May contain base-call and alignment qualities and other data.
+
+
+
+
+
+
+
+
+
+
+
+ SBML
+
+
+ Systems Biology Markup Language (SBML), the standard XML format for models of biological processes such as for example metabolism, cell signaling, and gene regulation.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+
+ completely unambiguous pure protein
+
+
+ beta12orEarlier
+ Alphabet for any protein sequence without unknown positions, ambiguity or non-sequence characters.
+
+
+
+
+
+
+
+
+
+ Bibliographic reference format
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Format of a bibliographic reference.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence annotation track format
+
+
+
+
+
+
+
+ Format of a sequence annotation track.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Alignment format (pair only)
+
+
+
+
+
+
+
+ beta12orEarlier
+ Data format for molecular sequence alignment information that can hold sequence alignment(s) of only 2 sequences.
+
+
+
+
+
+
+
+
+
+ Sequence variation annotation format
+
+
+
+
+
+
+
+ Format of sequence variation annotation.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ markx0 variant
+
+
+ Some variant of Pearson MARKX alignment format.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ mega variant
+
+
+
+ Some variant of Mega format for (typically aligned) sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Phylip format variant
+
+
+
+ beta12orEarlier
+ Some variant of Phylip format for (aligned) sequences.
+
+
+
+
+
+
+
+
+
+ AB1
+
+
+ beta12orEarlier
+ AB1 binary format of raw DNA sequence reads (output of Applied Biosystems' sequencing analysis software). Contains an electropherogram and the DNA base sequence.
+ AB1 uses the generic binary Applied Biosystems, Inc. Format (ABIF).
+
+
+
+
+
+
+
+
+
+ ACE
+
+
+ ACE sequence assembly format including contigs, base-call qualities, and other metadata (version Aug 1998 and onwards).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+
+ BED
+
+
+ beta12orEarlier
+ BED detail format includes 2 additional columns (http://genome.ucsc.edu/FAQ/FAQformat#format1.7) and BED 15 includes 3 additional columns for experiment scores (http://genomewiki.ucsc.edu/index.php/Microarray_track).
+ Browser Extensible Data (BED) format of sequence annotation track, typically to be displayed in a genome browser.
+
+
+
+
+
+
+
+
+
+
+
+ bigBed
+
+
+ beta12orEarlier
+ bigBed format for large sequence annotation tracks, similar to textual BED format.
+
+
+
+
+
+
+
+
+
+
+
+ WIG
+
+
+ Wiggle format (WIG) of a sequence annotation track that consists of a value for each sequence position. Typically to be displayed in a genome browser.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+
+ bigWig
+
+
+ beta12orEarlier
+ bigWig format for large sequence annotation tracks that consist of a value for each sequence position. Similar to textual WIG format.
+
+
+
+
+
+
+
+
+
+
+
+ PSL
+
+
+
+ PSL format of alignments, typically generated by BLAT or psLayout. Can be displayed in a genome browser like a sequence annotation track.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+
+ MAF
+
+
+
+ Multiple Alignment Format (MAF) supporting alignments of whole genomes with rearrangements, directions, multiple pieces to the alignment, and so forth.
+ Typically generated by Multiz and TBA aligners; can be displayed in a genome browser like a sequence annotation track. This should not be confused with MIRA Assembly Format or Mutation Annotation Format.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+
+ 2bit
+
+
+ beta12orEarlier
+ 2bit binary format of nucleotide sequences using 2 bits per nucleotide. In addition encodes unknown nucleotides and lower-case 'masking'.
+
+
+
+
+
+
+
+
+
+
+
+
+ .nib
+
+
+ beta12orEarlier
+ .nib (nibble) binary format of a nucleotide sequence using 4 bits per nucleotide (including unknown) and its lower-case 'masking'.
+
+
+
+
+
+
+
+
+
+
+
+ genePred
+
+
+ genePred table format for gene prediction tracks.
+ genePred format has 3 main variations (http://genome.ucsc.edu/FAQ/FAQformat#format9 http://www.broadinstitute.org/software/igv/genePred). They reflect UCSC Browser DB tables.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+
+ pgSnp
+
+
+ Personal Genome SNP (pgSnp) format for sequence variation tracks (indels and polymorphisms), supported by the UCSC Genome Browser.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+
+ axt
+
+
+ beta12orEarlier
+ axt format of alignments, typically produced from BLASTZ.
+
+
+
+
+
+
+
+
+
+
+
+ LAV
+
+
+ beta12orEarlier
+ LAV format of alignments generated by BLASTZ and LASTZ.
+
+
+
+
+
+
+
+
+
+
+
+ Pileup
+
+
+ beta12orEarlier
+ Pileup format of alignment of sequences (e.g. sequencing reads) to (a) reference sequence(s). Contains aligned bases per base of the reference sequence(s).
+
+
+
+
+
+
+
+
+
+
+
+ VCF
+
+
+ beta12orEarlier
+ Variant Call Format (VCF) for sequence variation (indels, polymorphisms, structural variation).
+
+
+
+
+
+
+
+
+
+
+
+ SRF
+
+
+ Sequence Read Format (SRF) of sequence trace data. Supports submission to the NCBI Short Read Archive.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+
+ ZTR
+
+
+ ZTR format for storing chromatogram data from DNA sequencing instruments.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+
+ GVF
+
+
+ Genome Variation Format (GVF). A GFF3-compatible format with defined header and attribute tags for sequence variation.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+
+ BCF
+
+
+ beta12orEarlier
+ BCF, the binary version of Variant Call Format (VCF) for sequence variation (indels, polymorphisms, structural variation).
+
+
+
+
+
+
+
+
+
+
+ Matrix format
+
+
+
+
+
+
+
+ Format of a matrix (array) of numerical values.
+ beta13
+
+
+
+
+
+
+
+
+
+ Protein domain classification format
+
+
+
+
+
+
+
+ Format of data concerning the classification of the sequences and/or structures of protein structural domain(s).
+ beta13
+
+
+
+
+
+
+
+
+
+ Raw SCOP domain classification format
+
+ Format of raw SCOP domain classification data files.
+ These are the parsable data files provided by SCOP.
+ beta13
+
+
+
+
+
+
+
+
+
+ Raw CATH domain classification format
+
+ These are the parsable data files provided by CATH.
+ beta13
+ Format of raw CATH domain classification data files.
+
+
+
+
+
+
+
+
+
+ CATH domain report format
+
+ Format of summary of domain classification information for a CATH domain.
+ beta13
+ The report (for example http://www.cathdb.info/domain/1cukA01) includes CATH codes for levels in the hierarchy for the domain, level descriptions and relevant data and links.
+
+
+
+
+
+
+
+
+
+ SBRML
+
+
+ 1.0
+ Systems Biology Result Markup Language (SBRML), the standard XML format for simulated or calculated results (e.g. trajectories) of systems biology models.
+
+
+
+
+
+
+
+
+
+
+
+ BioPAX
+
+ BioPAX is an exchange format for pathway data, with its data model defined in OWL.
+ 1.0
+
+
+
+
+
+
+
+
+
+
+
+ EBI Application Result XML
+
+
+
+ EBI Application Result XML is a format returned by sequence similarity search Web services at EBI.
+ 1.0
+
+
+
+
+
+
+
+
+
+
+
+ PSI MI XML (MIF)
+
+
+ 1.0
+ XML Molecular Interaction Format (MIF), standardised by HUPO PSI MI.
+ MIF
+
+
+
+
+
+
+
+
+
+
+
+ phyloXML
+
+
+ phyloXML is a standardised XML format for phylogenetic trees, networks, and associated data.
+ 1.0
+
+
+
+
+
+
+
+
+
+
+
+ NeXML
+
+
+ 1.0
+ NeXML is a standardised XML format for rich phyloinformatic data.
+
+
+
+
+
+
+
+
+
+
+
+ MAGE-ML
+
+
+
+
+
+
+
+
+ 1.0
+ MAGE-ML XML format for microarray expression data, standardised by MGED (now FGED).
+
+
+
+
+
+
+
+
+
+
+
+ MAGE-TAB
+
+
+
+
+
+
+
+
+ MAGE-TAB textual format for microarray expression data, standardised by MGED (now FGED).
+ 1.0
+
+
+
+
+
+
+
+
+
+
+
+ GCDML
+
+
+ GCDML XML format for genome and metagenome metadata according to MIGS/MIMS/MIMARKS information standards, standardised by the Genomic Standards Consortium (GSC).
+ 1.0
+
+
+
+
+
+
+
+
+
+
+
+ GTrack
+
+
+ 1.0
+ GTrack is an optimised tabular format for genome/sequence feature tracks unifying the power of other tabular formats (e.g. GFF3, BED, WIG).
+
+
+
+
+
+
+
+
+
+
+
+ Biological pathway or network report format
+
+
+
+
+
+
+
+ Data format for a report of information derived from a biological pathway or network.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Experiment annotation format
+
+
+
+
+
+
+
+ beta12orEarlier
+ Data format for annotation on a laboratory experiment.
+
+
+
+
+
+
+
+
+
+ Cytoband format
+
+
+
+
+
+
+
+
+ 1.2
+ Cytoband format for chromosome cytobands.
+ Reflects a UCSC Browser DB table.
+
+
+
+
+
+
+
+
+
+
+
+ CopasiML
+
+
+
+ 1.2
+ CopasiML, the native format of COPASI.
+
+
+
+
+
+
+
+
+
+
+
+ CellML
+
+
+ CellML, the format for mathematical models of biological and other networks.
+ 1.2
+
+
+
+
+
+
+
+
+
+
+
+ PSI MI TAB (MITAB)
+
+
+ 1.2
+ Tabular Molecular Interaction format (MITAB), standardised by HUPO PSI MI.
+
+
+
+
+
+
+
+
+
+
+
+ PSI-PAR
+
+ Protein affinity format (PSI-PAR), standardised by HUPO PSI MI. It is compatible with PSI MI XML (MIF) and uses the same XML Schema.
+ 1.2
+
+
+
+
+
+
+
+
+
+
+
+ mzML
+
+
+ mzML is the successor and unifier of the mzData format developed by PSI and mzXML developed at the Seattle Proteome Center.
+ 1.2
+ mzML format for raw spectrometer output data, standardised by HUPO PSI MSS.
+
+
+
+
+
+
+
+
+
+
+
+ Mass spectrometry data format
+
+
+
+
+
+
+
+ 1.2
+ Format for mass spectrometry data.
+
+
+
+
+
+
+
+
+
+ TraML
+
+
+ TraML (Transition Markup Language) is the format for mass spectrometry transitions, standardised by HUPO PSI MSS.
+ 1.2
+
+
+
+
+
+
+
+
+
+
+
+ mzIdentML
+
+
+ mzIdentML is the exchange format for peptides and proteins identified from mass spectra, standardised by HUPO PSI PI. It can be used for outputs of proteomics search engines.
+ 1.2
+
+
+
+
+
+
+
+
+
+
+
+ mzQuantML
+
+
+ mzQuantML is the format for quantitation values associated with peptides, proteins and small molecules from mass spectra, standardised by HUPO PSI PI. It can be used for outputs of quantitation software for proteomics.
+ 1.2
+
+
+
+
+
+
+
+
+
+
+
+ GelML
+
+
+ 1.2
+ GelML is the format for describing the process of gel electrophoresis, standardised by HUPO PSI PS.
+
+
+
+
+
+
+
+
+
+
+
+ spML
+
+
+ 1.2
+ spML is the format for describing proteomics sample processing, other than using gels, prior to mass spectrometric protein identification, standardised by HUPO PSI PS. It may also be applicable for metabolomics.
+
+
+
+
+
+
+
+
+
+
+
+ OWL Functional Syntax
+
+
+ A human-readable encoding for the Web Ontology Language (OWL).
+ 1.2
+
+
+
+
+
+
+
+
+
+ Manchester OWL Syntax
+
+
+ A syntax for writing OWL class expressions.
+ 1.2
+ This format was influenced by the OWL Abstract Syntax and the DL style syntax.
+
+
+
+
+
+
+
+
+
+ KRSS2 Syntax
+
+
+ This format is used in Protege 4.
+ A superset of the "Description-Logic Knowledge Representation System Specification from the KRSS Group of the ARPA Knowledge Sharing Effort".
+ 1.2
+
+
+
+
+
+
+
+
+
+ Turtle
+
+
+ The SPARQL Query Language incorporates a very similar syntax.
+ 1.2
+ The Terse RDF Triple Language (Turtle) is a human-friendly serialization format for RDF (Resource Description Framework) graphs.
+
+
+
+
+
+
+
+
+
+ N-Triples
+
+
+ N-Triples should not be confused with Notation 3 which is a superset of Turtle.
+ 1.2
+ A plain text serialisation format for RDF (Resource Description Framework) graphs, and a subset of the Turtle (Terse RDF Triple Language) format.
+
+
+
+
+
+
+
+
+
+ Notation3
+
+
+ N3
+ A shorthand non-XML serialization of Resource Description Framework model, designed with human-readability in mind.
+
+
+
+
+
+
+
+
+
+ RDF/XML
+
+
+
+ RDF
+ Resource Description Framework (RDF) XML format.
+ 1.2
+ http://www.ebi.ac.uk/SWO/data/SWO_3000006
+ RDF/XML is a serialization syntax for OWL DL, but not for OWL Full.
+
+
+
+
+
+
+
+
+
+ OWL/XML
+
+
+ OWL ontology XML serialisation format.
+ 1.2
+ OWL
+
+
+
+
+
+
+
+
+
+ A2M
+
+
+ The A2M format is used as the primary format for multiple alignments of protein or nucleic-acid sequences in the SAM suite of tools. It is a small modification of FASTA format for sequences and is compatible with most tools that read FASTA.
+ 1.3
+
+
+
+
+
+
+
+
+
+
+
+ SFF
+
+
+ Standard flowgram format
+ Standard flowgram format (SFF) is a binary file format used to encode results of pyrosequencing from the 454 Life Sciences platform for high-throughput sequencing.
+ 1.3
+
+
+
+
+
+
+
+
+
+
+
+ MAP
+
+ The MAP file describes SNPs and is used by the Plink package.
+ 1.3
+ Plink MAP
+
+
+
+
+
+
+
+
+
+
+ PED
+
+ Plink PED
+ 1.3
+ The PED file describes individuals and genetic data and is used by the Plink package.
+
+
+
+
+
+
+
+
+
+
+ Individual genetic data format
+
+ Data format for a metadata on an individual and their genetic data.
+ 1.3
+
+
+
+
+
+
+
+
+
+ PED/MAP
+
+
+ The PED/MAP file describes data used by the Plink package.
+ Plink PED/MAP
+ 1.3
+
+
+
+
+
+
+
+
+
+
+ CT
+
+
+ File format of a CT (Connectivity Table) file from the RNAstructure package.
+ beta12orEarlier
+ Connect format
+ Connectivity Table file format
+
+
+
+
+
+
+
+
+
+
+
+ SS
+
+
+ beta12orEarlier
+ XRNA old input style format.
+
+
+
+
+
+
+
+
+
+
+ RNAML
+
+
+
+ RNA Markup Language.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ GDE
+
+
+ Format for the Genetic Data Environment (GDE).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ BLC
+
+ 1.3
+ Block file format
+ A multiple alignment in vertical format, as used in the AMPS (Alignment of Multiple Protein Sequences) pacakge.
+
+
+
+
+
+
+
+
+
+
+ Data index format
+
+
+
+
+
+
+
+
+ 1.3
+
+
+
+
+
+
+
+
+
+ BAI
+
+
+
+
+
+
+
+ 1.3
+ BAM indexing format
+
+
+
+
+
+
+
+
+
+
+ HMMER2
+
+ HMMER profile HMM file for HMMER versions 2.x
+ 1.3
+
+
+
+
+
+
+
+
+
+
+ HMMER3
+
+ 1.3
+ HMMER profile HMM file for HMMER versions 3.x
+
+
+
+
+
+
+
+
+
+
+ PO
+
+ EMBOSS simple sequence pair alignment format.
+ 1.3
+
+
+
+
+
+
+
+
+
+
+ BLAST XML results format
+
+
+ XML format as produced by the NCBI Blast package
+ 1.3
+
+
+
+
+
+
+
+
+
+ CRAM
+
+
+ Reference-based compression of alignment format
+ http://www.ebi.ac.uk/ena/software/cram-usage#format_specification http://samtools.github.io/hts-specs/CRAMv2.1.pdf
+ http://www.ebi.ac.uk/ena/software/cram-usage#format_specification http://samtools.github.io/hts-specs/CRAMv2.1.pdf
+ 1.7
+
+
+
+
+
+
+
+
+
+ JSON
+
+ 1.7
+ Javascript Object Notation format; a lightweight, text-based format to represent structured data using key-value pairs.
+
+
+
+
+
+
+
+
+
+ EPS
+
+ Encapsulated PostScript format
+ 1.7
+
+
+
+
+
+
+
+
+
+ GIF
+
+ 1.7
+ Graphics Interchange Format.
+
+
+
+
+
+
+
+
+
+ xls
+
+
+ Microsoft Excel spreadsheet format.
+ Microsoft Excel format
+ 1.7
+
+
+
+
+
+
+
+
+
+ TSV
+
+ Tabular format
+ http://filext.com/file-extension/CSV
+ http://www.iana.org/assignments/media-types/text/csv
+ Tabular data represented as tab-separated values in a text file.
+ 1.7
+ http://filext.com/file-extension/TSV
+ CSV
+
+
+
+
+
+
+
+
+
+ Gene expression data format
+
+ true
+ 1.10
+ 1.7
+ Format of a file of gene expression data, e.g. a gene expression matrix or profile.
+
+
+
+
+
+
+
+
+
+ Cytoscape input file format
+
+
+ Format of the cytoscape input file of gene expression ratios or values are specified over one or more experiments.
+ 1.7
+
+
+
+
+
+
+
+
+
+ ebwt
+
+
+
+
+
+
+
+ https://github.com/BenLangmead/bowtie/blob/master/MANUAL
+ Bowtie index format
+ 1.7
+ Bowtie format for indexed reference genome for "small" genomes.
+
+
+
+
+
+
+
+
+
+ RSF
+
+ http://www.molbiol.ox.ac.uk/tutorials/Seqlab_GCG.pdf
+ RSF-format files contain one or more sequences that may or may not be related. In addition to the sequence data, each sequence can be annotated with descriptive sequence information (from the GCG manual).
+ Rich sequence format.
+ 1.7
+ GCG RSF
+
+
+
+
+
+
+
+
+
+ GCG format variant
+
+
+
+ 1.7
+ Some format based on the GCG format.
+
+
+
+
+
+
+
+
+
+ BSML
+
+
+ http://rothlab.ucdavis.edu/genhelp/chapter_2_using_sequences.html#_Creating_and_Editing_Single_Sequenc
+ Bioinformatics Sequence Markup Language format.
+ 1.7
+
+
+
+
+
+
+
+
+
+ ebwtl
+
+
+
+
+
+
+
+ 1.7
+ https://github.com/BenLangmead/bowtie/blob/master/MANUAL
+ Bowtie long index format
+ Bowtie format for indexed reference genome for "large" genomes.
+
+
+
+
+
+
+
+
+
+ Ensembl variation file format
+
+
+ Ensembl standard format for variation data.
+ 1.8
+
+
+
+
+
+
+
+
+
+
+ docx
+
+
+ 1.8
+ Microsoft Word format
+ doc
+ Microsoft Word format.
+
+
+
+
+
+
+
+
+
+ Document format
+
+ Portable Document Format
+ Microsoft Word format
+ Format of documents including word processor, spreadsheet and presentation.
+ 1.8
+ doc
+
+
+
+
+
+
+
+
+
+ PDF
+
+
+ 1.8
+ Portable Document Format
+
+
+
+
+
+
+
+
+
+ Image format
+
+
+
+
+
+
+
+ Format used for images and image metadata.
+ 1.9
+
+
+
+
+
+
+
+
+
+ DICOM format
+
+
+ 1.9
+ Medical image format corresponding to the Digital Imaging and Communications in Medicine (DICOM) standard.
+
+
+
+
+
+
+
+
+
+
+
+
+ nii
+
+
+ Medical image and metadata format of the Neuroimaging Informatics Technology Initiative.
+
+
+ NIfTI-1 format
+ 1.9
+
+
+
+
+
+
+
+
+
+
+ mhd
+
+
+ Metalmage format
+ 1.9
+ Text-based tagged file format for medical images generated using the MetaImage software package.
+
+
+
+
+
+
+
+
+
+
+ nrrd
+
+
+ 1.9
+ Nearly Raw Rasta Data format designed to support scientific visualization and image processing involving N-dimensional raster data.
+
+
+
+
+
+
+
+
+
+
+ R file format
+
+ File format used for scripts written in the R programming language for execution within the R software environment, typically for statistical computation and graphics.
+
+ 1.9
+
+
+
+
+
+
+
+
+
+ SPSS
+
+ 1.9
+ File format used for scripts for the Statistical Package for the Social Sciences.
+
+
+
+
+
+
+
+
+
+
+ MHT
+ MIME HTML format for Web pages, which can include external resources, including images, Flash animations and so on.
+
+ EMBL entry format wrapped in HTML elements.
+ 1.9
+ MHTML
+
+
+
+
+
+
+
+
+
+ IDAT
+
+
+
+
+
+
+
+
+ Proprietary file format for (raw) BeadArray data used by genomewide profiling platforms from Illumina Inc. This format is output directly from the scanner and stores summary intensities for each probe-type on an array.
+ 1.10
+
+
+
+
+
+
+
+
+
+ JPG
+
+
+ 1.10
+ Joint Picture Group file format for lossy graphics file.
+
+ Sequence of segments with markers. Begins with byte of 0xFF and follows by marker type.
+
+
+
+
+
+
+
+
+
+
+ rcc
+
+
+ 1.10
+ Reporter Code Count-A data file (.csv) output by the Nanostring nCounter Digital Analyzer, which contains gene sample information, probe information and probe counts.
+
+
+
+
+
+
+
+
+
+ arff
+
+ ARFF (Attribute-Relation File Format) is an ASCII text file format that describes a list of instances sharing a set of attributes.
+ 1.11
+ This file format is for machine learning.
+
+
+
+
+
+
+
+
+
+
+
+ afg
+
+
+ 1.11
+ AFG is a single text-based file assembly format that holds read and consensus information together
+
+
+
+
+
+
+
+
+
+
+
+ bedgraph
+
+
+ Holds a tab-delimited chromosome /start /end / datavalue dataset.
+ 1.11
+ The bedGraph format allows display of continuous-valued data in track format. This display type is useful for probability scores and transcriptome data
+
+
+
+
+
+
+
+
+
+
+
+ bedstrict
+
+ Browser Extensible Data (BED) format of sequence annotation track that strictly does not contain non-standard fields beyond the first 3 columns.
+ Galaxy allows BED files to contain non-standard fields beyond the first 3 columns, some other implementations do not.
+ 1.11
+
+
+
+
+
+
+
+
+
+
+
+ bed6
+
+ Tab delimited data in strict BED format - no non-standard columns allowed; column count forced to 6
+ BED file format where each feature is described by chromosome, start, end, name, score, and strand.
+ 1.11
+
+
+
+
+
+
+
+
+
+
+
+ bed12
+
+ 1.11
+ Tab delimited data in strict BED format - no non-standard columns allowed; column count forced to 12
+ A BED file where each feature is described by all twelve columns.
+
+
+
+
+
+
+
+
+
+
+
+ chrominfo
+
+
+ 1.11
+ Tabular format of chromosome names and sizes used by Galaxy.
+ Galaxy allows BED files to contain non-standard fields beyond the first 3 columns, some other implementations do not.
+
+
+
+
+
+
+
+
+
+
+
+ customtrack
+
+
+ 1.11
+ Custom Sequence annotation track format used by Galaxy.
+ Used for tracks/track views within galaxy.
+
+
+
+
+
+
+
+
+
+
+
+ csfasta
+
+
+ Color space FASTA format sequence variant.
+ 1.3
+ FASTA format extended for color space information.
+
+
+
+
+
+
+
+
+
+
+
+ hdf5
+
+ An HDF5 file appears to the user as a directed graph. The nodes of this graph are the higher-level HDF5 objects that are exposed by the HDF5 APIs: Groups, Datasets, Named datatypes. H5py uses straightforward NumPy and Python metaphors, like dictionary and NumPy array syntax.
+ 1.11
+ h5
+ Binary format used by Galaxy for hierarchical data.
+
+
+
+
+
+
+
+
+
+
+
+ tiff
+
+
+ The TIFF format is perhaps the most versatile and diverse bitmap format in existence. Its extensible nature and support for numerous data compression schemes allow developers to customize the TIFF format to fit any peculiar data storage needs.
+
+ A versatile bitmap format.
+ 1.11
+
+
+
+
+
+
+
+
+
+
+ bmp
+
+
+ Standard bitmap storage format in the Microsoft Windows environment.
+ 1.11
+ Although it is based on Windows internal bitmap data structures, it is supported by many non-Windows and non-PC applications.
+
+
+
+
+
+
+
+
+
+
+ im
+
+
+ IM is a format used by LabEye and other applications based on the IFUNC image processing library.
+ IFUNC library reads and writes most uncompressed interchange versions of this format.
+
+ 1.11
+
+
+
+
+
+
+
+
+
+
+ pcd
+
+
+ PCD was developed by Kodak. A PCD file contains five different resolution (ranging from low to high) of a slide or film negative. Due to it PCD is often used by many photographers and graphics professionals for high-end printed applications.
+ 1.11
+ Photo CD format, which is the highest resolution format for images on a CD.
+
+
+
+
+
+
+
+
+
+
+ pcx
+
+
+ 1.11
+ PCX is an image file format that uses a simple form of run-length encoding. It is lossless.
+
+
+
+
+
+
+
+
+
+
+
+ ppm
+
+
+ The PPM format is a lowest common denominator color image file format.
+
+ 1.11
+
+
+
+
+
+
+
+
+
+
+ psd
+
+
+ 1.11
+ PSD (Photoshop Document) is a proprietary file that allows the user to work with the images’ individual layers even after the file has been saved.
+
+
+
+
+
+
+
+
+
+
+ xbm
+
+
+ The XBM format was replaced by XPM for X11 in 1989.
+ 1.11
+ X BitMap is a plain text binary image format used by the X Window System used for storing cursor and icon bitmaps used in the X GUI.
+
+
+
+
+
+
+
+
+
+
+ xpm
+
+
+ 1.11
+ Sequence of segments with markers. Begins with byte of 0xFF and follows by marker type.
+ X PixMap (XPM) is an image file format used by the X Window System, it is intended primarily for creating icon pixmaps, and supports transparent pixels.
+
+
+
+
+
+
+
+
+
+
+
+ rgb
+
+
+ 1.11
+ RGB file format is the native raster graphics file format for Silicon Graphics workstations.
+
+
+
+
+
+
+
+
+
+
+
+ pbm
+
+
+ 1.11
+ The PBM format is a lowest common denominator monochrome file format. It serves as the common language of a large family of bitmap image conversion filters.
+
+
+
+
+
+
+
+
+
+
+
+ pgm
+
+
+ It is designed to be extremely easy to learn and write programs for.
+ The PGM format is a lowest common denominator grayscale file format.
+
+ 1.11
+
+
+
+
+
+
+
+
+
+
+ png
+
+
+ 1.11
+ PNG is a file format for image compression.
+
+ It iis expected to replace the Graphics Interchange Format (GIF).
+
+
+
+
+
+
+
+
+
+
+ svg
+
+
+ The SVG specification is an open standard developed by the World Wide Web Consortium (W3C) since 1999.
+ Scalable Vector Graphics (SVG) is an XML-based vector image format for two-dimensional graphics with support for interactivity and animation.
+ 1.11
+
+
+
+
+
+
+
+
+
+
+ rast
+
+
+ Sun Raster is a raster graphics file format used on SunOS by Sun Microsystems
+ 1.11
+ The SVG specification is an open standard developed by the World Wide Web Consortium (W3C) since 1999.
+
+
+
+
+
+
+
+
+
+
+ Sequence quality report format (text)
+
+
+
+
+
+
+
+
+ Textual report format for sequence quality for reports from sequencing machines.
+ 1.11
+
+
+
+
+
+
+
+
+
+ qual
+
+
+ http://en.wikipedia.org/wiki/Phred_quality_score
+ 1.11
+ Phred quality scores are defined as a property which is logarithmically related to the base-calling error probabilities.
+ FASTQ format subset for Phred sequencing quality score data only (no sequences).
+
+
+
+
+
+
+
+
+
+ qualsolexa
+
+
+ Solexa/Illumina 1.0 format can encode a Solexa/Illumina quality score from -5 to 62 using ASCII 59 to 126 (although in raw read data Solexa scores from -5 to 40 only are expected)
+ 1.11
+ FASTQ format subset for Phred sequencing quality score data only (no sequences) for Solexa/Illumina 1.0 format.
+
+
+
+
+
+
+
+
+
+ qualillumina
+
+
+ Starting in Illumina 1.5 and before Illumina 1.8, the Phred scores 0 to 2 have a slightly different meaning. The values 0 and 1 are no longer used and the value 2, encoded by ASCII 66 "B", is used also at the end of reads as a Read Segment Quality Control Indicator.
+ FASTQ format subset for Phred sequencing quality score data only (no sequences) from Illumina 1.5 and before Illumina 1.8.
+ 1.11
+ http://en.wikipedia.org/wiki/Phred_quality_score
+
+
+
+
+
+
+
+
+
+ qualsolid
+
+ For SOLiD data, the sequence is in color space, except the first position. The quality values are those of the Sanger format.
+ FASTQ format subset for Phred sequencing quality score data only (no sequences) for SOLiD data.
+ 1.11
+ http://en.wikipedia.org/wiki/Phred_quality_score
+
+
+
+
+
+
+
+
+
+ qual454
+
+ http://en.wikipedia.org/wiki/Phred_quality_score
+ 1.11
+ FASTQ format subset for Phred sequencing quality score data only (no sequences) from 454 sequencers.
+
+
+
+
+
+
+
+
+
+ ENCODE peak format
+
+ 1.11
+ Human ENCODE peak format.
+ Format that covers both the broad peak format and narrow peak format from ENCODE.
+
+
+
+
+
+
+
+
+
+
+
+ ENCODE narrow peak format
+
+ 1.11
+ Human ENCODE narrow peak format.
+ Format that covers both the broad peak format and narrow peak format from ENCODE.
+
+
+
+
+
+
+
+
+
+
+
+ ENCODE broad peak format
+
+ 1.11
+ Human ENCODE broad peak format.
+
+
+
+
+
+
+
+
+
+
+
+ bgzip
+
+
+ BAM files are compressed using a variant of GZIP (GNU ZIP), into a format called BGZF (Blocked GNU Zip Format).
+ Blocked GNU Zip format.
+ 1.11
+
+
+
+
+
+
+
+
+
+
+ tabix
+
+
+ TAB-delimited genome position file index format.
+ 1.11
+
+
+
+
+
+
+
+
+
+
+
+ Graph format
+
+ Data format for graph data.
+ 1.11
+
+
+
+
+
+
+
+
+
+ xgmml
+
+ XML-based format used to store graph descriptions within Galaxy.
+ 1.11
+
+
+
+
+
+
+
+
+
+
+ sif
+
+ 1.11
+ SIF (simple interaction file) Format - a network/pathway format used for instance in cytoscape.
+
+
+
+
+
+
+
+
+
+
+ xlsx
+
+
+ 1.11
+ MS Excel spreadsheet format consisting of a set of XML documents stored in a ZIP-compressed file.
+
+
+
+
+
+
+
+
+
+ SQLite
+
+ https://www.sqlite.org/fileformat2.html
+ Data format used by the SQLite database.
+ 1.11
+
+
+
+
+
+
+
+
+
+ GeminiSQLite
+
+ https://gemini.readthedocs.org/en/latest/content/quick_start.html
+ 1.11
+ Data format used by the SQLite database conformant to the Gemini schema.
+
+
+
+
+
+
+
+
+
+ Index format
+
+
+
+
+
+
+
+
+ Format of a data index of some type.
+ 1.11
+
+
+
+
+
+
+
+
+
+ snpeffdb
+
+ An index of a genome database, indexed for use by the snpeff tool.
+ 1.11
+
+
+
+
+
+
+
+
+
+ Operation
+
+
+ http://www.onto-med.de/ontologies/gfo.owl#Perpetuant
+ Computational tool
+ A function that processes a set of inputs and results in a set of outputs, or associates arguments (inputs) with values (outputs). Special cases are: a) An operation that consumes no input (has no input arguments). Such operation is either a constant function, or an operation depending only on the underlying state. b) An operation that may modify the underlying state but has no output. c) The singular-case operation with no input or output, that still may modify the underlying state.
+ Function
+ http://purl.org/biotop/biotop.owl#Function
+ http://www.ifomis.org/bfo/1.1/snap#Function
+ http://en.wikipedia.org/wiki/Function_(mathematics)
+ Computational method
+ http://semanticscience.org/resource/SIO_000017
+ http://www.ebi.ac.uk/swo/SWO_0000003
+ Mathematical operation
+ sumo:Function
+ beta12orEarlier
+ Process
+ Computational operation
+ Computational subroutine
+ http://semanticscience.org/resource/SIO_000649
+ http://www.ifomis.org/bfo/1.1/span#Process
+ http://www.ifomis.org/bfo/1.1/snap#Continuant
+ http://onto.eva.mpg.de/ontologies/gfo-bio.owl#Method
+ Computational procedure
+ Mathematical function
+ Lambda abstraction
+ Function (programming)
+ http://www.onto-med.de/ontologies/gfo.owl#Process
+ http://www.loa-cnr.it/ontologies/DOLCE-Lite.owl#quality
+ http://wsio.org/operation_001
+ http://www.loa-cnr.it/ontologies/DOLCE-Lite.owl#process
+ http://www.ifomis.org/bfo/1.1/snap#Quality
+ http://www.onto-med.de/ontologies/gfo.owl#Function
+ http://en.wikipedia.org/wiki/Function_(computer_science)
+ http://en.wikipedia.org/wiki/Subroutine
+
+
+
+
+ Process
+ Process can have a function (as its quality/attribute), and can also perform an operation with inputs and outputs.
+
+
+
+
+ Computational tool provides one or more operations.
+ Computational tool
+
+
+
+
+ Function
+ Operation is a function that is computational. It typically has input(s) and output(s), which are always data.
+
+
+
+
+
+
+
+
+
+ Query and retrieval
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Query
+ Retrieval
+ Search or query a data resource and retrieve entries and / or annotation.
+ Database retrieval
+ Search
+
+
+
+
+
+
+
+
+
+ Data retrieval (database cross-reference)
+
+ beta12orEarlier
+ Search database to retrieve all relevant references to a particular entity or entry.
+ true
+ beta13
+
+
+
+
+
+
+
+
+
+ Annotation
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Annotate an entity (typically a biological or biomedical database entity) with terms from a controlled vocabulary.
+ beta12orEarlier
+ This is a broad concept and is used a placeholder for other, more specific concepts.
+
+
+
+
+
+
+
+
+
+ Indexing
+
+
+
+
+
+
+
+ Data indexing
+ beta12orEarlier
+ Generate an index of (typically a file of) biological data.
+ Database indexing
+
+
+
+
+
+
+
+
+
+ Data index analysis
+
+ Database index analysis
+ Analyse an index of biological data.
+ beta12orEarlier
+ true
+ 1.6
+
+
+
+
+
+
+
+
+
+ Annotation retrieval (sequence)
+
+ true
+ beta12orEarlier
+ Retrieve basic information about a molecular sequence.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence generation
+
+
+ beta12orEarlier
+ Generate a molecular sequence by some means.
+
+
+
+
+
+
+
+
+
+ Sequence editing
+
+
+ Edit or change a molecular sequence, either randomly or specifically.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence merging
+
+ beta12orEarlier
+ Merge two or more (typically overlapping) molecular sequences.
+ Sequence splicing
+
+
+
+
+
+
+
+
+
+ Sequence conversion
+
+
+ Convert a molecular sequence from one type to another.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence complexity calculation
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Calculate sequence complexity, for example to find low-complexity regions in sequences.
+
+
+
+
+
+
+
+
+
+ Sequence ambiguity calculation
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Calculate sequence ambiguity, for example identity regions in protein or nucleotide sequences with many ambiguity codes.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence composition calculation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Calculate character or word composition or frequency of a molecular sequence.
+
+
+
+
+
+
+
+
+
+ Repeat sequence analysis
+
+
+
+
+
+
+
+ Find and/or analyse repeat sequences in (typically nucleotide) sequences.
+ beta12orEarlier
+ Repeat sequences include tandem repeats, inverted or palindromic repeats, DNA microsatellites (Simple Sequence Repeats or SSRs), interspersed repeats, maximal duplications and reverse, complemented and reverse complemented repeats etc. Repeat units can be exact or imperfect, in tandem or dispersed, of specified or unspecified length.
+
+
+
+
+
+
+
+
+
+ Sequence motif discovery
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Motifs and patterns might be conserved or over-represented (occur with improbable frequency).
+ beta12orEarlier
+ Discover new motifs or conserved patterns in sequences or sequence alignments (de-novo discovery).
+ Motif discovery
+
+
+
+
+
+
+
+
+
+ Sequence signature recognition
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Motif search
+ Sequence motif search
+ Protein secondary database search
+ Motif detection
+ Sequence motif recognition
+ Sequence signature detection
+ Sequence profile search
+ Find (scan for) known motifs, patterns and regular expressions in molecular sequence(s).
+ Sequence motif detection
+ Motif recognition
+
+
+
+
+
+
+
+
+
+ Sequence motif comparison
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Find motifs shared by molecular sequences.
+
+
+
+
+
+
+
+
+
+ Transcription regulatory sequence analysis
+
+ beta12orEarlier
+ beta13
+ Analyse the sequence, conformational or physicochemical properties of transcription regulatory elements in DNA sequences.
+ For example transcription factor binding sites (TFBS) analysis to predict accessibility of DNA to binding factors.
+ true
+
+
+
+
+
+
+
+
+
+ Conserved transcription regulatory sequence identification
+
+
+ For example cross-species comparison of transcription factor binding sites (TFBS). Methods might analyse co-regulated or co-expressed genes, or sets of oppositely expressed genes.
+ beta12orEarlier
+ Identify common, conserved (homologous) or synonymous transcriptional regulatory motifs (transcription factor binding sites).
+
+
+
+
+
+
+
+
+
+ Protein property calculation (from structure)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ This might be a residue-level search for properties such as solvent accessibility, hydropathy, secondary structure, ligand-binding etc.
+ Extract, calculate or predict non-positional (physical or chemical) properties of a protein from processing a protein (3D) structure.
+ beta12orEarlier
+ Protein structural property calculation
+
+
+
+
+
+
+
+
+
+ Protein flexibility and motion analysis
+
+
+ beta12orEarlier
+ Analyse flexibility and motion in protein structure.
+ Use this concept for analysis of flexible and rigid residues, local chain deformability, regions undergoing conformational change, molecular vibrations or fluctuational dynamics, domain motions or other large-scale structural transitions in a protein structure.
+
+
+
+
+
+
+
+
+
+ Protein structural motif recognition
+
+
+
+
+
+
+
+
+ Identify or screen for 3D structural motifs in protein structure(s).
+ This includes conserved substructures and conserved geometry, such as spatial arrangement of secondary structure or protein backbone. Methods might use structure alignment, structural templates, searches for similar electrostatic potential and molecular surface shape, surface-mapping of phylogenetic information etc.
+ beta12orEarlier
+ Protein structural feature identification
+
+
+
+
+
+
+
+
+
+ Protein domain recognition
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Identify structural domains in a protein structure from first principles (for example calculations on structural compactness).
+
+
+
+
+
+
+
+
+
+ Protein architecture analysis
+
+ beta12orEarlier
+ Analyse the architecture (spatial arrangement of secondary structure) of protein structure(s).
+
+
+
+
+
+
+
+
+
+ Residue interaction calculation
+
+
+
+
+
+
+
+
+
+
+
+
+
+ WHATIF: SymShellTenXML
+ WHATIF:ListContactsRelaxed
+ WHATIF: SymShellTwoXML
+ WHATIF:ListSideChainContactsRelaxed
+ beta12orEarlier
+ WHATIF:ListSideChainContactsNormal
+ WHATIF:ListContactsNormal
+ Calculate or extract inter-atomic, inter-residue or residue-atom contacts, distances and interactions in protein structure(s).
+ WHATIF: SymShellFiveXML
+ WHATIF: SymShellOneXML
+
+
+
+
+
+
+
+
+
+ Torsion angle calculation
+
+
+
+
+
+
+
+ beta12orEarlier
+ Calculate, visualise or analyse phi/psi angles of a protein structure.
+
+
+
+
+
+
+
+
+
+ Protein property calculation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Calculate (or predict) physical or chemical properties of a protein, including any non-positional properties of the molecular sequence, from processing a protein sequence.
+ This includes methods to render and visualise the properties of a protein sequence.
+ beta12orEarlier
+ Protein property rendering
+
+
+
+
+
+
+
+
+
+ Peptide immunogenicity prediction
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ This is usually done in the development of peptide-specific antibodies or multi-epitope vaccines. Methods might use sequence data (for example motifs) and / or structural data.
+ Predict antigenicity, allergenicity / immunogenicity, allergic cross-reactivity etc of peptides and proteins.
+
+
+
+
+
+
+
+
+
+ Sequence feature detection
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Sequence feature prediction
+ Predict, recognise and identify positional features in molecular sequences such as key functional sites or regions.
+ Sequence feature recognition
+ beta12orEarlier
+ Motif database search
+ SO:0000110
+
+
+
+
+
+
+
+
+
+ Data retrieval (feature table)
+
+ beta13
+ Extract a sequence feature table from a sequence database entry.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Feature table query
+
+ 1.6
+ beta12orEarlier
+ true
+ Query the features (in a feature table) of molecular sequence(s).
+
+
+
+
+
+
+
+
+
+ Sequence feature comparison
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Compare the feature tables of two or more molecular sequences.
+ Feature comparison
+ Feature table comparison
+
+
+
+
+
+
+
+
+
+ Data retrieval (sequence alignment)
+
+ beta12orEarlier
+ true
+ beta13
+ Display basic information about a sequence alignment.
+
+
+
+
+
+
+
+
+
+ Sequence alignment analysis
+
+
+
+
+
+
+
+ Analyse a molecular sequence alignment.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence alignment comparison
+
+
+ Compare (typically by aligning) two molecular sequence alignments.
+ beta12orEarlier
+ See also 'Sequence profile alignment'.
+
+
+
+
+
+
+
+
+
+ Sequence alignment conversion
+
+
+ beta12orEarlier
+ Convert a molecular sequence alignment from one type to another (for example amino acid to coding nucleotide sequence).
+
+
+
+
+
+
+
+
+
+ Nucleic acid property processing
+
+ beta12orEarlier
+ true
+ Process (read and / or write) physicochemical property data of nucleic acids.
+ beta13
+
+
+
+
+
+
+
+
+
+ Nucleic acid property calculation
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Calculate or predict physical or chemical properties of nucleic acid molecules, including any non-positional properties of the molecular sequence.
+
+
+
+
+
+
+
+
+
+ Splice transcript prediction
+
+
+
+
+
+
+
+ beta12orEarlier
+ Predict splicing alternatives or transcript isoforms from analysis of sequence data.
+
+
+
+
+
+
+
+
+
+ Frameshift detection
+
+
+
+
+
+
+
+
+ Detect frameshifts in DNA sequences, including frameshift sites and signals, and frameshift errors from sequencing projects.
+ Frameshift error detection
+ beta12orEarlier
+ Methods include sequence alignment (if related sequences are available) and word-based sequence comparison.
+
+
+
+
+
+
+
+
+
+ Vector sequence detection
+
+
+ beta12orEarlier
+ Detect vector sequences in nucleotide sequence, typically by comparison to a set of known vector sequences.
+
+
+
+
+
+
+
+
+
+ Protein secondary structure prediction
+
+
+
+
+
+
+
+
+
+ Methods might use amino acid composition, local sequence information, multiple sequence alignments, physicochemical features, estimated energy content, statistical algorithms, hidden Markov models, support vector machines, kernel machines, neural networks etc.
+ Predict secondary structure of protein sequences.
+ Secondary structure prediction (protein)
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein super-secondary structure prediction
+
+
+
+
+
+
+
+ beta12orEarlier
+ Predict super-secondary structure of protein sequence(s).
+ Super-secondary structures include leucine zippers, coiled coils, Helix-Turn-Helix etc.
+
+
+
+
+
+
+
+
+
+ Transmembrane protein prediction
+
+
+ Predict and/or classify transmembrane proteins or transmembrane (helical) domains or regions in protein sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Transmembrane protein analysis
+
+
+
+
+
+
+
+ beta12orEarlier
+ Analyse transmembrane protein(s), typically by processing sequence and / or structural data, and write an informative report for example about the protein and its transmembrane domains / regions.
+ Use this (or child) concept for analysis of transmembrane domains (buried and exposed faces), transmembrane helices, helix topology, orientation, inter-helical contacts, membrane dipping (re-entrant) loops and other secondary structure etc. Methods might use pattern discovery, hidden Markov models, sequence alignment, structural profiles, amino acid property analysis, comparison to known domains or some combination (hybrid methods).
+
+
+
+
+
+
+
+
+
+ Structure prediction
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Predict tertiary structure of a molecular (biopolymer) sequence.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Residue interaction prediction
+
+
+
+
+
+
+
+
+ Methods usually involve multiple sequence alignment analysis.
+ Predict contacts, non-covalent interactions and distance (constraints) between amino acids in protein sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein interaction raw data analysis
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Analyse experimental protein-protein interaction data from for example yeast two-hybrid analysis, protein microarrays, immunoaffinity chromatography followed by mass spectrometry, phage display etc.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein-protein interaction prediction (from protein sequence)
+
+
+ beta12orEarlier
+ Identify or predict protein-protein interactions, interfaces, binding sites etc in protein sequences.
+
+
+
+
+
+
+
+
+
+ Protein-protein interaction prediction (from protein structure)
+
+
+
+ beta12orEarlier
+ Identify or predict protein-protein interactions, interfaces, binding sites etc in protein structures.
+
+
+
+
+
+
+
+
+
+ Protein interaction network analysis
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Analyse a network of protein interactions.
+
+
+
+
+
+
+
+
+
+ Protein interaction network comparison
+
+
+ beta12orEarlier
+ Compare two or more networks of protein interactions.
+
+
+
+
+
+
+
+
+
+ RNA secondary structure prediction
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Predict RNA secondary structure (for example knots, pseudoknots, alternative structures etc).
+ beta12orEarlier
+ Methods might use RNA motifs, predicted intermolecular contacts, or RNA sequence-structure compatibility (inverse RNA folding).
+
+
+
+
+
+
+
+
+
+ Nucleic acid folding prediction
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Analyse some aspect of RNA/DNA folding, typically by processing sequence and/or structural data.
+ Nucleic acid folding modelling
+ Nucleic acid folding
+
+
+
+
+
+
+
+
+
+ Data retrieval (restriction enzyme annotation)
+
+ beta13
+ Restriction enzyme information retrieval
+ true
+ Retrieve information on restriction enzymes or restriction enzyme sites.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Genetic marker identification
+
+ true
+ beta12orEarlier
+ beta13
+ Identify genetic markers in DNA sequences.
+ A genetic marker is any DNA sequence of known chromosomal location that is associated with and specific to a particular gene or trait. This includes short sequences surrounding a SNP, Sequence-Tagged Sites (STS) which are well suited for PCR amplification, a longer minisatellites sequence etc.
+
+
+
+
+
+
+
+
+
+ Genetic mapping
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ QTL mapping
+ This includes mapping of the genetic architecture of dynamic complex traits (functional mapping), e.g. by characterization of the underlying quantitative trait loci (QTLs) or nucleotides (QTNs).
+ Linkage mapping
+ Genetic map generation
+ Mapping involves ordering genetic loci along a chromosome and estimating the physical distance between loci. A genetic map shows the relative (not physical) position of known genes and genetic markers.
+ Generate a genetic (linkage) map of a DNA sequence (typically a chromosome) showing the relative positions of genetic markers based on estimation of non-physical distances.
+ Genetic map construction
+ Functional mapping
+
+
+
+
+
+
+
+
+
+ Linkage analysis
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ For example, estimate how close two genes are on a chromosome by calculating how often they are transmitted together to an offspring, ascertain whether two genes are linked and parental linkage, calculate linkage map distance etc.
+ Analyse genetic linkage.
+
+
+
+
+
+
+
+
+
+ Codon usage table generation
+
+
+
+
+
+
+
+
+ Calculate codon usage statistics and create a codon usage table.
+ beta12orEarlier
+ Codon usage table construction
+
+
+
+
+
+
+
+
+
+ Codon usage table comparison
+
+
+ beta12orEarlier
+ Compare two or more codon usage tables.
+
+
+
+
+
+
+
+
+
+ Codon usage analysis
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ synon: Codon usage data analysis
+ Process (read and / or write) codon usage data, e.g. analyse codon usage tables or codon usage in molecular sequences.
+ synon: Codon usage table analysis
+
+
+
+
+
+
+
+
+
+ Base position variability plotting
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Identify and plot third base position variability in a nucleotide sequence.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence word comparison
+
+ Find exact character or word matches between molecular sequences without full sequence alignment.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence distance matrix generation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Sequence distance matrix construction
+ Phylogenetic distance matrix generation
+ beta12orEarlier
+ Calculate a sequence distance matrix or otherwise estimate genetic distances between molecular sequences.
+
+
+
+
+
+
+
+
+
+ Sequence redundancy removal
+
+
+
+
+
+
+
+ beta12orEarlier
+ Compare two or more molecular sequences, identify and remove redundant sequences based on some criteria.
+
+
+
+
+
+
+
+
+
+ Sequence clustering
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ The clusters may be output or used internally for some other purpose.
+ Sequence cluster construction
+ beta12orEarlier
+ Build clusters of similar sequences, typically using scores from pair-wise alignment or other comparison of the sequences.
+ Sequence cluster generation
+
+
+
+
+
+
+
+
+
+ Sequence alignment
+
+
+
+
+
+
+
+
+
+ Sequence alignment construction
+ beta12orEarlier
+ Align (identify equivalent sites within) molecular sequences.
+ Sequence alignment generation
+ Sequence alignment computation
+
+
+
+
+
+
+
+
+
+ Hybrid sequence alignment construction
+
+ Hybrid sequence alignment
+ true
+ beta13
+ beta12orEarlier
+ Align two or more molecular sequences of different types (for example genomic DNA to EST, cDNA or mRNA).
+ Hybrid sequence alignment generation
+
+
+
+
+
+
+
+
+
+ Structure-based sequence alignment
+
+ Structure-based sequence alignment
+ Sequence alignment generation (structure-based)
+ Structure-based sequence alignment construction
+ beta12orEarlier
+ Sequence alignment (structure-based)
+ Structure-based sequence alignment generation
+ Align molecular sequences using sequence and structural information.
+
+
+
+
+
+
+
+
+
+ Structure alignment
+
+
+
+
+
+
+
+
+
+ Align (superimpose) molecular tertiary structures.
+ Structure alignment generation
+ Structure alignment construction
+ beta12orEarlier
+ Multiple structure alignment construction
+ Multiple structure alignment generation
+
+
+
+
+
+
+
+
+
+ Sequence profile generation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Sequence profile construction
+ beta12orEarlier
+ Generate some type of sequence profile (for example a hidden Markov model) from a sequence alignment.
+
+
+
+
+
+
+
+
+
+ 3D profile generation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Structural profile generation
+ Generate some type of structural (3D) profile or template from a structure or structure alignment.
+ Structural profile construction
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Profile-to-profile alignment
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Sequence profile alignment
+ beta12orEarlier
+ See also 'Sequence alignment comparison'.
+ Sequence profile alignment construction
+ Align sequence profiles (representing sequence alignments).
+ Sequence profile alignment generation
+
+
+
+
+
+
+
+
+
+ 3D profile-to-3D profile alignment
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ 3D profile alignment (multiple)
+ 3D profile alignment
+ Multiple 3D profile alignment construction
+ Structural profile alignment construction (multiple)
+ Structural profile alignment
+ Structural profile alignment generation
+ Structural profile alignment construction
+ Align structural (3D) profiles or templates (representing structures or structure alignments).
+
+
+
+
+
+
+
+
+
+ Sequence-to-profile alignment
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Sequence-profile alignment construction
+ Sequence-profile alignment generation
+ beta12orEarlier
+ Align molecular sequence(s) to sequence profile(s).
+ Sequence-profile alignment
+ A sequence profile typically represents a sequence alignment. Methods might perform one-to-one, one-to-many or many-to-many comparisons.
+
+
+
+
+
+
+
+
+
+ Sequence-to-3D-profile alignment
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Sequence-3D profile alignment construction
+ Align molecular sequence(s) to structural (3D) profile(s) or template(s) (representing a structure or structure alignment).
+ Sequence-3D profile alignment generation
+ Methods might perform one-to-one, one-to-many or many-to-many comparisons.
+ Sequence-3D profile alignment
+
+
+
+
+
+
+
+
+
+ Protein threading
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Align molecular sequence to structure in 3D space (threading).
+ Use this concept for methods that evaluate sequence-structure compatibility by assessing residue interactions in 3D. Methods might perform one-to-one, one-to-many or many-to-many comparisons.
+ Sequence-structure alignment
+
+
+
+
+
+
+
+
+
+ Protein fold recognition
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Protein domain prediction
+ Methods use some type of mapping between sequence and fold, for example secondary structure prediction and alignment, profile comparison, sequence properties, homologous sequence search, kernel machines etc. Domains and folds might be taken from SCOP or CATH.
+ Recognize (predict and identify) known protein structural domains or folds in protein sequence(s).
+ Protein fold prediction
+
+
+
+
+
+
+
+
+
+ Metadata retrieval
+
+
+
+
+
+
+
+ Data retrieval (documentation)
+ Search for and retrieve data concerning or describing some core data, as distinct from the primary data that is being described.
+ Data retrieval (metadata)
+ beta12orEarlier
+ This includes documentation, general information and other metadata on entities such as databases, database entries and tools.
+
+
+
+
+
+
+
+
+
+ Literature search
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Query the biomedical and informatics literature.
+
+
+
+
+
+
+
+
+
+ Text mining
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Text data mining
+ beta12orEarlier
+ Process and analyse text (typically the biomedical and informatics literature) to extract information from it.
+
+
+
+
+
+
+
+
+
+ Virtual PCR
+
+
+
+
+
+
+
+ beta12orEarlier
+ Perform in-silico (virtual) PCR.
+
+
+
+
+
+
+
+
+
+ PCR primer design
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ PCR primer prediction
+ Primer design involves predicting or selecting primers that are specific to a provided PCR template. Primers can be designed with certain properties such as size of product desired, primer size etc. The output might be a minimal or overlapping primer set.
+ Design or predict oligonucleotide primers for PCR and DNA amplification etc.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Microarray probe design
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Predict and/or optimize oligonucleotide probes for DNA microarrays, for example for transcription profiling of genes, or for genomes and gene families.
+ beta12orEarlier
+ Microarray probe prediction
+
+
+
+
+
+
+
+
+
+ Sequence assembly
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ For example, assemble overlapping reads from paired-end sequencers into contigs (a contiguous sequence corresponding to read overlaps). Or assemble contigs, for example ESTs and genomic DNA fragments, depending on the detected fragment overlaps.
+ Combine (align and merge) overlapping fragments of a DNA sequence to reconstruct the original sequence.
+
+
+
+
+
+
+
+
+
+ Microarray data standardization and normalization
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Standardize or normalize microarray data.
+ This includes statistical analysis, for example of variability amongst microarrays experiments, comparison of heterogeneous microarray platforms etc.
+
+
+
+
+
+
+
+
+
+ Sequencing-based expression profile data processing
+
+ Process (read and / or write) SAGE, MPSS or SBS experimental data.
+ true
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Gene expression profile clustering
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Perform cluster analysis of gene expression (microarray) data, for example clustering of similar gene expression profiles.
+
+
+
+
+
+
+
+
+
+ Gene expression profiling
+
+
+
+
+
+
+
+
+ Expression profiling
+ Gene expression profile construction
+ Functional profiling
+ Generate a gene expression profile or pattern, for example from microarray data.
+ beta12orEarlier
+ Gene expression profile generation
+
+
+
+
+
+
+
+
+
+ Gene expression profile comparison
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Compare gene expression profiles or patterns.
+
+
+
+
+
+
+
+
+
+ Functional profiling
+
+ true
+ beta12orEarlier
+ Interpret (in functional terms) and annotate gene expression data.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ EST and cDNA sequence analysis
+
+ Analyse EST or cDNA sequences.
+ For example, identify full-length cDNAs from EST sequences or detect potential EST antisense transcripts.
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Structural genomics target selection
+
+ beta12orEarlier
+ Identify and select targets for protein structural determination.
+ beta12orEarlier
+ Methods will typically navigate a graph of protein families of known structure.
+ true
+
+
+
+
+
+
+
+
+
+ Protein secondary structure assignment
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Assign secondary structure from protein coordinate or experimental data.
+
+
+
+
+
+
+
+
+
+ Protein structure assignment
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Assign a protein tertiary structure (3D coordinates) from raw experimental data.
+
+
+
+
+
+
+
+
+
+ Protein model validation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Evaluate the quality or correctness a protein three-dimensional model.
+ Model validation might involve checks for atomic packing, steric clashes (bumps), volume irregularities, agreement with electron density maps, number of amino acid residues, percentage of residues with missing or bad atoms, irregular Ramachandran Z-scores, irregular Chi-1 / Chi-2 normality scores, RMS-Z score on bonds and angles etc.
+ WHATIF: CorrectedPDBasXML
+ Protein structure validation
+ WHATIF: UseFileDB
+ The PDB file format has had difficulties, inconsistencies and errors. Corrections can include identifying a meaningful sequence, removal of alternate atoms, correction of nomenclature problems, removal of incomplete residues and spurious waters, addition or removal of water, modelling of missing side chains, optimisation of cysteine bonds, regularisation of bond lengths, bond angles and planarities etc.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Molecular model refinement
+
+
+ Protein model refinement
+ WHATIF: CorrectedPDBasXML
+ beta12orEarlier
+ Refine (after evaluation) a model of a molecular structure (typically a protein structure) to reduce steric clashes, volume irregularities etc.
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree generation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Phylogenetic trees are usually constructed from a set of sequences from which an alignment (or data matrix) is calculated.
+ Phylogenetic tree construction
+ Construct a phylogenetic tree.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree analysis
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Analyse an existing phylogenetic tree or trees, typically to detect features or make predictions.
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree comparison
+
+
+ beta12orEarlier
+ Compare two or more phylogenetic trees.
+ For example, to produce a consensus tree, subtrees, supertrees, calculate distances between trees or test topological similarity between trees (e.g. a congruence index) etc.
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree editing
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Edit a phylogenetic tree.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Phylogenetic footprinting / shadowing
+
+
+
+
+
+
+
+ A phylogenetic 'shadow' represents the additive differences between individual sequences. By masking or 'shadowing' variable positions a conserved sequence is produced with few or none of the variations, which is then compared to the sequences of interest to identify significant regions of conservation.
+ beta12orEarlier
+ Infer a phylogenetic tree by comparing orthologous sequences in different species, particularly many closely related species (phylogenetic shadowing).
+
+
+
+
+
+
+
+
+
+ Protein folding simulation
+
+ beta12orEarlier
+ Simulate the folding of a protein.
+
+
+
+
+
+
+
+
+
+ Protein folding pathway prediction
+
+
+ Predict the folding pathway(s) or non-native structural intermediates of a protein.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein SNP mapping
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Map and model the effects of single nucleotide polymorphisms (SNPs) on protein structure(s).
+
+
+
+
+
+
+
+
+
+ Protein modelling (mutation)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Methods might predict silent or pathological mutations.
+ Protein mutation modelling
+ Predict the effect of point mutation on a protein structure, in terms of strucural effects and protein folding, stability and function.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Immunogen design
+
+ true
+ Design molecules that elicit an immune response (immunogens).
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Zinc finger prediction
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Predict and optimise zinc finger protein domains for DNA/RNA binding (for example for transcription factors and nucleases).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Enzyme kinetics calculation
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Calculate Km, Vmax and derived data for an enzyme reaction.
+
+
+
+
+
+
+
+
+
+ Formatting
+
+ beta12orEarlier
+ Reformat a file of data (or equivalent entity in memory).
+ Format conversion
+ File formatting
+ Reformatting
+ File reformatting
+ File format conversion
+
+
+
+
+
+
+
+
+
+ Format validation
+
+
+ Test and validate the format and content of a data file.
+ File format validation
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Visualisation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Visualise, plot or render (graphically) biomolecular data such as molecular sequences or structures.
+ Rendering
+
+
+
+
+
+
+
+
+
+ Sequence database search
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Search a sequence database by sequence comparison and retrieve similar sequences.
+
+sequences matching a given sequence motif or pattern, such as a Prosite pattern or regular expression.
+ beta12orEarlier
+ This excludes direct retrieval methods (e.g. the dbfetch program).
+
+
+
+
+
+
+
+
+
+ Structure database search
+
+
+
+
+
+
+
+ beta12orEarlier
+ Search a tertiary structure database, typically by sequence and/or structure comparison, or some other means, and retrieve structures and associated data.
+
+
+
+
+
+
+
+
+
+ Protein secondary database search
+
+ 1.8
+ beta12orEarlier
+ true
+ Search a secondary protein database (of classification information) to assign a protein sequence(s) to a known protein family or group.
+
+
+
+
+
+
+
+
+
+ Motif database search
+
+ beta12orEarlier
+ Screen a sequence against a motif or pattern database.
+ true
+ 1.8
+
+
+
+
+
+
+
+
+
+ Sequence profile database search
+
+ true
+ beta12orEarlier
+ Search a database of sequence profiles with a query sequence.
+ 1.4
+
+
+
+
+
+
+
+
+
+ Transmembrane protein database search
+
+ true
+ beta12orEarlier
+ Search a database of transmembrane proteins, for example for sequence or structural similarities.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence retrieval (by code)
+
+ Query a database and retrieve sequences with a given entry code or accession number.
+ true
+ 1.6
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence retrieval (by keyword)
+
+ true
+ Query a database and retrieve sequences containing a given keyword.
+ beta12orEarlier
+ 1.6
+
+
+
+
+
+
+
+
+
+ Sequence similarity search
+
+
+ Structure database search (by sequence)
+ Sequence database search (by sequence)
+ beta12orEarlier
+ Search a sequence database and retrieve sequences that are similar to a query sequence.
+
+
+
+
+
+
+
+
+
+ Sequence database search (by motif or pattern)
+
+ 1.8
+ Search a sequence database and retrieve sequences matching a given sequence motif or pattern, such as a Prosite pattern or regular expression.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Sequence database search (by amino acid composition)
+
+ true
+ Search a sequence database and retrieve sequences of a given amino acid composition.
+ 1.6
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence database search (by property)
+
+ Search a sequence database and retrieve sequences with a specified property, typically a physicochemical or compositional property.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence database search (by sequence using word-based methods)
+
+ beta12orEarlier
+ Word-based methods (for example BLAST, gapped BLAST, MEGABLAST, WU-BLAST etc.) are usually quicker than alignment-based methods. They may or may not handle gaps.
+ 1.6
+ true
+ Sequence similarity search (word-based methods)
+ Search a sequence database and retrieve sequences that are similar to a query sequence using a word-based method.
+
+
+
+
+
+
+
+
+
+ Sequence database search (by sequence using profile-based methods)
+
+ true
+ Sequence similarity search (profile-based methods)
+ Search a sequence database and retrieve sequences that are similar to a query sequence using a sequence profile-based method, or with a supplied profile as query.
+ beta12orEarlier
+ This includes tools based on PSI-BLAST.
+ 1.6
+
+
+
+
+
+
+
+
+
+ Sequence database search (by sequence using local alignment-based methods)
+
+ Search a sequence database for sequences that are similar to a query sequence using a local alignment-based method.
+ 1.6
+ beta12orEarlier
+ true
+ Sequence similarity search (local alignment-based methods)
+ This includes tools based on the Smith-Waterman algorithm or FASTA.
+
+
+
+
+
+
+
+
+
+ Sequence database search (by sequence using global alignment-based methods)
+
+ This includes tools based on the Needleman and Wunsch algorithm.
+ Search sequence(s) or a sequence database for sequences that are similar to a query sequence using a global alignment-based method.
+ 1.6
+ Sequence similarity search (global alignment-based methods)
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Sequence database search (by sequence for primer sequences)
+
+ true
+ beta12orEarlier
+ Search a DNA database (for example a database of conserved sequence tags) for matches to Sequence-Tagged Site (STS) primer sequences.
+ 1.6
+ STSs are genetic markers that are easily detected by the polymerase chain reaction (PCR) using specific primers.
+ Sequence similarity search (primer sequences)
+
+
+
+
+
+
+
+
+
+ Sequence database search (by molecular weight)
+
+ Search sequence(s) or a sequence database for sequences which match a set of peptide masses, for example a peptide mass fingerprint from mass spectrometry.
+ 1.6
+ Protein fingerprinting
+ true
+ beta12orEarlier
+ Peptide mass fingerprinting
+
+
+
+
+
+
+
+
+
+ Sequence database search (by isoelectric point)
+
+ 1.6
+ beta12orEarlier
+ Search sequence(s) or a sequence database for sequences of a given isoelectric point.
+ true
+
+
+
+
+
+
+
+
+
+ Structure retrieval (by code)
+
+ Query a tertiary structure database and retrieve entries with a given entry code or accession number.
+ 1.6
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Structure retrieval (by keyword)
+
+ true
+ 1.6
+ Query a tertiary structure database and retrieve entries containing a given keyword.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Structure database search (by sequence)
+
+ beta12orEarlier
+ true
+ Search a tertiary structure database and retrieve structures with a sequence similar to a query sequence.
+ 1.8
+
+
+
+
+
+
+
+
+
+ Structural similarity search
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Search a database of molecular structure and retrieve structures that are similar to a query structure.
+ Structure database search (by structure)
+ Structure retrieval by structure
+
+
+
+
+
+
+
+
+
+ Sequence annotation
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Annotate a molecular sequence record with terms from a controlled vocabulary.
+
+
+
+
+
+
+
+
+
+ Genome annotation
+
+ beta12orEarlier
+ Annotate a genome sequence with terms from a controlled vocabulary.
+
+
+
+
+
+
+
+
+
+ Nucleic acid sequence reverse and complement
+
+ beta12orEarlier
+ Generate the reverse and / or complement of a nucleotide sequence.
+
+
+
+
+
+
+
+
+
+ Random sequence generation
+
+ Generate a random sequence, for example, with a specific character composition.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Nucleic acid restriction digest
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Generate digest fragments for a nucleotide sequence containing restriction sites.
+
+
+
+
+
+
+
+
+
+ Protein sequence cleavage
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Cleave a protein sequence into peptide fragments (by enzymatic or chemical cleavage) and calculate the fragment masses.
+
+
+
+
+
+
+
+
+
+ Sequence mutation and randomization
+
+ beta12orEarlier
+ Mutate a molecular sequence a specified amount or shuffle it to produce a randomized sequence with the same overall composition.
+
+
+
+
+
+
+
+
+
+ Sequence masking
+
+ Mask characters in a molecular sequence (replacing those characters with a mask character).
+ For example, SNPs or repeats in a DNA sequence might be masked.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence cutting
+
+ Cut (remove) characters or a region from a molecular sequence.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Restriction site creation
+
+ Create (or remove) restriction sites in sequences, for example using silent mutations.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ DNA translation
+
+
+
+
+
+
+
+ beta12orEarlier
+ Translate a DNA sequence into protein.
+
+
+
+
+
+
+
+
+
+ DNA transcription
+
+
+
+
+
+
+
+ beta12orEarlier
+ Transcribe a nucleotide sequence into mRNA sequence(s).
+
+
+
+
+
+
+
+
+
+ Sequence composition calculation (nucleic acid)
+
+ true
+ Calculate base frequency or word composition of a nucleotide sequence.
+ 1.8
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence composition calculation (protein)
+
+ 1.8
+ Calculate amino acid frequency or word composition of a protein sequence.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Repeat sequence detection
+
+
+ beta12orEarlier
+ Find (and possibly render) short repetitive subsequences (repeat sequences) in (typically nucleotide) sequences.
+
+
+
+
+
+
+
+
+
+ Repeat sequence organisation analysis
+
+
+ beta12orEarlier
+ Analyse repeat sequence organization such as periodicity.
+
+
+
+
+
+
+
+
+
+ Protein hydropathy calculation (from structure)
+
+
+
+ Analyse the hydrophobic, hydrophilic or charge properties of a protein structure.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein solvent accessibility calculation
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Calculate solvent accessible or buried surface areas in protein structures.
+
+
+
+
+
+
+
+
+
+ Protein hydropathy cluster calculation
+
+
+ beta12orEarlier
+ Identify clusters of hydrophobic or charged residues in a protein structure.
+
+
+
+
+
+
+
+
+
+ Protein dipole moment calculation
+
+
+
+
+
+
+
+ beta12orEarlier
+ Calculate whether a protein structure has an unusually large net charge (dipole moment).
+
+
+
+
+
+
+
+
+
+ Protein surface and interior calculation
+
+
+
+
+
+
+
+ beta12orEarlier
+ Identify the protein surface and interior, surface accessible pockets, interior inaccessible cavities etc.
+
+
+
+
+
+
+
+
+
+ Protein binding site prediction (from structure)
+
+
+ Identify or predict catalytic residues, active sites or other ligand-binding sites in protein structures.
+ beta12orEarlier
+ Ligand-binding and active site prediction (from structure)
+ Binding site prediction (from structure)
+
+
+
+
+
+
+
+
+
+ Protein-nucleic acid binding site analysis
+
+
+
+
+
+
+
+ Analyse RNA or DNA-binding sites in protein structure.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein peeling
+
+ beta12orEarlier
+ Decompose a structure into compact or globular fragments (protein peeling).
+
+
+
+
+
+
+
+
+
+ Protein distance matrix calculation
+
+
+
+
+
+
+
+ beta12orEarlier
+ Calculate a matrix of distance between residues (for example the C-alpha atoms) in a protein structure.
+
+
+
+
+
+
+
+
+
+ Protein contact map calculation
+
+
+
+
+
+
+
+ beta12orEarlier
+ Calculate a residue contact map (typically all-versus-all inter-residue contacts) for a protein structure.
+
+
+
+
+
+
+
+
+
+ Protein residue cluster calculation
+
+
+
+
+
+
+
+ Cluster of contacting residues might be key structural residues.
+ Calculate clusters of contacting residues in protein structures.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Hydrogen bond calculation
+
+
+
+
+
+
+
+ WHATIF:ShowHydrogenBonds
+ WHATIF:HasHydrogenBonds
+ The output might include the atoms involved in the bond, bond geometric parameters and bond enthalpy.
+ beta12orEarlier
+ WHATIF:ShowHydrogenBondsM
+ Identify potential hydrogen bonds between amino acids and other groups.
+
+
+
+
+
+
+
+
+
+ Residue non-canonical interaction detection
+
+
+ beta12orEarlier
+ Calculate non-canonical atomic interactions in protein structures.
+
+
+
+
+
+
+
+
+
+ Ramachandran plot calculation
+
+
+
+
+
+
+
+ Calculate a Ramachandran plot of a protein structure.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Ramachandran plot validation
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Validate a Ramachandran plot of a protein structure.
+
+
+
+
+
+
+
+
+
+ Protein molecular weight calculation
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Calculate the molecular weight of a protein sequence or fragments.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein extinction coefficient calculation
+
+
+
+
+
+
+
+ beta12orEarlier
+ Predict extinction coefficients or optical density of a protein sequence.
+
+
+
+
+
+
+
+
+
+ Protein pH-dependent property calculation
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Calculate pH-dependent properties from pKa calculations of a protein sequence.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein hydropathy calculation (from sequence)
+
+
+ Hydropathy calculation on a protein sequence.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein titration curve plotting
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Plot a protein titration curve.
+
+
+
+
+
+
+
+
+
+ Protein isoelectric point calculation
+
+
+
+
+
+
+
+ beta12orEarlier
+ Calculate isoelectric point of a protein sequence.
+
+
+
+
+
+
+
+
+
+ Protein hydrogen exchange rate calculation
+
+
+
+
+
+
+
+ Estimate hydrogen exchange rate of a protein sequence.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein hydrophobic region calculation
+
+ Calculate hydrophobic or hydrophilic / charged regions of a protein sequence.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein aliphatic index calculation
+
+
+
+
+
+
+
+ beta12orEarlier
+ Calculate aliphatic index (relative volume occupied by aliphatic side chains) of a protein.
+
+
+
+
+
+
+
+
+
+ Protein hydrophobic moment plotting
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Hydrophobic moment is a peptides hydrophobicity measured for different angles of rotation.
+ Calculate the hydrophobic moment of a peptide sequence and recognize amphiphilicity.
+
+
+
+
+
+
+
+
+
+ Protein globularity prediction
+
+
+
+
+
+
+
+ Predict the stability or globularity of a protein sequence, whether it is intrinsically unfolded etc.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein solubility prediction
+
+
+
+
+
+
+
+ Predict the solubility or atomic solvation energy of a protein sequence.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein crystallizability prediction
+
+
+
+
+
+
+
+ beta12orEarlier
+ Predict crystallizability of a protein sequence.
+
+
+
+
+
+
+
+
+
+ Protein signal peptide detection (eukaryotes)
+
+ beta12orEarlier
+ Detect or predict signal peptides (and typically predict subcellular localization) of eukaryotic proteins.
+
+
+
+
+
+
+
+
+
+ Protein signal peptide detection (bacteria)
+
+ Detect or predict signal peptides (and typically predict subcellular localization) of bacterial proteins.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ MHC peptide immunogenicity prediction
+
+ Predict MHC class I or class II binding peptides, promiscuous binding peptides, immunogenicity etc.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein feature prediction (from sequence)
+
+ Methods typically involve scanning for known motifs, patterns and regular expressions.
+ beta12orEarlier
+ true
+ Sequence feature detection (protein)
+ 1.6
+ Predict, recognise and identify positional features in protein sequences such as functional sites or regions and secondary structure.
+
+
+
+
+
+
+
+
+
+ Nucleic acid feature detection
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Sequence feature detection (nucleic acid)
+ Predict, recognise and identify features in nucleotide sequences such as functional sites or regions, typically by scanning for known motifs, patterns and regular expressions.
+ Methods typically involve scanning for known motifs, patterns and regular expressions.
+ beta12orEarlier
+ Nucleic acid feature recognition
+ Nucleic acid feature prediction
+
+
+
+
+
+
+
+
+
+ Epitope mapping
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Predict antigenic determinant sites (epitopes) in protein sequences.
+ Epitope mapping is commonly done during vaccine design.
+
+
+
+
+
+
+
+
+
+ Protein post-translation modification site prediction
+
+
+
+
+
+
+
+ Predict post-translation modification sites in protein sequences.
+ beta12orEarlier
+ Methods might predict sites of methylation, N-terminal myristoylation, N-terminal acetylation, sumoylation, palmitoylation, phosphorylation, sulfation, glycosylation, glycosylphosphatidylinositol (GPI) modification sites (GPI lipid anchor signals) etc.
+
+
+
+
+
+
+
+
+
+ Protein signal peptide detection
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Methods might use sequence motifs and features, amino acid composition, profiles, machine-learned classifiers, etc.
+ Detect or predict signal peptides and signal peptide cleavage sites in protein sequences.
+
+
+
+
+
+
+
+
+
+ Protein binding site prediction (from sequence)
+
+
+ Binding site prediction (from sequence)
+ Predict catalytic residues, active sites or other ligand-binding sites in protein sequences.
+ Ligand-binding and active site prediction (from sequence)
+ Protein binding site detection
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein-nucleic acid binding prediction
+
+ beta12orEarlier
+ Predict RNA and DNA-binding binding sites in protein sequences.
+
+
+
+
+
+
+
+
+
+ Protein folding site prediction
+
+
+ Predict protein sites that are key to protein folding, such as possible sites of nucleation or stabilization.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein cleavage site prediction
+
+
+
+
+
+
+
+ beta12orEarlier
+ Detect or predict cleavage sites (enzymatic or chemical) in protein sequences.
+
+
+
+
+
+
+
+
+
+ Epitope mapping (MHC Class I)
+
+ 1.8
+ true
+ beta12orEarlier
+ Predict epitopes that bind to MHC class I molecules.
+
+
+
+
+
+
+
+
+
+ Epitope mapping (MHC Class II)
+
+ Predict epitopes that bind to MHC class II molecules.
+ 1.8
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Whole gene prediction
+
+ beta12orEarlier
+ Detect, predict and identify whole gene structure in DNA sequences. This includes protein coding regions, exon-intron structure, regulatory regions etc.
+
+
+
+
+
+
+
+
+
+ Gene component prediction
+
+ Methods for gene prediction might be ab initio, based on phylogenetic comparisons, use motifs, sequence features, support vector machine, alignment etc.
+ beta12orEarlier
+ Detect, predict and identify genetic elements such as promoters, coding regions, splice sites, etc in DNA sequences.
+
+
+
+
+
+
+
+
+
+ Transposon prediction
+
+ beta12orEarlier
+ Detect or predict transposons, retrotransposons / retrotransposition signatures etc.
+
+
+
+
+
+
+
+
+
+ PolyA signal detection
+
+ Detect polyA signals in nucleotide sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Quadruplex formation site detection
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Quadruplex structure prediction
+ Detect quadruplex-forming motifs in nucleotide sequences.
+ Quadruplex (4-stranded) structures are formed by guanine-rich regions and are implicated in various important biological processes and as therapeutic targets.
+
+
+
+
+
+
+
+
+
+ CpG island and isochore detection
+
+
+
+
+
+
+
+ An isochore is long region (> 3 KB) of DNA with very uniform GC content, in contrast to the rest of the genome. Isochores tend tends to have more genes, higher local melting or denaturation temperatures, and different flexibility. Methods might calculate fractional GC content or variation of GC content, predict methylation status of CpG islands etc. This includes methods that visualise CpG rich regions in a nucleotide sequence, for example plot isochores in a genome sequence.
+ beta12orEarlier
+ Find CpG rich regions in a nucleotide sequence or isochores in genome sequences.
+ CpG island and isochores rendering
+ CpG island and isochores detection
+
+
+
+
+
+
+
+
+
+ Restriction site recognition
+
+
+
+
+
+
+
+ beta12orEarlier
+ Find and identify restriction enzyme cleavage sites (restriction sites) in (typically) DNA sequences, for example to generate a restriction map.
+
+
+
+
+
+
+
+
+
+ Nucleosome formation or exclusion sequence prediction
+
+ beta12orEarlier
+ Identify or predict nucleosome exclusion sequences (nucleosome free regions) in DNA.
+
+
+
+
+
+
+
+
+
+ Splice site prediction
+
+
+
+
+
+
+
+ beta12orEarlier
+ Identify, predict or analyse splice sites in nucleotide sequences.
+ Methods might require a pre-mRNA or genomic DNA sequence.
+
+
+
+
+
+
+
+
+
+ Integrated gene prediction
+
+ Predict whole gene structure using a combination of multiple methods to achieve better predictions.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Operon prediction
+
+ Find operons (operators, promoters and genes) in bacteria genes.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Coding region prediction
+
+ Predict protein-coding regions (CDS or exon) or open reading frames in nucleotide sequences.
+ ORF prediction
+ ORF finding
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Selenocysteine insertion sequence (SECIS) prediction
+
+
+
+
+
+
+
+ Predict selenocysteine insertion sequence (SECIS) in a DNA sequence.
+ SECIS elements are around 60 nucleotides in length with a stem-loop structure directs the cell to translate UGA codons as selenocysteines.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Regulatory element prediction
+
+
+
+
+
+
+
+ Identify or predict transcription regulatory motifs, patterns, elements or regions in DNA sequences.
+ Translational regulatory element prediction
+ Transcription regulatory element prediction
+ This includes promoters, enhancers, silencers and boundary elements / insulators, regulatory protein or transcription factor binding sites etc. Methods might be specific to a particular genome and use motifs, word-based / grammatical methods, position-specific frequency matrices, discriminative pattern analysis etc.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Translation initiation site prediction
+
+
+
+
+
+
+
+ Predict translation initiation sites, possibly by searching a database of sites.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Promoter prediction
+
+ Identify or predict whole promoters or promoter elements (transcription start sites, RNA polymerase binding site, transcription factor binding sites, promoter enhancers etc) in DNA sequences.
+ Methods might recognize CG content, CpG islands, splice sites, polyA signals etc.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Transcription regulatory element prediction (DNA-cis)
+
+ beta12orEarlier
+ Cis-regulatory elements (cis-elements) regulate the expression of genes located on the same strand. Cis-elements are found in the 5' promoter region of the gene, in an intron, or in the 3' untranslated region. Cis-elements are often binding sites of one or more trans-acting factors.
+ Identify, predict or analyse cis-regulatory elements (TATA box, Pribnow box, SOS box, CAAT box, CCAAT box, operator etc.) in DNA sequences.
+
+
+
+
+
+
+
+
+
+ Transcription regulatory element prediction (RNA-cis)
+
+ Cis-regulatory elements (cis-elements) regulate genes located on the same strand from which the element was transcribed. A riboswitch is a region of an mRNA molecule that bind a small target molecule that regulates the gene's activity.
+ Identify, predict or analyse cis-regulatory elements (for example riboswitches) in RNA sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Transcription regulatory element prediction (trans)
+
+
+
+
+
+
+
+ beta12orEarlier
+ Trans-regulatory elements regulate genes distant from the gene from which they were transcribed.
+ Identify or predict functional RNA sequences with a gene regulatory role (trans-regulatory elements) or targets.
+ Functional RNA identification
+
+
+
+
+
+
+
+
+
+ Matrix/scaffold attachment site prediction
+
+ MAR/SAR sites often flank a gene or gene cluster and are found nearby cis-regulatory sequences. They might contribute to transcription regulation.
+ Identify matrix/scaffold attachment regions (MARs/SARs) in DNA sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Transcription factor binding site prediction
+
+ beta12orEarlier
+ Identify or predict transcription factor binding sites in DNA sequences.
+
+
+
+
+
+
+
+
+
+ Exonic splicing enhancer prediction
+
+
+
+
+
+
+
+ An exonic splicing enhancer (ESE) is 6-base DNA sequence motif in an exon that enhances or directs splicing of pre-mRNA or hetero-nuclear RNA (hnRNA) into mRNA.
+ Identify or predict exonic splicing enhancers (ESE) in exons.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence alignment validation
+
+
+ Evaluation might be purely sequence-based or use structural information.
+ Sequence alignment quality evaluation
+ Evaluate molecular sequence alignment accuracy.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence alignment analysis (conservation)
+
+ beta12orEarlier
+ Analyse character conservation in a molecular sequence alignment, for example to derive a consensus sequence.
+ Residue conservation analysis
+ Use this concept for methods that calculate substitution rates, estimate relative site variability, identify sites with biased properties, derive a consensus sequence, or identify highly conserved or very poorly conserved sites, regions, blocks etc.
+
+
+
+
+
+
+
+
+
+ Sequence alignment analysis (site correlation)
+
+
+ Analyse correlations between sites in a molecular sequence alignment.
+ This is typically done to identify possible covarying positions and predict contacts or structural constraints in protein structures.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Chimeric sequence detection
+
+ beta12orEarlier
+ A chimera includes regions from two or more phylogenetically distinct sequences. They are usually artifacts of PCR and are thought to occur when a prematurely terminated amplicon reanneals to another DNA strand and is subsequently copied to completion in later PCR cycles.
+ Detects chimeric sequences (chimeras) from a sequence alignment.
+ Sequence alignment analysis (chimeric sequence detection)
+
+
+
+
+
+
+
+
+
+ Recombination detection
+
+ Sequence alignment analysis (recombination detection)
+ beta12orEarlier
+ Detect recombination (hotspots and coldspots) and identify recombination breakpoints in a sequence alignment.
+ Tools might use a genetic algorithm, quartet-mapping, bootscanning, graphical methods, random forest model and so on.
+
+
+
+
+
+
+
+
+
+ Indel detection
+
+ beta12orEarlier
+ Sequence alignment analysis (indel detection)
+ Tools might use a genetic algorithm, quartet-mapping, bootscanning, graphical methods, random forest model and so on.
+ Identify insertion, deletion and duplication events from a sequence alignment.
+
+
+
+
+
+
+
+
+
+ Nucleosome formation potential prediction
+
+ true
+ beta12orEarlier
+ Predict nucleosome formation potential of DNA sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Nucleic acid thermodynamic property calculation
+
+
+
+
+
+
+
+ Calculate a thermodynamic property of DNA or DNA/RNA, such as melting temperature, enthalpy and entropy.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Nucleic acid melting profile plotting
+
+
+
+
+
+
+
+
+ Calculate and plot a DNA or DNA/RNA melting profile.
+ A melting profile is used to visualise and analyse partly melted DNA conformations.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Nucleic acid stitch profile plotting
+
+
+
+
+
+
+
+ A stitch profile represents the alternative conformations that partly melted DNA can adopt in a temperature range.
+ beta12orEarlier
+ Calculate and plot a DNA or DNA/RNA stitch profile.
+
+
+
+
+
+
+
+
+
+ Nucleic acid melting curve plotting
+
+
+
+
+
+
+
+ Calculate and plot a DNA or DNA/RNA melting curve.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Nucleic acid probability profile plotting
+
+
+
+
+
+
+
+ beta12orEarlier
+ Calculate and plot a DNA or DNA/RNA probability profile.
+
+
+
+
+
+
+
+
+
+ Nucleic acid temperature profile plotting
+
+
+
+
+
+
+
+ Calculate and plot a DNA or DNA/RNA temperature profile.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Nucleic acid curvature calculation
+
+
+
+
+
+
+
+ Calculate curvature and flexibility / stiffness of a nucleotide sequence.
+ beta12orEarlier
+ This includes properties such as.
+
+
+
+
+
+
+
+
+
+ microRNA detection
+
+ Identify or predict microRNA sequences (miRNA) and precursors or microRNA targets / binding sites in a DNA sequence.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ tRNA gene prediction
+
+
+
+
+
+
+
+ Identify or predict tRNA genes in genomic sequences (tRNA).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ siRNA binding specificity prediction
+
+
+
+
+
+
+
+ beta12orEarlier
+ Assess binding specificity of putative siRNA sequence(s), for example for a functional assay, typically with respect to designing specific siRNA sequences.
+
+
+
+
+
+
+
+
+
+ Protein secondary structure prediction (integrated)
+
+ Predict secondary structure of protein sequence(s) using multiple methods to achieve better predictions.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein secondary structure prediction (helices)
+
+ beta12orEarlier
+ Predict helical secondary structure of protein sequences.
+
+
+
+
+
+
+
+
+
+ Protein secondary structure prediction (turns)
+
+ Predict turn structure (for example beta hairpin turns) of protein sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein secondary structure prediction (coils)
+
+ beta12orEarlier
+ Predict open coils, non-regular secondary structure and intrinsically disordered / unstructured regions of protein sequences.
+
+
+
+
+
+
+
+
+
+ Protein secondary structure prediction (disulfide bonds)
+
+ beta12orEarlier
+ Predict cysteine bonding state and disulfide bond partners in protein sequences.
+
+
+
+
+
+
+
+
+
+ GPCR prediction
+
+
+ beta12orEarlier
+ G protein-coupled receptor (GPCR) prediction
+ Predict G protein-coupled receptors (GPCR).
+
+
+
+
+
+
+
+
+
+ GPCR analysis
+
+
+
+
+
+
+
+ Analyse G-protein coupled receptor proteins (GPCRs).
+ beta12orEarlier
+ G protein-coupled receptor (GPCR) analysis
+
+
+
+
+
+
+
+
+
+ Protein structure prediction
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Predict tertiary structure (backbone and side-chain conformation) of protein sequences.
+
+
+
+
+
+
+
+
+
+ Nucleic acid structure prediction
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Methods might identify thermodynamically stable or evolutionarily conserved structures.
+ Predict tertiary structure of DNA or RNA.
+
+
+
+
+
+
+
+
+
+ Ab initio structure prediction
+
+ Predict tertiary structure of protein sequence(s) without homologs of known structure.
+ de novo structure prediction
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein modelling
+
+
+
+
+
+
+
+
+
+ Comparative modelling
+ beta12orEarlier
+ Build a three-dimensional protein model based on known (for example homologs) structures.
+ The model might be of a whole, part or aspect of protein structure. Molecular modelling methods might use sequence-structure alignment, structural templates, molecular dynamics, energy minimization etc.
+ Homology modelling
+ Homology structure modelling
+ Protein structure comparative modelling
+
+
+
+
+
+
+
+
+
+ Molecular docking
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Model the structure of a protein in complex with a small molecule or another macromolecule.
+ beta12orEarlier
+ This includes protein-protein interactions, protein-nucleic acid, protein-ligand binding etc. Methods might predict whether the molecules are likely to bind in vivo, their conformation when bound, the strength of the interaction, possible mutations to achieve bonding and so on.
+ Docking simulation
+ Protein docking
+
+
+
+
+
+
+
+
+
+ Protein modelling (backbone)
+
+ Model protein backbone conformation.
+ Methods might require a preliminary C(alpha) trace.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein modelling (side chains)
+
+ beta12orEarlier
+ Methods might use a residue rotamer library.
+ Model, analyse or edit amino acid side chain conformation in protein structure, optimize side-chain packing, hydrogen bonding etc.
+
+
+
+
+
+
+
+
+
+ Protein modelling (loops)
+
+ beta12orEarlier
+ Model loop conformation in protein structures.
+
+
+
+
+
+
+
+
+
+ Protein-ligand docking
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Methods aim to predict the position and orientation of a ligand bound to a protein receptor or enzyme.
+ Ligand-binding simulation
+ Model protein-ligand (for example protein-peptide) binding using comparative modelling or other techniques.
+ Virtual ligand screening
+
+
+
+
+
+
+
+
+
+ Structured RNA prediction and optimisation
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Nucleic acid folding family identification
+ RNA inverse folding
+ beta12orEarlier
+ Predict or optimise RNA sequences (sequence pools) with likely secondary and tertiary structure for in vitro selection.
+
+
+
+
+
+
+
+
+
+ SNP detection
+
+
+
+ Find single nucleotide polymorphisms (SNPs) between sequences.
+ Single nucleotide polymorphism detection
+ beta12orEarlier
+ This includes functional SNPs for large-scale genotyping purposes, disease-associated non-synonymous SNPs etc.
+
+
+
+
+
+
+
+
+
+ Radiation Hybrid Mapping
+
+
+
+
+
+
+
+ Generate a physical (radiation hybrid) map of genetic markers in a DNA sequence using provided radiation hybrid (RH) scores for one or more markers.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Functional mapping
+
+ beta12orEarlier
+ true
+ This can involve characterization of the underlying quantitative trait loci (QTLs) or nucleotides (QTNs).
+ Map the genetic architecture of dynamic complex traits.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Haplotype mapping
+
+
+
+
+
+
+
+
+ Haplotype map generation
+ Haplotype inference
+ Infer haplotypes, either alleles at multiple loci that are transmitted together on the same chromosome, or a set of single nucleotide polymorphisms (SNPs) on a single chromatid that are statistically associated.
+ beta12orEarlier
+ Haplotype inference can help in population genetic studies and the identification of complex disease genes, , and is typically based on aligned single nucleotide polymorphism (SNP) fragments. Haplotype comparison is a useful way to characterize the genetic variation between individuals. An individual's haplotype describes which nucleotide base occurs at each position for a set of common SNPs. Tools might use combinatorial functions (for example parsimony) or a likelihood function or model with optimization such as minimum error correction (MEC) model, expectation-maximization algorithm (EM), genetic algorithm or Markov chain Monte Carlo (MCMC).
+ Haplotype reconstruction
+
+
+
+
+
+
+
+
+
+ Linkage disequilibrium calculation
+
+
+
+
+
+
+
+ beta12orEarlier
+ Linkage disequilibrium is identified where a combination of alleles (or genetic markers) occurs more or less frequently in a population than expected by chance formation of haplotypes.
+ Calculate linkage disequilibrium; the non-random association of alleles or polymorphisms at two or more loci (not necessarily on the same chromosome).
+
+
+
+
+
+
+
+
+
+ Genetic code prediction
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Predict genetic code from analysis of codon usage data.
+
+
+
+
+
+
+
+
+
+ Dotplot plotting
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Draw a dotplot of sequence similarities identified from word-matching or character comparison.
+
+
+
+
+
+
+
+
+
+ Pairwise sequence alignment
+
+
+
+
+
+
+
+ Pairwise sequence alignment generation
+ Pairwise sequence alignment
+ Methods might perform one-to-one, one-to-many or many-to-many comparisons.
+ Align exactly two molecular sequences.
+ Pairwise sequence alignment construction
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Multiple sequence alignment
+
+ Multiple sequence alignment construction
+ Align two or more molecular sequences.
+ This includes methods that use an existing alignment, for example to incorporate sequences into an alignment, or combine several multiple alignments into a single, improved alignment.
+ Multiple sequence alignment
+ beta12orEarlier
+ Multiple sequence alignment generation
+
+
+
+
+
+
+
+
+
+ Pairwise sequence alignment generation (local)
+
+ beta12orEarlier
+ Local pairwise sequence alignment construction
+ Locally align exactly two molecular sequences.
+ Pairwise sequence alignment (local)
+ true
+ Local alignment methods identify regions of local similarity.
+ 1.6
+ Pairwise sequence alignment construction (local)
+
+
+
+
+
+
+
+
+
+
+ Pairwise sequence alignment generation (global)
+
+ Pairwise sequence alignment construction (global)
+ Global pairwise sequence alignment construction
+ 1.6
+ true
+ Globally align exactly two molecular sequences.
+ beta12orEarlier
+ Global alignment methods identify similarity across the entire length of the sequences.
+ Pairwise sequence alignment (global)
+
+
+
+
+
+
+
+
+
+
+ Local sequence alignment
+
+ Multiple sequence alignment (local)
+ Local multiple sequence alignment construction
+ beta12orEarlier
+ Local alignment methods identify regions of local similarity.
+ Multiple sequence alignment construction (local)
+ Sequence alignment generation (local)
+ Sequence alignment (local)
+ Locally align two or more molecular sequences.
+
+
+
+
+
+
+
+
+
+ Global sequence alignment
+
+ Global multiple sequence alignment construction
+ Multiple sequence alignment (global)
+ beta12orEarlier
+ Sequence alignment (global)
+ Multiple sequence alignment construction (global)
+ Globally align two or more molecular sequences.
+ Sequence alignment generation (global)
+ Global alignment methods identify similarity across the entire length of the sequences.
+
+
+
+
+
+
+
+
+
+ Constrained sequence alignment
+
+ beta12orEarlier
+ Align two or more molecular sequences with user-defined constraints.
+ Multiple sequence alignment construction (constrained)
+ Sequence alignment generation (constrained)
+ Multiple sequence alignment (constrained)
+ Sequence alignment (constrained)
+ Constrained multiple sequence alignment construction
+
+
+
+
+
+
+
+
+
+ Consensus-based sequence alignment
+
+ Consensus multiple sequence alignment construction
+ Sequence alignment (consensus)
+ beta12orEarlier
+ Align two or more molecular sequences using multiple methods to achieve higher quality.
+ Sequence alignment generation (consensus)
+ Multiple sequence alignment construction (consensus)
+ Multiple sequence alignment (consensus)
+
+
+
+
+
+
+
+
+
+ Tree-based sequence alignment
+
+
+
+
+
+
+
+ Sequence alignment generation (phylogenetic tree-based)
+ This is supposed to give a more biologically meaningful alignment than standard alignments.
+ beta12orEarlier
+ Phylogenetic tree-based multiple sequence alignment construction
+ Align multiple sequences using relative gap costs calculated from neighbors in a supplied phylogenetic tree.
+ Sequence alignment (phylogenetic tree-based)
+ Multiple sequence alignment construction (phylogenetic tree-based)
+ Multiple sequence alignment (phylogenetic tree-based)
+
+
+
+
+
+
+
+
+
+ Secondary structure alignment generation
+
+ beta12orEarlier
+ 1.6
+ Secondary structure alignment construction
+ Secondary structure alignment
+ true
+ Align molecular secondary structure (represented as a 1D string).
+
+
+
+
+
+
+
+
+
+ Protein secondary structure alignment generation
+
+
+
+
+
+
+
+
+ Protein secondary structure alignment construction
+ Align protein secondary structures.
+ beta12orEarlier
+ Secondary structure alignment (protein)
+ Protein secondary structure alignment
+
+
+
+
+
+
+
+
+
+ RNA secondary structure alignment
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ RNA secondary structure alignment generation
+ RNA secondary structure alignment
+ Align RNA secondary structures.
+ RNA secondary structure alignment construction
+ Secondary structure alignment (RNA)
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Pairwise structure alignment
+
+ beta12orEarlier
+ Pairwise structure alignment generation
+ Pairwise structure alignment construction
+ Align (superimpose) exactly two molecular tertiary structures.
+
+
+
+
+
+
+
+
+
+ Multiple structure alignment construction
+
+ Align (superimpose) two or more molecular tertiary structures.
+ This includes methods that use an existing alignment.
+ 1.6
+ true
+ Multiple structure alignment
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Structure alignment (protein)
+
+ beta13
+ true
+ beta12orEarlier
+ Align protein tertiary structures.
+
+
+
+
+
+
+
+
+
+ Structure alignment (RNA)
+
+ beta13
+ true
+ Align RNA tertiary structures.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Pairwise structure alignment generation (local)
+
+ Locally align (superimpose) exactly two molecular tertiary structures.
+ Pairwise structure alignment (local)
+ Local alignment methods identify regions of local similarity, common substructures etc.
+ Pairwise structure alignment construction (local)
+ 1.6
+ true
+ Local pairwise structure alignment construction
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Pairwise structure alignment generation (global)
+
+ Global pairwise structure alignment construction
+ Global alignment methods identify similarity across the entire structures.
+ true
+ beta12orEarlier
+ 1.6
+ Pairwise structure alignment construction (global)
+ Globally align (superimpose) exactly two molecular tertiary structures.
+ Pairwise structure alignment (global)
+
+
+
+
+
+
+
+
+
+
+ Local structure alignment
+
+ Local multiple structure alignment construction
+ Local alignment methods identify regions of local similarity, common substructures etc.
+ Structure alignment construction (local)
+ beta12orEarlier
+ Locally align (superimpose) two or more molecular tertiary structures.
+ Multiple structure alignment construction (local)
+ Multiple structure alignment (local)
+ Structure alignment generation (local)
+
+
+
+
+
+
+
+
+
+ Global structure alignment
+
+ Structure alignment construction (global)
+ Multiple structure alignment (global)
+ Structure alignment generation (global)
+ Multiple structure alignment construction (global)
+ beta12orEarlier
+ Global alignment methods identify similarity across the entire structures.
+ Global multiple structure alignment construction
+ Globally align (superimpose) two or more molecular tertiary structures.
+
+
+
+
+
+
+
+
+
+ Profile-to-profile alignment (pairwise)
+
+ Sequence alignment generation (pairwise profile)
+ Methods might perform one-to-one, one-to-many or many-to-many comparisons.
+ Pairwise sequence profile alignment construction
+ Sequence profile alignment construction (pairwise)
+ Sequence profile alignment (pairwise)
+ beta12orEarlier
+ Align exactly two molecular profiles.
+ Sequence profile alignment generation (pairwise)
+
+
+
+
+
+
+
+
+
+ Sequence alignment generation (multiple profile)
+
+ Align two or more molecular profiles.
+ 1.6
+ true
+ Sequence profile alignment generation (multiple)
+ beta12orEarlier
+ Sequence profile alignment (multiple)
+ Sequence profile alignment construction (multiple)
+ Multiple sequence profile alignment construction
+
+
+
+
+
+
+
+
+
+ 3D profile-to-3D profile alignment (pairwise)
+
+ Methods might perform one-to-one, one-to-many or many-to-many comparisons.
+ Pairwise structural (3D) profile alignment construction
+ Structural (3D) profile alignment (pairwise)
+ Structural profile alignment construction (pairwise)
+ Align exactly two molecular Structural (3D) profiles.
+ beta12orEarlier
+ Structural profile alignment generation (pairwise)
+
+
+
+
+
+
+
+
+
+ Structural profile alignment generation (multiple)
+
+ true
+ Structural profile alignment construction (multiple)
+ Align two or more molecular 3D profiles.
+ Multiple structural (3D) profile alignment construction
+ beta12orEarlier
+ Structural (3D) profile alignment (multiple)
+ 1.6
+
+
+
+
+
+
+
+
+
+ Data retrieval (tool metadata)
+
+ Data retrieval (tool annotation)
+ 1.6
+ Search and retrieve names of or documentation on bioinformatics tools, for example by keyword or which perform a particular function.
+ beta12orEarlier
+ true
+ Tool information retrieval
+
+
+
+
+
+
+
+
+
+ Data retrieval (database metadata)
+
+ beta12orEarlier
+ true
+ Data retrieval (database annotation)
+ Search and retrieve names of or documentation on bioinformatics databases or query terms, for example by keyword.
+ Database information retrieval
+ 1.6
+
+
+
+
+
+
+
+
+
+ PCR primer design (for large scale sequencing)
+
+
+
+
+
+
+
+ Predict primers for large scale sequencing.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ PCR primer design (for genotyping polymorphisms)
+
+ beta12orEarlier
+ Predict primers for genotyping polymorphisms, for example single nucleotide polymorphisms (SNPs).
+
+
+
+
+
+
+
+
+
+ PCR primer design (for gene transcription profiling)
+
+
+
+
+
+
+
+ Predict primers for gene transcription profiling.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ PCR primer design (for conserved primers)
+
+ Predict primers that are conserved across multiple genomes or species.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ PCR primer design (based on gene structure)
+
+
+
+
+
+
+
+ Predict primers based on gene structure, promoters, exon-exon junctions etc.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ PCR primer design (for methylation PCRs)
+
+ beta12orEarlier
+ Predict primers for methylation PCRs.
+
+
+
+
+
+
+
+
+
+ Sequence assembly (mapping assembly)
+
+ Sequence assembly by combining fragments using an existing backbone sequence, typically a reference genome.
+ beta12orEarlier
+ The final sequence will resemble the backbone sequence. Mapping assemblers are usually much faster and less memory intensive than de-novo assemblers.
+
+
+
+
+
+
+
+
+
+ Sequence assembly (de-novo assembly)
+
+ Sequence assembly by combining fragments without the aid of a reference sequence or genome.
+ De-novo assemblers are much slower and more memory intensive than mapping assemblers.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence assembly (genome assembly)
+
+ Sequence assembly capable on a very large scale such as assembly of whole genomes.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence assembly (EST assembly)
+
+ beta12orEarlier
+ Sequence assembly for EST sequences (transcribed mRNA).
+ Assemblers must handle (or be complicated by) alternative splicing, trans-splicing, single-nucleotide polymorphism (SNP), recoding, and post-transcriptional modification.
+
+
+
+
+
+
+
+
+
+ Tag mapping
+
+
+
+
+
+
+
+
+ Tag mapping might assign experimentally obtained tags to known transcripts or annotate potential virtual tags in a genome.
+ Tag to gene assignment
+ Make gene to tag assignments (tag mapping) of SAGE, MPSS and SBS data, by annotating tags with ontology concepts.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ SAGE data processing
+
+ beta12orEarlier
+ Serial analysis of gene expression data processing
+ beta12orEarlier
+ Process (read and / or write) serial analysis of gene expression (SAGE) data.
+ true
+
+
+
+
+
+
+
+
+
+ MPSS data processing
+
+ beta12orEarlier
+ Process (read and / or write) massively parallel signature sequencing (MPSS) data.
+ true
+ Massively parallel signature sequencing data processing
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ SBS data processing
+
+ beta12orEarlier
+ Sequencing by synthesis data processing
+ beta12orEarlier
+ Process (read and / or write) sequencing by synthesis (SBS) data.
+ true
+
+
+
+
+
+
+
+
+
+ Heat map generation
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ The heat map usually uses a coloring scheme to represent clusters. They can show how expression of mRNA by a set of genes was influenced by experimental conditions.
+ Heat map construction
+ Generate a heat map of gene expression from microarray data.
+
+
+
+
+
+
+
+
+
+ Gene expression profile analysis
+
+ true
+ Functional profiling
+ beta12orEarlier
+ Analyse one or more gene expression profiles, typically to interpret them in functional terms.
+ 1.6
+
+
+
+
+
+
+
+
+
+ Gene expression profile pathway mapping
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Map a gene expression profile to known biological pathways, for example, to identify or reconstruct a pathway.
+
+
+
+
+
+
+
+
+
+ Protein secondary structure assignment (from coordinate data)
+
+
+ beta12orEarlier
+ Assign secondary structure from protein coordinate data.
+
+
+
+
+
+
+
+
+
+ Protein secondary structure assignment (from CD data)
+
+
+
+
+
+
+
+ Assign secondary structure from circular dichroism (CD) spectroscopic data.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein structure assignment (from X-ray crystallographic data)
+
+ true
+ 1.7
+ Assign a protein tertiary structure (3D coordinates) from raw X-ray crystallography data.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein structure assignment (from NMR data)
+
+ beta12orEarlier
+ Assign a protein tertiary structure (3D coordinates) from raw NMR spectroscopy data.
+ true
+ 1.7
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree generation (data centric)
+
+ Phylogenetic tree construction (data centric)
+ beta12orEarlier
+ Construct a phylogenetic tree from a specific type of data.
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree generation (method centric)
+
+ Phylogenetic tree construction (method centric)
+ Construct a phylogenetic tree using a specific method.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree generation (from molecular sequences)
+
+
+ Phylogenetic tree construction from molecular sequences.
+ beta12orEarlier
+ Phylogenetic tree construction (from molecular sequences)
+ Methods typically compare multiple molecular sequence and estimate evolutionary distances and relationships to infer gene families or make functional predictions.
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree generation (from continuous quantitative characters)
+
+
+
+
+
+
+
+ Phylogenetic tree construction (from continuous quantitative characters)
+ beta12orEarlier
+ Phylogenetic tree construction from continuous quantitative character data.
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree generation (from gene frequencies)
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree construction (from gene frequencies)
+ Phylogenetic tree construction from gene frequency data.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree construction (from polymorphism data)
+
+
+
+
+
+
+
+ Phylogenetic tree construction from polymorphism data including microsatellites, RFLP (restriction fragment length polymorphisms), RAPD (random-amplified polymorphic DNA) and AFLP (amplified fragment length polymorphisms) data.
+ Phylogenetic tree generation (from polymorphism data)
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Phylogenetic species tree construction
+
+ Construct a phylogenetic species tree, for example, from a genome-wide sequence comparison.
+ Phylogenetic species tree generation
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree generation (parsimony methods)
+
+ Phylogenetic tree construction (parsimony methods)
+ Construct a phylogenetic tree by computing a sequence alignment and searching for the tree with the fewest number of character-state changes from the alignment.
+ This includes evolutionary parsimony (invariants) methods.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree generation (minimum distance methods)
+
+ This includes neighbor joining (NJ) clustering method.
+ beta12orEarlier
+ Phylogenetic tree construction (minimum distance methods)
+ Construct a phylogenetic tree by computing (or using precomputed) distances between sequences and searching for the tree with minimal discrepancies between pairwise distances.
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree generation (maximum likelihood and Bayesian methods)
+
+ Phylogenetic tree construction (maximum likelihood and Bayesian methods)
+ Construct a phylogenetic tree by relating sequence data to a hypothetical tree topology using a model of sequence evolution.
+ Maximum likelihood methods search for a tree that maximizes a likelihood function, i.e. that is most likely given the data and model. Bayesian analysis estimate the probability of tree for branch lengths and topology, typically using a Monte Carlo algorithm.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree generation (quartet methods)
+
+ beta12orEarlier
+ Phylogenetic tree construction (quartet methods)
+ Construct a phylogenetic tree by computing four-taxon trees (4-trees) and searching for the phylogeny that matches most closely.
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree generation (AI methods)
+
+ Construct a phylogenetic tree by using artificial-intelligence methods, for example genetic algorithms.
+ Phylogenetic tree construction (AI methods)
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ DNA substitution modelling
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Sequence alignment analysis (phylogenetic modelling)
+ beta12orEarlier
+ Identify a plausible model of DNA substitution that explains a DNA sequence alignment.
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree analysis (shape)
+
+ Phylogenetic tree topology analysis
+ Analyse the shape (topology) of a phylogenetic tree.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree bootstrapping
+
+
+ Apply bootstrapping or other measures to estimate confidence of a phylogenetic tree.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree analysis (gene family prediction)
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Predict families of genes and gene function based on their position in a phylogenetic tree.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree analysis (natural selection)
+
+ beta12orEarlier
+ Stabilizing/purifying (directional) selection favors a single phenotype and tends to decrease genetic diversity as a population stabilizes on a particular trait, selecting out trait extremes or deleterious mutations. In contrast, balancing selection maintain genetic polymorphisms (or multiple alleles), whereas disruptive (or diversifying) selection favors individuals at both extremes of a trait.
+ Analyse a phylogenetic tree to identify allele frequency distribution and change that is subject to evolutionary pressures (natural selection, genetic drift, mutation and gene flow). Identify type of natural selection (such as stabilizing, balancing or disruptive).
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree generation (consensus)
+
+
+ Compare two or more phylogenetic trees to produce a consensus tree.
+ Methods typically test for topological similarity between trees using for example a congruence index.
+ beta12orEarlier
+ Phylogenetic tree construction (consensus)
+
+
+
+
+
+
+
+
+
+ Phylogenetic sub/super tree detection
+
+ beta12orEarlier
+ Compare two or more phylogenetic trees to detect subtrees or supertrees.
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree distances calculation
+
+
+
+
+
+
+
+ beta12orEarlier
+ Compare two or more phylogenetic trees to calculate distances between trees.
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree annotation
+
+ beta12orEarlier
+ http://www.evolutionaryontology.org/cdao.owl#CDAOAnnotation
+ Annotate a phylogenetic tree with terms from a controlled vocabulary.
+
+
+
+
+
+
+
+
+
+ Immunogenicity prediction
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Peptide immunogen prediction
+ Predict and optimise peptide ligands that elicit an immunological response.
+
+
+
+
+
+
+
+
+
+ DNA vaccine design
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Predict or optimise DNA to elicit (via DNA vaccination) an immunological response.
+
+
+
+
+
+
+
+
+
+ Sequence formatting
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Reformat (a file or other report of) molecular sequence(s).
+ Sequence file format conversion
+
+
+
+
+
+
+
+
+
+ Sequence alignment formatting
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Reformat (a file or other report of) molecular sequence alignment(s).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Codon usage table formatting
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Reformat a codon usage table.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence visualisation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Visualise, format or render a molecular sequence, possibly with sequence features or properties shown.
+ Sequence rendering
+
+
+
+
+
+
+
+
+
+ Sequence alignment visualisation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Sequence alignment rendering
+ Visualise, format or print a molecular sequence alignment.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence cluster visualisation
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Sequence cluster rendering
+ beta12orEarlier
+ Visualise, format or render sequence clusters.
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree visualisation
+
+
+
+
+
+
+
+
+ Render or visualise a phylogenetic tree.
+ Phylogenetic tree rendering
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ RNA secondary structure visualisation
+
+
+
+
+
+
+
+
+ RNA secondary structure rendering
+ Visualise RNA secondary structure, knots, pseudoknots etc.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein secondary structure rendering
+ Protein secondary structure visualisation
+
+
+
+
+
+
+
+ Render and visualise protein secondary structure.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Structure visualisation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Structure rendering
+ Visualise or render a molecular tertiary structure, for example a high-quality static picture or animation.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Microarray data rendering
+
+
+
+
+
+
+
+
+
+ Visualise microarray data.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein interaction network rendering
+ Protein interaction network visualisation
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Identify and analyse networks of protein interactions.
+
+
+
+
+
+
+
+
+
+ Map drawing
+
+
+
+
+
+
+
+ beta12orEarlier
+ DNA map drawing
+ Map rendering
+ Draw or visualise a DNA map.
+
+
+
+
+
+
+
+
+
+ Sequence motif rendering
+
+ Render a sequence with motifs.
+ true
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Restriction map drawing
+
+
+
+
+
+
+
+
+ Draw or visualise restriction maps in DNA sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ DNA linear map rendering
+
+ beta12orEarlier
+ beta12orEarlier
+ true
+ Draw a linear maps of DNA.
+
+
+
+
+
+
+
+
+
+ Plasmid map drawing
+
+ beta12orEarlier
+ DNA circular map rendering
+ Draw a circular maps of DNA, for example a plasmid map.
+
+
+
+
+
+
+
+
+
+ Operon drawing
+
+
+
+
+
+
+
+ Visualise operon structure etc.
+ beta12orEarlier
+ Operon rendering
+
+
+
+
+
+
+
+
+
+ Nucleic acid folding family identification
+
+ true
+ beta12orEarlier
+ Identify folding families of related RNAs.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Nucleic acid folding energy calculation
+
+ beta12orEarlier
+ Compute energies of nucleic acid folding, e.g. minimum folding energies for DNA or RNA sequences or energy landscape of RNA mutants.
+
+
+
+
+
+
+
+
+
+ Annotation retrieval
+
+ beta12orEarlier
+ Use this concepts for tools which retrieve pre-existing annotations, not for example prediction methods that might make annotations.
+ Retrieve existing annotation (or documentation), typically annotation on a database entity.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Protein function prediction
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Predict general functional properties of a protein.
+ For functional properties that can be mapped to a sequence, use 'Sequence feature detection (protein)' instead.
+
+
+
+
+
+
+
+
+
+ Protein function comparison
+
+
+
+
+
+
+
+
+ Compare the functional properties of two or more proteins.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence submission
+
+ Submit a molecular sequence to a database.
+ beta12orEarlier
+ 1.6
+ true
+
+
+
+
+
+
+
+
+
+ Gene regulatory network analysis
+
+
+
+
+
+
+
+ beta12orEarlier
+ Analyse a known network of gene regulation.
+
+
+
+
+
+
+
+
+
+
+ Loading
+
+
+
+
+
+
+
+ Data loading
+ WHATIF:UploadPDB
+ Prepare or load a user-specified data file so that it is available for use.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence retrieval
+
+ This includes direct retrieval methods (e.g. the dbfetch program) but not those that perform calculations on the sequence.
+ Data retrieval (sequences)
+ 1.6
+ Query a sequence data resource (typically a database) and retrieve sequences and / or annotation.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Structure retrieval
+
+ true
+ WHATIF:EchoPDB
+ beta12orEarlier
+ WHATIF:DownloadPDB
+ This includes direct retrieval methods but not those that perform calculations on the sequence or structure.
+ Query a tertiary structure data resource (typically a database) and retrieve structures, structure-related data and annotation.
+ 1.6
+
+
+
+
+
+
+
+
+
+ Surface rendering
+
+
+ beta12orEarlier
+ WHATIF:GetSurfaceDots
+ Calculate the positions of dots that are homogeneously distributed over the surface of a molecule.
+ A dot has three coordinates (x,y,z) and (typically) a color.
+
+
+
+
+
+
+
+
+
+ Protein atom surface calculation (accessible)
+
+ beta12orEarlier
+ WHATIF:AtomAccessibilitySolventPlus
+ WHATIF:AtomAccessibilitySolvent
+ Calculate the solvent accessibility ('accessible surface') for each atom in a structure.
+ Waters are not considered.
+
+
+
+
+
+
+
+
+
+ Protein atom surface calculation (accessible molecular)
+
+ beta12orEarlier
+ Calculate the solvent accessibility ('accessible molecular surface') for each atom in a structure.
+ Waters are not considered.
+ WHATIF:AtomAccessibilityMolecular
+ WHATIF:AtomAccessibilityMolecularPlus
+
+
+
+
+
+
+
+
+
+ Protein residue surface calculation (accessible)
+
+ WHATIF:ResidueAccessibilitySolvent
+ beta12orEarlier
+ Solvent accessibility might be calculated for the backbone, sidechain and total (backbone plus sidechain).
+ Calculate the solvent accessibility ('accessible surface') for each residue in a structure.
+
+
+
+
+
+
+
+
+
+ Protein residue surface calculation (vacuum accessible)
+
+ Solvent accessibility might be calculated for the backbone, sidechain and total (backbone plus sidechain).
+ Calculate the solvent accessibility ('vacuum accessible surface') for each residue in a structure. This is the accessibility of the residue when taken out of the protein together with the backbone atoms of any residue it is covalently bound to.
+ WHATIF:ResidueAccessibilityVacuum
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein residue surface calculation (accessible molecular)
+
+ Calculate the solvent accessibility ('accessible molecular surface') for each residue in a structure.
+ WHATIF:ResidueAccessibilityMolecular
+ Solvent accessibility might be calculated for the backbone, sidechain and total (backbone plus sidechain).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein residue surface calculation (vacuum molecular)
+
+ Solvent accessibility might be calculated for the backbone, sidechain and total (backbone plus sidechain).
+ beta12orEarlier
+ Calculate the solvent accessibility ('vacuum molecular surface') for each residue in a structure. This is the accessibility of the residue when taken out of the protein together with the backbone atoms of any residue it is covalently bound to.
+ WHATIF:ResidueAccessibilityVacuumMolecular
+
+
+
+
+
+
+
+
+
+ Protein surface calculation (accessible molecular)
+
+ WHATIF:TotAccessibilityMolecular
+ beta12orEarlier
+ Calculate the solvent accessibility ('accessible molecular surface') for a structure as a whole.
+
+
+
+
+
+
+
+
+
+ Protein surface calculation (accessible)
+
+ WHATIF:TotAccessibilitySolvent
+ Calculate the solvent accessibility ('accessible surface') for a structure as a whole.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Backbone torsion angle calculation
+
+ beta12orEarlier
+ WHATIF:ResidueTorsionsBB
+ Calculate for each residue in a protein structure all its backbone torsion angles.
+
+
+
+
+
+
+
+
+
+ Full torsion angle calculation
+
+ beta12orEarlier
+ Calculate for each residue in a protein structure all its torsion angles.
+ WHATIF:ResidueTorsions
+
+
+
+
+
+
+
+
+
+ Cysteine torsion angle calculation
+
+ beta12orEarlier
+ Calculate for each cysteine (bridge) all its torsion angles.
+ WHATIF:CysteineTorsions
+
+
+
+
+
+
+
+
+
+ Tau angle calculation
+
+ WHATIF:ShowTauAngle
+ beta12orEarlier
+ Tau is the backbone angle N-Calpha-C (angle over the C-alpha).
+ For each amino acid in a protein structure calculate the backbone angle tau.
+
+
+
+
+
+
+
+
+
+ Cysteine bridge detection
+
+ WHATIF:ShowCysteineBridge
+ Detect cysteine bridges (from coordinate data) in a protein structure.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Free cysteine detection
+
+ beta12orEarlier
+ A free cysteine is neither involved in a cysteine bridge, nor functions as a ligand to a metal.
+ Detect free cysteines in a protein structure.
+ WHATIF:ShowCysteineFree
+
+
+
+
+
+
+
+
+
+ Metal-bound cysteine detection
+
+ beta12orEarlier
+ WHATIF:ShowCysteineMetal
+ Detect cysteines that are bound to metal in a protein structure.
+
+
+
+
+
+
+
+
+
+ Residue contact calculation (residue-nucleic acid)
+
+
+
+ beta12orEarlier
+ WHATIF:ShowProteiNucleicContacts
+ Calculate protein residue contacts with nucleic acids in a structure.
+ WHATIF:HasNucleicContacts
+
+
+
+
+
+
+
+
+
+ Residue contact calculation (residue-metal)
+
+
+ WHATIF:HasMetalContacts
+ beta12orEarlier
+ Calculate protein residue contacts with metal in a structure.
+ WHATIF:HasMetalContactsPlus
+
+
+
+
+
+
+
+
+
+ Residue contact calculation (residue-negative ion)
+
+ Calculate ion contacts in a structure (all ions for all side chain atoms).
+ WHATIF:HasNegativeIonContactsPlus
+ beta12orEarlier
+ WHATIF:HasNegativeIonContacts
+
+
+
+
+
+
+
+
+
+ Residue bump detection
+
+ WHATIF:ShowBumps
+ beta12orEarlier
+ Detect 'bumps' between residues in a structure, i.e. those with pairs of atoms whose Van der Waals' radii interpenetrate more than a defined distance.
+
+
+
+
+
+
+
+
+
+ Residue symmetry contact calculation
+
+ Calculate the number of symmetry contacts made by residues in a protein structure.
+ WHATIF:SymmetryContact
+ A symmetry contact is a contact between two atoms in different asymmetric unit.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Residue contact calculation (residue-ligand)
+
+
+ beta12orEarlier
+ Calculate contacts between residues and ligands in a protein structure.
+ WHATIF:ShowDrugContactsShort
+ WHATIF:ShowLigandContacts
+ WHATIF:ShowDrugContacts
+
+
+
+
+
+
+
+
+
+ Salt bridge calculation
+
+ Salt bridges are interactions between oppositely charged atoms in different residues. The output might include the inter-atomic distance.
+ WHATIF:HasSaltBridgePlus
+ WHATIF:ShowSaltBridges
+ beta12orEarlier
+ WHATIF:HasSaltBridge
+ WHATIF:ShowSaltBridgesH
+ Calculate (and possibly score) salt bridges in a protein structure.
+
+
+
+
+
+
+
+
+
+ Rotamer likelihood prediction
+
+ WHATIF:ShowLikelyRotamers
+ WHATIF:ShowLikelyRotamers500
+ Predict rotamer likelihoods for all 20 amino acid types at each position in a protein structure.
+ WHATIF:ShowLikelyRotamers800
+ WHATIF:ShowLikelyRotamers600
+ WHATIF:ShowLikelyRotamers900
+ Output typically includes, for each residue position, the likelihoods for the 20 amino acid types with estimated reliability of the 20 likelihoods.
+ WHATIF:ShowLikelyRotamers700
+ WHATIF:ShowLikelyRotamers400
+ WHATIF:ShowLikelyRotamers300
+ WHATIF:ShowLikelyRotamers200
+ WHATIF:ShowLikelyRotamers100
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Proline mutation value calculation
+
+ Calculate for each position in a protein structure the chance that a proline, when introduced at this position, would increase the stability of the whole protein.
+ WHATIF:ProlineMutationValue
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Residue packing validation
+
+ beta12orEarlier
+ Identify poorly packed residues in protein structures.
+ WHATIF: PackingQuality
+
+
+
+
+
+
+
+
+
+ Dihedral angle validation
+
+ WHATIF: ImproperQualitySum
+ Identify for each residue in a protein structure any improper dihedral (phi/psi) angles.
+ beta12orEarlier
+ WHATIF: ImproperQualityMax
+
+
+
+
+
+
+
+
+
+ PDB file sequence retrieval
+
+ Extract a molecular sequence from a PDB file.
+ beta12orEarlier
+ WHATIF: PDB_sequence
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ HET group detection
+
+ Identify HET groups in PDB files.
+ WHATIF: HETGroupNames
+ beta12orEarlier
+ A HET group usually corresponds to ligands, lipids, but might also (not consistently) include groups that are attached to amino acids. Each HET group is supposed to have a unique three letter code and a unique name which might be given in the output.
+
+
+
+
+
+
+
+
+
+ DSSP secondary structure assignment
+
+ Determine for residue the DSSP determined secondary structure in three-state (HSC).
+ beta12orEarlier
+ WHATIF: ResidueDSSP
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Structure formatting
+
+ Reformat (a file or other report of) tertiary structure data.
+ beta12orEarlier
+ WHATIF: PDBasXML
+
+
+
+
+
+
+
+
+
+ Protein cysteine and disulfide bond assignment
+
+
+
+
+
+
+
+ Assign cysteine bonding state and disulfide bond partners in protein structures.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Residue validation
+
+ Identify poor quality amino acid positions in protein structures.
+ beta12orEarlier
+ WHATIF: UseResidueDB
+ The scoring function to identify poor quality residues may consider residues with bad atoms or atoms with high B-factor, residues in the N- or C-terminal position, adjacent to an unstructured residue, non-canonical residues, glycine and proline (or adjacent to these such residues).
+
+
+
+
+
+
+
+
+
+ Structure retrieval (water)
+
+ beta12orEarlier
+ 1.6
+ WHATIF:MovedWaterPDB
+ true
+ Query a tertiary structure database and retrieve water molecules.
+
+
+
+
+
+
+
+
+
+ siRNA duplex prediction
+
+
+
+
+
+
+
+ beta12orEarlier
+ Identify or predict siRNA duplexes in RNA.
+
+
+
+
+
+
+
+
+
+ Sequence alignment refinement
+
+
+ Refine an existing sequence alignment.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Listfile processing
+
+ 1.6
+ Process an EMBOSS listfile (list of EMBOSS Uniform Sequence Addresses).
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence file editing
+
+
+ beta12orEarlier
+ Perform basic (non-analytical) operations on a report or file of sequences (which might include features), such as file concatenation, removal or ordering of sequences, creation of subset or a new file of sequences.
+
+
+
+
+
+
+
+
+
+ Sequence alignment file processing
+
+ beta12orEarlier
+ Perform basic (non-analytical) operations on a sequence alignment file, such as copying or removal and ordering of sequences.
+ 1.6
+ true
+
+
+
+
+
+
+
+
+
+ Small molecule data processing
+
+ beta13
+ true
+ beta12orEarlier
+ Process (read and / or write) physicochemical property data for small molecules.
+
+
+
+
+
+
+
+
+
+ Data retrieval (ontology annotation)
+
+ beta13
+ Ontology information retrieval
+ true
+ Search and retrieve documentation on a bioinformatics ontology.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Data retrieval (ontology concept)
+
+ Query an ontology and retrieve concepts or relations.
+ true
+ beta13
+ beta12orEarlier
+ Ontology retrieval
+
+
+
+
+
+
+
+
+
+ Representative sequence identification
+
+
+
+
+
+
+
+ Identify a representative sequence from a set of sequences, typically using scores from pair-wise alignment or other comparison of the sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Structure file processing
+
+ Perform basic (non-analytical) operations on a file of molecular tertiary structural data.
+ 1.6
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Data retrieval (sequence profile)
+
+ Query a profile data resource and retrieve one or more profile(s) and / or associated annotation.
+ true
+ This includes direct retrieval methods that retrieve a profile by, e.g. the profile name.
+ beta13
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Statistical calculation
+
+ Statistical analysis
+ Perform a statistical data operation of some type, e.g. calibration or validation.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ 3D-1D scoring matrix generation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ 3D-1D scoring matrix construction
+ A 3D-1D scoring matrix scores the probability of amino acids occurring in different structural environments.
+ Calculate a 3D-1D scoring matrix from analysis of protein sequence and structural data.
+
+
+
+
+
+
+
+
+
+ Transmembrane protein visualisation
+
+
+
+
+
+
+
+
+ Visualise transmembrane proteins, typically the transmembrane regions within a sequence.
+ beta12orEarlier
+ Transmembrane protein rendering
+
+
+
+
+
+
+
+
+
+ Demonstration
+
+ beta12orEarlier
+ true
+ An operation performing purely illustrative (pedagogical) purposes.
+ beta13
+
+
+
+
+
+
+
+
+ Data retrieval (pathway or network)
+
+ beta12orEarlier
+ true
+ Query a biological pathways database and retrieve annotation on one or more pathways.
+ beta13
+
+
+
+
+
+
+
+
+
+ Data retrieval (identifier)
+
+ beta12orEarlier
+ Query a database and retrieve one or more data identifiers.
+ beta13
+ true
+
+
+
+
+
+
+
+
+
+ Nucleic acid density plotting
+
+
+ beta12orEarlier
+ Calculate a density plot (of base composition) for a nucleotide sequence.
+
+
+
+
+
+
+
+
+
+ Sequence analysis
+
+
+
+
+
+
+
+ Analyse one or more known molecular sequences.
+ beta12orEarlier
+ Sequence analysis (general)
+
+
+
+
+
+
+
+
+
+ Sequence motif processing
+
+ true
+ 1.6
+ Process (read and / or write) molecular sequence motifs.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein interaction data processing
+
+ 1.6
+ Process (read and / or write) protein interaction data.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein structure analysis
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Structure analysis (protein)
+ beta12orEarlier
+ Analyse protein tertiary structural data.
+
+
+
+
+
+
+
+
+
+ Annotation processing
+
+ true
+ beta12orEarlier
+ beta12orEarlier
+ Process (read and / or write) annotation of some type, typically annotation on an entry from a biological or biomedical database entity.
+
+
+
+
+
+
+
+
+
+ Sequence feature analysis
+
+ beta12orEarlier
+ true
+ Analyse features in molecular sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Utility operation
+
+
+
+
+
+
+
+ Basic (non-analytical) operations of some data, either a file or equivalent entity in memory.
+ File processing
+ beta12orEarlier
+ Report handling
+ File handling
+ Data file processing
+
+
+
+
+
+
+
+
+
+ Gene expression analysis
+
+ Analyse gene expression and regulation data.
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Structural profile processing
+
+ beta12orEarlier
+ 1.6
+ Process (read and / or write) one or more structural (3D) profile(s) or template(s) of some type.
+ 3D profile processing
+ true
+
+
+
+
+
+
+
+
+
+ Data index processing
+
+ Database index processing
+ true
+ Process (read and / or write) an index of (typically a file of) biological data.
+ 1.6
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence profile processing
+
+ true
+ beta12orEarlier
+ Process (read and / or write) some type of sequence profile.
+ 1.6
+
+
+
+
+
+
+
+
+
+ Protein function analysis
+
+
+
+
+
+
+
+ This is a broad concept and is used a placeholder for other, more specific concepts.
+ beta12orEarlier
+ Analyse protein function, typically by processing protein sequence and/or structural data, and generate an informative report.
+
+
+
+
+
+
+
+
+
+ Protein folding analysis
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ This is a broad concept and is used a placeholder for other, more specific concepts.
+ Analyse protein folding, typically by processing sequence and / or structural data, and write an informative report.
+ Protein folding modelling
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein secondary structure analysis
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Analyse known protein secondary structure data.
+ beta12orEarlier
+ Secondary structure analysis (protein)
+
+
+
+
+
+
+
+
+
+ Physicochemical property data processing
+
+ beta13
+ true
+ Process (read and / or write) data on the physicochemical property of a molecule.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Primer and probe design
+
+
+
+
+
+
+
+
+ Primer and probe prediction
+ beta12orEarlier
+ Predict oligonucleotide primers or probes.
+
+
+
+
+
+
+
+
+
+ Operation (typed)
+
+ Computation
+ Calculation
+ Processing
+ Process (read and / or write) data of a specific type, for example applying analytical methods.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Database search
+
+
+
+
+
+
+
+ beta12orEarlier
+ Typically the query is compared to each entry and high scoring matches (hits) are returned. For example, a BLAST search of a sequence database.
+ Search a database (or other data resource) with a supplied query and retrieve entries (or parts of entries) that are similar to the query.
+
+
+
+
+
+
+
+
+
+ Data retrieval
+
+
+
+
+
+
+
+ Information retrieval
+ beta12orEarlier
+ Retrieve an entry (or part of an entry) from a data resource that matches a supplied query. This might include some primary data and annotation. The query is a data identifier or other indexed term. For example, retrieve a sequence record with the specified accession number, or matching supplied keywords.
+
+
+
+
+
+
+
+
+
+ Prediction and recognition
+
+ beta12orEarlier
+ Recognition
+ Prediction
+ Predict, recognise, detect or identify some properties of a biomolecule.
+ Detection
+
+
+
+
+
+
+
+
+
+ Comparison
+
+ beta12orEarlier
+ Compare two or more things to identify similarities.
+
+
+
+
+
+
+
+
+
+ Optimisation and refinement
+
+ beta12orEarlier
+ Refine or optimise some data model.
+
+
+
+
+
+
+
+
+
+ Modelling and simulation
+
+
+
+
+
+
+
+ beta12orEarlier
+ Model or simulate some biological entity or system.
+
+
+
+
+
+
+
+
+
+ Data handling
+
+ true
+ beta12orEarlier
+ Perform basic operations on some data or a database.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Validation
+
+ beta12orEarlier
+ Validation and standardisation
+ Validate some data.
+
+
+
+
+
+
+
+
+
+ Mapping
+
+ This is a broad concept and is used a placeholder for other, more specific concepts.
+ Map properties to positions on an biological entity (typically a molecular sequence or structure), or assemble such an entity from constituent parts.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Design
+
+ beta12orEarlier
+ Design a biological entity (typically a molecular sequence or structure) with specific properties.
+ true
+
+
+
+
+
+
+
+
+
+ Microarray data processing
+
+ beta12orEarlier
+ Process (read and / or write) microarray data.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Codon usage table processing
+
+ Process (read and / or write) a codon usage table.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Data retrieval (codon usage table)
+
+ Retrieve a codon usage table and / or associated annotation.
+ beta12orEarlier
+ true
+ beta13
+
+
+
+
+
+
+
+
+
+ Gene expression profile processing
+
+ 1.6
+ Process (read and / or write) a gene expression profile.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Functional enrichment
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Gene expression profile annotation
+ The Gene Ontology (GO) is invariably used, the input is a set of Gene IDs and the output of the analysis is typically a ranked list of GO terms, each associated with a p-value.
+ Analyse a set of genes (genes corresponding to an expression profile, or any other set) with respect to concepts from an ontology of gene functions.
+ GO term enrichment
+
+
+
+
+
+
+
+
+
+ Gene regulatory network prediction
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Predict a network of gene regulation.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Pathway or network processing
+
+
+
+
+
+
+
+ Generate, analyse or handle a biological pathway or network.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ RNA secondary structure analysis
+
+
+
+
+
+
+
+ beta12orEarlier
+ Process (read and / or write) RNA secondary structure data.
+
+
+
+
+
+
+
+
+
+ Structure processing (RNA)
+
+ Process (read and / or write) RNA tertiary structure data.
+ beta12orEarlier
+ beta13
+ true
+
+
+
+
+
+
+
+
+
+ RNA structure prediction
+
+
+
+
+
+
+
+ beta12orEarlier
+ Predict RNA tertiary structure.
+
+
+
+
+
+
+
+
+
+ DNA structure prediction
+
+
+
+
+
+
+
+ Predict DNA tertiary structure.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree processing
+
+
+
+
+
+
+
+ beta12orEarlier
+ Process (read and / or write) a phylogenetic tree.
+
+
+
+
+
+
+
+
+
+ Protein secondary structure processing
+
+ Process (read and / or write) protein secondary structure data.
+ 1.6
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein interaction network processing
+
+ true
+ beta12orEarlier
+ Process (read and / or write) a network of protein interactions.
+ 1.6
+
+
+
+
+
+
+
+
+
+ Sequence processing
+
+ Sequence processing (general)
+ Process (read and / or write) one or more molecular sequences and associated annotation.
+ true
+ beta12orEarlier
+ 1.6
+
+
+
+
+
+
+
+
+
+ Sequence processing (protein)
+
+ Process (read and / or write) a protein sequence and associated annotation.
+ beta12orEarlier
+ true
+ 1.6
+
+
+
+
+
+
+
+
+
+ Sequence processing (nucleic acid)
+
+ 1.6
+ true
+ beta12orEarlier
+ Process (read and / or write) a nucleotide sequence and associated annotation.
+
+
+
+
+
+
+
+
+
+ Sequence comparison
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Compare two or more molecular sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence cluster processing
+
+ Process (read and / or write) a sequence cluster.
+ true
+ beta12orEarlier
+ 1.6
+
+
+
+
+
+
+
+
+
+ Feature table processing
+
+ Process (read and / or write) a sequence feature table.
+ 1.6
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Gene prediction
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Gene and gene component prediction
+ beta12orEarlier
+ Detect, predict and identify genes or components of genes in DNA sequences.
+ Gene finding
+
+
+
+
+
+
+
+
+
+ GPCR classification
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ G protein-coupled receptor (GPCR) classification
+ Classify G-protein coupled receptors (GPCRs) into families and subfamilies.
+
+
+
+
+
+
+
+
+
+ GPCR coupling selectivity prediction
+
+
+
+
+
+
+
+
+
+ Predict G-protein coupled receptor (GPCR) coupling selectivity.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Structure processing (protein)
+
+ true
+ 1.6
+ beta12orEarlier
+ Process (read and / or write) a protein tertiary structure.
+
+
+
+
+
+
+
+
+
+ Protein atom surface calculation
+
+ Waters are not considered.
+ Calculate the solvent accessibility for each atom in a structure.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein residue surface calculation
+
+ beta12orEarlier
+ Calculate the solvent accessibility for each residue in a structure.
+
+
+
+
+
+
+
+
+
+ Protein surface calculation
+
+ beta12orEarlier
+ Calculate the solvent accessibility of a structure as a whole.
+
+
+
+
+
+
+
+
+
+ Sequence alignment processing
+
+ beta12orEarlier
+ true
+ Process (read and / or write) a molecular sequence alignment.
+ 1.6
+
+
+
+
+
+
+
+
+
+ Protein-protein interaction prediction
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Identify or predict protein-protein interactions, interfaces, binding sites etc.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Structure processing
+
+ true
+ 1.6
+ Process (read and / or write) a molecular tertiary structure.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Map annotation
+
+ Annotate a DNA map of some type with terms from a controlled vocabulary.
+ true
+ beta12orEarlier
+ 1.6
+
+
+
+
+
+
+
+
+
+ Data retrieval (protein annotation)
+
+ Retrieve information on a protein.
+ beta13
+ true
+ Protein information retrieval
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Data retrieval (phylogenetic tree)
+
+ beta12orEarlier
+ beta13
+ Retrieve a phylogenetic tree from a data resource.
+ true
+
+
+
+
+
+
+
+
+
+ Data retrieval (protein interaction annotation)
+
+ Retrieve information on a protein interaction.
+ true
+ beta13
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Data retrieval (protein family annotation)
+
+ beta12orEarlier
+ Protein family information retrieval
+ beta13
+ Retrieve information on a protein family.
+ true
+
+
+
+
+
+
+
+
+
+ Data retrieval (RNA family annotation)
+
+ true
+ Retrieve information on an RNA family.
+ RNA family information retrieval
+ beta12orEarlier
+ beta13
+
+
+
+
+
+
+
+
+
+ Data retrieval (gene annotation)
+
+ beta12orEarlier
+ Gene information retrieval
+ Retrieve information on a specific gene.
+ true
+ beta13
+
+
+
+
+
+
+
+
+
+ Data retrieval (genotype and phenotype annotation)
+
+ Retrieve information on a specific genotype or phenotype.
+ Genotype and phenotype information retrieval
+ beta12orEarlier
+ beta13
+ true
+
+
+
+
+
+
+
+
+
+ Protein architecture comparison
+
+
+ Compare the architecture of two or more protein structures.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein architecture recognition
+
+
+
+ beta12orEarlier
+ Includes methods that try to suggest the most likely biological unit for a given protein X-ray crystal structure based on crystal symmetry and scoring of putative protein-protein interfaces.
+ Identify the architecture of a protein structure.
+
+
+
+
+
+
+
+
+
+ Molecular dynamics simulation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Simulate molecular (typically protein) conformation using a computational model of physical forces and computer simulation.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Nucleic acid sequence analysis
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Analyse a nucleic acid sequence (using methods that are only applicable to nucleic acid sequences).
+ beta12orEarlier
+ Sequence analysis (nucleic acid)
+
+
+
+
+
+
+
+
+
+ Protein sequence analysis
+
+
+
+
+
+
+
+
+ Analyse a protein sequence (using methods that are only applicable to protein sequences).
+ Sequence analysis (protein)
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Structure analysis
+
+
+
+
+
+
+
+ beta12orEarlier
+ Analyse known molecular tertiary structures.
+
+
+
+
+
+
+
+
+
+ Nucleic acid structure analysis
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Analyse nucleic acid tertiary structural data.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Secondary structure processing
+
+ 1.6
+ Process (read and / or write) a molecular secondary structure.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Structure comparison
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Compare two or more molecular tertiary structures.
+
+
+
+
+
+
+
+
+
+ Helical wheel drawing
+
+
+
+
+
+
+
+ Helical wheel rendering
+ beta12orEarlier
+ Render a helical wheel representation of protein secondary structure.
+
+
+
+
+
+
+
+
+
+ Topology diagram drawing
+
+
+
+
+
+
+
+ Topology diagram rendering
+ beta12orEarlier
+ Render a topology diagram of protein secondary structure.
+
+
+
+
+
+
+
+
+
+ Protein structure comparison
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Structure comparison (protein)
+ Methods might identify structural neighbors, find structural similarities or define a structural core.
+ Compare protein tertiary structures.
+
+
+
+
+
+
+
+
+
+ Protein secondary structure comparison
+
+
+
+ Compare protein secondary structures.
+ beta12orEarlier
+ Secondary structure comparison (protein)
+ Protein secondary structure
+
+
+
+
+
+
+
+
+
+ Protein subcellular localization prediction
+
+
+
+
+
+
+
+
+ The prediction might include subcellular localization (nuclear, cytoplasmic, mitochondrial, chloroplast, plastid, membrane etc) or export (extracellular proteins) of a protein.
+ Predict the subcellular localization of a protein sequence.
+ Protein targeting prediction
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Residue contact calculation (residue-residue)
+
+ beta12orEarlier
+ Calculate contacts between residues in a protein structure.
+
+
+
+
+
+
+
+
+
+ Hydrogen bond calculation (inter-residue)
+
+
+ Identify potential hydrogen bonds between amino acid residues.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein interaction prediction
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Predict the interactions of proteins with other molecules.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Codon usage data processing
+
+ beta12orEarlier
+ beta13
+ Process (read and / or write) codon usage data.
+ true
+
+
+
+
+
+
+
+
+
+ Gene expression data analysis
+
+
+
+
+
+
+
+ beta12orEarlier
+ Gene expression profile analysis
+ Gene expression (microarray) data processing
+ Microarray data processing
+ Gene expression data processing
+ Process (read and / or write) gene expression (typically microarray) data, including analysis of one or more gene expression profiles, typically to interpret them in functional terms.
+
+
+
+
+
+
+
+
+
+ Gene regulatory network processing
+
+ 1.6
+ beta12orEarlier
+ Process (read and / or write) a network of gene regulation.
+ true
+
+
+
+
+
+
+
+
+ Pathway or network analysis
+
+
+
+
+
+
+
+ Analyse a known biological pathway or network.
+ Pathway analysis
+ Network analysis
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequencing-based expression profile data analysis
+
+ Analyse SAGE, MPSS or SBS experimental data, typically to identify or quantify mRNA transcripts.
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Splicing model analysis
+
+
+
+
+
+
+
+
+
+ Analyse, characterize and model alternative splicing events from comparing multiple nucleic acid sequences.
+ Splicing analysis
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Microarray raw data analysis
+
+ beta12orEarlier
+ beta12orEarlier
+ true
+ Analyse raw microarray data.
+
+
+
+
+
+
+
+
+
+ Nucleic acid analysis
+
+
+
+
+
+
+
+ Process (read and / or write) nucleic acid sequence or structural data.
+ Nucleic acid data processing
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein analysis
+
+
+
+
+
+
+
+ beta12orEarlier
+ Protein data processing
+ Process (read and / or write) protein sequence or structural data.
+
+
+
+
+
+
+
+
+
+ Sequence data processing
+
+ beta12orEarlier
+ Process (read and / or write) molecular sequence data.
+ beta13
+ true
+
+
+
+
+
+
+
+
+ Structural data processing
+
+ Process (read and / or write) molecular structural data.
+ beta13
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Text processing
+
+ true
+ beta12orEarlier
+ Process (read and / or write) text.
+ 1.6
+
+
+
+
+
+
+
+
+
+ Protein sequence alignment analysis
+
+
+
+
+
+
+
+
+
+ Analyse a protein sequence alignment, typically to detect features or make predictions.
+ beta12orEarlier
+ Sequence alignment analysis (protein)
+
+
+
+
+
+
+
+
+
+ Nucleic acid sequence alignment analysis
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Sequence alignment analysis (nucleic acid)
+ Analyse a protein sequence alignment, typically to detect features or make predictions.
+
+
+
+
+
+
+
+
+
+ Nucleic acid sequence comparison
+
+
+
+ Sequence comparison (nucleic acid)
+ Compare two or more nucleic acid sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein sequence comparison
+
+
+
+ beta12orEarlier
+ Sequence comparison (protein)
+ Compare two or more protein sequences.
+
+
+
+
+
+
+
+
+
+ DNA back-translation
+
+
+
+
+
+
+
+ beta12orEarlier
+ Back-translate a protein sequence into DNA.
+
+
+
+
+
+
+
+
+
+ Sequence editing (nucleic acid)
+
+ 1.8
+ true
+ Edit or change a nucleic acid sequence, either randomly or specifically.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence editing (protein)
+
+ Edit or change a protein sequence, either randomly or specifically.
+ beta12orEarlier
+ true
+ 1.8
+
+
+
+
+
+
+
+
+
+ Sequence generation (nucleic acid)
+
+ Generate a nucleic acid sequence by some means.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence generation (protein)
+
+
+ Generate a protein sequence by some means.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Nucleic acid sequence visualisation
+
+ Visualise, format or render a nucleic acid sequence.
+ true
+ Various nucleic acid sequence analysis methods might generate a sequence rendering but are not (for brevity) listed under here.
+ 1.8
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein sequence visualisation
+
+ true
+ beta12orEarlier
+ Visualise, format or render a protein sequence.
+ 1.8
+ Various protein sequence analysis methods might generate a sequence rendering but are not (for brevity) listed under here.
+
+
+
+
+
+
+
+
+
+ Nucleic acid structure comparison
+
+
+
+ Compare nucleic acid tertiary structures.
+ beta12orEarlier
+ Structure comparison (nucleic acid)
+
+
+
+
+
+
+
+
+
+ Structure processing (nucleic acid)
+
+ 1.6
+ beta12orEarlier
+ true
+ Process (read and / or write) nucleic acid tertiary structure data.
+
+
+
+
+
+
+
+
+
+ DNA mapping
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Generate a map of a DNA sequence annotated with positional or non-positional features of some type.
+
+
+
+
+
+
+
+
+
+ Map data processing
+
+ DNA map data processing
+ Process (read and / or write) a DNA map of some type.
+ beta12orEarlier
+ true
+ 1.6
+
+
+
+
+
+
+
+
+
+ Protein hydropathy calculation
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Analyse the hydrophobic, hydrophilic or charge properties of a protein (from analysis of sequence or structural information).
+
+
+
+
+
+
+
+
+
+ Protein binding site prediction
+
+
+
+
+
+
+
+ Ligand-binding and active site prediction
+ beta12orEarlier
+ Binding site prediction
+ Identify or predict catalytic residues, active sites or other ligand-binding sites in protein sequences or structures.
+
+
+
+
+
+
+
+
+
+ Sequence tagged site (STS) mapping
+
+
+
+
+
+
+
+ beta12orEarlier
+ Sequence mapping
+ An STS is a short subsequence of known sequence and location that occurs only once in the chromosome or genome that is being mapped. Sources of STSs include 1. expressed sequence tags (ESTs), simple sequence length polymorphisms (SSLPs), and random genomic sequences from cloned genomic DNA or database sequences.
+ Generate a physical DNA map (sequence map) from analysis of sequence tagged sites (STS).
+
+
+
+
+
+
+
+
+
+ Alignment
+
+
+
+
+
+
+
+
+ Compare two or more entities, typically the sequence or structure (or derivatives) of macromolecules, to identify equivalent subunits.
+ Alignment
+ Alignment generation
+ beta12orEarlier
+ Alignment construction
+
+
+
+
+
+
+
+
+
+ Protein fragment weight comparison
+
+
+ beta12orEarlier
+ Calculate the molecular weight of a protein (or fragments) and compare it another protein or reference data.
+
+
+
+
+
+
+
+
+
+ Protein property comparison
+
+
+
+
+
+
+
+ Compare the physicochemical properties of two or more proteins (or reference data).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Secondary structure comparison
+
+
+
+
+
+
+
+ Compare two or more molecular secondary structures.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Hopp and Woods plotting
+
+
+ beta12orEarlier
+ Generate a Hopp and Woods plot of antigenicity of a protein.
+
+
+
+
+
+
+
+
+
+ Microarray cluster textual view generation
+
+ beta12orEarlier
+ Visualise gene clusters with gene names.
+
+
+
+
+
+
+
+
+
+ Microarray wave graph plotting
+
+ Microarray wave graph rendering
+ Microarray cluster temporal graph rendering
+ beta12orEarlier
+ This view can be rendered as a pie graph. The distance matrix is sorted by cluster number and typically represented as a diagonal matrix with distance values displayed in different color shades.
+ Visualise clustered gene expression data as a set of waves, where each wave corresponds to a gene across samples on the X-axis.
+
+
+
+
+
+
+
+
+
+ Microarray dendrograph plotting
+
+ Microarray dendrograph rendering
+ Generate a dendrograph of raw, preprocessed or clustered microarray data.
+ beta12orEarlier
+ Microarray checks view rendering
+ Microarray view rendering
+
+
+
+
+
+
+
+
+
+ Microarray proximity map plotting
+
+ beta12orEarlier
+ Microarray distance map rendering
+ Generate a plot of distances (distance matrix) between genes.
+ Microarray proximity map rendering
+
+
+
+
+
+
+
+
+
+ Microarray tree or dendrogram rendering
+
+ Microarray 2-way dendrogram rendering
+ beta12orEarlier
+ Visualise clustered gene expression data using a gene tree, array tree and color coded band of gene expression.
+ Microarray matrix tree plot rendering
+
+
+
+
+
+
+
+
+
+ Microarray principal component plotting
+
+ beta12orEarlier
+ Microarray principal component rendering
+ Generate a line graph drawn as sum of principal components (Eigen value) and individual expression values.
+
+
+
+
+
+
+
+
+
+ Microarray scatter plot plotting
+
+ Generate a scatter plot of microarray data, typically after principal component analysis.
+ beta12orEarlier
+ Microarray scatter plot rendering
+
+
+
+
+
+
+
+
+
+ Whole microarray graph plotting
+
+ Visualise gene expression data where each band (or line graph) corresponds to a sample.
+ beta12orEarlier
+ Whole microarray graph rendering
+
+
+
+
+
+
+
+
+
+ Microarray tree-map rendering
+
+ beta12orEarlier
+ Visualise gene expression data after hierarchical clustering for representing hierarchical relationships.
+
+
+
+
+
+
+
+
+
+ Microarray Box-Whisker plot plotting
+
+ beta12orEarlier
+ Visualise raw and pre-processed gene expression data, via a plot showing over- and under-expression along with mean, upper and lower quartiles.
+
+
+
+
+
+
+
+
+
+ Physical mapping
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Generate a physical (sequence) map of a DNA sequence showing the physical distance (base pairs) between features or landmarks such as restriction sites, cloned DNA fragments, genes and other genetic markers.
+
+
+
+
+
+
+
+
+
+ Analysis
+
+ Apply analytical methods to existing data of a specific type.
+ For non-analytical operations, see the 'Processing' branch.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Alignment analysis
+
+ Process or analyse an alignment of molecular sequences or structures.
+ true
+ beta12orEarlier
+ 1.8
+
+
+
+
+
+
+
+
+
+ Article analysis
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Analyse a body of scientific text (typically a full text article from a scientific journal.)
+ beta12orEarlier
+ Article analysis
+
+
+
+
+
+
+
+
+
+ Molecular interaction analysis
+
+ Analyse the interactions of two or more molecules (or parts of molecules) that are known to interact.
+ beta12orEarlier
+ beta13
+ true
+
+
+
+
+
+
+
+
+ Protein interaction analysis
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Analyse known protein-protein, protein-DNA/RNA or protein-ligand interactions.
+
+
+
+
+
+
+
+
+
+ Residue contact calculation
+
+ Calculate contacts between residues and some other group in a protein structure.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Alignment processing
+
+ true
+ Process (read and / or write) an alignment of two or more molecular sequences, structures or derived data.
+ 1.6
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Structure alignment processing
+
+ Process (read and / or write) a molecular tertiary (3D) structure alignment.
+ 1.6
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Codon usage bias calculation
+
+
+
+
+
+
+
+ Calculate codon usage bias.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Codon usage bias plotting
+
+
+
+
+
+
+
+
+ beta12orEarlier
+ Generate a codon usage bias plot.
+
+
+
+
+
+
+
+
+
+ Codon usage fraction calculation
+
+
+
+
+
+
+
+ Calculate the differences in codon usage fractions between two sequences, sets of sequences, codon usage tables etc.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Classification
+
+ beta12orEarlier
+ Assign molecular sequences, structures or other biological data to a specific group or category according to qualities it shares with that group or category.
+
+
+
+
+
+
+
+
+
+ Molecular interaction data processing
+
+ beta13
+ true
+ beta12orEarlier
+ Process (read and / or write) molecular interaction data.
+
+
+
+
+
+
+
+
+ Sequence classification
+
+
+ beta12orEarlier
+ Assign molecular sequence(s) to a group or category.
+
+
+
+
+
+
+
+
+
+ Structure classification
+
+
+ Assign molecular structure(s) to a group or category.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein comparison
+
+ Compare two or more proteins (or some aspect) to identify similarities.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Nucleic acid comparison
+
+ beta12orEarlier
+ Compare two or more nucleic acids to identify similarities.
+
+
+
+
+
+
+
+
+
+ Prediction and recognition (protein)
+
+ beta12orEarlier
+ Predict, recognise, detect or identify some properties of proteins.
+
+
+
+
+
+
+
+
+
+ Prediction and recognition (nucleic acid)
+
+ beta12orEarlier
+ Predict, recognise, detect or identify some properties of nucleic acids.
+
+
+
+
+
+
+
+
+
+ Structure editing
+
+
+
+
+
+
+
+ beta13
+ Edit, convert or otherwise change a molecular tertiary structure, either randomly or specifically.
+
+
+
+
+
+
+
+
+
+ Sequence alignment editing
+
+ Edit, convert or otherwise change a molecular sequence alignment, either randomly or specifically.
+ beta13
+
+
+
+
+
+
+
+
+
+ Pathway or network visualisation
+
+
+
+
+
+
+
+
+ Render (visualise) a biological pathway or network.
+ Pathway or network rendering
+ beta13
+
+
+
+
+
+
+
+
+
+ Protein function prediction (from sequence)
+
+ beta13
+ true
+ Predict general (non-positional) functional properties of a protein from analysing its sequence.
+ For functional properties that are positional, use 'Protein site detection' instead.
+ 1.6
+
+
+
+
+
+
+
+
+
+ Protein sequence feature detection
+
+
+
+ Protein site recognition
+ Predict, recognise and identify functional or other key sites within protein sequences, typically by scanning for known motifs, patterns and regular expressions.
+ Protein site prediction
+ Sequence profile database search
+ Protein site detection
+ Protein secondary database search
+ Sequence feature detection (protein)
+ beta13
+
+
+
+
+
+
+
+
+
+ Protein property calculation (from sequence)
+
+
+ beta13
+ Calculate (or predict) physical or chemical properties of a protein, including any non-positional properties of the molecular sequence, from processing a protein sequence.
+
+
+
+
+
+
+
+
+
+ Protein feature prediction (from structure)
+
+ beta13
+ 1.6
+ true
+ Predict, recognise and identify positional features in proteins from analysing protein structure.
+
+
+
+
+
+
+
+
+
+ Protein feature detection
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Features includes functional sites or regions, secondary structure, structural domains and so on. Methods might use fingerprints, motifs, profiles, hidden Markov models, sequence alignment etc to provide a mapping of a query protein sequence to a discriminatory element. This includes methods that search a secondary protein database (Prosite, Blocks, ProDom, Prints, Pfam etc.) to assign a protein sequence(s) to a known protein family or group.
+
+ Predict, recognise and identify positional features in proteins from analysing protein sequences or structures.
+ beta13
+ Protein feature recognition
+ Protein feature prediction
+
+
+
+
+
+
+
+
+
+ Database search (by sequence)
+
+ Sequence screening
+ true
+ 1.6
+ Screen a molecular sequence(s) against a database (of some type) to identify similarities between the sequence and database entries.
+ beta13
+
+
+
+
+
+
+
+
+
+ Protein interaction network prediction
+
+
+
+
+
+
+
+
+
+
+
+
+
+ beta13
+ Predict a network of protein interactions.
+
+
+
+
+
+
+
+
+
+ Nucleic acid design
+
+
+ beta13
+ Design (or predict) nucleic acid sequences with specific chemical or physical properties.
+
+
+
+
+
+
+
+
+
+ Editing
+
+ beta13
+ Edit a data entity, either randomly or specifically.
+
+
+
+
+
+
+
+
+
+ Sequence assembly validation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1.1
+ Evaluate a DNA sequence assembly, typically for purposes of quality control.
+
+
+
+
+
+
+
+
+
+ Genome alignment
+
+ Align two or more (tpyically huge) molecular sequences that represent genomes.
+ Genome alignment construction
+ 1.1
+ Genome alignment
+
+
+
+
+
+
+
+
+
+ Localized reassembly
+
+ Reconstruction of a sequence assembly in a localised area.
+ 1.1
+
+
+
+
+
+
+
+
+
+ Sequence assembly visualisation
+
+ Assembly rendering
+ Sequence assembly rendering
+ Render and visualise a DNA sequence assembly.
+ 1.1
+ Assembly visualisation
+
+
+
+
+
+
+
+
+
+ Base-calling
+
+
+
+
+
+
+
+ Phred base calling
+ 1.1
+ Identify base (nucleobase) sequence from a fluorescence 'trace' data generated by an automated DNA sequencer.
+ Base calling
+ Phred base-calling
+
+
+
+
+
+
+
+
+
+ Bisulfite mapping
+
+ 1.1
+ Bisulfite mapping follows high-throughput sequencing of DNA which has undergone bisulfite treatment followed by PCR amplification; unmethylated cytosines are specifically converted to thymine, allowing the methylation status of cytosine in the DNA to be detected.
+ The mapping of methylation sites in a DNA (genome) sequence.
+ Bisulfite sequence alignment
+ Bisulfite sequence mapping
+
+
+
+
+
+
+
+
+
+ Sequence contamination filtering
+
+
+
+
+
+
+
+ beta12orEarlier
+ Identify and filter a (typically large) sequence data set to remove sequences from contaminants in the sample that was sequenced.
+
+
+
+
+
+
+
+
+
+ Trim ends
+
+ 1.1
+ Trim sequences (typically from an automated DNA sequencer) to remove misleading ends.
+ For example trim polyA tails, introns and primer sequence flanking the sequence of amplified exons, or other unwanted sequence.
+
+
+
+
+
+
+
+
+
+ Trim vector
+
+ Trim sequences (typically from an automated DNA sequencer) to remove sequence-specific end regions, typically contamination from vector sequences.
+ 1.1
+
+
+
+
+
+
+
+
+
+ Trim to reference
+
+ 1.1
+ Trim sequences (typically from an automated DNA sequencer) to remove the sequence ends that extend beyond an assembled reference sequence.
+
+
+
+
+
+
+
+
+
+ Sequence trimming
+
+ 1.1
+ Cut (remove) the end from a molecular sequence.
+
+
+
+
+
+
+
+
+
+ Genome feature comparison
+
+ Genomic elements that might be compared include genes, indels, single nucleotide polymorphisms (SNPs), retrotransposons, tandem repeats and so on.
+ Compare the features of two genome sequences.
+ 1.1
+
+
+
+
+
+
+
+
+
+ Sequencing error detection
+
+
+
+
+
+
+
+ Short read error correction
+ Short-read error correction
+ beta12orEarlier
+ Detect errors in DNA sequences generated from sequencing projects).
+
+
+
+
+
+
+
+
+
+ Genotyping
+
+ 1.1
+ Methods might consider cytogenetic analyses, copy number polymorphism (and calculate copy number calls for copy-number variation(CNV) regions), single nucleotide polymorphism (SNP), , rare copy number variation (CNV) identification, loss of heterozygosity data and so on.
+ Analyse DNA sequence data to identify differences between the genetic composition (genotype) of an individual compared to other individual's or a reference sequence.
+
+
+
+
+
+
+
+
+
+ Genetic variation analysis
+
+
+ 1.1
+ Sequence variation analysis
+ Genetic variation annotation provides contextual interpretation of coding SNP consequences in transcripts. It allows comparisons to be made between variation data in different populations or strains for the same transcript.
+ Genetic variation annotation
+ Analyse a genetic variation, for example to annotate its location, alleles, classification, and effects on individual transcripts predicted for a gene model.
+
+
+
+
+
+
+
+
+
+ Read mapping
+
+
+ Short oligonucleotide alignment
+ Oligonucleotide mapping
+ Oligonucleotide alignment generation
+ Short read mapping
+ Oligonucleotide alignment construction
+ The purpose of read mapping is to identify the location of sequenced fragments within a reference genome and assumes that there is, in fact, at least local similarity between the fragment and reference sequences.
+ Oligonucleotide alignment
+ Read alignment
+ 1.1
+ Short read alignment
+ Align short oligonucleotide sequences (reads) to a larger (genomic) sequence.
+ Short sequence read mapping
+
+
+
+
+
+
+
+
+
+ Split read mapping
+
+ A varient of oligonucleotide mapping where a read is mapped to two separate locations because of possible structural variation.
+ 1.1
+
+
+
+
+
+
+
+
+
+ DNA barcoding
+
+ Analyse DNA sequences in order to identify a DNA barcode; short fragment(s) of DNA that are useful to diagnose the taxa of biological organisms.
+ 1.1
+ Sample barcoding
+
+
+
+
+
+
+
+
+
+ SNP calling
+
+ Identify single nucleotide change in base positions in sequencing data that differ from a reference genome and which might, especially by reference to population frequency or functional data, indicate a polymorphism.
+ Operations usually score confidence in the prediction or some other statistical measure of evidence.
+ 1.1
+
+
+
+
+
+
+
+
+
+ Mutation detection
+
+ Polymorphism detection
+ Detect mutations in multiple DNA sequences, for example, from the alignment and comparison of the fluorescent traces produced by DNA sequencing hardware.
+ 1.1
+
+
+
+
+
+
+
+
+
+ Chromatogram visualisation
+
+ Visualise, format or render an image of a Chromatogram.
+ Chromatogram viewing
+ 1.1
+
+
+
+
+
+
+
+
+
+ Methylation analysis
+
+ 1.1
+ Determine cytosine methylation states in nucleic acid sequences.
+
+
+
+
+
+
+
+
+
+ Methylation calling
+
+
+ 1.1
+ Determine cytosine methylation status of specific positions in a nucleic acid sequences.
+
+
+
+
+
+
+
+
+
+ Methylation level analysis (global)
+
+ 1.1
+ Global methylation analysis
+ Measure the overall level of methyl cytosines in a genome from analysis of experimental data, typically from chromatographic methods and methyl accepting capacity assay.
+
+
+
+
+
+
+
+
+
+ Methylation level analysis (gene-specific)
+
+ Gene-specific methylation analysis
+ Many different techniques are available for this.
+ Measure the level of methyl cytosines in specific genes.
+ 1.1
+
+
+
+
+
+
+
+
+
+ Genome visualisation
+
+ 1.1
+ Genome visualization
+ Visualise, format or render a nucleic acid sequence that is part of (and in context of) a complete genome sequence.
+ Genome rendering
+ Genome visualisation
+ Genome viewing
+ Genome browsing
+
+
+
+
+
+
+
+
+
+ Genome comparison
+
+ Compare the sequence or features of two or more genomes, for example, to find matching regions.
+ 1.1
+ Genomic region matching
+
+
+
+
+
+
+
+
+
+ Genome indexing
+
+
+
+
+
+
+
+ Many sequence alignment tasks involving many or very large sequences rely on a precomputed index of the sequence to accelerate the alignment.
+ Generate an index of a genome sequence.
+ 1.1
+
+
+
+
+
+
+
+
+
+ Genome indexing (Burrows-Wheeler)
+
+ The Burrows-Wheeler Transform (BWT) is a permutation of the genome based on a suffix array algorithm.
+ Generate an index of a genome sequence using the Burrows-Wheeler algorithm.
+ 1.1
+
+
+
+
+
+
+
+
+
+ Genome indexing (suffix arrays)
+
+ 1.1
+ Generate an index of a genome sequence using a suffix arrays algorithm.
+ suffix arrays
+ A suffix array consists of the lexicographically sorted list of suffixes of a genome.
+
+
+
+
+
+
+
+
+
+ Spectral analysis
+
+ Spectral analysis
+ 1.1
+ Spectrum analysis
+ Analyse a spectrum from a mass spectrometry (or other) experiment.
+ Mass spectrum analysis
+
+
+
+
+
+
+
+
+
+ Peak detection
+
+
+
+
+
+
+
+ 1.1
+ Peak finding
+ Peak assignment
+ Identify peaks in a spectrum from a mass spectrometry, NMR, or some other spectrum-generating experiment.
+
+
+
+
+
+
+
+
+
+ Scaffolding
+
+
+
+
+
+
+
+
+ Scaffold construction
+ Link together a non-contiguous series of genomic sequences into a scaffold, consisting of sequences separated by gaps of known length. The sequences that are linked are typically typically contigs; contiguous sequences corresponding to read overlaps.
+ 1.1
+ Scaffold may be positioned along a chromosome physical map to create a "golden path".
+ Scaffold generation
+
+
+
+
+
+
+
+
+
+ Scaffold gap completion
+
+ Fill the gaps in a sequence assembly (scaffold) by merging in additional sequences.
+ Different techniques are used to generate gap sequences to connect contigs, depending on the size of the gap. For small (5-20kb) gaps, PCR amplification and sequencing is used. For large (>20kb) gaps, fragments are cloned (e.g. in BAC (Bacterial artificial chromosomes) vectors) and then sequenced.
+ 1.1
+
+
+
+
+
+
+
+
+
+ Sequencing quality control
+
+
+ Raw sequence data quality control.
+ Analyse raw sequence data from a sequencing pipeline and identify problems.
+ Sequencing QC
+ 1.1
+
+
+
+
+
+
+
+
+
+ Read pre-processing
+
+
+ Sequence read pre-processing
+ This is a broad concept and is used a placeholder for other, more specific concepts. For example process paired end reads to trim low quality ends remove short sequences, identify sequence inserts, detect chimeric reads, or remove low quality sequnces including vector, adaptor, low complexity and contaminant sequences. Sequences might come from genomic DNA library, EST libraries, SSH library and so on.
+ Pre-process sequence reads to ensure (or improve) quality and reliability.
+ 1.1
+
+
+
+
+
+
+
+
+
+ Species frequency estimation
+
+
+
+
+
+
+
+ Estimate the frequencies of different species from analysis of the molecular sequences, typically of DNA recovered from environmental samples.
+ 1.1
+
+
+
+
+
+
+
+
+
+ Peak calling
+
+ Chip-sequencing combines chromatin immunoprecipitation (ChIP) with massively parallel DNA sequencing to generate a set of reads, which are aligned to a genome sequence. The enriched areas contain the binding sites of DNA-associated proteins. For example, a transcription factor binding site. ChIP-on-chip in contrast combines chromatin immunoprecipitation ('ChIP') with microarray ('chip').
+ Identify putative protein-binding regions in a genome sequence from analysis of Chip-sequencing data or ChIP-on-chip data.
+ Protein binding peak detection
+ 1.1
+
+
+
+
+
+
+
+
+
+ Differential expression analysis
+
+ Identify (typically from analysis of microarray or RNA-seq data) genes whose expression levels are significantly different between two sample groups.
+ Differentially expressed gene identification
+ Differential expression analysis is used, for example, to identify which genes are up-regulated (increased expression) or down-regulated (decreased expression) between a group treated with a drug and a control groups.
+ 1.1
+
+
+
+
+
+
+
+
+
+ Gene set testing
+
+ 1.1
+ Gene sets can be defined beforehand by biological function, chromosome locations and so on.
+ Analyse gene expression patterns (typically from DNA microarray datasets) to identify sets of genes that are associated with a specific trait, condition, clinical outcome etc.
+
+
+
+
+
+
+
+
+
+ Variant classification
+
+
+ Classify variants based on their potential effect on genes, especially functional effects on the expressed proteins.
+ 1.1
+ Variants are typically classified by their position (intronic, exonic, etc.) in a gene transcript and (for variants in coding exons) by their effect on the protein sequence (synonymous, non-synonymous, frameshifting, etc.)
+
+
+
+
+
+
+
+
+
+ Variant prioritization
+
+ Variant prioritization can be used for example to produce a list of variants responsible for 'knocking out' genes in specific genomes. Methods amino acid substitution, aggregative approaches, probabilistic approach, inheritance and unified likelihood-frameworks.
+ Identify biologically interesting variants by prioritizing individual variants, for example, homozygous variants absent in control genomes.
+ 1.1
+
+
+
+
+
+
+
+
+
+ Variant calling
+
+ Variant mapping
+ 1.1
+ Identify and map genomic alterations, including single nucleotide polymorphisms, short indels and structural variants, in a genome sequence.
+ Methods often utilise a database of aligned reads.
+
+
+
+
+
+
+
+
+
+ Structural variation discovery
+
+ Detect large regions in a genome subject to copy-number variation, or other structural variations in genome(s).
+ 1.1
+ Methods might involve analysis of whole-genome array comparative genome hybridization or single-nucleotide polymorphism arrays, paired-end mapping of sequencing data, or from analysis of short reads from new sequencing technologies.
+
+
+
+
+
+
+
+
+
+ Exome analysis
+
+ 1.1
+ Targeted exome capture
+ Exome sequencing is considered a cheap alternative to whole genome sequencing.
+ Exome sequence analysis
+ Anaylse sequencing data from experiments aiming to selectively sequence the coding regions of the genome.
+
+
+
+
+
+
+
+
+
+ Read depth analysis
+
+ 1.1
+ Analyse mapping density (read depth) of (typically) short reads from sequencing platforms, for example, to detect deletions and duplications.
+
+
+
+
+
+
+
+
+
+ Gene expression QTL analysis
+
+
+
+
+
+
+
+ expression quantitative trait loci profiling
+ 1.1
+ eQTL profiling
+ Combine classical quantitative trait loci (QTL) analysis with gene expression profiling, for example, to describe describe cis- and trans-controlling elements for the expression of phenotype associated genes.
+ expression QTL profiling
+
+
+
+
+
+
+
+
+
+ Copy number estimation
+
+ Methods typically implement some statistical model for hypothesis testing, and methods estimate total copy number, i.e. do not distinguish the two inherited chromosomes quantities (specific copy number).
+ Transcript copy number estimation
+ 1.1
+ Estimate the number of copies of loci of particular gene(s) in DNA sequences typically from gene-expression profiling technology based on microarray hybridization-based experiments. For example, estimate copy number (or marker dosage) of a dominant marker in samples from polyploid plant cells or tissues, or chromosomal gains and losses in tumors.
+
+
+
+
+
+
+
+
+
+ Primer removal
+
+ 1.2
+ Remove forward and/or reverse primers from nucleic acid sequences (typically PCR products).
+
+
+
+
+
+
+
+
+
+ Transcriptome assembly
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Infer a transcriptome sequence by analysis of short sequence reads.
+ 1.2
+
+
+
+
+
+
+
+
+
+ Transcriptome assembly (de novo)
+
+ de novo transcriptome assembly
+ true
+ 1.6
+ 1.2
+ Infer a transcriptome sequence without the aid of a reference genome, i.e. by comparing short sequences (reads) to each other.
+
+
+
+
+
+
+
+
+
+ Transcriptome assembly (mapping)
+
+ Infer a transcriptome sequence by mapping short reads to a reference genome.
+ 1.6
+ 1.2
+ true
+
+
+
+
+
+
+
+
+
+ Sequence coordinate conversion
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1.3
+ Convert one set of sequence coordinates to another, e.g. convert coordinates of one assembly to another, cDNA to genomic, CDS to genomic, protein translation to genomic etc.
+
+
+
+
+
+
+
+
+
+ Document similarity calculation
+
+ Calculate similarity between 2 or more documents.
+ 1.3
+
+
+
+
+
+
+
+
+
+ Document clustering
+
+
+ Cluster (group) documents on the basis of their calculated similarity.
+ 1.3
+
+
+
+
+
+
+
+
+
+ Named entity recognition
+
+
+ Entity identification
+ Entity chunking
+ Entity extraction
+ Recognise named entities (text tokens) within documents.
+ 1.3
+
+
+
+
+
+
+
+
+
+ ID mapping
+
+
+ Identifier mapping
+ The mapping can be achieved by comparing identifier values or some other means, e.g. exact matches to a provided sequence.
+ 1.3
+ Accession mapping
+ Map data identifiers to one another for example to establish a link between two biological databases for the purposes of data integration.
+
+
+
+
+
+
+
+
+
+ Anonymisation
+
+ Process data in such a way that makes it hard to trace to the person which the data concerns.
+ 1.3
+ Data anonymisation
+
+
+
+
+
+
+
+
+
+ ID retrieval
+
+
+
+
+
+
+
+ id retrieval
+ Data retrieval (accession)
+ Data retrieval (ID)
+ Identifier retrieval
+ Data retrieval (id)
+ Accession retrieval
+ Search for and retrieve a data identifier of some kind, e.g. a database entry accession.
+ 1.3
+
+
+
+
+
+
+
+
+
+ Sequence checksum generation
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Generate a checksum of a molecular sequence.
+ 1.4
+
+
+
+
+
+
+
+
+
+ Bibliography generation
+
+
+
+
+
+
+
+ Bibliography construction
+ Construct a bibliography from the scientific literature.
+ 1.4
+
+
+
+
+
+
+
+
+
+ Protein quaternary structure prediction
+
+ 1.4
+ Predict the structure of a multi-subunit protein and particularly how the subunits fit together.
+
+
+
+
+
+
+
+
+
+ Protein surface analysis
+
+ 1.4
+ Analyse the surface properties of proteins.
+
+
+
+
+
+
+
+
+
+ Ontology comparison
+
+ 1.4
+ Compare two or more ontologies, e.g. identify differences.
+
+
+
+
+
+
+
+
+
+ Ontology comparison
+
+ 1.4
+ Compare two or more ontologies, e.g. identify differences.
+ 1.9
+
+
+
+
+
+
+
+
+
+ Format detection
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Recognition of which format the given data is in.
+ 1.4
+ Format identification
+ Format recognition
+ 'Format recognition' is not a bioinformatics-specific operation, but of great relevance in bioinformatics. Should be removed from EDAM if/when captured satisfactorily in a suitable domain-generic ontology.
+ Format inference
+
+
+
+
+
+ The has_input "Data" (data_0006) may cause visualisation or other problems although ontologically correct. But on the other hand it may be useful to distinguish from nullary operations without inputs.
+
+
+
+
+
+
+
+
+
+
+ Splitting
+
+ File splitting
+ Split a file containing multiple data items into many files, each containing one item
+ 1.4
+
+
+
+
+
+
+
+
+
+ Generation
+
+ Construction
+ beta12orEarlier
+ For non-analytical operations, see the 'Processing' branch.
+ Construct some data entity.
+
+
+
+
+
+
+
+
+
+ Nucleic acid sequence feature detection
+
+
+ Nucleic acid site prediction
+ Predict, recognise and identify functional or other key sites within nucleic acid sequences, typically by scanning for known motifs, patterns and regular expressions.
+ Nucleic acid site recognition
+ 1.6
+ Nucleic acid site detection
+
+
+
+
+
+
+
+
+
+ Deposition
+
+ Deposit some data in a database or some other type of repository or software system.
+ 1.6
+ Database submission
+ Submission
+ Data submission
+ Data deposition
+ Database deposition
+ For non-analytical operations, see the 'Processing' branch.
+
+
+
+
+
+
+
+
+
+ Clustering
+
+ 1.6
+ Group together some data entities on the basis of similarities such that entities in the same group (cluster) are more similar to each other than to those in other groups (clusters).
+
+
+
+
+
+
+
+
+
+ Assembly
+
+ 1.6
+ Construct some entity (typically a molecule sequence) from component pieces.
+
+
+
+
+
+
+
+
+
+ Conversion
+
+ 1.6
+ Non-analytical data conversion.
+
+
+
+
+
+
+
+
+
+ Standardization and normalization
+
+ 1.6
+ Standardize or normalize data.
+
+
+
+
+
+
+
+
+
+ Aggregation
+
+ Combine multiple files or data items into a single file or object.
+ 1.6
+
+
+
+
+
+
+
+
+
+ Article comparison
+
+ Compare two or more scientific articles.
+ 1.6
+
+
+
+
+
+
+
+
+
+ Calculation
+
+ Mathemetical determination of the value of something, typically a properly of a molecule.
+ 1.6
+
+
+
+
+
+
+
+
+
+ Pathway or network prediction
+
+
+ 1.6
+ Predict a molecular pathway or network.
+
+
+
+
+
+
+
+
+
+ Genome assembly
+
+ 1.6
+ The process of assembling many short DNA sequences together such thay they represent the original chromosomes from which the DNA originated.
+
+
+
+
+
+
+
+
+
+ Plotting
+
+ Generate a graph, or other visual representation, of data, showing the relationship between two or more variables.
+ 1.6
+
+
+
+
+
+
+
+
+
+ Image analysis
+
+
+
+
+
+
+
+ 1.7
+ The analysis of a image (typically a digital image) of some type in order to extract information from it.
+ Image processing
+
+
+
+
+
+
+
+
+
+
+ Diffraction data analysis
+
+ 1.7
+ Analysis of data from a diffraction experiment.
+
+
+
+
+
+
+
+
+
+ Cell migration analysis
+
+
+
+
+
+
+
+ 1.7
+ Analysis of cell migration images in order to study cell migration, typically in order to study the processes that play a role in the disease progression.
+
+
+
+
+
+
+
+
+
+ Diffraction data reduction
+
+ 1.7
+ Processing of diffraction data into a corrected, ordered, and simplified form.
+
+
+
+
+
+
+
+
+
+ Neurite measurement
+
+
+
+
+
+
+
+ Measurement of neurites; projections (axons or dendrites) from the cell body of a neuron, from analysis of neuron images.
+ 1.7
+
+
+
+
+
+
+
+
+
+ Diffraction data integration
+
+ 1.7
+ Diffraction summation integration
+ Diffraction profile fitting
+ The evaluation of diffraction intensities and integration of diffraction maxima from a diffraction experiment.
+
+
+
+
+
+
+
+
+
+ Phasing
+
+ Phase a macromolecular crystal structure, for example by using molecular replacement or experimental phasing methods.
+ 1.7
+
+
+
+
+
+
+
+
+
+ Molecular replacement
+
+ 1.7
+ A technique used to construct an atomic model of an unknown structure from diffraction data, based upon an atomic model of a known structure, either a related protein or the same protein from a different crystal form.
+ The technique solves the phase problem, i.e. retrieve information concern phases of the structure.
+
+
+
+
+
+
+
+
+
+ Rigid body refinement
+
+ 1.7
+ Rigid body refinement usually follows molecular replacement in the assignment of a structure from diffraction data.
+ A method used to refine a structure by moving the whole molecule or parts of it as a rigid unit, rather than moving individual atoms.
+
+
+
+
+
+
+
+
+
+ Single particle analysis
+
+
+
+
+
+
+
+
+ An image processing technique that combines and analyze multiple images of a particulate sample, in order to produce an image with clearer features that are more easily interpreted.
+ 1.7
+ Single particle analysis is used to improve the information that can be obtained by relatively low resolution techniques, , e.g. an image of a protein or virus from transmission electron microscopy (TEM).
+
+
+
+
+
+
+
+
+
+ Single particle alignment and classification
+
+
+ Compare (align and classify) multiple particle images from a micrograph in order to produce a representative image of the particle.
+ 1.7
+ A micrograph can include particles in multiple different orientations and/or conformations. Particles are compared and organised into sets based on their similarity. Typically iterations of classification and alignment and are performed to optimise the final image; average images produced by classification are used as a reference image for subsequent alignment of the whole image set.
+
+
+
+
+
+
+
+
+
+ Functional clustering
+
+
+
+
+
+
+
+ 1.7
+ Clustering of molecular sequences on the basis of their function, typically using information from an ontology of gene function, or some other measure of functional phenotype.
+ Functional sequence clustering
+
+
+
+
+
+
+
+
+
+ Taxonomic classification
+
+ 1.7
+ Classifiication of molecular sequences by assignment to some taxonomic hierarchy.
+
+
+
+
+
+
+
+
+
+ Virulence prediction
+
+
+
+
+
+
+
+
+ Pathogenicity prediction
+ The prediction of the degree of pathogenicity of a microorganism from analysis of molecular sequences.
+ 1.7
+
+
+
+
+
+
+
+
+
+ Gene expression correlation analysis
+
+
+ 1.7
+ Gene co-expression network analysis
+ Analyse the correlation patterns among genes across across a variety of experiments, microarray samples etc.
+
+
+
+
+
+
+
+
+
+
+ Correlation
+
+
+
+
+
+
+
+ 1.7
+ Identify a correlation, i.e. a statistical relationship between two random variables or two sets of data.
+
+
+
+
+
+
+
+
+
+ RNA structure covariance model generation
+
+
+
+
+
+
+
+
+ Compute the covariance model for (a family of) RNA secondary structures.
+ 1.7
+
+
+
+
+
+
+
+
+
+ RNA secondary structure prediction (shape-based)
+
+ RNA shape prediction
+ Predict RNA secondary structure by analysis, e.g. probabilistic analysis, of the shape of RNA folds.
+ 1.7
+
+
+
+
+
+
+
+
+
+ Nucleic acid alignment folding prediction (alignment-based)
+
+ 1.7
+ Prediction of nucleic-acid folding using sequence alignments as a source of data.
+
+
+
+
+
+
+
+
+
+ k-mer counting
+
+ Count k-mers (substrings of length k) in DNA sequence data.
+ 1.7
+ k-mer counting is used in genome and transcriptome assembly, metagenomic sequencing, and for error correction of sequence reads.
+
+
+
+
+
+
+
+
+
+ Phylogenetic tree reconstruction
+
+
+
+
+
+
+
+ Reconstructing the inner node labels of a phylogenetic tree from its leafes.
+ Note that this is somewhat different from simply analysing an existing tree or constructing a completely new one.
+ 1.7
+
+
+
+
+
+
+
+
+
+ Probabilistic data generation
+
+ Generate some data from a choosen probibalistic model, possibly to evaluate algorithms.
+ 1.7
+
+
+
+
+
+
+
+
+
+ Probabilistic sequence generation
+
+
+ 1.7
+ Generate sequences from some probabilistic model, e.g. a model that simulates evolution.
+
+
+
+
+
+
+
+
+
+ Antimicrobial resistance prediction
+
+
+
+
+
+
+
+
+ 1.7
+ Identify or predict causes for antibiotic resistance from molecular sequence analysis.
+
+
+
+
+
+
+
+
+
+ Enrichment
+
+
+
+
+
+
+
+
+ A relevant ontology will be used. The input is typically a set of identifiers or other data, and the output of the analysis is typically a ranked list of ontology terms, each associated with a p-value.
+ Term enrichment
+ 1.8
+ Analyse a dataset with respect to concepts from an ontology.
+
+
+
+
+
+
+
+
+
+ Chemical class enrichment
+
+
+
+
+
+
+
+
+ 1.8
+ Analyse a dataset with respect to concepts from an ontology of chemical structure.
+
+
+
+
+
+
+
+
+
+ Incident curve plotting
+
+ 1.8
+ Plot an incident curve such as a survival curve, death curve, mortality curve.
+
+
+
+
+
+
+
+
+
+ Variant pattern analysis
+
+ Methods often utilise a database of aligned reads.
+ Identify and map patterns of genomic variations.
+ 1.8
+
+
+
+
+
+
+
+
+
+ Mathematical modelling
+
+ Model some biological system using mathematical techniques including dynamical systems, statistical models, differential equations, and game theoretic models.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Microscope image visualisation
+
+
+
+
+
+
+
+ Visualise images resulting from various types of microscopy.
+ 1.9
+ Microscopy image visualisation
+
+
+
+
+
+
+
+
+
+ Image annotation
+
+ 1.9
+ Annotate an image of some sort, typically with terms from a controlled vocabulary.
+
+
+
+
+
+
+
+
+
+ Imputation
+
+ Data imputation
+ Replace missing data with substituted values, usually by using some statistical or other mathematical approach.
+ true
+ 1.9
+
+
+
+
+
+
+
+
+
+ Ontology visualisation
+
+ 1.9
+ Visualise, format or render data from an ontology, typically a tree of terms.
+ Ontology browsing
+
+
+
+
+
+
+
+
+
+ Maximum occurence analysis
+
+ A method for making numerical assessments about the maximum percent of time that a conformer of a flexible macromolecule can exist and still be compatible with the experimental data.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Database comparison
+
+
+ 1.9
+ Data model comparison
+ Compare the models or schemas used by two or more databases, or any other general comparison of databases rather than a detailed comparison of the entries themselves.
+ Schema comparison
+
+
+
+
+
+
+
+
+
+ Network simulation
+
+
+
+
+
+
+
+ Simulate the bevaviour of a biological pathway or network.
+ Pathway simulation
+ Network topology simulation
+ 1.9
+
+
+
+
+
+
+
+
+
+ RNA-seq read count analysis
+
+ Analyze read counts from RNA-seq experiments.
+ 1.9
+
+
+
+
+
+
+
+
+
+ Chemical redundancy removal
+
+ 1.9
+ Identify and remove redudancy from a set of small molecule structures.
+
+
+
+
+
+
+
+
+
+ RNA-seq time series data analysis
+
+ 1.9
+ Analyze time series data from an RNA-seq experiment.
+
+
+
+
+
+
+
+
+
+ Simulated gene expression data generation
+
+ 1.9
+ Simulate gene expression data, e.g. for purposes of benchmarking.
+
+
+
+
+
+
+
+
+
+ Topic
+
+ http://purl.org/biotop/biotop.owl#Quality
+ http://bioontology.org/ontologies/ResearchArea.owl#Area_of_Research
+ http://www.onto-med.de/ontologies/gfo.owl#Category
+ http://www.ifomis.org/bfo/1.1/snap#Quality
+ http://www.onto-med.de/ontologies/gfo.owl#Perpetuant
+ A category denoting a rather broad domain or field of interest, of study, application, work, data, or technology. Topics have no clearly defined borders between each other.
+ http://www.loa-cnr.it/ontologies/DOLCE-Lite.owl#quality
+ beta12orEarlier
+ http://www.ifomis.org/bfo/1.1/snap#Continuant
+ sumo:FieldOfStudy
+ http://onto.eva.mpg.de/ontologies/gfo-bio.owl#Method
+
+
+
+
+
+
+
+
+
+ Nucleic acids
+
+ The processing and analysis of nucleic acid sequence, structural and other data.
+ Nucleic acid bioinformatics
+ Nucleic acid analysis
+ Nucleic acid informatics
+ http://purl.bioontology.org/ontology/MSH/D017423
+ Nucleic acid properties
+ Nucleic acid physicochemistry
+ http://purl.bioontology.org/ontology/MSH/D017422
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Proteins
+
+ Protein bioinformatics
+ Protein informatics
+ Protein databases
+ Protein analysis
+ http://purl.bioontology.org/ontology/MSH/D020539
+ Archival, processing and analysis of protein data, typically molecular sequence and structural data.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Metabolites
+
+ Metabolite structures
+ This concept excludes macromolecules such as proteins and nucleic acids.
+ The structures of reactants or products of metabolism, for example small molecules such as including vitamins, polyols, nucleotides and amino acids.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence analysis
+
+ beta12orEarlier
+ Sequence databases
+ Sequences
+ http://purl.bioontology.org/ontology/MSH/D017421
+ The archival, processing and analysis of molecular sequences (monomer composition of polymers) including molecular sequence data resources, sequence sites, alignments, motifs and profiles.
+
+
+
+
+
+
+
+
+
+
+ Structure analysis
+
+ Computational structural biology
+ The curation, processing and analysis of the structure of biological molecules, typically proteins and nucleic acids and other macromolecules.
+ http://purl.bioontology.org/ontology/MSH/D015394
+ Structure analysis
+ Structural bioinformatics
+ Structure databases
+ This includes related concepts such as structural properties, alignments and structural motifs.
+ Structure data resources
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Structure prediction
+
+ beta12orEarlier
+ The prediction of molecular (secondary or tertiary) structure.
+
+
+
+
+
+
+
+
+
+ Alignment
+
+ beta12orEarlier
+ true
+ The alignment (equivalence between sites) of molecular sequences, structures or profiles (representing a sequence or structure alignment).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Phylogeny
+
+ Phylogeny reconstruction
+ Phylogenetic stratigraphy
+ beta12orEarlier
+ Phylogenetic dating
+ Phylogenetic clocks
+ http://purl.bioontology.org/ontology/MSH/D010802
+ The study of evolutionary relationships amongst organisms.
+ Phylogenetic simulation
+ This includes diverse phylogenetic methods, including phylogenetic tree construction, typically from molecular sequence or morphological data, methods that simulate DNA sequence evolution, a phylogenetic tree or the underlying data, or which estimate or use molecular clock and stratigraphic (age) data, methods for studying gene evolution etc.
+
+
+
+
+
+
+
+
+
+
+ Functional genomics
+
+
+ beta12orEarlier
+ The study of gene or protein functions and their interactions in totality in a given organism, tissue, cell etc.
+
+
+
+
+
+
+
+
+
+
+ Ontology and terminology
+
+ Terminology
+ beta12orEarlier
+ http://purl.bioontology.org/ontology/MSH/D002965
+ Applied ontology
+ Ontology
+ The conceptualisation, categorisation and nomenclature (naming) of entities or phenomena within biology or bioinformatics. This includes formal ontologies, controlled vocabularies, structured glossary, symbols and terminology or other related resource.
+ Ontologies
+
+
+
+
+
+
+
+
+
+
+ Information retrieval
+
+ beta12orEarlier
+ Data retrieval
+ The search and query of data sources (typically databases or ontologies) in order to retrieve entries or other information.
+ This includes, for example, search, query and retrieval of molecular sequences and associated data.
+ Data search
+ VT 1.3.3 Information retrieval
+ Data query
+
+
+
+
+
+
+
+
+
+ Bioinformatics
+
+ This includes data processing in general, including basic handling of files and databases, datatypes, workflows and annotation.
+ VT 1.5.6 Bioinformatics
+ The archival, curation, processing and analysis of complex biological data.
+ http://purl.bioontology.org/ontology/MSH/D016247
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Data visualisation
+
+ Data rendering
+ Rendering (drawing on a computer screen) or visualisation of molecular sequences, structures or other biomolecular data.
+ VT 1.2.5 Computer graphics
+ beta12orEarlier
+ Computer graphics
+
+
+
+
+
+
+
+
+
+ Nucleic acid thermodynamics
+
+ true
+ The study of the thermodynamic properties of a nucleic acid.
+ 1.3
+
+
+
+
+
+
+
+
+
+ Nucleic acid structure analysis
+
+ Includes secondary and tertiary nucleic acid structural data, nucleic acid thermodynamic, thermal and conformational properties including DNA or DNA/RNA denaturation (melting) etc.
+ DNA melting
+ Nucleic acid denaturation
+ RNA alignment
+ The archival, curation, processing and analysis of nucleic acid structural information, such as whole structures, structural features and alignments, and associated annotation.
+ RNA structure alignment
+ beta12orEarlier
+ Nucleic acid structure
+ Nucleic acid thermodynamics
+ RNA structure
+
+
+
+
+
+
+
+
+
+ RNA
+
+ beta12orEarlier
+ RNA sequences and structures.
+
+
+
+
+
+
+
+
+
+ Nucleic acid restriction
+
+ 1.3
+ beta12orEarlier
+ Topic for the study of restriction enzymes, their cleavage sites and the restriction of nucleic acids.
+ true
+
+
+
+
+
+
+
+
+
+ Mapping
+
+ Genetic linkage
+ Linkage
+ Linkage mapping
+ Synteny
+ DNA mapping
+ beta12orEarlier
+ The mapping of complete (typically nucleotide) sequences.
+ This includes resources that aim to identify, map or analyse genetic markers in DNA sequences, for example to produce a genetic (linkage) map of a chromosome or genome or to analyse genetic linkage and synteny. It also includes resources for physical (sequence) maps of a DNA sequence showing the physical distance (base pairs) between features or landmarks such as restriction sites, cloned DNA fragments, genes and other genetic markers.
+
+
+
+
+
+
+
+
+
+ Genetic codes and codon usage
+
+ beta12orEarlier
+ true
+ 1.3
+ Codon usage analysis
+ The study of codon usage in nucleotide sequence(s), genetic codes and so on.
+
+
+
+
+
+
+
+
+
+ Protein expression
+
+ Translation
+ The translation of mRNA into protein and subsequent protein processing in the cell.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Gene finding
+
+ 1.3
+ This includes the study of promoters, coding regions, splice sites, etc. Methods for gene prediction might be ab initio, based on phylogenetic comparisons, use motifs, sequence features, support vector machine, alignment etc.
+ Gene discovery
+ Methods that aims to identify, predict, model or analyse genes or gene structure in DNA sequences.
+ beta12orEarlier
+ Gene prediction
+ true
+
+
+
+
+
+
+
+
+
+ Transcription
+
+ 1.3
+ The transcription of DNA into mRNA.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Promoters
+
+ true
+ beta12orEarlier
+ Promoters in DNA sequences (region of DNA that facilitates the transcription of a particular gene by binding RNA polymerase and transcription factor proteins).
+ beta13
+
+
+
+
+
+
+
+
+
+ Nucleic acid folding
+
+ beta12orEarlier
+ The folding (in 3D space) of nucleic acid molecules.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Gene structure
+
+
+ This includes the study of promoters, coding regions etc.
+ beta12orEarlier
+ Gene features
+ Gene structure, regions which make an RNA product and features such as promoters, coding regions, gene fusion, splice sites etc.
+
+
+
+
+
+
+
+
+
+
+ Proteomics
+
+ beta12orEarlier
+ Protein and peptide identification, especially in the study of whole proteomes of organisms.
+ Protein and peptide identification
+ Peptide identification
+ Proteomics includes any methods (especially high-throughput) that separate, characterize and identify expressed proteins such as mass spectrometry, two-dimensional gel electrophoresis and protein microarrays, as well as in-silico methods that perform proteolytic or mass calculations on a protein sequence and other analyses of protein expression data, for example in different cells or tissues.
+ http://purl.bioontology.org/ontology/MSH/D040901
+ Protein expression
+
+
+
+
+
+
+
+
+
+
+ Structural genomics
+
+
+ beta12orEarlier
+ The elucidation of the three dimensional structure for all (available) proteins in a given organism.
+
+
+
+
+
+
+
+
+
+
+ Protein properties
+
+ The study of the physical and biochemical properties of peptides and proteins, for example the hydrophobic, hydrophilic and charge properties of a protein.
+ Protein hydropathy
+ Protein physicochemistry
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein interactions
+
+
+ Protein-protein, protein-DNA/RNA and protein-ligand interactions, including analysis of known interactions and prediction of putative interactions.
+ Protein-nucleic acid interactions
+ Protein-RNA interaction
+ This includes experimental (e.g. yeast two-hybrid) and computational analysis techniques.
+ Protein-protein interactions
+ Protein-ligand interactions
+ beta12orEarlier
+ Protein-DNA interaction
+
+
+
+
+
+
+
+
+
+ Protein folding, stability and design
+
+ Protein folding
+ Protein stability
+ beta12orEarlier
+ Protein stability, folding (in 3D space) and protein sequence-structure-function relationships. This includes for example study of inter-atomic or inter-residue interactions in protein (3D) structures, the effect of mutation, and the design of proteins with specific properties, typically by designing changes (via site-directed mutagenesis) to an existing protein.
+ Protein residue interactions
+ Protein design
+ Rational protein design
+
+
+
+
+
+
+
+
+
+ Two-dimensional gel electrophoresis
+
+ Two-dimensional gel electrophoresis image and related data.
+ beta13
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Mass spectrometry
+
+ beta12orEarlier
+ An analytical chemistry technique that measures the mass-to-charge ratio and abundance of irons in the gas phase.
+
+
+
+
+
+
+
+
+
+
+ Protein microarrays
+
+ Protein microarray data.
+ true
+ beta12orEarlier
+ beta13
+
+
+
+
+
+
+
+
+
+ Protein hydropathy
+
+ beta12orEarlier
+ true
+ The study of the hydrophobic, hydrophilic and charge properties of a protein.
+ 1.3
+
+
+
+
+
+
+
+
+
+ Protein targeting and localization
+
+ Protein targeting
+ Protein sorting
+ The study of how proteins are transported within and without the cell, including signal peptides, protein subcellular localization and export.
+ Protein localization
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein cleavage sites and proteolysis
+
+ true
+ beta12orEarlier
+ 1.3
+ Enzyme or chemical cleavage sites and proteolytic or mass calculations on a protein sequence.
+
+
+
+
+
+
+
+
+
+ Protein structure comparison
+
+ The comparison of two or more protein structures.
+ beta12orEarlier
+ true
+ Use this concept for methods that are exclusively for protein structure.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Protein residue interactions
+
+ The processing and analysis of inter-atomic or inter-residue interactions in protein (3D) structures.
+ Protein residue interactions
+ true
+ 1.3
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein-protein interactions
+
+ Protein interaction networks
+ true
+ Protein-protein interactions, individual interactions and networks, protein complexes, protein functional coupling etc.
+ beta12orEarlier
+ 1.3
+
+
+
+
+
+
+
+
+
+ Protein-ligand interactions
+
+ beta12orEarlier
+ true
+ 1.3
+ Protein-ligand (small molecule) interactions.
+
+
+
+
+
+
+
+
+
+ Protein-nucleic acid interactions
+
+ beta12orEarlier
+ 1.3
+ Protein-DNA/RNA interactions.
+ true
+
+
+
+
+
+
+
+
+
+ Protein design
+
+ 1.3
+ beta12orEarlier
+ The design of proteins with specific properties, typically by designing changes (via site-directed mutagenesis) to an existing protein.
+ true
+
+
+
+
+
+
+
+
+
+ G protein-coupled receptors (GPCR)
+
+ G-protein coupled receptors (GPCRs).
+ true
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Carbohydrates
+
+ beta12orEarlier
+ Carbohydrates, typically including structural information.
+
+
+
+
+
+
+
+
+
+ Lipids
+
+ beta12orEarlier
+ Lipids and their structures.
+
+
+
+
+
+
+
+
+
+ Small molecules
+
+ Small molecules of biological significance, typically archival, curation, processing and analysis of structural information.
+ Small molecules include organic molecules, metal-organic compounds, small polypeptides, small polysaccharides and oligonucleotides. Structural data is usually included.
+ CHEBI:23367
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence editing
+
+ beta12orEarlier
+ true
+ beta12orEarlier
+ Edit, convert or otherwise change a molecular sequence, either randomly or specifically.
+
+
+
+
+
+
+
+
+
+
+ Sequence composition, complexity and repeats
+
+ Sequence complexity
+ Repeat sequences
+ The archival, processing and analysis of the basic character composition of molecular sequences, for example character or word frequency, ambiguity, complexity, particularly regions of low complexity, and repeats or the repetitive nature of molecular sequences.
+ beta12orEarlier
+ Sequence repeats
+ Low complexity sequences
+ Sequence composition
+
+
+
+
+
+
+
+
+
+ Sequence motifs
+
+ beta12orEarlier
+ Motifs
+ true
+ 1.3
+ Conserved patterns (motifs) in molecular sequences, that (typically) describe functional or other key sites.
+
+
+
+
+
+
+
+
+
+ Sequence comparison
+
+ The comparison might be on the basis of sequence, physico-chemical or some other properties of the sequences.
+ beta12orEarlier
+ The comparison of two or more molecular sequences, for example sequence alignment and clustering.
+
+
+
+
+
+
+
+
+
+ Sequence sites, features and motifs
+
+ Sequence features
+ The archival, detection, prediction and analysis of
+positional features such as functional and other key sites, in molecular sequences and the conserved patterns (motifs, profiles etc.) that may be used to describe them.
+ Functional sites
+ Sequence motifs
+ Sequence profiles
+ Sequence sites
+ HMMs
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence database search
+
+ beta12orEarlier
+ Search and retrieve molecular sequences that are similar to a sequence-based query (typically a simple sequence).
+ beta12orEarlier
+ true
+ The query is a sequence-based entity such as another sequence, a motif or profile.
+
+
+
+
+
+
+
+
+
+ Sequence clustering
+
+ This includes systems that generate, process and analyse sequence clusters.
+ beta12orEarlier
+ true
+ 1.7
+ The comparison and grouping together of molecular sequences on the basis of their similarities.
+ Sequence clusters
+
+
+
+
+
+
+
+
+
+ Protein structural motifs and surfaces
+
+
+ This includes conformation of conserved substructures, conserved geometry (spatial arrangement) of secondary structure or protein backbone, solvent-exposed surfaces, internal cavities, the analysis of shape, hydropathy, electrostatic patches, role and functions etc.
+ Protein structural features
+ Structural motifs
+ Protein 3D motifs
+ beta12orEarlier
+ Protein structural motifs
+ Structural features or common 3D motifs within protein structures, including the surface of a protein structure, such as biological interfaces with other molecules.
+ Protein surfaces
+
+
+
+
+
+
+
+
+
+ Structural (3D) profiles
+
+ The processing, analysis or use of some type of structural (3D) profile or template; a computational entity (typically a numerical matrix) that is derived from and represents a structure or structure alignment.
+ true
+ beta12orEarlier
+ 1.3
+ Structural profiles
+
+
+
+
+
+
+
+
+
+ Protein structure prediction
+
+
+ beta12orEarlier
+ The prediction, modelling, recognition or design of protein secondary or tertiary structure or other structural features.
+
+
+
+
+
+
+
+
+
+ Nucleic acid structure prediction
+
+
+ The folding of nucleic acid molecules and the prediction or design of nucleic acid (typically RNA) sequences with specific conformations.
+ DNA structure prediction
+ Nucleic acid design
+ RNA structure prediction
+ beta12orEarlier
+ Nucleic acid folding
+
+
+
+
+
+
+
+
+
+ Ab initio structure prediction
+
+ 1.7
+ The prediction of three-dimensional structure of a (typically protein) sequence from first principles, using a physics-based or empirical scoring function and without using explicit structural templates.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Homology modelling
+
+ 1.4
+ The modelling of the three-dimensional structure of a protein using known sequence and structural data.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Molecular dynamics
+
+ This includes resources concerning flexibility and motion in protein and other molecular structures.
+ Protein dynamics
+ Molecular flexibility
+ Molecular motions
+ beta12orEarlier
+ The study and simulation of molecular (typically protein) conformation using a computational model of physical forces and computer simulation.
+
+
+
+
+
+
+
+
+
+ Molecular docking
+
+ beta12orEarlier
+ The modelling the structure of proteins in complex with small molecules or other macromolecules.
+
+
+
+
+
+
+
+
+
+ Protein secondary structure prediction
+
+ beta12orEarlier
+ 1.3
+ The prediction of secondary or supersecondary structure of protein sequences.
+ true
+
+
+
+
+
+
+
+
+
+
+ Protein tertiary structure prediction
+
+ 1.3
+ true
+ The prediction of tertiary structure of protein sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Protein fold recognition
+
+ For example threading, or the alignment of molecular sequences to structures, structural (3D) profiles or templates (representing a structure or structure alignment).
+ The recognition (prediction and assignment) of known protein structural domains or folds in protein sequence(s).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence alignment
+
+ This includes the generation of alignments (the identification of equivalent sites), the analysis of alignments, editing, visualisation, alignment databases, the alignment (equivalence between sites) of sequence profiles (representing sequence alignments) and so on.
+ beta12orEarlier
+ 1.7
+ The alignment of molecular sequences or sequence profiles (representing sequence alignments).
+ true
+
+
+
+
+
+
+
+
+
+ Structure alignment
+
+ The superimposition of molecular tertiary structures or structural (3D) profiles (representing a structure or structure alignment).
+ This includes the generation, storage, analysis, rendering etc. of structure alignments.
+ true
+ 1.7
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Threading
+
+ Sequence-structure alignment
+ 1.3
+ beta12orEarlier
+ The alignment of molecular sequences to structures, structural (3D) profiles or templates (representing a structure or structure alignment).
+ true
+
+
+
+
+
+
+
+
+
+ Sequence profiles and HMMs
+
+ true
+ Sequence profiles; typically a positional, numerical matrix representing a sequence alignment.
+ beta12orEarlier
+ 1.3
+ Sequence profiles include position-specific scoring matrix (position weight matrix), hidden Markov models etc.
+
+
+
+
+
+
+
+
+
+ Phylogeny reconstruction
+
+ The reconstruction of a phylogeny (evolutionary relatedness amongst organisms), for example, by building a phylogenetic tree.
+ 1.3
+ true
+ Currently too specific for the topic sub-ontology (but might be unobsoleted).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Phylogenomics
+
+
+ beta12orEarlier
+ The integrated study of evolutionary relationships and whole genome data, for example, in the analysis of species trees, horizontal gene transfer and evolutionary reconstruction.
+
+
+
+
+
+
+
+
+
+
+ Virtual PCR
+
+ beta13
+ Polymerase chain reaction
+ beta12orEarlier
+ Simulated polymerase chain reaction (PCR).
+ PCR
+ true
+
+
+
+
+
+
+
+
+
+ Sequence assembly
+
+ Assembly
+ The assembly of fragments of a DNA sequence to reconstruct the original sequence.
+ beta12orEarlier
+ This covers for example the alignment of sequences of (typically millions) of short reads to a reference genome.
+
+
+
+
+
+
+
+
+
+ Genetic variation
+
+
+ http://purl.bioontology.org/ontology/MSH/D014644
+ Stable, naturally occuring mutations in a nucleotide sequence including alleles, naturally occurring mutations such as single base nucleotide substitutions, deletions and insertions, RFLPs and other polymorphisms.
+ DNA variation
+ Mutation
+ Polymorphism
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Microarrays
+
+ true
+ http://purl.bioontology.org/ontology/MSH/D046228
+ Microarrays, for example, to process microarray data or design probes and experiments.
+ 1.3
+ DNA microarrays
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Pharmacology
+
+ Computational pharmacology
+ beta12orEarlier
+ Pharmacoinformatics
+ The study of drugs and their effects or responses in living systems.
+ VT 3.1.7 Pharmacology and pharmacy
+
+
+
+
+
+
+
+
+
+
+ Gene expression
+
+ This includes the study of codon usage in nucleotide sequence(s), genetic codes and so on.
+ Gene expression profiling
+ Expression profiling
+ beta12orEarlier
+ http://edamontology.org/topic_0197
+ Gene expression levels are analysed by identifying, quantifying or comparing mRNA transcripts, for example using microarrays, RNA-seq, northern blots, gene-indexed expression profiles etc.
+ http://purl.bioontology.org/ontology/MSH/D015870
+ Gene expression analysis
+ DNA microarrays
+ The analysis of levels and patterns of synthesis of gene products (proteins and functional RNA) including interpretation in functional terms of gene expression data.
+ Codon usage
+
+
+
+
+
+
+
+
+
+
+ Gene regulation
+
+ beta12orEarlier
+ The regulation of gene expression.
+
+
+
+
+
+
+
+
+
+ Pharmacogenomics
+
+
+ beta12orEarlier
+ The influence of genotype on drug response, for example by correlating gene expression or single-nucleotide polymorphisms with drug efficacy or toxicity.
+
+
+
+
+
+
+
+
+
+
+ Medicinal chemistry
+
+
+ VT 3.1.4 Medicinal chemistry
+ The design and chemical synthesis of bioactive molecules, for example drugs or potential drug compounds, for medicinal purposes.
+ This includes methods that search compound collections, generate or analyse drug 3D conformations, identify drug targets with structural docking etc.
+ Drug design
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Fish
+
+ beta12orEarlier
+ true
+ 1.3
+ Information on a specific fish genome including molecular sequences, genes and annotation.
+
+
+
+
+
+
+
+
+
+ Flies
+
+ 1.3
+ true
+ beta12orEarlier
+ Information on a specific fly genome including molecular sequences, genes and annotation.
+
+
+
+
+
+
+
+
+
+ Mice or rats
+
+ Information on a specific mouse or rat genome including molecular sequences, genes and annotation.
+ The resource may be specific to a group of mice / rats or all mice / rats.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Worms
+
+ true
+ 1.3
+ beta12orEarlier
+ Information on a specific worm genome including molecular sequences, genes and annotation.
+
+
+
+
+
+
+
+
+
+ Literature analysis
+
+ beta12orEarlier
+ 1.3
+ The processing and analysis of the bioinformatics literature and bibliographic data, such as literature search and query.
+ true
+
+
+
+
+
+
+
+
+
+ Data mining
+
+ beta12orEarlier
+ Text data mining
+ The analysis of the biomedical and informatics literature.
+ Literature analysis
+ Text mining
+ Literature mining
+
+
+
+
+
+
+
+
+
+
+ Data deposition, annotation and curation
+
+ Deposition and curation of database accessions, including annotation, typically with terms from a controlled vocabulary.
+ Database curation
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Document, record and content management
+
+ Document management
+ File management
+ This includes editing, reformatting, conversion, transformation, validation, debugging, indexing and so on.
+ Content management
+ The management and manipulation of digital documents, including database records, files and reports.
+ VT 1.3.6 Multimedia, hypermedia
+ Record management
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Sequence annotation
+
+ beta12orEarlier
+ beta12orEarlier
+ true
+ Annotation of a molecular sequence.
+
+
+
+
+
+
+
+
+
+ Genome annotation
+
+ Annotation of a genome.
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ NMR
+
+
+ ROESY
+ NOESY
+ Nuclear Overhauser Effect Spectroscopy
+ An analytical technique that exploits the magenetic properties of certain atomic nuclei to provide information on the structure, dynamics, reaction state and chemical environment of molecules.
+ HOESY
+ beta12orEarlier
+ Heteronuclear Overhauser Effect Spectroscopy
+ Nuclear magnetic resonance spectroscopy
+ Spectroscopy
+ NMR spectroscopy
+ Rotational Frame Nuclear Overhauser Effect Spectroscopy
+
+
+
+
+
+
+
+
+
+
+ Sequence classification
+
+ beta12orEarlier
+ The classification of molecular sequences based on some measure of their similarity.
+ Methods including sequence motifs, profile and other diagnostic elements which (typically) represent conserved patterns (of residues or properties) in molecular sequences.
+
+
+
+
+
+
+
+
+
+ Protein classification
+
+ 1.3
+ true
+ beta12orEarlier
+ primarily the classification of proteins (from sequence or structural data) into clusters, groups, families etc.
+
+
+
+
+
+
+
+
+
+ Sequence motif or profile
+
+ beta12orEarlier
+ true
+ Sequence motifs, or sequence profiles derived from an alignment of molecular sequences of a particular type.
+ This includes comparison, discovery, recognition etc. of sequence motifs.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Protein modifications
+
+ GO:0006464
+ Protein chemical modifications, e.g. post-translational modifications.
+ Protein post-translational modification
+ MOD:00000
+ EDAM does not describe all possible protein modifications. For fine-grained annotation of protein modification use the Gene Ontology (children of concept GO:0006464) and/or the Protein Modifications ontology (children of concept MOD:00000)
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Molecular interactions, pathways and networks
+
+ Biological networks
+ Network or pathway analysis
+ beta13
+ Molecular interactions
+ Biological models
+ Molecular interactions, biological pathways, networks and other models.
+ Biological pathways
+ http://edamontology.org/topic_3076
+
+
+
+
+
+
+
+
+
+
+ Informatics
+
+ The study and practice of information processing and use of computer information systems.
+ VT 1.3.99 Other
+ Knowledge management
+ VT 1.3.4 Information management
+ beta12orEarlier
+ Information management
+ VT 1.3.5 Knowledge management
+ VT 1.3.3 Information retrieval
+ VT 1.3 Information sciences
+ Information science
+
+
+
+
+
+
+
+
+ Literature data resources
+
+ Data resources for the biological or biomedical literature, either a primary source of literature or some derivative.
+ true
+ 1.3
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Laboratory information management
+
+ Laboratory management and resources, for example, catalogues of biological resources for use in the lab including cell lines, viruses, plasmids, phages, DNA probes and primers and so on.
+ beta12orEarlier
+ Laboratory resources
+
+
+
+
+
+
+
+
+
+
+
+ Cell and tissue culture
+
+ Tissue culture
+ 1.3
+ true
+ General cell culture or data on a specific cell lines.
+ Cell culture
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Ecology
+
+ The ecological and environmental sciences and especially the application of information technology (ecoinformatics).
+ http://purl.bioontology.org/ontology/MSH/D004777
+ Ecological informatics
+ VT 1.5.15 Ecology
+ Computational ecology
+ beta12orEarlier
+ Ecoinformatics
+ Environmental science
+
+
+
+
+
+
+
+
+
+
+ Electron microscopy
+
+
+ SEM
+ Scanning electron microscopy
+ TEM
+ The study of matter by studying the interference pattern from firing electrons at a sample, to analyse structures at resolutions higher than can be achieved using light.
+
+ Transmission electron microscopy
+ beta12orEarlier
+ Electron crystallography
+ Electron diffraction experiment
+ Single particle electron microscopy
+
+
+
+
+
+
+
+
+
+
+ Cell cycle
+
+ beta13
+ beta12orEarlier
+ true
+ The cell cycle including key genes and proteins.
+
+
+
+
+
+
+
+
+
+ Peptides and amino acids
+
+ beta12orEarlier
+ The physicochemical, biochemical or structural properties of amino acids or peptides.
+ Amino acids
+ Peptides
+
+
+
+
+
+
+
+
+
+ Organelles
+
+ Cell membrane
+ Cytoplasm
+ Organelle genes and proteins
+ Smooth endoplasmic reticulum
+ beta12orEarlier
+ Lysosome
+ Centriole
+ Ribosome
+ Nucleus
+ true
+ A specific organelle, or organelles in general, typically the genes and proteins (or genome and proteome).
+ Mitochondria
+ Golgi apparatus
+ Rough endoplasmic reticulum
+ 1.3
+
+
+
+
+
+
+
+
+
+ Ribosomes
+
+ beta12orEarlier
+ Ribosomes, typically of ribosome-related genes and proteins.
+ Ribosome genes and proteins
+ 1.3
+ true
+
+
+
+
+
+
+
+
+
+ Scents
+
+ A database about scents.
+ beta12orEarlier
+ beta13
+ true
+
+
+
+
+
+
+
+
+
+ Drugs and target structures
+
+
+ Drug structures
+ beta12orEarlier
+ The structures of drugs, drug target, their interactions and binding affinities.
+ Target structures
+
+
+
+
+
+
+
+
+
+
+ Model organisms
+
+ This may include information on the genome (including molecular sequences and map, genes and annotation), proteome, as well as more general information about an organism.
+ beta12orEarlier
+ A specific organism, or group of organisms, used to study a particular aspect of biology.
+ Organisms
+
+
+
+
+
+
+
+
+
+
+ Genomics
+
+ http://purl.bioontology.org/ontology/MSH/D023281
+ beta12orEarlier
+ Whole genomes of one or more organisms, or genomes in general, such as meta-information on genomes, genome projects, gene names etc.
+
+
+
+
+
+
+
+
+
+
+ Gene families
+
+ Particular gene(s), gene family or other gene group or system and their encoded proteins.
+ beta12orEarlier
+ Gene family
+ Gene system
+ Genes, gene family or system
+ Gene and protein families
+
+
+
+
+
+
+
+
+
+
+ Chromosomes
+
+ beta12orEarlier
+ Study of chromosomes.
+
+
+
+
+
+
+
+
+
+ Genotype and phenotype
+
+ Genotype and phenotype resources
+ The study of genetic constitution of a living entity, such as an individual, and organism, a cell and so on, typically with respect to a particular observable phenotypic traits, or resources concerning such traits, which might be an aspect of biochemistry, physiology, morphology, anatomy, development and so on.
+ Genotyping
+ Phenotyping
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Gene expression and microarray
+
+ true
+ beta12orEarlier
+ beta12orEarlier
+ Gene expression e.g. microarray data, northern blots, gene-indexed expression profiles etc.
+
+
+
+
+
+
+
+
+
+
+ Sequence design
+
+ Probes
+ This includes the design of primers for PCR and DNA amplification or the design of molecular probes.
+ http://purl.bioontology.org/ontology/MSH/D015335
+ Gene design
+ Molecular probes (e.g. a peptide probe or DNA microarray probe) or primers (e.g. for PCR).
+ Probe design
+ in silico cloning
+ Primer design
+ Primers
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Pathology
+
+ Diseases, including diseases in general and the genes, gene variations and proteins involved in one or more specific diseases.
+ beta12orEarlier
+ Diseases
+ VT 3.1.6 Pathology
+
+
+
+
+
+
+
+
+
+
+ Specific protein resources
+
+ 1.3
+ A particular protein, protein family or other group of proteins.
+ true
+ Specific protein
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Taxonomy
+
+ beta12orEarlier
+ VT 1.5.25 Taxonomy
+ Organism classification, identification and naming.
+
+
+
+
+
+
+
+
+
+ Protein sequence analysis
+
+ beta12orEarlier
+ Archival, processing and analysis of protein sequences and sequence-based entities such as alignments, motifs and profiles.
+ 1.8
+ true
+
+
+
+
+
+
+
+
+
+ Nucleic acid sequence analysis
+
+ beta12orEarlier
+ 1.8
+ true
+ The archival, processing and analysis of nucleotide sequences and and sequence-based entities such as alignments, motifs and profiles.
+
+
+
+
+
+
+
+
+
+
+ Repeat sequences
+
+ true
+ The repetitive nature of molecular sequences.
+ beta12orEarlier
+ 1.3
+
+
+
+
+
+
+
+
+
+ Low complexity sequences
+
+ true
+ The (character) complexity of molecular sequences, particularly regions of low complexity.
+ 1.3
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Proteome
+
+ A specific proteome including protein sequences and annotation.
+ beta12orEarlier
+ beta13
+ true
+
+
+
+
+
+
+
+
+
+ DNA
+
+ DNA analysis
+ beta12orEarlier
+ DNA sequences and structure, including processes such as methylation and replication.
+ The DNA sequences might be coding or non-coding sequences.
+
+
+
+
+
+
+
+
+
+ Coding RNA
+
+ EST
+ cDNA
+ mRNA
+ This includes expressed sequence tag (EST) or complementary DNA (cDNA) sequences.
+ Protein-coding regions including coding sequences (CDS), exons, translation initiation sites and open reading frames
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Functional, regulatory and non-coding RNA
+
+
+ ncRNA
+ Non-coding RNA
+ Functional RNA
+ Non-coding or functional RNA sequences, including regulatory RNA sequences, ribosomal RNA (rRNA) and transfer RNA (tRNA).
+ Regulatory RNA
+ Non-coding RNA includes piwi-interacting RNA (piRNA), small nuclear RNA (snRNA) and small nucleolar RNA (snoRNA). Regulatory RNA includes microRNA (miRNA) - short single stranded RNA molecules that regulate gene expression, and small interfering RNA (siRNA).
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ rRNA
+
+ 1.3
+ One or more ribosomal RNA (rRNA) sequences.
+ true
+
+
+
+
+
+
+
+
+
+ tRNA
+
+ 1.3
+ true
+ One or more transfer RNA (tRNA) sequences.
+
+
+
+
+
+
+
+
+
+ Protein secondary structure
+
+ true
+ beta12orEarlier
+ 1.8
+ Protein secondary structure or secondary structure alignments.
+ This includes assignment, analysis, comparison, prediction, rendering etc. of secondary structure data.
+
+
+
+
+
+
+
+
+
+ RNA structure
+
+ 1.3
+ RNA secondary or tertiary structure and alignments.
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Protein tertiary structure
+
+ 1.8
+ true
+ Protein tertiary structures.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Nucleic acid classification
+
+ Classification of nucleic acid sequences and structures.
+ 1.3
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein families
+
+ beta12orEarlier
+ Protein sequence classification
+ Protein secondary databases
+ A protein families database might include the classifier (e.g. a sequence profile) used to build the classification.
+ Primarily the classification of proteins (from sequence or structural data) into clusters, groups, families etc., curation of a particular protein or protein family, or any other proteins that have been classified as members of a common group.
+
+
+
+
+
+
+
+
+
+
+ Protein domains and folds
+
+ beta12orEarlier
+ Protein folds
+ Protein tertiary structural domains and folds.
+ Protein domains
+
+
+
+
+
+
+
+
+
+ Nucleic acid sequence alignment
+
+ beta12orEarlier
+ true
+ 1.3
+ Nucleotide sequence alignments.
+
+
+
+
+
+
+
+
+
+ Protein sequence alignment
+
+ 1.3
+ Protein sequence alignments.
+ beta12orEarlier
+ true
+ A sequence profile typically represents a sequence alignment.
+
+
+
+
+
+
+
+
+
+ Nucleic acid sites and features
+
+ beta12orEarlier
+ 1.3
+ true
+ The archival, detection, prediction and analysis of
+positional features such as functional sites in nucleotide sequences.
+
+
+
+
+
+
+
+
+
+
+ Protein sites and features
+
+ beta12orEarlier
+ The detection, identification and analysis of positional features in proteins, such as functional sites.
+ 1.3
+ true
+
+
+
+
+
+
+
+
+
+
+ Transcription factors and regulatory sites
+
+
+
+ Transcription factor proteins either promote (as an activator) or block (as a repressor) the binding to DNA of RNA polymerase. Regulatory sites including transcription factor binding site as well as promoters, enhancers, silencers and boundary elements / insulators.
+ Proteins that bind to DNA and control transcription of DNA to mRNA (transcription factors) and also transcriptional regulatory sites, elements and regions (such as promoters, enhancers, silencers and boundary elements / insulators) in nucleotide sequences.
+ Transcriptional regulatory sites
+ TFBS
+ Transcription factors
+ beta12orEarlier
+ Transcription factor binding sites
+
+
+
+
+
+
+
+
+
+ Phosphorylation sites
+
+ 1.0
+ Protein phosphorylation and phosphorylation sites in protein sequences.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Metabolic pathways
+
+ beta12orEarlier
+ Metabolic pathways.
+
+
+
+
+
+
+
+
+
+ Signaling pathways
+
+ Signaling pathways.
+ Signal transduction pathways
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Protein and peptide identification
+
+ 1.3
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Workflows
+
+ Biological or biomedical analytical workflows or pipelines.
+ beta12orEarlier
+ true
+ 1.0
+
+
+
+
+
+
+
+
+ Data types and objects
+
+ Structuring data into basic types and (computational) objects.
+ beta12orEarlier
+ 1.0
+ true
+
+
+
+
+
+
+
+
+
+ Theoretical biology
+
+ 1.3
+ true
+
+
+
+
+
+
+
+
+
+ Mitochondria
+
+ beta12orEarlier
+ true
+ Mitochondria, typically of mitochondrial genes and proteins.
+ 1.3
+
+
+
+
+
+
+
+
+
+ Plants
+
+ The resource may be specific to a plant, a group of plants or all plants.
+ Plant science
+ Plants, e.g. information on a specific plant genome including molecular sequences, genes and annotation.
+ Plant biology
+ Botany
+ VT 1.5.22 Plant science
+ Plant
+ VT 1.5.10 Botany
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Viruses
+
+ Virology
+ VT 1.5.28 Virology
+ beta12orEarlier
+ Viruses, e.g. sequence and structural data, interactions of viral proteins, or a viral genome including molecular sequences, genes and annotation.
+ The resource may be specific to a virus, a group of viruses or all viruses.
+
+
+
+
+
+
+
+
+
+ Fungi
+
+ Mycology
+ beta12orEarlier
+ The resource may be specific to a fungus, a group of fungi or all fungi.
+ Yeast
+ VT 1.5.21 Mycology
+ Fungi and molds, e.g. information on a specific fungal genome including molecular sequences, genes and annotation.
+
+
+
+
+
+
+
+
+
+ Pathogens
+
+ Pathogens, e.g. information on a specific vertebrate genome including molecular sequences, genes and annotation.
+ beta12orEarlier
+ The resource may be specific to a pathogen, a group of pathogens or all pathogens.
+
+
+
+
+
+
+
+
+
+ Arabidopsis
+
+ beta12orEarlier
+ Arabidopsis-specific data.
+ 1.3
+ true
+
+
+
+
+
+
+
+
+
+ Rice
+
+ Rice-specific data.
+ true
+ 1.3
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Genetic mapping and linkage
+
+ Linkage mapping
+ beta12orEarlier
+ 1.3
+ true
+ Genetic linkage
+ Informatics resources that aim to identify, map or analyse genetic markers in DNA sequences, for example to produce a genetic (linkage) map of a chromosome or genome or to analyse genetic linkage and synteny.
+
+
+
+
+
+
+
+
+
+ Comparative genomics
+
+ The study (typically comparison) of the sequence, structure or function of multiple genomes.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Mobile genetic elements
+
+
+ Transposons
+ beta12orEarlier
+ Mobile genetic elements, such as transposons, Plasmids, Bacteriophage elements and Group II introns.
+
+
+
+
+
+
+
+
+
+ Human disease
+
+ Human diseases, typically describing the genes, mutations and proteins implicated in disease.
+ beta13
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Immunology
+
+ VT 3.1.3 Immunology
+ Immunoinformatics
+ http://purl.bioontology.org/ontology/MSH/D007120
+ http://purl.bioontology.org/ontology/MSH/D007125
+ beta12orEarlier
+ Computational immunology
+ The application of information technology to immunology such as immunological processes, immunological genes, proteins and peptide ligands, antigens and so on.
+
+
+
+
+
+
+
+
+
+
+ Membrane and lipoproteins
+
+ Lipoproteins (protein-lipid assemblies), and proteins or region of a protein that spans or are associated with a membrane.
+ beta12orEarlier
+ Membrane proteins
+ Lipoproteins
+ Transmembrane proteins
+
+
+
+
+
+
+
+
+
+ Enzymes
+
+ Proteins that catalyze chemical reaction, the kinetics of enzyme-catalysed reactions, enzyme nomenclature etc.
+ beta12orEarlier
+ Enzymology
+
+
+
+
+
+
+
+
+
+ Primers
+
+ PCR primers and hybridization oligos in a nucleic acid sequence.
+ Nucleic acid features (primers)
+ beta12orEarlier
+ Primer binding sites
+
+
+
+
+
+
+
+
+
+
+ PolyA signal or sites
+
+ beta12orEarlier
+ Nucleic acid features (PolyA signal or site)
+ PolyA signal
+ A polyA signal is required for endonuclease cleavage of an RNA transcript that is followed by polyadenylation. A polyA site is a site on an RNA transcript to which adenine residues will be added during post-transcriptional polyadenylation.
+ PolyA site
+ Regions or sites in a eukaryotic and eukaryotic viral RNA sequence which directs endonuclease cleavage or polyadenylation of an RNA transcript.
+
+
+
+
+
+
+
+
+
+
+ CpG island and isochores
+
+ beta12orEarlier
+ Nucleic acid features (CpG island and isochore)
+ CpG rich regions (isochores) in a nucleotide sequence.
+
+
+
+
+
+
+
+
+
+
+ Restriction sites
+
+ Restriction enzyme recognition sites (restriction sites) in a nucleic acid sequence.
+ Nucleic acid features (restriction sites)
+ beta12orEarlier
+ Nucleic acid restriction sites (report)
+
+
+
+
+
+
+
+
+
+
+ Splice sites
+
+
+ Nucleic acid features (splice sites)
+ Nucleic acid report (RNA splicing)
+ beta12orEarlier
+ Splice sites in a nucleotide sequence or alternative RNA splicing events.
+ Nucleic acid report (RNA splice model)
+
+
+
+
+
+
+
+
+
+
+ Matrix/scaffold attachment sites
+
+ Nucleic acid features (matrix/scaffold attachment sites)
+ beta12orEarlier
+ Matrix/scaffold attachment regions (MARs/SARs) in a DNA sequence.
+
+
+
+
+
+
+
+
+
+
+ Operon
+
+ Gene features (operon)
+ beta12orEarlier
+ Nucleic acid features (operon)
+ The report for a query sequence or gene might include the predicted operon leader and trailer gene, gene composition of the operon and associated information, as well as information on the query.
+ Operons (operators, promoters and genes) from a bacterial genome.
+
+
+
+
+
+
+
+
+
+
+ Promoters
+
+ Whole promoters or promoter elements (transcription start sites, RNA polymerase binding site, transcription factor binding sites, promoter enhancers etc) in a DNA sequence.
+ beta12orEarlier
+ Nucleic acid features (promoters)
+
+
+
+
+
+
+
+
+
+
+ Structural biology
+
+ Structural assignment
+ Structure determination
+ This includes experimental methods for biomolecular structure determination, such as X-ray crystallography, nuclear magnetic resonance (NMR), circular dichroism (CD) spectroscopy, microscopy etc., including the assignment or modelling of molecular structure from such data.
+ 1.3
+ This includes Informatics concerning data generated from the use of microscopes, including optical, electron and scanning probe microscopy. Includes methods for digitizing microscope images and viewing the produced virtual slides and associated data on a computer screen.
+ The molecular structure of biological molecules, particularly macromolecules such as proteins and nucleic acids.
+ VT 1.5.24 Structural biology
+ Structural determination
+
+
+
+
+
+
+
+
+
+
+ Protein membrane regions
+
+
+ 1.8
+ Protein features (membrane regions)
+ This might include the location and size of the membrane spanning segments and intervening loop regions, transmembrane region IN/OUT orientation relative to the membrane, plus the following data for each amino acid: A Z-coordinate (the distance to the membrane center), the free energy of membrane insertion (calculated in a sliding window over the sequence) and a reliability score. The z-coordinate implies information about re-entrant helices, interfacial helices, the tilt of a transmembrane helix and loop lengths.
+ Intramembrane regions
+ Trans- or intra-membrane regions of a protein, typically describing physicochemical properties of the secondary structure elements.
+ Protein transmembrane regions
+ Transmembrane regions
+
+
+
+
+
+
+
+
+
+
+ Structure comparison
+
+ This might involve comparison of secondary or tertiary (3D) structural information.
+ The comparison of two or more molecular structures, for example structure alignment and clustering.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Function analysis
+
+ Protein function prediction
+ The study of gene and protein function including the prediction of functional properties of a protein.
+ Protein function analysis
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Prokaryotes and archae
+
+ The resource may be specific to a prokaryote, a group of prokaryotes or all prokaryotes.
+ VT 1.5.2 Bacteriology
+ Bacteriology
+ beta12orEarlier
+ Specific bacteria or archaea, e.g. information on a specific prokaryote genome including molecular sequences, genes and annotation.
+
+
+
+
+
+
+
+
+
+ Protein databases
+
+ true
+ 1.3
+ Protein data resources.
+ beta12orEarlier
+ Protein data resources
+
+
+
+
+
+
+
+
+
+ Structure determination
+
+ Experimental methods for biomolecular structure determination, such as X-ray crystallography, nuclear magnetic resonance (NMR), circular dichroism (CD) spectroscopy, microscopy etc., including the assignment or modelling of molecular structure from such data.
+ beta12orEarlier
+ true
+ 1.3
+
+
+
+
+
+
+
+
+
+ Cell biology
+
+ beta12orEarlier
+ VT 1.5.11 Cell biology
+ Cells, such as key genes and proteins involved in the cell cycle.
+
+
+
+
+
+
+
+
+
+ Classification
+
+ beta13
+ beta12orEarlier
+ Topic focused on identifying, grouping, or naming things in a structured way according to some schema based on observable relationships.
+ true
+
+
+
+
+
+
+
+
+
+ Lipoproteins
+
+ true
+ 1.3
+ beta12orEarlier
+ Lipoproteins (protein-lipid assemblies).
+
+
+
+
+
+
+
+
+
+ Phylogeny visualisation
+
+ true
+ Visualise a phylogeny, for example, render a phylogenetic tree.
+ beta12orEarlier
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Cheminformatics
+
+ The application of information technology to chemistry in biological research environment.
+ Chemical informatics
+ beta12orEarlier
+ Chemoinformatics
+
+
+
+
+
+
+
+
+
+
+ Systems biology
+
+ http://en.wikipedia.org/wiki/Systems_biology
+ This includes databases of models and methods to construct or analyse a model.
+ Biological models
+ http://purl.bioontology.org/ontology/MSH/D049490
+ beta12orEarlier
+ Biological modelling
+ Biological system modelling
+ The holistic modelling and analysis of complex biological systems and the interactions therein.
+
+
+
+
+
+
+
+
+
+
+ Statistics and probability
+
+ Biostatistics
+ The application of statistical methods to biological problems.
+ http://en.wikipedia.org/wiki/Biostatistics
+ beta12orEarlier
+ http://purl.bioontology.org/ontology/MSH/D056808
+
+
+
+
+
+
+
+
+
+
+ Structure database search
+
+ The query is a structure-based entity such as another structure, a 3D (structural) motif, 3D profile or template.
+ beta12orEarlier
+ Search for and retrieve molecular structures that are similar to a structure-based query (typically another structure or part of a structure).
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Molecular modelling
+
+ Homology modeling
+ Comparative modeling
+ Comparative modelling
+ beta12orEarlier
+ Homology modelling
+ Molecular modeling
+ The construction, analysis, evaluation, refinement etc. of models of a molecules properties or behaviour.
+
+
+
+
+
+
+
+
+
+ Protein function prediction
+
+ 1.2
+ beta12orEarlier
+ true
+ The prediction of functional properties of a protein.
+
+
+
+
+
+
+
+
+
+ SNP
+
+ Single nucleotide polymorphisms (SNP) and associated data, for example, the discovery and annotation of SNPs.
+ beta12orEarlier
+ Single nucleotide polymorphism
+ A SNP is a DNA sequence variation where a single nucleotide differs between members of a species or paired chromosomes in an individual.
+
+
+
+
+
+
+
+
+
+ Transmembrane protein prediction
+
+ Predict transmembrane domains and topology in protein sequences.
+ beta12orEarlier
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+
+ Nucleic acid structure comparison
+
+ The comparison two or more nucleic acid (typically RNA) secondary or tertiary structures.
+ beta12orEarlier
+ true
+ beta12orEarlier
+ Use this concept for methods that are exclusively for nucleic acid structures.
+
+
+
+
+
+
+
+
+
+
+ Exons
+
+ Gene features (exon)
+ beta12orEarlier
+ Exons in a nucleotide sequences.
+
+
+
+
+
+
+
+
+
+
+ Gene transcription features
+
+ GC signals (report)
+ CAAT signals (report)
+ -35 signals (report)
+ Gene transcriptional features
+ This includes promoters, CAAT signals, TATA signals, -35 signals, -10 signals, GC signals, primer binding sites for initiation of transcription or reverse transcription, enhancer, attenuator, terminators and ribosome binding sites.
+ Enhancers (report)
+ Terminators (report)
+ Transcription of DNA into RNA including the regulation of transcription.
+ Ribosome binding sites (report)
+ -10 signals (report)
+ beta12orEarlier
+ TATA signals (report)
+ Attenuators (report)
+
+
+
+
+
+
+
+
+
+
+ DNA mutation
+
+
+ Mutation annotation
+ beta12orEarlier
+ DNA mutation.
+ Nucleic acid features (mutation)
+
+
+
+
+
+
+
+
+
+
+ Oncology
+
+ beta12orEarlier
+ VT 3.2.16 Oncology
+ Cancer
+ The study of cancer, for example, genes and proteins implicated in cancer.
+ Cancer biology
+
+
+
+
+
+
+
+
+
+
+ Toxins and targets
+
+
+ Toxins
+ Targets
+ beta12orEarlier
+ Structural and associated data for toxic chemical substances.
+
+
+
+
+
+
+
+
+
+
+ Introns
+
+ Gene features (intron)
+ Nucleic acid features (intron)
+ Introns in a nucleotide sequences.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Tool topic
+
+ beta12orEarlier
+ A topic concerning primarily bioinformatics software tools, typically the broad function or purpose of a tool.
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Study topic
+
+ A general area of bioinformatics study, typically the broad scope or category of content of a bioinformatics journal or conference proceeding.
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Nomenclature
+
+ true
+ 1.3
+ beta12orEarlier
+ Biological nomenclature (naming), symbols and terminology.
+
+
+
+
+
+
+
+
+
+ Disease genes and proteins
+
+ 1.3
+ true
+ beta12orEarlier
+ The genes, gene variations and proteins involved in one or more specific diseases.
+
+
+
+
+
+
+
+
+
+ Protein structure analysis
+
+ Protein structure
+ Protein secondary or tertiary structural data and/or associated annotation.
+ http://edamontology.org/topic_3040
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Humans
+
+ beta12orEarlier
+ true
+ The human genome, including molecular sequences, genes, annotation, maps and viewers, the human proteome or human beings in general.
+
+
+
+
+
+
+
+
+
+ Gene resources
+
+ Gene resource
+ beta12orEarlier
+ 1.3
+ Informatics resource (typically a database) primarily focussed on genes.
+ Gene database
+ true
+
+
+
+
+
+
+
+
+
+ Yeast
+
+ beta12orEarlier
+ Yeast, e.g. information on a specific yeast genome including molecular sequences, genes and annotation.
+ true
+ 1.3
+
+
+
+
+
+
+
+
+
+ Eukaryotes
+
+ Eukaryote
+ Eukaryotes or data concerning eukaryotes, e.g. information on a specific eukaryote genome including molecular sequences, genes and annotation.
+ The resource may be specific to a eukaryote, a group of eukaryotes or all eukaryotes.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Invertebrates
+
+ The resource may be specific to an invertebrate, a group of invertebrates or all invertebrates.
+ beta12orEarlier
+ Invertebrates, e.g. information on a specific invertebrate genome including molecular sequences, genes and annotation.
+
+
+
+
+
+
+
+
+
+ Vertebrates
+
+ The resource may be specific to a vertebrate, a group of vertebrates or all vertebrates.
+ Vertebrates, e.g. information on a specific vertebrate genome including molecular sequences, genes and annotation.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Unicellular eukaryotes
+
+ Unicellular eukaryotes, e.g. information on a unicellular eukaryote genome including molecular sequences, genes and annotation.
+ beta12orEarlier
+ The resource may be specific to a unicellular eukaryote, a group of unicellular eukaryotes or all unicellular eukaryotes.
+
+
+
+
+
+
+
+
+
+ Protein structure alignment
+
+ Protein secondary or tertiary structure alignments.
+ beta12orEarlier
+ true
+ 1.3
+
+
+
+
+
+
+
+
+
+ X-ray diffraction
+
+
+ The study of matter and their structure by means of the diffraction of X-rays, typically the diffraction pattern caused by the regularly spaced atoms of a crystalline sample.
+ beta12orEarlier
+ X-ray microscopy
+ Crystallography
+ X-ray crystallography
+
+
+
+
+
+
+
+
+
+
+ Ontologies, nomenclature and classification
+
+ true
+ Conceptualisation, categorisation and naming of entities or phenomena within biology or bioinformatics.
+ 1.3
+ http://purl.bioontology.org/ontology/MSH/D002965
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Immunoproteins, genes and antigens
+
+
+ Immunopeptides
+ Immunity-related genes, proteins and their ligands.
+ Antigens
+ This includes T cell receptors (TR), major histocompatibility complex (MHC), immunoglobulin superfamily (IgSF) / antibodies, major histocompatibility complex superfamily (MhcSF), etc."
+ beta12orEarlier
+ Immunoproteins
+ Immunogenes
+
+
+
+
+
+
+
+
+
+
+ Molecules
+
+ CHEBI:23367
+ beta12orEarlier
+ beta12orEarlier
+ Specific molecules, including large molecules built from repeating subunits (macromolecules) and small molecules of biological significance.
+ true
+
+
+
+
+
+
+
+
+
+ Toxicology
+
+
+ Toxins and the adverse effects of these chemical substances on living organisms.
+ VT 3.1.9 Toxicology
+ Toxicoinformatics
+ Toxicology
+ beta12orEarlier
+ Computational toxicology
+
+
+
+
+
+
+
+
+
+
+ High-throughput sequencing
+
+ Next-generation sequencing
+ beta13
+ true
+ beta12orEarlier
+ Parallelized sequencing processes that are capable of sequencing many thousands of sequences simultaneously.
+
+
+
+
+
+
+
+
+
+ Structural clustering
+
+ The comparison and grouping together of molecular structures on the basis of similarity; generate, process or analyse structural clusters.
+ 1.7
+ Structure classification
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Gene regulatory networks
+
+
+ Gene regulatory networks.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ Disease (specific)
+
+ Informatics resources dedicated to one or more specific diseases (not diseases in general).
+ beta12orEarlier
+ true
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ VNTR
+
+ Nucleic acid features (VNTR)
+ Variable number of tandem repeat polymorphism
+ Variable number of tandem repeat (VNTR) polymorphism in a DNA sequence.
+ beta12orEarlier
+ VNTR annotation
+ VNTRs occur in non-coding regions of DNA and consists sub-sequence that is repeated a multiple (and varied) number of times.
+
+
+
+
+
+
+
+
+
+
+ Microsatellites
+
+ beta12orEarlier
+ Nucleic acid features (microsatellite)
+ A microsatellite polymorphism is a very short subsequence that is repeated a variable number of times between individuals. These repeats consist of the nucleotides cytosine and adenosine.
+ Microsatellite annotation
+ Microsatellite polymorphism in a DNA sequence.
+
+
+
+
+
+
+
+
+
+
+ RFLP
+
+ Restriction fragment length polymorphisms (RFLP) in a DNA sequence.
+ An RFLP is defined by the presence or absence of a specific restriction site of a bacterial restriction enzyme.
+ RFLP annotation
+ beta12orEarlier
+ Nucleic acid features (RFLP)
+
+
+
+
+
+
+
+
+
+
+ DNA polymorphism
+
+
+ Nucleic acid features (polymorphism)
+ DNA polymorphism.
+ Polymorphism annotation
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+
+ Nucleic acid design
+
+ Topic for the design of nucleic acid sequences with specific conformations.
+ 1.3
+ beta12orEarlier
+ true
+
+
+
+
+
+
+
+
+
+ Primer or probe design
+
+ 1.3
+ true
+ beta13
+ The design of primers for PCR and DNA amplification or the design of molecular probes.
+
+
+
+
+
+
+
+
+
+ Structure databases
+
+ beta13
+ true
+ 1.2
+ Structure data resources
+ Molecular secondary or tertiary (3D) structural data resources, typically of proteins and nucleic acids.
+
+
+
+
+
+
+
+
+
+ Nucleic acid structure
+
+ true
+ beta13
+ Nucleic acid (secondary or tertiary) structure, such as whole structures, structural features and associated annotation.
+ 1.2
+
+
+
+
+
+
+
+
+
+ Sequence databases
+
+ Molecular sequence data resources, including sequence sites, alignments, motifs and profiles.
+ true
+ beta13
+ Sequence data resources
+ Sequence data
+ Sequence data resource
+ 1.3
+
+
+
+
+
+
+
+
+
+ Nucleic acid sequences
+
+ Nucleotide sequences and associated concepts such as sequence sites, alignments, motifs and profiles.
+ beta13
+ 1.3
+ true
+ Nucleotide sequences
+
+
+
+
+
+
+
+
+
+ Protein sequences
+
+ Protein sequences and associated concepts such as sequence sites, alignments, motifs and profiles.
+ beta13
+ 1.3
+ true
+
+
+
+
+
+
+
+
+
+ Protein interaction networks
+
+ 1.3
+ true
+
+
+
+
+
+
+
+
+
+ Molecular biology
+
+ VT 1.5.4 Biochemistry and molecular biology
+ beta13
+ The molecular basis of biological activity, particularly the macromolecules (e.g. proteins and nucleic acids) that are essential to life.
+
+
+
+
+
+
+
+
+
+
+ Mammals
+
+ true
+ beta13
+ 1.3
+ Mammals, e.g. information on a specific mammal genome including molecular sequences, genes and annotation.
+
+
+
+
+
+
+
+
+
+ Biodiversity
+
+ The degree of variation of life forms within a given ecosystem, biome or an entire planet.
+ beta13
+ VT 1.5.5 Biodiversity conservation
+ http://purl.bioontology.org/ontology/MSH/D044822
+
+
+
+
+
+
+
+
+
+
+ Sequence clusters and classification
+
+ This includes the results of sequence clustering, ortholog identification, assignment to families, annotation etc.
+ The comparison, grouping together and classification of macromolecules on the basis of sequence similarity.
+ Sequence families
+ 1.3
+ true
+ Sequence clusters
+ beta13
+
+
+
+
+
+
+
+
+
+ Genetics
+
+ http://purl.bioontology.org/ontology/MSH/D005823
+ The study of genes, genetic variation and heredity in living organisms.
+ beta13
+ Heredity
+
+
+
+
+
+
+
+
+
+
+ Quantitative genetics
+
+ beta13
+ The genes and genetic mechanisms such as Mendelian inheritance that underly continuous phenotypic traits (such as height or weight).
+
+
+
+
+
+
+
+
+
+ Population genetics
+
+ The distribution of allele frequencies in a population of organisms and its change subject to evolutionary processes including natural selection, genetic drift, mutation and gene flow.
+ beta13
+
+
+
+
+
+
+
+
+
+
+ Regulatory RNA
+
+ 1.3
+ Regulatory RNA sequences including microRNA (miRNA) and small interfering RNA (siRNA).
+ true
+ beta13
+
+
+
+
+
+
+
+
+
+ Documentation and help
+
+ The documentation of resources such as tools, services and databases and how to get help.
+ Help
+ beta13
+ Documentation
+
+
+
+
+
+
+
+
+
+
+ Genetic organisation
+
+ The structural and functional organisation of genes and other genetic elements.
+ 1.3
+ beta13
+ true
+
+
+
+
+
+
+
+
+
+ Medical informatics
+
+ Health informatics
+ Clinical informatics
+ Biomedical informatics
+ Translational medicine
+ The application of information technology to health, disease and biomedicine.
+ Healthcare informatics
+ beta13
+ Health and disease
+ Molecular medicine
+
+
+
+
+
+
+
+
+
+
+ Developmental biology
+
+ VT 1.5.14 Developmental biology
+ beta13
+ How organisms grow and develop.
+
+
+
+
+
+
+
+
+
+
+ Embryology
+
+ beta13
+ The development of organisms between the one-cell stage (typically the zygote) and the end of the embryonic stage.
+
+
+
+
+
+
+
+
+
+
+ Anatomy
+
+ VT 3.1.1 Anatomy and morphology
+ beta13
+ The form and function of the structures of living organisms.
+
+
+
+
+
+
+
+
+
+
+ Literature and reference
+
+ Literature search
+ beta13
+ The scientific literature, reference information and documentation.
+ Literature sources
+ http://purl.bioontology.org/ontology/MSH/D011642
+
+
+
+
+
+
+
+
+
+
+ Biology
+
+ VT 1.5.8 Biology
+ beta13
+ VT 1.5 Biological sciences
+ VT 1.5.23 Reproductive biology
+ Cryobiology
+ Biological rhythms
+ A particular biological science, especially observable traits such as aspects of biochemistry, physiology, morphology, anatomy, development and so on.
+ VT 1.5.7 Biological rhythm
+ Biological science
+ Aerobiology
+ VT 1.5.99 Other
+ Chronobiology
+ VT 1.5.13 Cryobiology
+
+ VT 1.5.1 Aerobiology
+ VT 1.5.3 Behavioural biology
+ Reproductive biology
+ Behavioural biology
+
+
+
+
+
+
+
+
+
+
+ Data management
+
+ The development and use of architectures, policies, practices and procedures for management of data.
+ beta13
+ Data handling
+ http://purl.bioontology.org/ontology/MSH/D030541
+ VT 1.3.1 Data management
+
+
+
+
+
+
+
+
+
+
+ Sequence feature detection
+
+ 1.3
+ true
+ beta13
+ The detection of the positional features, such as functional and other key sites, in molecular sequences.
+ http://purl.bioontology.org/ontology/MSH/D058977
+
+
+
+
+
+
+
+
+
+ Nucleic acid feature detection
+
+ The detection of positional features such as functional sites in nucleotide sequences.
+ true
+ beta13
+ 1.3
+
+
+
+
+
+
+
+
+
+ Protein feature detection
+
+ The detection, identification and analysis of positional protein sequence features, such as functional sites.
+ beta13
+ 1.3
+ true
+
+
+
+
+
+
+
+
+
+ Biological system modelling
+
+ 1.2
+ true
+ beta13
+ Topic for modelling biological systems in mathematical terms.
+
+
+
+
+
+
+
+
+
+ Data acquisition
+
+ The acquisition of data, typically measurements of physical systems using any type of sampling system, or by another other means.
+ beta13
+
+
+
+
+
+
+
+
+
+ Genes and proteins resources
+
+ 1.3
+ Gene family
+ beta13
+ Gene and protein families
+ Specific genes and/or their encoded proteins or a family or other grouping of related genes and proteins.
+ true
+
+
+
+
+
+
+
+
+
+ Protein topological domains
+
+
+ Topological domains such as cytoplasmic regions in a protein.
+ Protein features (topological domains)
+ 1.8
+
+
+
+
+
+
+
+
+
+
+ Protein variants
+
+ protein sequence variants produced e.g. from alternative splicing, alternative promoter usage, alternative initiation and ribosomal frameshifting.
+ beta13
+
+
+
+
+
+
+
+
+
+
+ Expression signals
+
+
+ beta13
+ Nucleic acid features (expression signal)
+ Regions within a nucleic acid sequence containing a signal that alters a biological function.
+
+
+
+
+
+
+
+
+
+
+ DNA binding sites
+
+
+ This includes ribosome binding sites (Shine-Dalgarno sequence in prokaryotes).
+ beta13
+ Nucleic acid features (binding)
+ Nucleic acids binding to some other molecule.
+
+
+
+
+
+
+
+
+
+
+ Nucleic acid repeats
+
+ beta13
+ This includes long terminal repeats (LTRs); sequences (typically retroviral) directly repeated at both ends of a defined sequence and other types of repeating unit.
+ Repetitive elements within a nucleic acid sequence.
+
+
+
+
+
+
+
+
+
+
+ DNA replication and recombination
+
+ DNA replication or recombination.
+ This includes binding sites for initiation of replication (origin of replication), regions where transfer is initiated during the conjugation or mobilization (origin of transfer), starting sites for DNA duplication (origin of replication) and regions which are eliminated through any of kind of recombination.
+ Nucleosome exclusion sequences
+ Nucleic acid features (replication and recombination)
+ beta13
+
+
+
+
+
+
+
+
+
+
+ Signal or transit peptide
+
+ beta13
+ Nucleic acid features (signal or transit peptide)
+ A signal peptide coding sequence encodes an N-terminal domain of a secreted protein, which is involved in attaching the polypeptide to a membrane leader sequence. A transit peptide coding sequence encodes an N-terminal domain of a nuclear-encoded organellar protein; which is involved in import of the protein into the organelle.
+ Coding sequences for a signal or transit peptide.
+
+
+
+
+
+
+
+
+
+
+ Sequence tagged sites
+
+ Nucleic acid features (STS)
+ beta13
+ Sequence tagged sites are short DNA sequences that are unique within a genome and serve as a mapping landmark, detectable by PCR they allow a genome to be mapped via an ordering of STSs.
+ Sequence tagged sites (STS) in nucleic acid sequences.
+
+
+
+
+
+
+
+
+
+
+ Sequencing
+
+ http://purl.bioontology.org/ontology/MSH/D059014
+ 1.1
+ NGS
+ Next generation sequencing
+ The determination of complete (typically nucleotide) sequences, including those of genomes (full genome sequencing, de novo sequencing and resequencing), amplicons and transcriptomes.
+ Next gen sequencing
+
+
+
+
+
+
+
+
+
+
+ ChIP-seq
+
+ true
+ 1.3
+ Chip sequencing
+ Chip seq
+ 1.1
+ The analysis of protein-DNA interactions where chromatin immunoprecipitation (ChIP) is used in combination with massively parallel DNA sequencing to identify the binding sites of DNA-associated proteins.
+ Chip-sequencing
+
+
+
+
+
+
+
+
+
+ RNA-Seq
+
+ Small RNA-seq
+ Whole transcriptome shotgun sequencing
+ RNA-seq
+ 1.1
+ 1.3
+ A topic concerning high-throughput sequencing of cDNA to measure the RNA content (transcriptome) of a sample, for example, to investigate how different alleles of a gene are expressed, detect post-transcriptional mutations or identify gene fusions.
+ Small RNA-Seq
+ WTSS
+ This includes small RNA profiling (small RNA-Seq), for example to find novel small RNAs, characterize mutations and analyze expression of small RNAs.
+ true
+
+
+
+
+
+
+
+
+
+ DNA methylation
+
+ true
+ DNA methylation including bisulfite sequencing, methylation sites and analysis, for example of patterns and profiles of DNA methylation in a population, tissue etc.
+ 1.3
+ http://purl.bioontology.org/ontology/MSH/D019175
+ 1.1
+
+
+
+
+
+
+
+
+
+ Metabolomics
+
+ The systematic study of metabolites, the chemical processes they are involved, and the chemical fingerprints of specific cellular processes in a whole cell, tissue, organ or organism.
+ http://purl.bioontology.org/ontology/MSH/D055432
+ 1.1
+
+
+
+
+
+
+
+
+
+
+ Epigenomics
+
+
+ Epigenetics concerns the heritable changes in gene expression owing to mechanisms other than DNA sequence variation.
+ 1.1
+ http://purl.bioontology.org/ontology/MSH/D057890
+ The study of the epigenetic modifications of a whole cell, tissue, organism etc.
+
+
+
+
+
+
+
+
+
+
+ Metagenomics
+
+
+ http://purl.bioontology.org/ontology/MSH/D056186
+ Ecogenomics
+ Community genomics
+ Environmental genomics
+ 1.1
+ The study of genetic material recovered from environmental samples, and associated environmental data.
+
+
+
+
+
+
+
+
+
+
+ Structural variation
+
+
+ 1.1
+ Variation in chromosome structure including microscopic and submicroscopic types of variation such as deletions, duplications, copy-number variants, insertions, inversions and translocations.
+ Genomic structural variation
+
+
+
+
+
+
+
+
+
+ DNA packaging
+
+ beta12orEarlier
+ DNA-histone complexes (chromatin), organisation of chromatin into nucleosomes and packaging into higher-order structures.
+ http://purl.bioontology.org/ontology/MSH/D042003
+
+
+
+
+
+
+
+
+
+ DNA-Seq
+
+ 1.1
+ A topic concerning high-throughput sequencing of randomly fragmented genomic DNA, for example, to investigate whole-genome sequencing and resequencing, SNP discovery, identification of copy number variations and chromosomal rearrangements.
+ 1.3
+ DNA-seq
+ true
+
+
+
+
+
+
+
+
+
+ RNA-Seq alignment
+
+ true
+ 1.3
+ RNA-seq alignment
+ The alignment of sequences of (typically millions) of short reads to a reference genome. This is a specialised topic within sequence alignment, especially because of complications arising from RNA splicing.
+ beta12orEarlier
+
+
+
+
+
+
+
+
+
+ ChIP-on-chip
+
+ true
+ 1.3
+ 1.1
+ Experimental techniques that combine chromatin immunoprecipitation ('ChIP') with microarray ('chip'). ChIP-on-chip is used for high-throughput study protein-DNA interactions.
+ ChIP-chip
+
+
+
+
+
+
+
+
+
+ Data security
+
+ 1.3
+ Data privacy
+ The protection of data, such as patient health data, from damage or unwanted access from unauthorized users.
+
+
+
+
+
+
+
+
+
+ Sample collections
+
+ samples
+ biobanking
+ 1.3
+ biosamples
+ Biological samples and specimens.
+ Specimen collections
+
+
+
+
+
+
+
+
+
+
+ Biochemistry
+
+
+ VT 1.5.4 Biochemistry and molecular biology
+ Chemical biology
+ 1.3
+ Biological chemistry
+ Chemical substances and physico-chemical processes and that occur within living organisms.
+
+
+
+
+
+
+
+
+
+
+ Phylogenetics
+
+
+ The study of evolutionary relationships amongst organisms from analysis of genetic information (typically gene or protein sequences).
+ 1.3
+ http://purl.bioontology.org/ontology/MSH/D010802
+
+
+
+
+
+
+
+
+
+ Epigenetics
+
+ Topic concerning the study of heritable changes, for example in gene expression or phenotype, caused by mechanisms other than changes in the DNA sequence.
+ DNA methylation
+ This includes sub-topics such as histone modification and DNA methylation.
+ http://purl.bioontology.org/ontology/MSH/D019175
+ Histone modification
+ 1.3
+
+
+
+
+
+
+
+
+
+
+ Biotechnology
+
+ 1.3
+ The exploitation of biological process, structure and function for industrial purposes, for example the genetic manipulation of microorganisms for the antibody production.
+
+
+
+
+
+
+
+
+
+
+ Phenomics
+
+
+ Phenomes, or the study of the change in phenotype (the physical and biochemical traits of organisms) in response to genetic and environmental factors.
+ 1.3
+
+
+
+
+
+
+
+
+
+
+ Evolutionary biology
+
+ VT 1.5.16 Evolutionary biology
+ 1.3
+ The evolutionary processes, from the genetic to environmental scale, that produced life in all its diversity.
+
+
+
+
+
+
+
+
+
+
+ Physiology
+
+ The functions of living organisms and their constituent parts.
+ 1.3
+ VT 3.1.8 Physiology
+
+
+
+
+
+
+
+
+
+
+ Microbiology
+
+ The biology of microorganisms.
+ 1.3
+ VT 1.5.20 Microbiology
+
+
+
+
+
+
+
+
+
+
+ Parasitology
+
+ 1.3
+ The biology of parasites.
+
+
+
+
+
+
+
+
+
+
+ Medicine
+
+ General medicine
+ Research in support of healing by diagnosis, treatment, and prevention of disease.
+ 1.3
+ VT 3.1 Basic medicine
+ VT 3.2.9 General and internal medicine
+ Experimental medicine
+ Biomedical research
+ Clinical medicine
+ VT 3.2 Clinical medicine
+ Internal medicine
+
+
+
+
+
+
+
+
+
+
+ Neurobiology
+
+ Neuroscience
+ 1.3
+ The study of the nervous system and brain; its anatomy, physiology and function.
+ VT 3.1.5 Neuroscience
+
+
+
+
+
+
+
+
+
+
+ Public health and epidemiology
+
+ VT 3.3.1 Epidemiology
+ Topic concerning the the patterns, cause, and effect of disease within populations.
+ 1.3
+ Public health
+ Epidemiology
+
+
+
+
+
+
+
+
+
+
+ Biophysics
+
+
+ 1.3
+ VT 1.5.9 Biophysics
+ The use of physics to study biological system.
+
+
+
+
+
+
+
+
+
+
+ Computational biology
+
+
+ VT 1.5.19 Mathematical biology
+ VT 1.5.12 Computational biology
+ This includes the modeling and treatment of biological processes and systems in mathematical terms (theoretical biology).
+ Mathematical biology
+ VT 1.5.26 Theoretical biology
+ Theoretical biology
+ 1.3
+ The development and application of theory, analytical methods, mathematical models and computational simulation of biological systems.
+ Biomathematics
+
+
+
+
+
+
+
+
+
+
+ Transcriptomics
+
+
+ The analysis of transcriptomes, or a set of all the RNA molecules in a specific cell, tissue etc.
+ Transcriptome
+ 1.3
+
+
+
+
+
+
+
+
+
+
+ Chemistry
+
+ VT 1.7.10 Polymer science
+ VT 1.7.7 Mathematical chemistry
+ VT 1.7.3 Colloid chemistry
+ 1.3
+ Mathematical chemistry
+ Physical chemistry
+ VT 1.7.9 Physical chemistry
+ Polymer science
+ Chemical science
+ Organic chemistry
+ VT 1.7.6 Inorganic and nuclear chemistry
+ VT 1.7 Chemical sciences
+ VT 1.7.5 Electrochemistry
+ Inorganic chemistry
+ VT 1.7.2 Chemistry
+ Nuclear chemistry
+ VT 1.7.8 Organic chemistry
+ The composition and properties of matter, reactions, and the use of reactions to create new substances.
+
+
+
+
+
+
+
+
+
+
+ Mathematics
+
+ The study of numbers (quantity) and other topics including structure, space, and change.
+ VT:1.1 Mathematics
+ Maths
+ VT 1.1.99 Other
+ 1.3
+
+
+
+
+
+
+
+
+
+
+ Computer science
+
+ 1.3
+ VT 1.2 Computer sciences
+ VT 1.2.99 Other
+ The theory and practical use of computer systems.
+
+
+
+
+
+
+
+
+
+
+ Physics
+
+ The study of matter, space and time, and related concepts such as energy and force.
+ 1.3
+
+
+
+
+
+
+
+
+
+
+ RNA splicing
+
+
+ RNA splicing; post-transcription RNA modification involving the removal of introns and joining of exons.
+ This includes the study of splice sites, splicing patterns, splice alternatives or variants, isoforms, etc.
+ 1.3
+
+
+
+
+
+
+
+
+
+ Molecular genetics
+
+ 1.3
+ The structure and function of genes at a molecular level.
+
+
+
+
+
+
+
+
+
+
+ Respiratory medicine
+
+ VT 3.2.25 Respiratory systems
+ Pulmonology
+ The study of respiratory system.
+ Pulmonary medicine
+ Respiratory disease
+ 1.3
+ Pulmonary disorders
+
+
+
+
+
+
+
+
+
+
+ Metabolic disease
+
+ The study of metabolic diseases.
+ 1.4
+ 1.3
+ true
+
+
+
+
+
+
+
+
+
+ Infectious disease
+
+ Transmissable disease
+ VT 3.3.4 Infectious diseases
+ Communicable disease
+ The branch of medicine that deals with the prevention, diagnosis and management of transmissable disease with clinically evident illness resulting from infection with pathogenic biological agents (viruses, bacteria, fungi, protozoa, parasites and prions).
+ 1.3
+
+
+
+
+
+
+
+
+
+
+ Rare diseases
+
+ 1.3
+ The study of rare diseases.
+
+
+
+
+
+
+
+
+
+
+ Computational chemistry
+
+
+ 1.3
+ VT 1.7.4 Computational chemistry
+ Topic concerning the development and application of theory, analytical methods, mathematical models and computational simulation of chemical systems.
+
+
+
+
+
+
+
+
+
+
+ Neurology
+
+ Neurological disorders
+ 1.3
+ The branch of medicine that deals with the anatomy, functions and disorders of the nervous system.
+
+
+
+
+
+
+
+
+
+
+ Cardiology
+
+ Cardiovascular disease
+ VT 3.2.4 Cardiac and Cardiovascular systems
+ 1.3
+ Cardiovascular medicine
+ Heart disease
+ VT 3.2.22 Peripheral vascular disease
+ The diseases and abnormalities of the heart and circulatory system.
+
+
+
+
+
+
+
+
+
+
+ Drug discovery
+
+
+ The discovery and design of drugs or potential drug compounds.
+ This includes methods that search compound collections, generate or analyse drug 3D conformations, identify drug targets with structural docking etc.
+ 1.3
+
+
+
+
+
+
+
+
+
+
+ Biobank
+
+ biobanking
+ 1.3
+ Repositories of biological samples, typically human, for basic biological and clinical research.
+ Tissue collection
+
+
+
+
+
+
+
+
+
+
+ Mouse clinic
+
+ 1.3
+ Laboratory study of mice, for example, phenotyping, and mutagenesis of mouse cell lines.
+
+
+
+
+
+
+
+
+
+
+ Microbial collection
+
+ Collections of microbial cells including bacteria, yeasts and moulds.
+ 1.3
+
+
+
+
+
+
+
+
+
+
+ Cell culture collection
+
+ 1.3
+ Collections of cells grown under laboratory conditions, specifically, cells from multi-cellular eukaryotes and especially animal cells.
+
+
+
+
+
+
+
+
+
+
+ Clone library
+
+ 1.3
+ Collections of DNA, including both collections of cloned molecules, and populations of micro-organisms that store and propagate cloned DNA.
+
+
+
+
+
+
+
+
+
+
+ Translational medicine
+
+ 'translating' the output of basic and biomedical research into better diagnostic tools, medicines, medical procedures, policies and advice.
+ 1.3
+
+
+
+
+
+
+
+
+
+
+ Compound libraries and screening
+
+ Translational medicine
+ Chemical library
+ Collections of chemicals, typically for use in high-throughput screening experiments.
+ Compound library
+ Chemical screening
+ 1.3
+
+
+
+
+
+
+
+
+
+
+ Biomedical science
+
+
+ Topic concerning biological science that is (typically) performed in the context of medicine.
+ VT 3.3 Health sciences
+ Health science
+ 1.3
+
+
+
+
+
+
+
+
+
+
+ Data identity and mapping
+
+ Topic concerning the identity of biological entities, or reports on such entities, and the mapping of entities and records in different databases.
+ 1.3
+
+
+
+
+
+
+
+
+
+
+ Sequence search
+
+ 1.3
+ Sequence database search
+ The search and retrieval from a database on the basis of molecular sequence similarity.
+
+
+
+
+
+
+
+
+
+ Biomarkers
+
+ Diagnostic markers
+ 1.4
+ Objective indicators of biological state often used to assess health, and determinate treatment.
+
+
+
+
+
+
+
+
+
+ Laboratory techniques
+
+ The procedures used to conduct an experiment.
+ Lab techniques
+ 1.4
+
+
+
+
+
+
+
+
+
+
+ Data architecture, analysis and design
+
+ The development of policies, models and standards that cover data acquisitioin, storage and integration, such that it can be put to use, typically through a process of systematically applying statistical and / or logical techniques to describe, illustrate, summarise or evaluate data.
+ Data analysis
+ Data design
+ 1.4
+ Data architecture
+
+
+
+
+
+
+
+
+
+
+ Data integration and warehousing
+
+ The combination and integration of data from different sources, for example into a central repository or warehouse, to provide users with a unified view of these data.
+
+
+ Data integration
+ 1.4
+ Data warehousing
+
+
+
+
+
+
+
+
+
+
+ Biomaterials
+
+ Any matter, surface or construct that interacts with a biological system.
+ Diagnostic markers
+ 1.4
+
+
+
+
+
+
+
+
+
+
+ Chemical biology
+
+
+ 1.4
+ The use of synthetic chemistry to study and manipulate biological systems.
+
+
+
+
+
+
+
+
+
+
+ Analytical chemistry
+
+ 1.4
+ The study of the separation, identification, and quantification of the chemical components of natural and artificial materials.
+ VT 1.7.1 Analytical chemistry
+
+
+
+
+
+
+
+
+
+
+ Synthetic chemistry
+
+ Synthetic organic chemistry
+ The use of chemistry to create new compounds.
+ 1.4
+
+
+
+
+
+
+
+
+
+
+ Software engineering
+
+ VT 1.2.1 Algorithms
+ Programming languages
+ VT 1.2.7 Data structures
+ Software development
+ Software engineering
+ Computer programming
+ 1.4
+ 1.2.12 Programming languages
+ The process that leads from an original formulation of a computing problem to executable programs.
+ Data structures
+ Algorithms
+ VT 1.2.14 Software engineering
+
+
+
+
+
+
+
+
+
+
+ Drug development
+
+ 1.4
+ Medicine development
+ The process of bringing a new drug to market once a lead compounds has been identified through drug discovery.
+ Drug development science
+ Medicines development
+
+
+
+
+
+
+
+
+
+
+ Drug formulation and delivery
+
+ The process of formulating abd administering a pharmaceutical compound to achieve a therapeutic effect.
+ Drug delivery
+ Drug formulation
+ 1.4
+
+
+
+
+
+
+
+
+
+
+ Pharmacokinetics and pharmacodynamics
+
+ Pharmacodynamics
+ Pharmacokinetics
+ Drug distribution
+ 1.4
+ Drug excretion
+ The study of how a drug interacts with the body.
+ Drug absorption
+ ADME
+ Drug metabolism
+ Drug metabolism
+
+
+
+
+
+
+
+
+
+
+ Medicines research and development
+ Medicine research and development
+
+ The discovery, development and approval of medicines.
+ Health care research
+ Drug discovery and development
+ 1.4
+ Health care science
+
+
+
+
+
+
+
+
+
+
+ Safety sciences
+
+ 1.4
+ Drug safety
+ The safety (or lack) of drugs and other medical interventions.
+
+
+
+
+
+
+
+
+
+
+ Pharmacovigilence
+
+ 1.4
+ Pharmacovigilence concerns safety once a drug has gone to market.
+ The detection, assesment, understanding and prevention of adverse effects of medicines.
+
+
+
+
+
+
+
+
+
+
+ Preclinical and clinical studies
+
+ The testing of new medicines, vaccines or procedures on animals (preclinical) and humans (clinical) prior to their approval by regulatory authorities.
+ Preclinical studies
+ 1.4
+ Clinical studies
+
+
+
+
+
+
+
+
+
+
+ Imaging
+
+ This includes diffraction experiments that are based upon the interference of waves, typically electromagnetic waves such as X-rays or visible light, by some object being studied, typical in order to produce an image of the object or determine its structure.
+ Microscopy imaging
+ 1.4
+ Microscopy
+ Diffraction experiment
+ The visual representation of an object.
+
+
+
+
+
+
+
+
+
+
+ Biological imaging
+
+ The use of imaging techniques to understand biology.
+ 1.4
+
+
+
+
+
+
+
+
+
+
+ Medical imaging
+
+ VT 3.2.24 Radiology
+ The use of imaging techniques for clinical purposes for medical research.
+ 1.4
+ Radiology
+ VT 3.2.14 Nuclear medicine
+ Nuclear medicine
+ VT 3.2.13 Medical imaging
+
+
+
+
+
+
+
+
+
+
+ Light microscopy
+
+ The use of optical instruments to magnify the image of an object.
+ 1.4
+
+
+
+
+
+
+
+
+
+
+ Laboratory animal science
+
+ 1.4
+ The use of animals and alternatives in experimental research.
+
+
+
+
+
+
+
+
+
+
+ Marine biology
+
+ 1.4
+ VT 1.5.18 Marine and Freshwater biology
+ The study of organisms in the ocean or brackish waters.
+
+
+
+
+
+
+
+
+
+
+ Molecular medicine
+
+ The identification of molecular and genetic causes of disease and the development of interventions to correct them.
+ 1.4
+
+
+
+
+
+
+
+
+
+
+ Nutritional science
+
+ 1.4
+ VT 3.3.7 Nutrition and Dietetics
+ Dietetics
+ The study of the effects of food components on the metabolism, health, performance and disease resistance of humans and animals. It also includes the study of human behaviours related to food choices.
+ Nutrition science
+
+
+
+
+
+
+
+
+
+
+ Omics
+
+ The collective characterisation and quantification of pools of biological molecules that translate into the structure, function, and dynamics of an organism or organisms.
+ 1.4
+
+
+
+
+
+
+
+
+
+
+ Quality affairs
+
+ The processes that need to be in place to ensure the quality of products for human or animal use.
+ Good clinical practice
+ Good manufacturing practice
+ Quality assurance
+ Good laboratory practice
+ 1.4
+
+
+
+
+
+
+
+
+
+
+ Regulatory affairs
+
+ The protection of public health by controlling the safety and efficacy of products in areas including pharmaceuticals, veterinary medicine, medical devices, pesticides, agrochemicals, cosmetics, and complementary medicines.
+ 1.4
+
+
+
+
+
+
+
+
+
+
+ Regnerative medicine
+
+ Stem cell research
+ Biomedical approaches to clinical interventions that involve the use of stem cells.
+ 1.4
+
+
+
+
+
+
+
+
+
+
+ Systems medicine
+
+ 1.4
+ An interdisciplinary field of study that looks at the dynamic systems of the human body as part of an integrted whole, incoporating biochemical, physiological, and environmental interactions that sustain life.
+
+
+
+
+
+
+
+
+
+
+ Veterinary medicine
+
+ 1.4
+ Topic concerning the branch of medicine that deals with the prevention, diagnosis, and treatment of disease, disorder and injury in animals.
+
+
+
+
+
+
+
+
+
+
+ Bioengineering
+
+ 1.4
+ The application of biological concepts and methods to the analytical and synthetic methodologies of engineering.
+ Diagnostic markers
+
+
+
+
+
+
+
+
+
+
+ Geriatric medicine
+
+ The branch of medicine dealing with the diagnosis, treatment and prevention of disease in older people, and the problems specific to aging.
+ VT 3.2.10 Geriatrics and gerontology
+ Ageing
+ Aging
+ Gerontology
+ 1.4
+ Geriatrics
+
+
+
+
+
+
+
+
+
+
+ Allergy, clinical immunology and immunotherapeutics.
+
+ VT 3.2.1 Allergy
+ Health issues related to the immune system and their prevention, diagnosis and mangement.
+ 1.4
+ Immune disorders
+ Clinical immunology
+ Immunomodulators
+ Allergy
+ Immunotherapeutics
+
+
+
+
+
+
+
+
+
+
+ Pain medicine
+
+ Ageing
+ 1.4
+ Algiatry
+ The prevention of pain and the evaluation, treatment and rehabilitation of persons in pain.
+
+
+
+
+
+
+
+
+
+
+ Anaesthesiology
+
+ Anaesthetics
+ Anaesthesia and anaesthetics.
+ 1.4
+ VT 3.2.2 Anaesthesiology
+
+
+
+
+
+
+
+
+
+
+ Critical care medicine
+
+ Acute medicine
+ Geriatrics
+ VT 3.2.5 Critical care/Emergency medicine
+ Emergency medicine
+ 1.4
+ The multidisciplinary that cares for patients with acute, life-threatening illness or injury.
+
+
+
+
+
+
+
+
+
+
+ Dermatology
+
+ The branch of medicine that deals with prevention, diagnosis and treatment of disorders of the skin, scalp, hair and nails.
+ Dermatological disorders
+ 1.4
+ VT 3.2.7 Dermatology and venereal diseases
+
+
+
+
+
+
+
+
+
+
+ Dentistry
+
+ 1.4
+ The study, diagnosis, prevention and treatments of disorders of the oral cavity, maxillofacial area and adjacent structures.
+
+
+
+
+
+
+
+
+
+
+ Ear, nose and throat medicine
+
+ Otolaryngology
+ 1.4
+ The branch of medicine that deals with the prevention, diagnosis, and treatment of disorders of the ear, nose and throat.
+ Otorhinolaryngology
+ Head and neck disorders
+ VT 3.2.20 Otorhinolaryngology
+ Audiovestibular medicine
+
+
+
+
+
+
+
+
+
+
+ Endocrinology and metabolism
+
+ 1.4
+ Metabolic disorders
+ Metabolism
+ Endocrinology
+ The branch of medicine dealing with diseases of endocrine organs, hormone systems, their target organs, and disorders of the pathways of glucose and lipid metabolism.
+ Endocrine disorders
+
+
+
+
+
+
+
+
+
+
+ Haematology
+
+ VT 3.2.11 Hematology
+ The branch of medicine that deals with the blood, blood-forming organs and blood diseases.
+ Haematological disorders
+ 1.4
+ Blood disorders
+
+
+
+
+
+
+
+
+
+
+ Gastroenterology
+
+ The branch of medicine that deals with disorders of the oesophagus, stomach, duodenum, jejenum, ileum, large intestine, sigmoid colon and rectum.
+ Gastrointestinal disorders
+ VT 3.2.8 Gastroenterology and hepatology
+ 1.4
+
+
+
+
+
+
+
+
+
+
+ Gender medicine
+
+ The study of the biological and physiological differences between males and females and how they effect differences in disease presentation and management.
+ 1.4
+
+
+
+
+
+
+
+
+
+
+ Gynaecology and obstetrics
+
+ VT 3.2.15 Obstetrics and gynaecology
+ 1.4
+ Gynaecology
+ The branch of medicine that deals with the health of the female reproductive system, pregnancy and birth.
+ Gynaecological disorders
+ Obstetrics
+
+
+
+
+
+
+
+
+
+
+ Hepatic and biliary medicine
+
+ Hepatobiliary medicine
+ Liver disorders
+ 1.4
+ The branch of medicine that deals with the liver, gallbladder, bile ducts and bile.
+
+
+
+
+
+
+
+
+
+
+ Infectious tropical disease
+
+ The branch of medicine that deals with the infectious diseases of the tropics.
+ 1.4
+
+
+
+
+
+
+
+
+
+
+ Trauma medicine
+
+ 1.4
+ The branch of medicine that treats body wounds or shock produced by sudden physical injury, as from violence or accident.
+
+
+
+
+
+
+
+
+
+
+ Medical toxicology
+
+ The branch of medicine that deals with the diagnosis, management and prevention of poisoning and other adverse health effects caused by medications, occupational and environmental toxins, and biological agents.
+ 1.4
+
+
+
+
+
+
+
+
+
+
+ Musculoskeletal medicine
+
+ The branch of medicine that deals with the prevention, diagnosis, and treatment of disorders of the muscle, bone and connective tissue. It incorporates aspects of orthopaedics, rheumatology, rehabilitation medicine and pain medicine.
+ VT 3.2.26 Rheumatology
+ VT 3.2.19 Orthopaedics
+ Musculoskeletal disorders
+ Orthopaedics
+ Rheumatology
+ 1.4
+
+
+
+
+
+
+
+
+
+
+ Opthalmology
+
+ Eye disoders
+ VT 3.2.18 Optometry
+ 1.4
+ Optometry
+ VT 3.2.17 Ophthalmology
+ Audiovestibular medicine
+ The branch of medicine that deals with disorders of the eye, including eyelid, optic nerve/visual pathways and occular muscles.
+
+
+
+
+
+
+
+
+
+
+ Paediatrics
+
+ 1.4
+ The branch of medicine that deals with the medical care of infants, children and adolescents.
+ VT 3.2.21 Paediatrics
+ Child health
+
+
+
+
+
+
+
+
+
+
+ Psychiatry
+
+ The branch of medicine that deals with the mangement of mental illness, emotional disturbance and abnormal behaviour.
+ 1.4
+ Psychiatric disorders
+ VT 3.2.23 Psychiatry
+ Mental health
+
+
+
+
+
+
+
+
+
+
+ Reproductive health
+
+ Reproductive disorders
+ Audiovestibular medicine
+ VT 3.2.3 Andrology
+ Andrology
+ 1.4
+ Family planning
+ The health of the reproductive processes, functions and systems at all stages of life.
+ Fertility medicine
+
+
+
+
+
+
+
+
+
+
+ Surgery
+
+ Transplantation
+ VT 3.2.28 Transplantation
+ The use of operative, manual and instrumental techniques on a patient to investigate and/or treat a pathological condition or help improve bodily function or appearance.
+ 1.4
+
+
+
+
+
+
+
+
+
+
+ Urology and nephrology
+
+ The branches of medicine and physiology focussing on the function and disorders of the urinary system in males and females, the reproductive system in males, and the kidney.
+ VT 3.2.29 Urology and nephrology
+ 1.4
+ Urology
+ Kidney disease
+ Urological disorders
+ Nephrology
+
+
+
+
+
+
+
+
+
+
+ Complementary medicine
+
+ Medical therapies that fall beyond the scope of conventional medicine but may be used alongside it in the treatment of disease and ill health.
+ VT 3.2.12 Integrative and Complementary medicine
+ Holistic medicine
+ 1.4
+ Alternative medicine
+ Integrative medicine
+
+
+
+
+
+
+
+
+
+
+ MRI
+
+ Nuclear magnetic resonance imaging
+ 1.7
+ MRT
+ Magnetic resonance tomography
+ Techniques that uses magnetic fields and radiowaves to form images, typically to investigate the anatomy and physiology of the human body.
+ NMRI
+ Magnetic resonance imaging
+
+
+
+
+
+
+
+
+
+
+ Neutron diffraction
+
+
+ The study of matter by studying the diffraction pattern from firing neutrons at a sample, typically to determine atomic and/or magnetic structure.
+ Neutron microscopy
+ Elastic neutron scattering
+ 1.7
+ Neutron diffraction experiment
+
+
+
+
+
+
+
+
+
+ Tomography
+
+ X-ray tomography
+ Imaging in sections (sectioning), through the use of a wave-generating device (tomograph) that generates an image (a tomogram).
+ Electron tomography
+ 1.7
+
+
+
+
+
+
+
+
+
+ Data mining
+
+ 1.7
+ VT 1.3.2 Data mining
+ The discovery of patterns in large data sets and the extraction and trasnsformation of those patterns into a useful format.
+ KDD
+ Knowledge discovery in databases
+
+
+
+
+
+
+
+
+
+ Machine learning
+
+ A topic concerning the application of artificial intelligence methods to algorithms, in order to create methods that can learn from data in order to generate an ouput, rather than relying on explicitly encoded information only.
+ Artificial Intelligence
+ 1.7
+ VT 1.2.2 Artificial Intelligence (expert systems, machine learning, robotics)
+
+
+
+
+
+
+
+
+
+ Database management
+
+ 1.8
+ Data maintenance
+ Databases
+ Database administration
+ The general handling of data stored in digital archives such as databanks, databases proper, web portals and other data resources.
+
+ This includes databases for the results of scientific experiments, the application of high-throughput technology, computational analysis and the scientific literature.
+ Biological databases
+
+
+
+
+
+
+
+
+
+ Animals
+
+ 1.8
+ Animal biology
+ Animals, e.g. information on a specific animal genome including molecular sequences, genes and annotation.
+ Zoology
+ Animal
+ VT 1.5.29 Zoology
+ The resource may be specific to a plant, a group of plants or all plants.
+ Metazoa
+
+
+
+
+
+
+
+
+
+ Protein sites, features and motifs
+
+ Protein sequence features
+ Protein functional sites
+ 1.8
+ The biology, archival, detection, prediction and analysis of positional features such as functional and other key sites, in protein sequences and the conserved patterns (motifs, profiles etc.) that may be used to describe them.
+
+
+
+
+
+
+
+
+
+ Nucleic acid sites, features and motifs
+
+ Nucleic acid sequence features
+ 1.8
+ Nucleic acid functional sites
+ The biology, archival, detection, prediction and analysis of positional features such as functional and other key sites, in nucleic acid sequences and the conserved patterns (motifs, profiles etc.) that may be used to describe them.
+
+
+
+
+
+
+
+
+
+ Gene transcript features
+
+
+ Nucleic acid features (mRNA features)
+ Features of a messenger RNA (mRNA) molecules including precursor RNA, primary (unprocessed) transcript and fully processed molecules.
+ mRNA features
+ This includes 5'untranslated region (5'UTR), coding sequences (CDS), exons, intervening sequences (intron) and 3'untranslated regions (3'UTR).
+ 1.8
+
+
+
+
+
+
+
+
+
+ Protein-ligand interactions
+
+ 1.8
+ Protein-ligand (small molecule) interaction(s).
+
+
+
+
+
+
+
+
+
+ Protein-drug interactions
+
+ 1.8
+ Protein-drug interaction(s).
+
+
+
+
+
+
+
+
+
+ Genotyping experiment
+
+ 1.8
+ Genotype experiment including case control, population, and family studies. These might use array based methods and re-sequencing methods.
+
+
+
+
+
+
+
+
+
+ GWAS study
+
+ 1.8
+ Genome-wide association study experiments.
+ Genome-wide association study
+
+
+
+
+
+
+
+
+
+ Microarray experiment
+
+ 1.8
+ This might specify which raw data file relates to which sample and information on hybridisations, e.g. which are technical and which are biological replicates.
+ Microarray experiments including conditions, protocol, sample:data relationships etc.
+
+
+
+
+
+
+
+
+
+ PCR experiment
+
+ 1.8
+ PCR experiments, e.g. quantitative real-time PCR.
+
+
+
+
+
+
+
+
+
+ Proteomics experiment
+
+ Proteomics experiments.
+ 1.8
+
+
+
+
+
+
+
+
+
+ 2D PAGE experiment
+
+ Two-dimensional gel electrophoresis experiments, gels or spots in a gel.
+ 1.8
+
+
+
+
+
+
+
+
+
+ Northern blot experiment
+
+ Northern Blot experiments.
+ 1.8
+
+
+
+
+
+
+
+
+
+ RNAi experiment
+
+ 1.8
+ RNAi experiments.
+
+
+
+
+
+
+
+
+
+ Simulation experiment
+
+ 1.8
+ Biological computational model experiments (simulation), for example the minimum information required in order to permit its correct interpretation and reproduction.
+
+
+
+
+
+
+
+
+
+ Protein-nucleic acid interactions
+
+ 1.8
+ Protein-DNA/RNA interaction(s).
+
+
+
+
+
+
+
+
+
+ Protein-protein interactions
+
+ Domain-domain interactions
+ Protein-protein interaction(s), including interactions between protein domains.
+ 1.8
+ Protein interaction networks
+
+
+
+
+
+
+
+
+
+ Cellular process pathways
+
+ 1.8
+ Cellular process pathways.
+
+
+
+
+
+
+
+
+
+ Disease pathways
+
+ Disease pathways, typically of human disease.
+ Pathway or network (disease)
+ 1.8
+
+
+
+
+
+
+
+
+
+ Environmental information processing pathways
+
+ Environmental information processing pathways.
+ 1.8
+ Pathway or network (environmental information processing)
+
+
+
+
+
+
+
+
+
+ Genetic information processing pathways
+
+ Pathway or network (genetic information processing)
+ 1.8
+ Genetic information processing pathways.
+
+
+
+
+
+
+
+
+
+ Protein super-secondary structure
+
+ Super-secondary structure of protein sequence(s).
+ Protein features (super-secondary)
+ 1.8
+ Super-secondary structures include leucine zippers, coiled coils, Helix-Turn-Helix etc.
+
+
+
+
+
+
+
+
+
+ Protein active sites
+
+ Enzyme active site
+ 1.8
+ Protein features (active sites)
+ Catalytic residues (active site) of an enzyme.
+
+
+
+
+
+
+
+
+
+ Protein binding sites
+
+ Ligand-binding (non-catalytic) residues of a protein, such as sites that bind metal, prosthetic groups or lipids.
+ 1.8
+ Protein features (binding sites)
+
+
+
+
+
+
+
+
+
+ Protein-nucleic acid binding sites
+
+ RNA and DNA-binding proteins and binding sites in protein sequences.
+ 1.8
+ Protein features (nucleic acid binding sites)
+
+
+
+
+
+
+
+
+
+ Protein cleavage sites
+
+ Cleavage sites (for a proteolytic enzyme or agent) in a protein sequence.
+ Protein features (cleavage sites)
+ 1.8
+
+
+
+
+
+
+
+
+
+ Protein chemical modifications
+
+ Chemical modification of a protein.
+ Protein features (chemical modifications)
+ MOD:00000
+ 1.8
+ GO:0006464
+
+
+
+
+
+
+
+
+
+ Protein disordered structure
+
+ Disordered structure in a protein.
+ 1.8
+ Protein features (disordered structure)
+
+
+
+
+
+
+
+
+
+ Protein domains
+
+
+ The report will typically include a graphic of the location of domains in a sequence, with associated data such as lists of related sequences, literature references, etc.
+ Structural domains or 3D folds in a protein or polypeptide chain.
+ 1.8
+ Protein structural domains
+ Protein features (domains)
+
+
+
+
+
+
+
+
+
+ Protein key folding sites
+
+
+ Protein features (key folding sites)
+ 1.8
+ Key residues involved in protein folding.
+
+
+
+
+
+
+
+
+
+ Protein post-translational modifications
+
+ Protein features (post-translation modifications)
+ Post-translation modifications
+ Post-translation modifications in a protein sequence, typically describing the specific sites involved.
+ 1.8
+
+
+
+
+
+
+
+
+
+ Protein secondary structure
+
+
+ The location and size of the secondary structure elements and intervening loop regions is typically given. The report can include disulphide bonds and post-translationally formed peptide bonds (crosslinks).
+ Secondary structure (predicted or real) of a protein.
+ Protein features (secondary structure)
+ 1.8
+
+
+
+
+
+
+
+
+
+ Protein sequence repeats
+
+ 1.8
+ Protein features (repeats)
+ Short repetitive subsequences (repeat sequences) in a protein sequence.
+ Protein repeats
+
+
+
+
+
+
+
+
+
+ Protein signal peptides
+
+ Protein features (signal peptides)
+ Signal peptides or signal peptide cleavage sites in protein sequences.
+ 1.8
+
+
+
+
+
+
+
+
+
+ Applied mathematics
+
+ VT 1.1.1 Applied mathematics
+ The application of mathematics to specific problems in science, typically by the formulation and analysis of mathematical models.
+ 1.10
+
+
+
+
+
+
+
+
+
+ Pure mathematics
+
+ VT 1.1.1 Pure mathematics
+ The study of abstract mathematical concepts.
+ 1.10
+
+
+
+
+
+
+
+
+
+ Data governance
+
+ Data handling
+ http://purl.bioontology.org/ontology/MSH/D030541
+ The control of data entry and maintenance to ensure the data meets defined standards, qualities or constraints.
+ 1.10
+ Data stewardship
+
+
+
+
+
+
+
+
+
+ Data quality management
+
+ http://purl.bioontology.org/ontology/MSH/D030541
+ 1.10
+ Data quality
+ Data integrity
+ Data clean-up
+ Data enrichment
+ The quality, integrity, cleaning up and enrichment of data.
+
+
+
+
+
+
+
+
+
+ Freshwater biology
+
+ 1.10
+ VT 1.5.18 Marine and Freshwater biology
+ The study of organisms in freshwater ecosystems.
+
+
+
+
+
+
+
+
+
+
+ Human genetics
+
+ The study of inheritatnce in human beings.
+ VT 3.1.2 Human genetics
+ 1.10
+
+
+
+
+
+
+
+
+
+
+ Tropical medicine
+
+ 1.10
+ Health problems that are prevalent in tropical and subtropical regions.
+ VT 3.3.14 Tropical medicine
+
+
+
+
+
+
+
+
+
+
+ Medical biotechnology
+
+ 1.10
+ VT 3.4.1 Biomedical devices
+ VT 3.4.2 Health-related biotechnology
+ VT 3.4 Medical biotechnology
+ VT 3.3.14 Tropical medicine
+ Pharmaceutical biotechnology
+ Biotechnology applied to the medical sciences and the development of medicines.
+
+
+
+
+
+
+
+
+
+
+ Personalized medicine
+
+ 1.10
+ Health problems that are prevalent in tropical and subtropical regions.
+ Molecular diagnostics
+ VT 3.4.5 Molecular diagnostics
+
+
+
+
+
+
+
+
+
+
+ Obsolete concept (EDAM)
+
+
+ 1.2
+ Needed for conversion to the OBO format.
+ An obsolete concept (redefined in EDAM).
+ true
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/unit/tools/cwl_tools/v1.0/Hello.java b/test/unit/tools/cwl_tools/v1.0/Hello.java
new file mode 100644
index 000000000000..6afc99f0d362
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/Hello.java
@@ -0,0 +1 @@
+public class Hello {}
diff --git a/test/unit/tools/cwl_tools/v1.0/abc.json b/test/unit/tools/cwl_tools/v1.0/abc.json
new file mode 100644
index 000000000000..72e106961230
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/abc.json
@@ -0,0 +1,3 @@
+{
+ "ids": ["a", "b", "c"]
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/any-type-compat.cwl b/test/unit/tools/cwl_tools/v1.0/any-type-compat.cwl
new file mode 100644
index 000000000000..0eaa71590f13
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/any-type-compat.cwl
@@ -0,0 +1,22 @@
+cwlVersion: v1.0
+class: Workflow
+
+steps: []
+inputs:
+ input1:
+ type: Any
+ input2:
+ type: Any[]
+ input3:
+ type: Any
+
+outputs:
+ - id: output1
+ type: string[]
+ outputSource: input1
+ - id: output2
+ type: string[]
+ outputSource: input2
+ - id: output3
+ type: string
+ outputSource: input3
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/any-type-job.json b/test/unit/tools/cwl_tools/v1.0/any-type-job.json
new file mode 100644
index 000000000000..d5309e47d743
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/any-type-job.json
@@ -0,0 +1,5 @@
+{
+ "input1": ["hello", "world"],
+ "input2": ["foo", "bar"],
+ "input3": "hello"
+}
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/args.py b/test/unit/tools/cwl_tools/v1.0/args.py
new file mode 100755
index 000000000000..1baf6a090404
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/args.py
@@ -0,0 +1,7 @@
+#!/usr/bin/env python
+import sys
+import json
+import os
+args = [os.path.basename(a) for a in sys.argv[1:]]
+with open("cwl.output.json", "w") as f:
+ json.dump({"args": args}, f)
diff --git a/test/unit/tools/cwl_tools/v1.0/arguments-job.yml b/test/unit/tools/cwl_tools/v1.0/arguments-job.yml
new file mode 100644
index 000000000000..9e56989a85cb
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/arguments-job.yml
@@ -0,0 +1,3 @@
+src:
+ class: File
+ path: Hello.java
diff --git a/test/unit/tools/cwl_tools/v1.0/array-of-strings-job.yml b/test/unit/tools/cwl_tools/v1.0/array-of-strings-job.yml
new file mode 100644
index 000000000000..113dbbfe220a
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/array-of-strings-job.yml
@@ -0,0 +1,5 @@
+array_input:
+ - class: File
+ path: ./hello.txt
+ - class: File
+ path: ./hello.2.txt
diff --git a/test/unit/tools/cwl_tools/v1.0/basename-fields-job.yml b/test/unit/tools/cwl_tools/v1.0/basename-fields-job.yml
new file mode 100644
index 000000000000..b9fd71586897
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/basename-fields-job.yml
@@ -0,0 +1,5 @@
+cwlVersion: v1.0
+tool:
+ class: File
+ path: echo-tool.cwl # could have been any file, this isn't a secret CWL feature :-)
+
diff --git a/test/unit/tools/cwl_tools/v1.0/basename-fields-test.cwl b/test/unit/tools/cwl_tools/v1.0/basename-fields-test.cwl
new file mode 100644
index 000000000000..b46cc47a0e72
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/basename-fields-test.cwl
@@ -0,0 +1,33 @@
+cwlVersion: v1.0
+class: Workflow
+
+requirements:
+ - class: StepInputExpressionRequirement
+
+inputs:
+ tool: File
+
+outputs:
+ rootFile:
+ type: File
+ outputSource: root/out
+ extFile:
+ type: File
+ outputSource: ext/out
+
+steps:
+ root:
+ run: echo-file-tool.cwl
+ in:
+ tool: tool
+ in:
+ valueFrom: $(inputs.tool.nameroot)
+ out: [out]
+ ext:
+ run: echo-file-tool.cwl
+ in:
+ tool: tool
+ in:
+ valueFrom: $(inputs.tool.nameext)
+ out: [out]
+
diff --git a/test/unit/tools/cwl_tools/v1.0/binding-test.cwl b/test/unit/tools/cwl_tools/v1.0/binding-test.cwl
new file mode 100755
index 000000000000..d2e2e54717ed
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/binding-test.cwl
@@ -0,0 +1,32 @@
+#!/usr/bin/env cwl-runner
+
+class: CommandLineTool
+cwlVersion: v1.0
+hints:
+ - class: DockerRequirement
+ dockerPull: python:2-slim
+inputs:
+ - id: reference
+ type: File
+ inputBinding: { position: 2 }
+
+ - id: reads
+ type:
+ type: array
+ items: File
+ inputBinding: { prefix: "-YYY" }
+ inputBinding: { position: 3, prefix: "-XXX" }
+
+ - id: "#args.py"
+ type: File
+ default:
+ class: File
+ location: args.py
+ inputBinding:
+ position: -1
+
+outputs:
+ args: string[]
+
+baseCommand: python
+arguments: ["bwa", "mem"]
diff --git a/test/unit/tools/cwl_tools/v1.0/bool-empty-inputbinding-job.json b/test/unit/tools/cwl_tools/v1.0/bool-empty-inputbinding-job.json
new file mode 100644
index 000000000000..a89b76acdbde
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/bool-empty-inputbinding-job.json
@@ -0,0 +1,3 @@
+{
+ "flag": true
+}
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/bool-empty-inputbinding.cwl b/test/unit/tools/cwl_tools/v1.0/bool-empty-inputbinding.cwl
new file mode 100644
index 000000000000..43dbb139dd40
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/bool-empty-inputbinding.cwl
@@ -0,0 +1,22 @@
+#!/usr/bin/env cwl-runner
+class: CommandLineTool
+cwlVersion: v1.0
+hints:
+ - class: DockerRequirement
+ dockerPull: python:2-slim
+inputs:
+- id: flag
+ type: boolean
+ inputBinding: {}
+- id: "args.py"
+ type: File
+ default:
+ class: File
+ location: args.py
+ inputBinding:
+ position: -1
+outputs:
+- id: args
+ type: string[]
+baseCommand: python
+arguments: []
diff --git a/test/unit/tools/cwl_tools/v1.0/bwa-mem-job.json b/test/unit/tools/cwl_tools/v1.0/bwa-mem-job.json
new file mode 100644
index 000000000000..f3e900deafa0
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/bwa-mem-job.json
@@ -0,0 +1,25 @@
+{
+ "reference": {
+ "class": "File",
+ "location": "chr20.fa",
+ "size": 123,
+ "checksum": "sha1$hash"
+ },
+ "reads": [
+ {
+ "class": "File",
+ "location": "example_human_Illumina.pe_1.fastq"
+ },
+ {
+ "class": "File",
+ "location": "example_human_Illumina.pe_2.fastq"
+ }
+ ],
+ "min_std_max_min": [
+ 1,
+ 2,
+ 3,
+ 4
+ ],
+ "minimum_seed_length": 3
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/bwa-mem-tool.cwl b/test/unit/tools/cwl_tools/v1.0/bwa-mem-tool.cwl
new file mode 100755
index 000000000000..8c5adf7362cc
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/bwa-mem-tool.cwl
@@ -0,0 +1,61 @@
+#!/usr/bin/env cwl-runner
+
+cwlVersion: v1.0
+
+class: CommandLineTool
+
+hints:
+ - class: ResourceRequirement
+ coresMin: 2
+ - class: DockerRequirement
+ dockerPull: python:2-slim
+
+inputs:
+ - id: reference
+ type: File
+ inputBinding: { position: 2 }
+
+ - id: reads
+ type:
+ type: array
+ items: File
+ inputBinding: { position: 3 }
+
+ - id: minimum_seed_length
+ type: int
+ inputBinding: { position: 1, prefix: -m }
+
+ - id: min_std_max_min
+ type: { type: array, items: int }
+ inputBinding:
+ position: 1
+ prefix: -I
+ itemSeparator: ","
+
+ - id: args.py
+ type: File
+ default:
+ class: File
+ location: args.py
+ inputBinding:
+ position: -1
+
+outputs:
+ - id: sam
+ type: ["null", File]
+ outputBinding: { glob: output.sam }
+ - id: args
+ type:
+ type: array
+ items: string
+
+baseCommand: python
+
+arguments:
+ - bwa
+ - mem
+ - valueFrom: $(runtime.cores)
+ position: 1
+ prefix: -t
+
+stdout: output.sam
diff --git a/test/unit/tools/cwl_tools/v1.0/cat-job.json b/test/unit/tools/cwl_tools/v1.0/cat-job.json
new file mode 100644
index 000000000000..837875d924f6
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/cat-job.json
@@ -0,0 +1,6 @@
+{
+ "file1": {
+ "class": "File",
+ "location": "hello.txt"
+ }
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/cat-n-job.json b/test/unit/tools/cwl_tools/v1.0/cat-n-job.json
new file mode 100644
index 000000000000..1b93b815a72f
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/cat-n-job.json
@@ -0,0 +1,7 @@
+{
+ "file1": {
+ "class": "File",
+ "location": "hello.txt"
+ },
+ "numbering": true
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/cat-tool.cwl b/test/unit/tools/cwl_tools/v1.0/cat-tool.cwl
new file mode 100644
index 000000000000..3d92a5fc4a22
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/cat-tool.cwl
@@ -0,0 +1,17 @@
+#!/usr/bin/env cwl-runner
+
+class: CommandLineTool
+cwlVersion: v1.0
+
+inputs:
+ file1: File
+
+outputs:
+ output:
+ type: File
+ outputBinding: { glob: output }
+
+baseCommand: [cat]
+
+stdin: $(inputs.file1.path)
+stdout: output
diff --git a/test/unit/tools/cwl_tools/v1.0/cat1-testcli.cwl b/test/unit/tools/cwl_tools/v1.0/cat1-testcli.cwl
new file mode 100755
index 000000000000..26927b749333
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/cat1-testcli.cwl
@@ -0,0 +1,41 @@
+#!/usr/bin/env cwl-runner
+{
+ "class": "CommandLineTool",
+ "cwlVersion": "v1.0",
+ "doc": "Print the contents of a file to stdout using 'cat' running in a docker container.",
+ "hints": [
+ {
+ "class": "DockerRequirement",
+ "dockerPull": "python:2-slim"
+ }
+ ],
+ "inputs": [
+ {
+ "id": "file1",
+ "type": "File",
+ "inputBinding": {"position": 1}
+ },
+ {
+ "id": "numbering",
+ "type": ["null", "boolean"],
+ "inputBinding": {
+ "position": 0,
+ "prefix": "-n"
+ }
+ },
+ {
+ id: "args.py",
+ type: File,
+ default: {
+ class: File,
+ location: args.py
+ },
+ inputBinding: {
+ position: -1
+ }
+ }
+ ],
+ "outputs": [{"id": "args", "type": "string[]"}],
+ "baseCommand": "python",
+ "arguments": ["cat"]
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/cat3-nodocker.cwl b/test/unit/tools/cwl_tools/v1.0/cat3-nodocker.cwl
new file mode 100755
index 000000000000..7007aa153ea9
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/cat3-nodocker.cwl
@@ -0,0 +1,16 @@
+#!/usr/bin/env cwl-runner
+class: CommandLineTool
+cwlVersion: v1.0
+doc: "Print the contents of a file to stdout using 'cat'."
+inputs:
+ file1:
+ type: File
+ label: Input File
+ doc: "The file that will be copied using 'cat'"
+ inputBinding: {position: 1}
+outputs:
+ output_file:
+ type: File
+ outputBinding: {glob: output.txt}
+baseCommand: cat
+stdout: output.txt
diff --git a/test/unit/tools/cwl_tools/v1.0/cat3-tool-mediumcut.cwl b/test/unit/tools/cwl_tools/v1.0/cat3-tool-mediumcut.cwl
new file mode 100755
index 000000000000..9a41321269e0
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/cat3-tool-mediumcut.cwl
@@ -0,0 +1,18 @@
+#!/usr/bin/env cwl-runner
+class: CommandLineTool
+cwlVersion: v1.0
+doc: "Print the contents of a file to stdout using 'cat' running in a docker container."
+hints:
+ DockerRequirement:
+ dockerPull: debian:stretch-slim
+inputs:
+ file1:
+ type: File
+ label: Input File
+ doc: "The file that will be copied using 'cat'"
+ inputBinding: {position: 1}
+outputs:
+ output_file:
+ type: stdout
+baseCommand: cat
+stdout: cat-out
diff --git a/test/unit/tools/cwl_tools/v1.0/cat3-tool-shortcut.cwl b/test/unit/tools/cwl_tools/v1.0/cat3-tool-shortcut.cwl
new file mode 100755
index 000000000000..9d6244636871
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/cat3-tool-shortcut.cwl
@@ -0,0 +1,17 @@
+#!/usr/bin/env cwl-runner
+class: CommandLineTool
+cwlVersion: v1.0
+doc: "Print the contents of a file to stdout using 'cat' running in a docker container."
+hints:
+ DockerRequirement:
+ dockerPull: debian:stretch-slim
+inputs:
+ file1:
+ type: File
+ label: Input File
+ doc: "The file that will be copied using 'cat'"
+ inputBinding: {position: 1}
+outputs:
+ output_file:
+ type: stdout
+baseCommand: cat
diff --git a/test/unit/tools/cwl_tools/v1.0/cat3-tool.cwl b/test/unit/tools/cwl_tools/v1.0/cat3-tool.cwl
new file mode 100755
index 000000000000..2b7e755252b8
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/cat3-tool.cwl
@@ -0,0 +1,19 @@
+#!/usr/bin/env cwl-runner
+class: CommandLineTool
+cwlVersion: v1.0
+doc: "Print the contents of a file to stdout using 'cat' running in a docker container."
+hints:
+ DockerRequirement:
+ dockerPull: debian:stretch-slim
+inputs:
+ file1:
+ type: File
+ label: Input File
+ doc: "The file that will be copied using 'cat'"
+ inputBinding: {position: 1}
+outputs:
+ output_file:
+ type: File
+ outputBinding: {glob: output.txt}
+baseCommand: cat
+stdout: output.txt
diff --git a/test/unit/tools/cwl_tools/v1.0/cat4-tool.cwl b/test/unit/tools/cwl_tools/v1.0/cat4-tool.cwl
new file mode 100755
index 000000000000..d05c0ae3b6b2
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/cat4-tool.cwl
@@ -0,0 +1,17 @@
+#!/usr/bin/env cwl-runner
+class: CommandLineTool
+cwlVersion: v1.0
+doc: "Print the contents of a file to stdout using 'cat' running in a docker container."
+hints:
+ DockerRequirement:
+ dockerPull: debian:stretch-slim
+inputs:
+ file1: File
+outputs:
+ output_txt:
+ type: File
+ outputBinding:
+ glob: output.txt
+baseCommand: cat
+stdout: output.txt
+stdin: $(inputs.file1.path)
diff --git a/test/unit/tools/cwl_tools/v1.0/cat5-tool.cwl b/test/unit/tools/cwl_tools/v1.0/cat5-tool.cwl
new file mode 100755
index 000000000000..46c041068e43
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/cat5-tool.cwl
@@ -0,0 +1,23 @@
+#!/usr/bin/env cwl-runner
+$namespaces:
+ ex: http://example.com/
+cwlVersion: v1.0
+class: CommandLineTool
+doc: "Print the contents of a file to stdout using 'cat' running in a docker container."
+hints:
+ DockerRequirement:
+ dockerPull: "debian:stretch-slim"
+ ex:BlibberBlubberFakeRequirement:
+ fakeField: fraggleFroogle
+inputs:
+ file1:
+ type: File
+ label: "Input File"
+ doc: "The file that will be copied using 'cat'"
+ inputBinding: {position: 1}
+outputs:
+ output_file:
+ type: File
+ outputBinding: {glob: output.txt}
+baseCommand: cat
+stdout: output.txt
diff --git a/test/unit/tools/cwl_tools/v1.0/chr20.fa b/test/unit/tools/cwl_tools/v1.0/chr20.fa
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/test/unit/tools/cwl_tools/v1.0/conflict-job.json b/test/unit/tools/cwl_tools/v1.0/conflict-job.json
new file mode 100644
index 000000000000..33aa6713e86f
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/conflict-job.json
@@ -0,0 +1,4 @@
+{
+ "input_1" : "Hello World",
+ "input_2" : "How are you?"
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/conflict-wf.cwl b/test/unit/tools/cwl_tools/v1.0/conflict-wf.cwl
new file mode 100644
index 000000000000..2047e96cbc96
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/conflict-wf.cwl
@@ -0,0 +1,72 @@
+cwlVersion: v1.0
+$graph:
+- id: echo
+ class: CommandLineTool
+ inputs:
+ text:
+ type: string
+ inputBinding: {}
+
+ outputs:
+ fileout:
+ type: File
+ outputBinding:
+ glob: out.txt
+
+ baseCommand: echo
+ stdout: out.txt
+
+- id: cat
+ class: CommandLineTool
+ inputs:
+ file1:
+ type: File
+ inputBinding:
+ position: 1
+ file2:
+ type: File
+ inputBinding:
+ position: 2
+
+ outputs:
+ fileout:
+ type: File
+ outputBinding:
+ glob: out.txt
+
+ baseCommand: cat
+ stdout: out.txt
+
+- class: Workflow
+ id: collision
+
+ inputs:
+ input_1: string
+ input_2: string
+
+ outputs:
+ fileout:
+ type: File
+ outputSource: cat_step/fileout
+
+ steps:
+ echo_1:
+ run: "#echo"
+ in:
+ text: input_1
+ out: [fileout]
+
+ echo_2:
+ run: "#echo"
+ in:
+ text: input_2
+ out: [fileout]
+
+ cat_step:
+ run: "#cat"
+ in:
+ file1:
+ source: echo_1/fileout
+ file2:
+ source: echo_2/fileout
+ out: [fileout]
diff --git a/test/unit/tools/cwl_tools/v1.0/conformance_tests.yaml b/test/unit/tools/cwl_tools/v1.0/conformance_tests.yaml
new file mode 100644
index 000000000000..15d550f971f0
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/conformance_tests.yaml
@@ -0,0 +1,2076 @@
+- job: v1.0/bwa-mem-job.json
+ tool: v1.0/bwa-mem-tool.cwl
+ output:
+ args: [bwa, mem, -t, '2', -I, '1,2,3,4', -m, '3',
+ chr20.fa,
+ example_human_Illumina.pe_1.fastq,
+ example_human_Illumina.pe_2.fastq]
+ label: cl_basic_generation
+ doc: General test of command line generation
+ tags: [ required, command_line_tool ]
+
+- output:
+ args: [bwa, mem, chr20.fa,
+ "-XXX",
+ "-YYY", example_human_Illumina.pe_1.fastq,
+ "-YYY", example_human_Illumina.pe_2.fastq]
+ job: v1.0/bwa-mem-job.json
+ tool: v1.0/binding-test.cwl
+ label: nested_prefixes_arrays
+ doc: Test nested prefixes with arrays
+ tags: [ required, command_line_tool ]
+
+- output:
+ args: [tmap, mapall, stage1, map1, --min-seq-length, '20', map2, --min-seq-length,
+ '20', stage2, map1, --max-seq-length, '20', --min-seq-length, '10', --seed-length,
+ '16', map2, --max-seed-hits, '-1', --max-seq-length, '20', --min-seq-length, '10']
+ job: v1.0/tmap-job.json
+ tool: v1.0/tmap-tool.cwl
+ label: nested_cl_bindings
+ doc: Test nested command line bindings
+ tags: [ schema_def, command_line_tool ]
+
+- output:
+ args: [cat, hello.txt]
+ job: v1.0/cat-job.json
+ tool: v1.0/cat1-testcli.cwl
+ label: cl_optional_inputs_missing
+ doc: Test command line with optional input (missing)
+ tags: [ required, command_line_tool ]
+
+- output:
+ args: [cat, -n, hello.txt]
+ job: v1.0/cat-n-job.json
+ tool: v1.0/cat1-testcli.cwl
+ label: cl_optional_bindings_provided
+ doc: Test command line with optional input (provided)
+ tags: [ required, command_line_tool ]
+
+- output:
+ "foo": {
+ "checksum": "sha1$63da67422622fbf9251a046d7a34b7ea0fd4fead",
+ "class": "File",
+ "location": "foo.txt",
+ "size": 22
+ }
+ job: v1.0/cat-job.json
+ tool: v1.0/template-tool.cwl
+ label: initworkdir_expreng_requirements
+ doc: Test InitialWorkDirRequirement ExpressionEngineRequirement.engineConfig feature
+ tags: [ initial_work_dir, inline_javascript, command_line_tool ]
+
+- job: v1.0/cat-job.json
+ output:
+ output_file:
+ class: File
+ checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ location: output.txt
+ size: 13
+ tool: v1.0/cat3-tool.cwl
+ label: stdout_redirect_docker
+ doc: Test command execution in Docker with stdout redirection
+ tags: [ required, command_line_tool ]
+
+- job: v1.0/cat-job.json
+ tool: v1.0/cat3-tool-shortcut.cwl
+ output:
+ output_file:
+ class: File
+ checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ location: Any
+ size: 13
+ label: stdout_redirect_shortcut_docker
+ doc: Test command execution in Docker with shortcut stdout redirection
+ tags: [ required, command_line_tool ]
+
+- job: v1.0/cat-job.json
+ output:
+ output_file:
+ class: File
+ checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ location: cat-out
+ size: 13
+ tool: v1.0/cat3-tool-mediumcut.cwl
+ label: stdout_redirect_mediumcut_docker
+ doc: Test command execution in Docker with mediumcut stdout redirection
+ tags: [ required, command_line_tool ]
+
+- job: v1.0/empty.json
+ tool: v1.0/stderr.cwl
+ label: stderr_redirect
+ doc: Test command line with stderr redirection
+ output:
+ output_file:
+ class: File
+ checksum: sha1$f1d2d2f924e986ac86fdf7b36c94bcdf32beec15
+ size: 4
+ location: error.txt
+ tags: [ shell_command, command_line_tool ]
+
+- job: v1.0/empty.json
+ tool: v1.0/stderr-shortcut.cwl
+ label: stderr_redirect_shortcut
+ doc: Test command line with stderr redirection, brief syntax
+ output:
+ output_file:
+ class: File
+ checksum: sha1$f1d2d2f924e986ac86fdf7b36c94bcdf32beec15
+ size: 4
+ location: Any
+ tags: [ shell_command, command_line_tool ]
+
+- output:
+ output_file:
+ class: File
+ size: 4
+ checksum: sha1$f1d2d2f924e986ac86fdf7b36c94bcdf32beec15
+ location: std.err
+ job: v1.0/empty.json
+ tool: v1.0/stderr-mediumcut.cwl
+ label: stderr_redirect_mediumcut
+ doc: Test command line with stderr redirection, named brief syntax
+ tags: [ shell_command, command_line_tool ]
+
+- job: v1.0/cat-job.json
+ output:
+ output_txt:
+ class: File
+ checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ location: output.txt
+ size: 13
+ tool: v1.0/cat4-tool.cwl
+ label: stdinout_redirect_docker
+ doc: Test command execution in Docker with stdin and stdout redirection
+ tags: [ required, command_line_tool ]
+
+- job: v1.0/empty.json
+ tool: v1.0/null-expression1-tool.cwl
+ output:
+ output: 1
+ label: expression_any
+ doc: Test default usage of Any in expressions.
+ tags: [ inline_javascript, expression_tool ]
+
+- job: v1.0/null-expression1-job.json
+ tool: v1.0/null-expression1-tool.cwl
+ output:
+ output: 1
+ label: expression_any_null
+ doc: Test explicitly passing null to Any type inputs with default values.
+ tags: [ inline_javascript, expression_tool ]
+
+- job: v1.0/null-expression2-job.json
+ tool: v1.0/null-expression1-tool.cwl
+ output:
+ output: 2
+ label: expression_any_string
+ doc: Testing the string 'null' does not trip up an Any with a default value.
+ tags: [ inline_javascript, expression_tool ]
+
+- job: v1.0/empty.json
+ tool: v1.0/null-expression2-tool.cwl
+ should_fail: true
+ label: expression_any_nodefaultany
+ doc: Test Any without defaults cannot be unspecified.
+ tags: [ inline_javascript, expression_tool ]
+
+- job: v1.0/null-expression1-job.json
+ tool: v1.0/null-expression2-tool.cwl
+ should_fail: true
+ label: expression_any_null_nodefaultany
+ doc: Test explicitly passing null to Any type without a default value.
+ tags: [ inline_javascript, expression_tool ]
+
+- job: v1.0/null-expression2-job.json
+ tool: v1.0/null-expression2-tool.cwl
+ output:
+ output: 2
+ label: expression_any_nullstring_nodefaultany
+ doc: Testing the string 'null' does not trip up an Any without a default value.
+ tags: [ inline_javascript, expression_tool ]
+
+- job: v1.0/any-type-job.json
+ tool: v1.0/any-type-compat.cwl
+ output:
+ output1: ["hello", "world"]
+ output2: ["foo", "bar"]
+ output3: hello
+ label: any_outputSource_compatibility
+ doc: Testing Any type compatibility in outputSource
+ tags: [ required, workflow ]
+
+- job: v1.0/cat-job.json
+ output:
+ output:
+ checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ class: File
+ location: output
+ size: 13
+ tool: v1.0/cat-tool.cwl
+ label: stdinout_redirect
+ doc: Test command execution in with stdin and stdout redirection
+ tags: [ required, command_line_tool ]
+
+- job: v1.0/parseInt-job.json
+ output: {output: 42}
+ tool: v1.0/parseInt-tool.cwl
+ label: expression_parseint
+ doc: Test ExpressionTool with Docker-based expression engine
+ tags: [ inline_javascript, expression_tool ]
+
+- job: v1.0/wc-job.json
+ output: {output: 16}
+ tool: v1.0/wc2-tool.cwl
+ label: expression_outputEval
+ doc: Test outputEval to transform output
+ tags: [ inline_javascript, command_line_tool ]
+
+- job: v1.0/wc-job.json
+ output: {count_output: 16}
+ tool: v1.0/count-lines1-wf.cwl
+ label: wf_wc_parseInt
+ doc: Test two step workflow with imported tools
+ tags: [ inline_javascript, workflow ]
+
+- job: v1.0/wc-job.json
+ output: {count_output: 16}
+ tool: v1.0/count-lines2-wf.cwl
+ label: wf_wc_expressiontool
+ doc: Test two step workflow with inline tools
+ tags: [ inline_javascript, workflow ]
+
+- job: v1.0/count-lines3-job.json
+ output:
+ count_output: [16, 1]
+ tool: v1.0/count-lines3-wf.cwl
+ label: wf_wc_scatter
+ doc: Test single step workflow with Scatter step
+ tags: [ scatter, inline_javascript, workflow ]
+
+- job: v1.0/count-lines4-job.json
+ output:
+ count_output: [16, 1]
+ tool: v1.0/count-lines4-wf.cwl
+ label: wf_wc_scatter_multiple_merge
+ doc: |
+ Test single step workflow with Scatter step and two data links connected to
+ same input, default merge behavior
+ tags: [ scatter, multiple_input, inline_javascript, workflow ]
+
+- job: v1.0/count-lines6-job.json
+ output:
+ count_output: [32, 2]
+ tool: v1.0/count-lines6-wf.cwl
+ label: wf_wc_scatter_multiple_nested
+ doc: |
+ Test single step workflow with Scatter step and two data links connected to
+ same input, nested merge behavior
+ tags: [ scatter, multiple_input, inline_javascript, workflow ]
+
+- job: v1.0/count-lines6-job.json
+ output:
+ count_output: 34
+ tool: v1.0/count-lines7-wf.cwl
+ label: wf_wc_scatter_multiple_flattened
+ doc: |
+ Test single step workflow with Scatter step and two data links connected to
+ same input, flattened merge behavior
+ tags: [ multiple_input, inline_javascript, workflow ]
+
+- job: v1.0/count-lines6-job.json
+ output:
+ count_output: 32
+ tool: v1.0/count-lines13-wf.cwl
+ label: wf_wc_nomultiple
+ doc: |
+ Test that no MultipleInputFeatureRequirement is necessary when
+ workflow step source is a single-item list
+ tags: [ inline_javascript, workflow ]
+
+- job: v1.0/empty.json
+ output: {count_output: 1}
+ tool: v1.0/count-lines5-wf.cwl
+ label: wf_input_default_missing
+ doc: Test workflow with default value for input parameter (missing)
+ tags: [ inline_javascript, workflow ]
+
+- job: v1.0/wc-job.json
+ output: {count_output: 16}
+ tool: v1.0/count-lines5-wf.cwl
+ label: wf_input_default_provided
+ doc: Test workflow with default value for input parameter (provided)
+ tags: [ inline_javacscript, workflow ]
+
+- job: v1.0/empty.json
+ output: {default_output: workflow_default}
+ tool: v1.0/echo-wf-default.cwl
+ label: wf_default_tool_default
+ doc: Test that workflow defaults override tool defaults
+ tags: [ required, workflow ]
+
+- job: v1.0/env-job.json
+ output:
+ out:
+ class: File
+ checksum: sha1$b3ec4ed1749c207e52b3a6d08c59f31d83bff519
+ location: out
+ size: 15
+ tool: v1.0/env-tool1.cwl
+ label: envvar_req
+ doc: Test EnvVarRequirement
+ tags: [ env_var, command_line_tool ]
+
+- job: v1.0/scatter-job1.json
+ output:
+ out: ["foo one", "foo two", "foo three", "foo four"]
+ tool: v1.0/scatter-wf1.cwl
+ label: wf_scatter_single_param
+ doc: Test workflow scatter with single scatter parameter
+ tags: [ scatter, workflow ]
+
+- job: v1.0/scatter-job2.json
+ output:
+ out: [["foo one three", "foo one four"], ["foo two three", "foo two four"]]
+ tool: v1.0/scatter-wf2.cwl
+ label: wf_scatter_two_nested_crossproduct
+ doc: Test workflow scatter with two scatter parameters and nested_crossproduct join method
+ tags: [ scatter, workflow ]
+
+- job: v1.0/scatter-job2.json
+ output:
+ out: ["foo one three", "foo one four", "foo two three", "foo two four"]
+ tool: "v1.0/scatter-wf3.cwl#main"
+ label: wf_scatter_two_flat_crossproduct
+ doc: Test workflow scatter with two scatter parameters and flat_crossproduct join method
+ tags: [ scatter, workflow ]
+
+- job: v1.0/scatter-job2.json
+ output:
+ out: ["foo one three", "foo two four"]
+ tool: "v1.0/scatter-wf4.cwl#main"
+ label: wf_scatter_two_dotproduct
+ doc: Test workflow scatter with two scatter parameters and dotproduct join method
+ tags: [ scatter, workflow ]
+
+- job: v1.0/scatter-empty-job1.json
+ output:
+ out: []
+ tool: v1.0/scatter-wf1.cwl
+ label: wf_scatter_emptylist
+ doc: Test workflow scatter with single empty list parameter
+ tags: [ scatter, workflow ]
+
+- job: v1.0/scatter-empty-job2.json
+ output:
+ out: [[], []]
+ tool: v1.0/scatter-wf2.cwl
+ label: wf_scatter_nested_crossproduct_secondempty
+ doc: Test workflow scatter with two scatter parameters and nested_crossproduct join method with second list empty
+ tags: [ scatter, workflow ]
+
+- job: v1.0/scatter-empty-job3.json
+ output:
+ out: []
+ tool: "v1.0/scatter-wf3.cwl#main"
+ label: wf_scatter_nested_crossproduct_firstempty
+ doc: Test workflow scatter with two scatter parameters and nested_crossproduct join method with first list empty
+ tags: [ scatter, workflow ]
+
+- job: v1.0/scatter-empty-job2.json
+ output:
+ out: []
+ tool: "v1.0/scatter-wf3.cwl#main"
+ label: wf_scatter_flat_crossproduct_oneempty
+ doc: Test workflow scatter with two scatter parameters, one of which is empty and flat_crossproduct join method
+ tags: [ scatter, workflow ]
+
+- job: v1.0/scatter-empty-job4.json
+ output:
+ out: []
+ tool: "v1.0/scatter-wf4.cwl#main"
+ label: wf_scatter_dotproduct_twoempty
+ doc: Test workflow scatter with two empty scatter parameters and dotproduct join method
+ tags: [ scatter, workflow ]
+
+- tool: v1.0/echo-tool.cwl
+ job: v1.0/env-job.json
+ output:
+ {"out": "hello test env\n"}
+ label: any_input_param
+ doc: Test Any type input parameter
+ tags: [ required, command_line_tool ]
+
+- job: v1.0/wc-job.json
+ output: {count_output: 16}
+ tool: v1.0/count-lines8-wf.cwl
+ label: nested_workflow
+ doc: Test nested workflow
+ tags: [ subworkflow, workflow ]
+
+- job: v1.0/env-job.json
+ output:
+ out:
+ class: File
+ checksum: sha1$b3ec4ed1749c207e52b3a6d08c59f31d83bff519
+ location: out
+ size: 15
+ tool: v1.0/env-wf1.cwl
+ label: requirement_priority
+ doc: Test requirement priority
+ tags: [ env_var, workflow ]
+
+- job: v1.0/env-job.json
+ output:
+ out:
+ class: File
+ checksum: sha1$cdc1e84968261d6a7575b5305945471f8be199b6
+ location: out
+ size: 9
+ tool: v1.0/env-wf2.cwl
+ label: requirement_override_hints
+ doc: Test requirements override hints
+ tags: [ env_var, workflow ]
+
+- job: v1.0/env-job.json
+ output:
+ out:
+ class: File
+ checksum: sha1$cdc1e84968261d6a7575b5305945471f8be199b6
+ location: out
+ size: 9
+ tool: v1.0/env-wf3.cwl
+ label: requirement_workflow_steps
+ doc: Test requirements on workflow steps
+ tags: [ env_var, workflow ]
+
+- job: v1.0/empty.json
+ output: {count_output: 16}
+ tool: v1.0/count-lines9-wf.cwl
+ label: step_input_default_value
+ doc: Test default value on step input parameter
+ tags: [ inline_javascript, workflow ]
+
+- job: v1.0/empty.json
+ output: {count_output: 16}
+ tool: v1.0/count-lines11-wf.cwl
+ label: step_input_default_value_nosource
+ doc: Test use default value on step input parameter with empty source
+ tags: [ inline_javascript, workflow ]
+
+- job: v1.0/file1-null.json
+ output: {count_output: 16}
+ tool: v1.0/count-lines11-wf.cwl
+ label: step_input_default_value_nullsource
+ doc: Test use default value on step input parameter with null source
+ tags: [ inline_javascript, workflow ]
+
+- job: v1.0/cat-job.json
+ output: {count_output: 1}
+ tool: v1.0/count-lines11-wf.cwl
+ label: step_input_default_value_overriden
+ doc: Test default value on step input parameter overridden by provided source
+ tags: [ inline_javascript, workflow ]
+
+- job: v1.0/revsort-job.json
+ output:
+ output:
+ class: File
+ checksum: sha1$b9214658cc453331b62c2282b772a5c063dbd284
+ location: output.txt
+ size: 1111
+ tool: v1.0/revsort.cwl
+ label: wf_simple
+ doc: Test simple workflow
+ tags: [ required, workflow ]
+
+- job: v1.0/cat-job.json
+ output:
+ output_file:
+ class: File
+ checksum: sha1$47a013e660d408619d894b20806b1d5086aab03b
+ location: output.txt
+ size: 13
+ tool: v1.0/cat5-tool.cwl
+ label: hints_unknown_ignored
+ doc: Test unknown hints are ignored.
+ tags: [ required, command_line_tool ]
+
+- job: v1.0/search-job.json
+ output:
+ outfile:
+ class: File
+ checksum: sha1$e2dc9daaef945ac15f01c238ed2f1660f60909a0
+ location: result.txt
+ size: 142
+ indexedfile: {
+ "location": "input.txt",
+ "class": "File",
+ "checksum": "sha1$327fc7aedf4f6b69a42a7c8b808dc5a7aff61376",
+ "secondaryFiles": [
+ {
+ "location": "input.txt.idx1",
+ "class": "File",
+ "checksum": "sha1$553f3a09003a9f69623f03bec13c0b078d706023",
+ "size": 1500
+ },
+ {
+ "location": "input.idx2",
+ "class": "File",
+ "checksum": "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709",
+ "size": 0
+ },
+ {
+ "location": "input.txt.idx3",
+ "class": "File",
+ "checksum": "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709",
+ "size": 0
+ },
+ {
+ "location": "input.txt.idx4",
+ "class": "File",
+ "checksum": "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709",
+ "size": 0
+ },
+ {
+ "location": "input.txt.idx5",
+ "class": "File",
+ "checksum": "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709",
+ "size": 0
+ },
+ {
+ "location": "input.idx6.txt",
+ "class": "File",
+ "checksum": "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709",
+ "size": 0
+ },
+ {
+ "checksum": "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709",
+ "class": "File",
+ "location": "input.txt.idx7",
+ "size": 0
+ },
+ {
+ "checksum": "sha1$47a013e660d408619d894b20806b1d5086aab03b",
+ "class": "File",
+ "location": "hello.txt",
+ "size": 13
+ },
+ {
+ "class": "Directory",
+ "listing": [{
+ "basename": "index",
+ "checksum": "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709",
+ "class": "File",
+ "location": "index",
+ "size": 0
+ }],
+ "location": "input.txt_idx8",
+ }
+ ],
+ "size": 1111
+ }
+ tool: "v1.0/search.cwl#main"
+ label: initial_workdir_secondary_files_expr
+ doc: |
+ Test InitialWorkDirRequirement linking input files and capturing secondaryFiles
+ on input and output. Also tests the use of a variety of parameter references
+ and expressions in the secondaryFiles field.
+ tags: [ initial_work_dir, inline_javascript, command_line_tool ]
+
+- job: v1.0/rename-job.json
+ output:
+ outfile:
+ class: File
+ checksum: sha1$327fc7aedf4f6b69a42a7c8b808dc5a7aff61376
+ location: fish.txt
+ size: 1111
+ tool: v1.0/rename.cwl
+ label: rename
+ doc: |
+ Test InitialWorkDirRequirement with expression in filename.
+ tags: [ initial_work_dir, command_line_tool ]
+
+- job: v1.0/string-job.json
+ output:
+ out:
+ class: File
+ checksum: sha1$6a47aa22b2a9d13a66a24b3ee5eaed95ce4753cf
+ location: example.conf
+ size: 16
+ tool: v1.0/iwdr-entry.cwl
+ label: initial_workdir_trailingnl
+ doc: Test if trailing newline is present in file entry in InitialWorkDir
+ tags: [ initial_work_dir, command_line_tool ]
+
+- job: v1.0/wc-job.json
+ output:
+ output: 16
+ tool: v1.0/wc4-tool.cwl
+ label: inline_expressions
+ doc: |
+ Test inline expressions
+ tags: [ inline_javascript, command_line_tool ]
+
+- job: v1.0/schemadef-job.json
+ output:
+ output:
+ location: output.txt
+ size: 12
+ class: File
+ checksum: "sha1$f12e6cfe70f3253f70b0dbde17c692e7fb0f1e5e"
+ tool: v1.0/schemadef-tool.cwl
+ label: schemadef_req_tool_param
+ doc: |
+ Test SchemaDefRequirement definition used in tool parameter
+ tags: [ schema_def, command_line_tool ]
+
+- job: v1.0/schemadef-job.json
+ output:
+ output:
+ location: output.txt
+ size: 12
+ class: File
+ checksum: "sha1$f12e6cfe70f3253f70b0dbde17c692e7fb0f1e5e"
+ tool: v1.0/schemadef-wf.cwl
+ label: schemadef_req_wf_param
+ doc: |
+ Test SchemaDefRequirement definition used in workflow parameter
+ tags: [ schema_def, workflow ]
+
+- job: v1.0/empty.json
+ output: {
+ "t1": {
+ "bar": {
+ "b az": 2,
+ "b\"az": null,
+ "b'az": true,
+ "baz": "zab1",
+ "buz": [
+ "a",
+ "b",
+ "c"
+ ]
+ }
+ },
+ "t10": true,
+ "t11": true,
+ "t12": null,
+ "t13": "-zab1",
+ "t14": "-zab1",
+ "t15": "-zab1",
+ "t16": "-zab1",
+ "t17": "zab1 zab1",
+ "t18": "zab1 zab1",
+ "t19": "zab1 zab1",
+ "t2": {
+ "b az": 2,
+ "b\"az": null,
+ "b'az": true,
+ "baz": "zab1",
+ "buz": [
+ "a",
+ "b",
+ "c"
+ ]
+ },
+ "t20": "zab1 zab1",
+ "t21": "2 2",
+ "t22": "true true",
+ "t23": "true true",
+ "t24": "null null",
+ "t25": "b",
+ "t26": "b b",
+ "t3": {
+ "b az": 2,
+ "b\"az": null,
+ "b'az": true,
+ "baz": "zab1",
+ "buz": [
+ "a",
+ "b",
+ "c"
+ ]
+ },
+ "t4": {
+ "b az": 2,
+ "b\"az": null,
+ "b'az": true,
+ "baz": "zab1",
+ "buz": [
+ "a",
+ "b",
+ "c"
+ ]
+ },
+ "t5": "zab1",
+ "t6": "zab1",
+ "t7": "zab1",
+ "t8": "zab1",
+ "t9": 2,
+ "t27": null,
+ "t28": 3
+ }
+ tool: v1.0/params.cwl
+ label: param_evaluation_noexpr
+ doc: |
+ Test parameter evaluation, no support for JS expressions
+ tags: [ required, command_line_tool ]
+
+- job: v1.0/empty.json
+ output: {
+ "t1": {
+ "bar": {
+ "b az": 2,
+ "b\"az": null,
+ "b'az": true,
+ "baz": "zab1",
+ "buz": [
+ "a",
+ "b",
+ "c"
+ ]
+ }
+ },
+ "t10": true,
+ "t11": true,
+ "t12": null,
+ "t13": "-zab1",
+ "t14": "-zab1",
+ "t15": "-zab1",
+ "t16": "-zab1",
+ "t17": "zab1 zab1",
+ "t18": "zab1 zab1",
+ "t19": "zab1 zab1",
+ "t2": {
+ "b az": 2,
+ "b\"az": null,
+ "b'az": true,
+ "baz": "zab1",
+ "buz": [
+ "a",
+ "b",
+ "c"
+ ]
+ },
+ "t20": "zab1 zab1",
+ "t21": "2 2",
+ "t22": "true true",
+ "t23": "true true",
+ "t24": "null null",
+ "t25": "b",
+ "t26": "b b",
+ "t3": {
+ "b az": 2,
+ "b\"az": null,
+ "b'az": true,
+ "baz": "zab1",
+ "buz": [
+ "a",
+ "b",
+ "c"
+ ]
+ },
+ "t4": {
+ "b az": 2,
+ "b\"az": null,
+ "b'az": true,
+ "baz": "zab1",
+ "buz": [
+ "a",
+ "b",
+ "c"
+ ]
+ },
+ "t5": "zab1",
+ "t6": "zab1",
+ "t7": "zab1",
+ "t8": "zab1",
+ "t9": 2,
+ "t27": null,
+ "t28": 3
+ }
+ tool: v1.0/params2.cwl
+ label: param_evaluation_expr
+ doc: |
+ Test parameter evaluation, with support for JS expressions
+ tags: [ inline_javascript, command_line_tool ]
+
+- output: {}
+ job: v1.0/cat-job.json
+ tool: v1.0/metadata.cwl
+ label: metadata
+ doc: Test metadata
+ tags: [ required ]
+
+- job: v1.0/formattest-job.json
+ output:
+ output:
+ "location": "output.txt"
+ "format": "http://edamontology.org/format_2330"
+ "size": 1111
+ "class": "File"
+ "checksum": "sha1$97fe1b50b4582cebc7d853796ebd62e3e163aa3f"
+ tool: v1.0/formattest.cwl
+ label: format_checking
+ doc: |
+ Test simple format checking.
+ tags: [ required, command_line_tool ]
+
+- job: v1.0/formattest2-job.json
+ output:
+ output:
+ "location": "output.txt"
+ "format": "http://edamontology.org/format_1929"
+ "size": 12010
+ "class": "File"
+ "checksum": "sha1$971d88faeda85a796752ecf752b7e2e34f1337ce"
+ tool: v1.0/formattest2.cwl
+ label: format_checking_subclass
+ doc: |
+ Test format checking against ontology using subclassOf.
+ tags: [ required, command_line_tool ]
+
+- job: v1.0/formattest2-job.json
+ output:
+ output:
+ "location": "output.txt"
+ "format": "http://edamontology.org/format_1929"
+ "size": 12010
+ "class": "File"
+ "checksum": "sha1$971d88faeda85a796752ecf752b7e2e34f1337ce"
+ tool: v1.0/formattest3.cwl
+ label: format_checking_equivalentclass
+ doc: |
+ Test format checking against ontology using equivalentClass.
+ tags: [ required, command_line_tool ]
+
+- tool: v1.0/optional-output.cwl
+ job: v1.0/cat-job.json
+ output:
+ optional_file: null
+ output_file:
+ location: output.txt
+ size: 13
+ class: "File"
+ checksum: "sha1$47a013e660d408619d894b20806b1d5086aab03b"
+ label: output_secondaryfile_optional
+ doc: |
+ Test optional output file and optional secondaryFile on output.
+ tags: [ docker, command_line_tool ]
+
+
+- job: v1.0/empty.json
+ output:
+ out: "\n"
+ tool: v1.0/vf-concat.cwl
+ label: valuefrom_ignored_null
+ doc: Test that valueFrom is ignored when the parameter is null
+ tags: [ inline_javascript, command_line_tool ]
+
+- job: v1.0/cat-job.json
+ output:
+ out: "a string\n"
+ tool: v1.0/vf-concat.cwl
+ label: valuefrom_secondexpr_ignored
+ doc: Test that second expression in concatenated valueFrom is not ignored
+ tags: [ inline_javascript, command_line_tool ]
+
+- job: v1.0/step-valuefrom-wf.json
+ output: {count_output: 16}
+ tool: v1.0/step-valuefrom-wf.cwl
+ label: valuefrom_wf_step
+ doc: Test valueFrom on workflow step.
+ tags: [ step_input, inline_javascript, workflow ]
+
+- job: v1.0/step-valuefrom-job.json
+ output: {val: "3\n"}
+ tool: v1.0/step-valuefrom2-wf.cwl
+ label: valuefrom_wf_step_multiple
+ doc: Test valueFrom on workflow step with multiple sources
+ tags: [ step_input, inline_javascript, multiple_input, workflow ]
+
+- job: v1.0/step-valuefrom-job.json
+ output: {val: "3\n"}
+ tool: v1.0/step-valuefrom3-wf.cwl
+ label: valuefrom_wf_step_other
+ doc: Test valueFrom on workflow step referencing other inputs
+ tags: [ step_input, inline_javascript, workflow ]
+
+- job: v1.0/record-output-job.json
+ output:
+ "orec": {
+ "ofoo": {
+ "location": "foo",
+ "size": 1111,
+ "class": "File",
+ "checksum": "sha1$327fc7aedf4f6b69a42a7c8b808dc5a7aff61376"
+ },
+ "obar": {
+ "location": "bar",
+ "size": 12010,
+ "class": "File",
+ "checksum": "sha1$aeb3d11bdf536511649129f4077d5cda6a324118"
+ }
+ }
+ tool: v1.0/record-output.cwl
+ label: record_output_binding
+ doc: Test record type output binding.
+ tags: [ shell_command, command_line_tool ]
+
+- job: v1.0/empty.json
+ output: {
+ "foo": {
+ "location": "foo",
+ "class": "File",
+ "checksum": "sha1$f1d2d2f924e986ac86fdf7b36c94bcdf32beec15",
+ "size": 4
+ }
+ }
+ tool: v1.0/test-cwl-out.cwl
+ label: docker_json_output_path
+ doc: |
+ Test support for reading cwl.output.json when running in a Docker container
+ and just 'path' is provided.
+ tags: [ shell_command, command_line_tool ]
+
+- job: v1.0/empty.json
+ output: {
+ "foo": {
+ "location": "foo",
+ "class": "File",
+ "checksum": "sha1$f1d2d2f924e986ac86fdf7b36c94bcdf32beec15",
+ "size": 4
+ }
+ }
+ tool: v1.0/test-cwl-out2.cwl
+ label: docker_json_output_location
+ doc: |
+ Test support for reading cwl.output.json when running in a Docker container
+ and just 'location' is provided.
+ tags: [ shell_command, command_line_tool ]
+
+- job: v1.0/abc.json
+ output:
+ files: [{
+ "location": "a",
+ "size": 0,
+ "class": "File",
+ "checksum": "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709"
+ },
+ {
+ "location": "b",
+ "size": 0,
+ "class": "File",
+ "checksum": "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709"
+ },
+ {
+ "location": "c",
+ "size": 0,
+ "class": "File",
+ "checksum": "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709"
+ }]
+ tool: v1.0/glob-expr-list.cwl
+ label: multiple_glob_expr_list
+ doc: Test support for returning multiple glob patterns from expression
+ tags: [ required, command_line_tool ]
+
+- job: v1.0/scatter-valuefrom-job1.json
+ output:
+ out: ["foo one one", "foo one two", "foo one three", "foo one four"]
+ tool: v1.0/scatter-valuefrom-wf1.cwl
+ label: wf_scatter_oneparam_valuefrom
+ doc: Test workflow scatter with single scatter parameter and two valueFrom on step input (first and current el)
+ tags: [ scatter, step_input, workflow ]
+
+- job: v1.0/scatter-valuefrom-job2.json
+ output:
+ out: [["foo one one three", "foo one one four"], ["foo one two three", "foo one two four"]]
+ tool: v1.0/scatter-valuefrom-wf2.cwl
+ label: wf_scatter_twoparam_nested_crossproduct_valuefrom
+ doc: Test workflow scatter with two scatter parameters and nested_crossproduct join method and valueFrom on step input
+ tags: [ scatter, step_input, workflow ]
+
+- job: v1.0/scatter-valuefrom-job2.json
+ output:
+ out: ["foo one one three", "foo one one four", "foo one two three", "foo one two four"]
+ tool: "v1.0/scatter-valuefrom-wf3.cwl#main"
+ label: wf_scatter_twoparam_flat_crossproduct_valuefrom
+ doc: Test workflow scatter with two scatter parameters and flat_crossproduct join method and valueFrom on step input
+ tags: [ scatter, step_input, workflow ]
+
+- job: v1.0/scatter-valuefrom-job2.json
+ output:
+ out: ["foo one one three", "foo one two four"]
+ tool: "v1.0/scatter-valuefrom-wf4.cwl#main"
+ label: wf_scatter_twoparam_dotproduct_valuefrom
+ doc: Test workflow scatter with two scatter parameters and dotproduct join method and valueFrom on step input
+ tags: [ scatter, step_input, workflow ]
+
+- job: v1.0/scatter-valuefrom-job1.json
+ output:
+ out: ["foo one one", "foo two two", "foo three three", "foo four four"]
+ tool: v1.0/scatter-valuefrom-wf5.cwl
+ label: wf_scatter_oneparam_valuefrom_twice_current_el
+ doc: Test workflow scatter with single scatter parameter and two valueFrom on step input (current el twice)
+ tags: [ scatter, step_input, workflow ]
+
+- job: v1.0/scatter-valuefrom-job3.json
+ tool: v1.0/scatter-valuefrom-wf6.cwl
+ label: wf_scatter_oneparam_valueFrom
+ doc: Test valueFrom eval on scattered input parameter
+ output:
+ out_message: [
+ {
+ "checksum": "sha1$98030575f6fc40e5021be5a8803a6bef94aee11f",
+ "location": Any,
+ "class": "File",
+ "size": 16
+ },
+ {
+ "checksum": "sha1$edcacd50778d98ae113015406b3195c165059dd8",
+ "location": Any,
+ "class": "File",
+ "size": 16
+ }
+ ]
+ tags: [ scatter, step_input, workflow ]
+
+- job: v1.0/conflict-job.json
+ output: {
+ "fileout": {
+ "location": "out.txt",
+ "checksum": "sha1$a2d8d6e7b28295dc9977dc3bdb652ddd480995f0",
+ "class": "File",
+ "size": 25
+ }
+ }
+ tool: "v1.0/conflict-wf.cwl#collision"
+ label: wf_two_inputfiles_namecollision
+ doc: Test workflow two input files with same name.
+ tags: [ required, workflow ]
+
+- job: v1.0/dir-job.yml
+ output:
+ "outlist": {
+ "size": 20,
+ "location": "output.txt",
+ "checksum": "sha1$13cda8661796ae241da3a18668fb552161a72592",
+ "class": "File"
+ }
+ tool: v1.0/dir.cwl
+ label: directory_input_param_ref
+ doc: Test directory input with parameter reference
+ tags: [ shell_command, command_line_tool ]
+
+- job: v1.0/dir-job.yml
+ output:
+ "outlist": {
+ "size": 20,
+ "location": "output.txt",
+ "checksum": "sha1$13cda8661796ae241da3a18668fb552161a72592",
+ "class": "File"
+ }
+ tool: v1.0/dir2.cwl
+ label: directory_input_docker
+ doc: Test directory input in Docker
+ tags: [ required, command_line_tool ]
+
+- job: v1.0/dir3-job.yml
+ output:
+ "outdir": {
+ "class": "Directory",
+ "listing": [
+ {
+ "class": "File",
+ "location": "goodbye.txt",
+ "checksum": "sha1$dd0a4c4c49ba43004d6611771972b6cf969c1c01",
+ "size": 24
+ },
+ {
+ "class": "File",
+ "location": "hello.txt",
+ "checksum": "sha1$47a013e660d408619d894b20806b1d5086aab03b",
+ "size": 13
+ }
+ ],
+ }
+ tool: v1.0/dir3.cwl
+ label: directory_output
+ doc: Test directory output
+ tags: [ required, command_line_tool ]
+
+- job: v1.0/dir4-job.yml
+ output: {
+ "outlist": {
+ "checksum": "sha1$13cda8661796ae241da3a18668fb552161a72592",
+ "size": 20,
+ "location": "output.txt",
+ "class": "File"
+ }
+ }
+ tool: v1.0/dir4.cwl
+ label: directory_secondaryfiles
+ doc: Test directories in secondaryFiles
+ tags: [ shell_command, command_line_tool ]
+
+- job: v1.0/dir4-subdir-1-job.yml
+ output: {
+ "outlist": {
+ "checksum": "sha1$9d9bc8f5252d39274b5dfbac64216c6e888f5dfc",
+ "size": 14,
+ "location": "output.txt",
+ "class": "File"
+ }
+ }
+ tool: v1.0/dir4.cwl
+ doc: Test specifying secondaryFiles in subdirectories of the job input document.
+
+- job: v1.0/dir4-subdir-2-job.yml
+ output: {
+ "outlist": {
+ "checksum": "sha1$9d9bc8f5252d39274b5dfbac64216c6e888f5dfc",
+ "size": 14,
+ "location": "output.txt",
+ "class": "File"
+ }
+ }
+ tool: v1.0/dir4.cwl
+ doc: Test specifying secondaryFiles in same subdirectory of the job input as the primary input file.
+
+- job: v1.0/dir-job.yml
+ output: {
+ "outlist": {
+ "checksum": "sha1$13cda8661796ae241da3a18668fb552161a72592",
+ "size": 20,
+ "location": "output.txt",
+ "class": "File"
+ }
+ }
+ tool: v1.0/dir5.cwl
+ label: dynamic_initial_workdir
+ doc: Test dynamic initial work dir
+ tags: [ shell_command, initial_work_dir, command_line_tool ]
+
+- job: v1.0/stagefile-job.yml
+ output: {
+ "outfile": {
+ "checksum": "sha1$b769c7b2e316edd4b5eb2d24799b2c1f9d8c86e6",
+ "size": 1111,
+ "location": "bob.txt",
+ "class": "File"
+ }
+ }
+ tool: v1.0/stagefile.cwl
+ label: writable_stagedfiles
+ doc: Test writable staged files.
+ tags: [ initial_work_dir, command_line_tool ]
+
+- job: v1.0/file-literal.yml
+ output:
+ output_file:
+ class: File
+ checksum: sha1$d0e04ff6c413c7d57f9a0ca0a33cd3ab52e2dd9c
+ location: output.txt
+ size: 18
+ tool: v1.0/cat3-tool.cwl
+ label: input_file_literal
+ doc: Test file literal as input
+ tags: [ required, command_line_tool ]
+
+- job: v1.0/arguments-job.yml
+ output:
+ classfile:
+ checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ location: Hello.class
+ class: File
+ size: 0
+ tool: v1.0/linkfile.cwl
+ label: initial_workdir_expr
+ doc: Test expression in InitialWorkDir listing
+ tags: [ initial_work_dir, command_line_tool ]
+
+- job: v1.0/wc-job.json
+ output:
+ b:
+ checksum: sha1$c4cfd130e7578714e3eef91c1d6d90e0e0b9db3e
+ location: whale.xtx
+ class: File
+ size: 21
+ tool: v1.0/nameroot.cwl
+ label: nameroot_nameext_stdout_expr
+ doc: Test nameroot/nameext expression in arguments, stdout
+ tags: [ required, command_line_tool ]
+
+- job: v1.0/dir-job.yml
+ output:
+ "outlist": {
+ "size": 20,
+ "location": "output.txt",
+ "checksum": "sha1$13cda8661796ae241da3a18668fb552161a72592",
+ "class": "File"
+ }
+ tool: v1.0/dir6.cwl
+ label: input_dir_inputbinding
+ doc: Test directory input with inputBinding
+ tags: [ shell_command, command_line_tool ]
+
+- job: v1.0/nested-array-job.yml
+ output:
+ echo:
+ checksum: sha1$3f786850e387550fdab836ed7e6dc881de23001b
+ location: echo.txt
+ class: File
+ size: 2
+ tool: v1.0/nested-array.cwl
+ label: cl_gen_arrayofarrays
+ doc: Test command line generation of array-of-arrays
+ tags: [ required, command_line_tool ]
+
+- job: v1.0/empty.json
+ output: {}
+ tool: v1.0/envvar.cwl
+ label: env_home_tmpdir
+ doc: Test $HOME and $TMPDIR are set correctly
+ tags: [ shell_command, command_line_tool ]
+
+- job: v1.0/empty.json
+ output: {}
+ tool: v1.0/envvar2.cwl
+ label: env_home_tmpdir_docker
+ doc: Test $HOME and $TMPDIR are set correctly in Docker
+ tags: [ shell_command, command_line_tool ]
+
+- job: v1.0/empty.json
+ output:
+ "out": {
+ "checksum": "sha1$7448d8798a4380162d4b56f9b452e2f6f9e24e7a",
+ "location": "whatever.txt",
+ "class": "File",
+ "size": 2
+ }
+ tool: "v1.0/js-expr-req-wf.cwl#wf"
+ label: expressionlib_tool_wf_override
+ doc: Test that expressionLib requirement of individual tool step overrides expressionLib of workflow.
+ tags: [ inline_javascript, workflow ]
+
+- job: v1.0/initialworkdirrequirement-docker-out-job.json
+ output:
+ OUTPUT:
+ "checksum": "sha1$aeb3d11bdf536511649129f4077d5cda6a324118"
+ "location": "ref.fasta"
+ "secondaryFiles": [{
+ "checksum": "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709",
+ "location": "ref.fasta.fai",
+ "class": "File",
+ "size": 0
+ }]
+ "class": "File"
+ "size": 12010
+ tool: v1.0/initialworkdirrequirement-docker-out.cwl
+ label: initial_workdir_output
+ doc: Test output of InitialWorkDir
+ tags: [ docker, initial_work_dir, command_line_tool ]
+
+- job: v1.0/wc-job.json
+ output: {count_output: 16}
+ tool: v1.0/count-lines10-wf.cwl
+ label: embedded_subworkflow
+ doc: Test embedded subworkflow
+ tags: [ subworkflow, workflow ]
+
+- job: v1.0/docker-array-secondaryfiles-job.json
+ output: {
+ "bai_list": {
+ "checksum": "sha1$081fc0e57d6efa5f75eeb237aab1d04031132be6",
+ "location": "fai.list",
+ "class": "File",
+ "size": 386
+ }
+ }
+ tool: v1.0/docker-array-secondaryfiles.cwl
+ label: filesarray_secondaryfiles
+ doc: Test secondaryFiles on array of files.
+ tags: [ docker, inline_javascript, shell_command, command_line_tool ]
+
+- job: v1.0/dir7.yml
+ output: {
+ "dir": {
+ "location": "a_directory",
+ "class": "Directory",
+ "listing": [
+ {
+ "class": "File",
+ "location": "whale.txt",
+ "checksum": "sha1$327fc7aedf4f6b69a42a7c8b808dc5a7aff61376",
+ "size": 1111
+ },
+ {
+ "class": "File",
+ "location": "hello.txt",
+ "checksum": "sha1$47a013e660d408619d894b20806b1d5086aab03b",
+ "size": 13
+ }
+ ]
+ }
+ }
+ tool: v1.0/dir7.cwl
+ label: exprtool_directory_literal
+ doc: Test directory literal output created by ExpressionTool
+ tags: [ inline_javascript, expression_tool ]
+
+- job: v1.0/empty.json
+ output:
+ lit:
+ location: "a_file"
+ class: "File"
+ checksum: "sha1$fea23663b9c8ed71968f86415b5ec091bb111448"
+ size: 19
+ tool: v1.0/file-literal-ex.cwl
+ label: exprtool_file_literal
+ doc: Test file literal output created by ExpressionTool
+ tags: [ inline_javascript, expression_tool ]
+
+- job: v1.0/empty.json
+ output:
+ "thing": {
+ "checksum": "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709",
+ "location": "thing",
+ "class": "File",
+ "size": 0
+ }
+ tool: v1.0/docker-output-dir.cwl
+ label: dockeroutputdir
+ doc: Test dockerOutputDirectory
+ tags: [ docker, command_line_tool ]
+
+- job: v1.0/empty.json
+ output:
+ out:
+ class: File
+ checksum: sha1$b3ec4ed1749c207e52b3a6d08c59f31d83bff519
+ location: out
+ size: 15
+ tool: v1.0/imported-hint.cwl
+ label: hints_import
+ doc: Test hints with $import
+ tags: [ required, command_line_tool ]
+
+- output: {}
+ job: v1.0/default_path_job.yml
+ tool: v1.0/default_path.cwl
+ label: default_path_notfound_warning
+ doc: Test warning instead of error when default path is not found
+ tags: [ required, command_line_tool ]
+
+- output:
+ args: [-A,'2',-B,baz,-C,'10','9','8','7','6','5','4','3','2','1',-D]
+ job: v1.0/empty.json
+ tool: v1.0/inline-js.cwl
+ label: inlinejs_req_expressions
+ doc: Test InlineJavascriptRequirement with multiple expressions in the same tool
+ tags: [ inline_javascript, command_line_tool ]
+
+- job: v1.0/recursive-input-directory.yml
+ output:
+ output_dir: {
+ "basename": "work_dir",
+ "class": "Directory",
+ "listing": [
+ {
+ "basename": "a",
+ "checksum": "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709",
+ "class": "File",
+ "location": "work_dir/a",
+ "size": 0
+ },
+ {
+ "basename": "b",
+ "checksum": "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709",
+ "class": "File",
+ "location": "work_dir/b",
+ "size": 0
+ },
+ {
+ "basename": "c",
+ "class": "Directory",
+ "listing": [
+ {
+ "basename": "d",
+ "checksum": "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709",
+ "class": "File",
+ "location": "work_dir/c/d",
+ "size": 0
+ }
+ ],
+ "location": "work_dir/c",
+ },
+ {
+ "basename": "e",
+ "checksum": "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709",
+ "class": "File",
+ "location": "work_dir/e",
+ "size": 0
+ },
+ ],
+ "location": "work_dir",
+ }
+ test_result: {
+ "checksum": "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709",
+ "class": "File",
+ "location": "output.txt",
+ "size": 0
+ }
+ tool: v1.0/recursive-input-directory.cwl
+ label: input_dir_recurs_copy_writable
+ doc: Test if a writable input directory is recursively copied and writable
+ tags: [ initial_work_dir, shell_command, command_line_tool ]
+
+- output:
+ out: "t\n"
+ job: v1.0/empty.json
+ tool: v1.0/null-defined.cwl
+ label: null_missing_params
+ doc: Test that missing parameters are null (not undefined) in expression
+ tags: [ inline_javascript, command_line_tool ]
+
+- output:
+ out: "f\n"
+ job: v1.0/cat-job.json
+ tool: v1.0/null-defined.cwl
+ label: param_notnull_expr
+ doc: Test that provided parameter is not null in expression
+ tags: [ inline_javascript, command_line_tool ]
+
+- job: v1.0/revsort-job.json
+ output:
+ output:
+ class: File
+ checksum: sha1$b9214658cc453331b62c2282b772a5c063dbd284
+ location: output.txt
+ size: 1111
+ tool: v1.0/revsort-packed.cwl#main
+ label: wf_compound_doc
+ doc: Test compound workflow document
+ tags: [ required, workflow ]
+
+- job: v1.0/basename-fields-job.yml
+ output:
+ extFile:
+ checksum: sha1$301a72c82a835e1737caf30f94d0eec210c4d9f1
+ class: File
+ size: 5
+ location: Any
+ path: Any
+ rootFile:
+ checksum: sha1$b4a583c391e234cf210e1d576f68f674c8ad7ecd
+ class: File
+ size: 10
+ location: Any
+ path: Any
+ tool: v1.0/basename-fields-test.cwl
+ label: nameroot_nameext_generated
+ doc: Test that nameroot and nameext are generated from basename at execution time by the runner
+ tags: [ step_input_expression, workflow ]
+
+- job: v1.0/wc-job.json
+ output: {}
+ tool: v1.0/initialwork-path.cwl
+ label: initialworkpath_output
+ doc: Test that file path in $(inputs) for initialworkdir is in $(outdir).
+ tags: [ initial_work_dir, command_line_tool ]
+
+- job: v1.0/count-lines6-job.json
+ output:
+ count_output: 34
+ tool: v1.0/count-lines12-wf.cwl
+ label: wf_scatter_twopar_oneinput_flattenedmerge
+ doc: |
+ Test single step workflow with Scatter step and two data links connected to
+ same input, flattened merge behavior. Workflow inputs are set as list
+ tags: [ multiple_input, inline_javascript, workflow ]
+
+- job: v1.0/sum-job.json
+ output:
+ result: 12
+ tool: v1.0/sum-wf.cwl
+ label: wf_multiplesources_multipletypes
+ doc: Test step input with multiple sources with multiple types
+ tags: [ step_input, inline_javascript, multiple_input, workflow ]
+
+- job: v1.0/empty.json
+ output: {
+ "stderr_file": {
+ "checksum": "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709",
+ "location": Any,
+ "class": "File",
+ "size": 0
+ },
+ "stdout_file": {
+ "checksum": "sha1$1555252d52d4ec3262538a4426a83a99cfff4402",
+ "location": Any,
+ "class": "File",
+ "size": 9
+ }
+ }
+ tool: v1.0/shellchar.cwl
+ label: shelldir_notinterpreted
+ doc: "Test that shell directives are not interpreted."
+ tags: [ required, command_line_tool ]
+
+- job: v1.0/empty.json
+ output: {
+ "stderr_file": {
+ "checksum": "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709",
+ "location": Any,
+ "class": "File",
+ "size": 0
+ },
+ "stdout_file": {
+ "checksum": "sha1$1555252d52d4ec3262538a4426a83a99cfff4402",
+ "location": Any,
+ "class": "File",
+ "size": 9
+ }
+ }
+ tool: v1.0/shellchar2.cwl
+ label: shelldir_quoted
+ doc: "Test that shell directives are quoted."
+ tags: [ shell_command, command_line_tool ]
+
+- job: v1.0/empty.json
+ output:
+ out: {
+ "basename": "emptyWritableDir",
+ "listing": [
+ {
+ "checksum": "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709",
+ "basename": "blurg",
+ "location": "blurg",
+ "class": "File",
+ "size": 0
+ }
+ ],
+ "location": "emptyWritableDir",
+ "class": "Directory"
+ }
+ tool: v1.0/writable-dir.cwl
+ label: initial_workdir_empty_writable
+ doc: Test empty writable dir with InitialWorkDirRequirement
+ tags: [ inline_javascript, initial_work_dir, command_line_tool ]
+
+- job: v1.0/empty.json
+ output:
+ out: {
+ "basename": "emptyWritableDir",
+ "listing": [
+ {
+ "checksum": "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709",
+ "basename": "blurg",
+ "location": "blurg",
+ "class": "File",
+ "size": 0
+ }
+ ],
+ "location": "emptyWritableDir",
+ "class": "Directory"
+ }
+ tool: v1.0/writable-dir-docker.cwl
+ label: initial_workdir_empty_writable_docker
+ doc: Test empty writable dir with InitialWorkDirRequirement inside Docker
+ tags: [ inline_javascript, initial_work_dir, command_line_tool ]
+
+- job: v1.0/dynresreq-job.yaml
+ tool: v1.0/dynresreq.cwl
+ label: dynamic_resreq_inputs
+ doc: Test dynamic resource reqs referencing inputs
+ output:
+ output: {
+ "location": "cores.txt",
+ "size": 2,
+ "class": "File",
+ "checksum": "sha1$7448d8798a4380162d4b56f9b452e2f6f9e24e7a"
+ }
+ tags: [ resource, command_line_tool ]
+
+- job: v1.0/file-literal.yml
+ output:
+ output_file:
+ class: File
+ checksum: sha1$d0e04ff6c413c7d57f9a0ca0a33cd3ab52e2dd9c
+ location: output.txt
+ size: 18
+ tool: v1.0/cat3-nodocker.cwl
+ label: fileliteral_input_docker
+ doc: Test file literal as input without Docker
+ tags: [ required, command_line_tool ]
+
+- doc: Test that OutputBinding.glob is sorted as specified by POSIX
+ job: v1.0/empty.json
+ label: outputbinding_glob_sorted
+ tool: v1.0/glob_test.cwl
+ output:
+ letters:
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ location: a
+ class: File
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ location: b
+ class: File
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ location: c
+ class: File
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ location: w
+ class: File
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ location: x
+ class: File
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ location: y
+ class: File
+ size: 0
+ - checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ location: z
+ class: File
+ size: 0
+ tags: [ required, command_line_tool ]
+
+- doc: Test InitialWorkDirRequirement with a nested directory structure from another step
+ job: v1.0/empty.json
+ output:
+ ya_empty:
+ class: File
+ checksum: sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709
+ location: ya
+ size: 0
+ tool: v1.0/iwdr_with_nested_dirs.cwl
+ label: initialworkdir_nesteddir
+ tags: [ initial_work_dir, workflow ]
+
+- job: v1.0/bool-empty-inputbinding-job.json
+ output: {
+ "args": [
+ ]
+ }
+ tool: v1.0/bool-empty-inputbinding.cwl
+ label: booleanflags_cl_noinputbinding
+ doc: "Test that boolean flags do not appear on command line if inputBinding is empty and not null"
+ tags: [ required, command_line_tool ]
+
+- job: v1.0/empty.json
+ output: {
+ "args": []
+ }
+ tool: v1.0/stage-unprovided-file.cwl
+ label: expr_reference_self_noinput
+ doc: Test that expression engine does not fail to evaluate reference to self
+ with unprovided input
+ tags: [ required, command_line_tool ]
+
+- tool: v1.0/exit-success.cwl
+ label: success_codes
+ doc: Test successCodes
+ job: v1.0/empty.json
+ output: {}
+ tags: [ required, command_line_tool ]
+
+- job: v1.0/dynresreq-job.yaml
+ doc: Test simple workflow with a dynamic resource requirement
+ tool: v1.0/dynresreq-workflow.cwl
+ label: dynamic_resreq_wf
+ output:
+ cores: {
+ "location": "output",
+ "size": 2,
+ "class": "File",
+ "checksum": "sha1$7448d8798a4380162d4b56f9b452e2f6f9e24e7a"
+ }
+ tags: [ resource, workflow ]
+
+- job: v1.0/empty-array-job.json
+ output: {
+ "args": []
+ }
+ tool: v1.0/empty-array-input.cwl
+ label: cl_empty_array_input
+ doc: "Test that empty array input does not add anything to command line"
+ tags: [ required, command_line_tool ]
+
+- job: v1.0/empty.json
+ tool: v1.0/steplevel-resreq.cwl
+ label: resreq_step_overrides_wf
+ doc: Test that ResourceRequirement on a step level redefines requirement on the workflow level
+ output:
+ out: {
+ "location": "cores.txt",
+ "size": 2,
+ "class": "File",
+ "checksum": "sha1$e5fa44f2b31c1fb553b6021e7360d07d5d91ff5e"
+ }
+ tags: [ resource, workflow ]
+
+- job: v1.0/array-of-strings-job.yml
+ output: {
+ "args": ["replacementValue"]
+ }
+ tool: v1.0/valueFrom-constant.cwl
+ label: valuefrom_constant_overrides_inputs
+ doc: Test valueFrom with constant value overriding provided array inputs
+ tags: [ required, command_line_tool ]
+
+- job: v1.0/dynresreq-dir-job.yaml
+ tool: v1.0/dynresreq-dir.cwl
+ label: dynamic_resreq_filesizes
+ doc: Test dynamic resource reqs referencing the size of Files inside a Directory
+ output:
+ output: {
+ "location": "cores.txt",
+ "size": 2,
+ "class": "File",
+ "checksum": "sha1$7448d8798a4380162d4b56f9b452e2f6f9e24e7a"
+ }
+ tags: [ resource, command_line_tool ]
+
+- job: v1.0/empty.json
+ tool: v1.0/pass-unconnected.cwl
+ label: wf_step_connect_undeclared_param
+ doc: |
+ Test that it is not an error to connect a parameter to a workflow
+ step, even if the parameter doesn't appear in the `run` process
+ inputs.
+ output: { "out": "hello inp1\n" }
+ tags: [ required, workflow ]
+
+- job: v1.0/empty.json
+ tool: v1.0/fail-unconnected.cwl
+ label: wf_step_access_undeclared_param
+ doc: |
+ Test that parameters that don't appear in the `run` process
+ inputs are not present in the input object used to run the tool.
+ should_fail: true
+ tags: [ required, workflow ]
+
+- job: v1.0/count-lines3-job.json
+ output:
+ count_output: [16, 1]
+ tool: v1.0/count-lines13-wf.cwl
+ label: wf_scatter_embedded_subwf
+ doc: Test simple scatter over an embedded subworkflow
+
+- job: v1.0/count-lines4-job.json
+ output:
+ count_output: [16, 1]
+ tool: v1.0/count-lines14-wf.cwl
+ label: wf_multiple_param_embedded_subwf
+ doc: Test simple multiple input scatter over an embedded subworkflow
+
+- job: v1.0/wc-job.json
+ output: {count_output: 16}
+ tool: v1.0/count-lines15-wf.cwl
+ label: wf_double_nested_subwf
+ doc: Test twice nested subworkflow
+
+- job: v1.0/wc-job.json
+ output: {count_output: 16}
+ tool: v1.0/count-lines16-wf.cwl
+ label: wf_subwf_tool_then_wf
+ doc: Test subworkflow of mixed depth with tool first
+
+- job: v1.0/wc-job.json
+ output: {count_output: 16}
+ tool: v1.0/count-lines17-wf.cwl
+ label: wf_subwf_wf_then_tool
+ doc: Test subworkflow of mixed depth with tool after
+
+- job: v1.0/record-output-job.json
+ output:
+ "orec": {
+ "ofoo": {
+ "location": "foo",
+ "size": 1111,
+ "class": "File",
+ "checksum": "sha1$327fc7aedf4f6b69a42a7c8b808dc5a7aff61376"
+ },
+ "obar": {
+ "location": "bar",
+ "size": 12010,
+ "class": "File",
+ "checksum": "sha1$aeb3d11bdf536511649129f4077d5cda6a324118"
+ }
+ }
+ tool: v1.0/record-output-wf.cwl
+ label: wf_output_record
+ doc: Test record type inputs to and outputs from workflows.
+
+- job: v1.0/io-int.json
+ output: {"o": 10}
+ tool: v1.0/io-int-wf.cwl
+ label: wf_input_output_int
+ doc: Test integer workflow input and outputs
+
+- job: v1.0/io-int.json
+ output: {"o": 10}
+ tool: v1.0/io-int-optional-wf.cwl
+ label: wf_input_int_opt_spec
+ doc: Test optional integer workflow inputs (specified)
+
+- job: v1.0/empty.json
+ output: {"o": 4}
+ tool: v1.0/io-int-optional-wf.cwl
+ label: wf_input_int_opt_unspec
+ doc: Test optional integer workflow inputs (unspecified)
+
+- job: v1.0/io-int.json
+ output: {"o": 10}
+ tool: v1.0/io-int-default-wf.cwl
+ label: wf_input_int_default_spec
+ doc: Test default integer workflow inputs (specified)
+
+- job: v1.0/empty.json
+ output: {"o": 8}
+ tool: v1.0/io-int-default-wf.cwl
+ label: wf_input_int_default_unspec
+ doc: Test default integer workflow inputs (unspecified)
+
+- job: v1.0/empty.json
+ output: {"o": 13}
+ tool: v1.0/io-int-default-tool-and-wf.cwl
+ label: wf_input_int_default_tool_wf_unspec
+ doc: Test default integer tool and workflow inputs (unspecified)
+
+- job: v1.0/empty.json
+ output: {"o": {
+ "class": "File",
+ "basename": "output",
+ "size": 1111,
+ "location": Any,
+ "checksum": "sha1$327fc7aedf4f6b69a42a7c8b808dc5a7aff61376"
+ }}
+ tool: v1.0/io-file-default-wf.cwl
+ label: wf_input_file_default_unspec
+ doc: Test File input with default unspecified to workflow
+
+- job: v1.0/default_path_job.yml
+ output: {"o": {
+ "class": "File",
+ "basename": "output",
+ "location": Any,
+ "checksum": "sha1$47a013e660d408619d894b20806b1d5086aab03b",
+ "size": 13
+ }}
+ tool: v1.0/io-file-default-wf.cwl
+ label: wf_input_file_default_spec
+ doc: Test File input with default specified to workflow
+
+- job: v1.0/job-input-array-one-empty-file.json
+ output: {"output_file":
+ {
+ "checksum": "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709",
+ "basename": "output.txt",
+ "location": Any,
+ "class": "File",
+ "size": 0
+ }}
+ tool: v1.0/io-file-or-files.cwl
+ label: wf_input_union_file_filearray_onefilearray
+ doc: Test input union type or File or File array to a tool with one file in array specified.
+
+- job: v1.0/job-input-array-few-files.json
+ output: {"output_file":
+ {
+ "checksum": "sha1$6d1723861ad5a1260f1c3c07c93076c5a215f646",
+ "basename": "output.txt",
+ "location": Any,
+ "class": "File",
+ "size": 1114
+ }}
+ tool: v1.0/io-file-or-files.cwl
+ label: wf_input_union_file_filearray_fewfilesarray
+ doc: Test input union type or File or File array to a tool with a few files in array specified.
+
+- job: v1.0/job-input-one-file.json
+ output: {"output_file":
+ {
+ "checksum": "sha1$327fc7aedf4f6b69a42a7c8b808dc5a7aff61376",
+ "basename": "output.txt",
+ "location": Any,
+ "class": "File",
+ "size": 1111
+ }}
+ tool: v1.0/io-file-or-files.cwl
+ label: wf_input_union_file_filearray_onefile
+ doc: Test input union type or File or File array to a tool with one file specified.
+
+- job: v1.0/job-input-null.json
+ output: {"output_file":
+ {
+ "checksum": "sha1$503458abf7614be3fb26d85ff5d8f3e17aa0a552",
+ "basename": "output.txt",
+ "location": Any,
+ "class": "File",
+ "size": 10
+ }}
+ tool: v1.0/io-file-or-files.cwl
+ label: wf_input_union_file_filearray_null
+ doc: Test input union type or File or File array to a tool with null specified.
+
+- job: v1.0/io-any-int.json
+ output: {"t1": 7}
+ tool: v1.0/io-any-1.cwl
+ label: wf_input_any_integer_tool
+ doc: Test Any parameter with integer input to a tool
+
+- job: v1.0/io-any-string.json
+ output: {"t1": "7"}
+ tool: v1.0/io-any-1.cwl
+ label: wf_input_any_string_tool
+ doc: Test Any parameter with string input to a tool
+
+- job: v1.0/io-any-file.json
+ output: {"t1": "File"}
+ tool: v1.0/io-any-1.cwl
+ label: wf_input_any_file_tool
+ doc: Test Any parameter with file input to a tool
+
+- job: v1.0/io-any-array.json
+ output: {"t1": [1, "moocow"]}
+ tool: v1.0/io-any-1.cwl
+ label: wf_input_any_array_tool
+ doc: Test Any parameter with array input to a tool
+
+- job: v1.0/io-any-record.json
+ output: {"t1": {"moo": 1, "cow": 5}}
+ tool: v1.0/io-any-1.cwl
+ label: wf_input_any_record_tool
+ doc: Test Any parameter with record input to a tool
+
+- job: v1.0/io-any-int.json
+ output: {"t1": 7}
+ tool: v1.0/io-any-wf-1.cwl
+ label: wf_input_any_integer_wf
+ doc: Test Any parameter with integer input to a workflow
+
+- job: v1.0/io-any-string.json
+ output: {"t1": "7"}
+ tool: v1.0/io-any-wf-1.cwl
+ label: wf_input_any_string_wf
+ doc: Test Any parameter with string input to a workflow
+
+- job: v1.0/io-any-file.json
+ output: {"t1": "File"}
+ tool: v1.0/io-any-wf-1.cwl
+ label: wf_input_any_file_wf
+ doc: Test Any parameter with file input to a workflow
+
+- job: v1.0/io-any-array.json
+ output: {"t1": [1, "moocow"]}
+ tool: v1.0/io-any-wf-1.cwl
+ label: wf_input_any_array_wf
+ doc: Test Any parameter with array input to a workflow
+
+- job: v1.0/io-any-record.json
+ output: {"t1": {"moo": 1, "cow": 5}}
+ tool: v1.0/io-any-wf-1.cwl
+ label: wf_input_any_record_wf
+ doc: Test Any parameter with record input to a tool
+
+- job: v1.0/empty.json
+ output: {"o": "the default value"}
+ tool: v1.0/io-union-input-default-wf.cwl
+ label: wf_input_union_default_unspec
+ doc: Test union type input to workflow with default unspecified
+
+- job: v1.0/io-any-file.json
+ output: {"o": "File"}
+ tool: v1.0/io-union-input-default-wf.cwl
+ label: wf_input_union_default_file
+ doc: Test union type input to workflow with default specified as file
+
+- job: v1.0/empty.json
+ output: {"val": "moocow\n"}
+ tool: v1.0/step-valuefrom4-wf.cwl
+ label: wf_step_valuefrom_literal
+ doc: Test valueFrom on workflow step from literal (string).
+
+- job: v1.0/wc-job.json
+ output: {"val1": "whale.txt\n", "val2": "step1_out\n"}
+ tool: v1.0/step-valuefrom5-wf.cwl
+ label: wf_step_valuefrom_basename
+ doc: Test valueFrom on workflow step using basename.
+
+- job: v1.0/output-arrays-int-job.json
+ output: {"o": [0, 1, 2]}
+ tool: v1.0/output-arrays-int.cwl
+ label: tool_output_arrays_ints
+ doc: Test output arrays in a tool (with ints).
+
+- job: v1.0/output-arrays-int-job.json
+ output: {"o": 12}
+ tool: v1.0/output-arrays-int-wf.cwl
+ label: wf_output_arrays_ints
+ doc: Test output arrays in a workflow (with ints).
+
+- job: v1.0/output-arrays-file-job.json
+ output: {"o": [
+ {
+ "basename": "moo",
+ "checksum": "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709",
+ "class": "File",
+ "location": Any,
+ "size": 0
+ },
+ {
+ "basename": "cow",
+ "checksum": "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709",
+ "class": "File",
+ "location": Any,
+ "size": 0
+ }
+ ]}
+ tool: v1.0/output-arrays-file-wf.cwl
+ label: wf_output_arrays_files
+ doc: Test output arrays in a workflow (with Files).
+
+- job: "v1.0/empty.json"
+ output: {"cow":
+ {
+ "checksum": "sha1$7a788f56fa49ae0ba5ebde780efe4d6a89b5db47",
+ "basename": "cow",
+ "class": "File",
+ "size": 4,
+ "location": Any
+ }}
+ tool: "v1.0/docker-run-cmd.cwl"
+ label: tool_docker_entrypoint
+ doc: Test Docker ENTRYPOINT usage
+
+- job: "v1.0/job-input-array-one-empty-file.json"
+ output: {"output_file":
+ {
+ "checksum": "sha1$dad5a8472b87f6c5ef87d8fc6ef1458defc57250",
+ "basename": "output.txt",
+ "location": Any,
+ "class": "File",
+ "size": 11
+ }}
+ tool: "v1.0/size-expression-tool.cwl"
+ label: tool_expressions_size_emptyfile
+ doc: Test use of size in expressions for an empty file
+
+- job: "v1.0/job-input-array-few-files.json"
+ output: {"output_file":
+ {
+ "checksum": "sha1$9def39730e8012bd09bf8387648982728501737d",
+ "basename": "output.txt",
+ "location": Any,
+ "class": "File",
+ "size": 31
+ }}
+ tool: "v1.0/size-expression-tool.cwl"
+ label: tool_expressions_size_fewfiles
+ doc: Test use of size in expressions for a few files
diff --git a/test/unit/tools/cwl_tools/v1.0/count-lines1-wf.cwl b/test/unit/tools/cwl_tools/v1.0/count-lines1-wf.cwl
new file mode 100755
index 000000000000..77cbf3aa04a2
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/count-lines1-wf.cwl
@@ -0,0 +1,25 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+inputs:
+ file1:
+ type: File
+
+outputs:
+ count_output:
+ type: int
+ outputSource: step2/output
+
+steps:
+ step1:
+ run: wc-tool.cwl
+ in:
+ file1: file1
+ out: [output]
+
+ step2:
+ run: parseInt-tool.cwl
+ in:
+ file1: step1/output
+ out: [output]
diff --git a/test/unit/tools/cwl_tools/v1.0/count-lines10-wf.cwl b/test/unit/tools/cwl_tools/v1.0/count-lines10-wf.cwl
new file mode 100755
index 000000000000..781127154c47
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/count-lines10-wf.cwl
@@ -0,0 +1,22 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+inputs:
+ file1: File
+outputs:
+ count_output: {type: int, outputSource: step0/count_output}
+requirements:
+ SubworkflowFeatureRequirement: {}
+steps:
+ step0:
+ in: {file1: file1}
+ out: [count_output]
+ run:
+ class: Workflow
+ inputs:
+ file1: File
+ outputs:
+ count_output: {type: int, outputSource: step2/output}
+ steps:
+ step1: {run: wc-tool.cwl, in: {file1: file1}, out: [output]}
+ step2: {run: parseInt-tool.cwl, in: {file1: step1/output}, out: [output]}
diff --git a/test/unit/tools/cwl_tools/v1.0/count-lines11-wf.cwl b/test/unit/tools/cwl_tools/v1.0/count-lines11-wf.cwl
new file mode 100755
index 000000000000..17e1af955bdd
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/count-lines11-wf.cwl
@@ -0,0 +1,28 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+inputs:
+ file1: File?
+
+outputs:
+ count_output:
+ type: int
+ outputSource: step2/output
+
+steps:
+ step1:
+ run: wc-tool.cwl
+ in:
+ file1:
+ source: file1
+ default:
+ class: File
+ location: whale.txt
+ out: [output]
+
+ step2:
+ run: parseInt-tool.cwl
+ in:
+ file1: step1/output
+ out: [output]
diff --git a/test/unit/tools/cwl_tools/v1.0/count-lines12-wf.cwl b/test/unit/tools/cwl_tools/v1.0/count-lines12-wf.cwl
new file mode 100755
index 000000000000..533d9ed5e1f8
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/count-lines12-wf.cwl
@@ -0,0 +1,27 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+requirements:
+ - class: MultipleInputFeatureRequirement
+
+inputs:
+ file1:
+ - type: array
+ items: File
+ file2:
+ - type: array
+ items: File
+
+outputs:
+ count_output:
+ type: int
+ outputSource: step1/output
+
+steps:
+ step1:
+ run: wc3-tool.cwl
+ in:
+ file1:
+ source: [file1, file2]
+ linkMerge: merge_flattened
+ out: [output]
diff --git a/test/unit/tools/cwl_tools/v1.0/count-lines13-wf.cwl b/test/unit/tools/cwl_tools/v1.0/count-lines13-wf.cwl
new file mode 100644
index 000000000000..474aa03235f4
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/count-lines13-wf.cwl
@@ -0,0 +1,20 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+inputs:
+ file1: File[]
+ file2: File[]
+
+outputs:
+ count_output:
+ type: int
+ outputSource: step1/output
+
+steps:
+ step1:
+ run: wc3-tool.cwl
+ in:
+ file1:
+ source: [file1]
+ out: [output]
diff --git a/test/unit/tools/cwl_tools/v1.0/count-lines14-wf.cwl b/test/unit/tools/cwl_tools/v1.0/count-lines14-wf.cwl
new file mode 100644
index 000000000000..90809d619f65
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/count-lines14-wf.cwl
@@ -0,0 +1,36 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+inputs:
+ file1:
+ type: File
+ file2:
+ type: File
+
+outputs:
+ count_output:
+ type: int[]
+ outputSource: step1/count_output
+
+requirements:
+ ScatterFeatureRequirement: {}
+ SubworkflowFeatureRequirement: {}
+ MultipleInputFeatureRequirement: {}
+
+steps:
+ step1:
+ in: {file1: file1}
+ out: [count_output]
+ scatter: file1
+ in:
+ file1: [file1, file2]
+ run:
+ class: Workflow
+ inputs:
+ file1: File
+ outputs:
+ count_output: {type: int, outputSource: step2/output}
+ steps:
+ step1: {run: wc-tool.cwl, in: {file1: file1}, out: [output]}
+ step2: {run: parseInt-tool.cwl, in: {file1: step1/output}, out: [output]}
diff --git a/test/unit/tools/cwl_tools/v1.0/count-lines15-wf.cwl b/test/unit/tools/cwl_tools/v1.0/count-lines15-wf.cwl
new file mode 100644
index 000000000000..1e6ddd3faf3f
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/count-lines15-wf.cwl
@@ -0,0 +1,32 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+inputs:
+ file1: File
+outputs:
+ count_output: {type: int, outputSource: step1/count_output}
+requirements:
+ SubworkflowFeatureRequirement: {}
+steps:
+ step1:
+ in: {file1: file1}
+ out: [count_output]
+ run:
+ class: Workflow
+ inputs:
+ file1: File
+ outputs:
+ count_output: {type: int, outputSource: step1/count_output}
+ steps:
+ step1:
+ in: {file1: file1}
+ out: [count_output]
+ run:
+ class: Workflow
+ inputs:
+ file1: File
+ outputs:
+ count_output: {type: int, outputSource: step2/output}
+ steps:
+ step1: {run: wc-tool.cwl, in: {file1: file1}, out: [output]}
+ step2: {run: parseInt-tool.cwl, in: {file1: step1/output}, out: [output]}
diff --git a/test/unit/tools/cwl_tools/v1.0/count-lines16-wf.cwl b/test/unit/tools/cwl_tools/v1.0/count-lines16-wf.cwl
new file mode 100644
index 000000000000..aac8d982f32e
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/count-lines16-wf.cwl
@@ -0,0 +1,32 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+inputs:
+ file1: File
+outputs:
+ count_output: {type: int, outputSource: step1/count_output}
+requirements:
+ SubworkflowFeatureRequirement: {}
+steps:
+ step1:
+ in: {file1: file1}
+ out: [count_output]
+ run:
+ class: Workflow
+ inputs:
+ file1: File
+ outputs:
+ count_output: {type: int, outputSource: step2/count_output}
+ steps:
+ step1: {run: wc-tool.cwl, in: {file1: file1}, out: [output]}
+ step2:
+ in: {file1: step1/output}
+ out: [count_output]
+ run:
+ class: Workflow
+ inputs:
+ file1: File
+ outputs:
+ count_output: {type: int, outputSource: step1/output}
+ steps:
+ step1: {run: parseInt-tool.cwl, in: {file1: file1}, out: [output]}
diff --git a/test/unit/tools/cwl_tools/v1.0/count-lines17-wf.cwl b/test/unit/tools/cwl_tools/v1.0/count-lines17-wf.cwl
new file mode 100644
index 000000000000..6752510b16cd
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/count-lines17-wf.cwl
@@ -0,0 +1,33 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+inputs:
+ file1: File
+outputs:
+ count_output: {type: int, outputSource: step1/count_output}
+requirements:
+ SubworkflowFeatureRequirement: {}
+steps:
+ step1:
+ in: {file1: file1}
+ out: [count_output]
+ run:
+ class: Workflow
+ inputs:
+ file1: File
+ outputs:
+ count_output: {type: int, outputSource: step2/output}
+ steps:
+ step1:
+ in: {file1: file1}
+ out: [wc_output]
+ run:
+ class: Workflow
+ inputs:
+ file1: File
+ outputs:
+ wc_output: {type: File, outputSource: step1/output}
+ steps:
+ step1: {run: wc-tool.cwl, in: {file1: file1}, out: [output]}
+
+ step2: {run: parseInt-tool.cwl, in: {file1: step1/wc_output}, out: [output]}
diff --git a/test/unit/tools/cwl_tools/v1.0/count-lines2-wf.cwl b/test/unit/tools/cwl_tools/v1.0/count-lines2-wf.cwl
new file mode 100755
index 000000000000..be564beaf94c
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/count-lines2-wf.cwl
@@ -0,0 +1,50 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+requirements:
+ InlineJavascriptRequirement: {}
+
+inputs:
+ file1:
+ type: File
+
+outputs:
+ count_output:
+ type: int
+ outputSource: step2/parseInt_output
+
+steps:
+ step1:
+ in:
+ wc_file1: file1
+ out: [wc_output]
+ run:
+ id: wc
+ class: CommandLineTool
+ inputs:
+ wc_file1:
+ type: File
+ inputBinding: {}
+ outputs:
+ wc_output:
+ type: File
+ outputBinding:
+ glob: output.txt
+ stdout: output.txt
+ baseCommand: wc
+
+ step2:
+ in:
+ parseInt_file1: step1/wc_output
+ out: [parseInt_output]
+ run:
+ class: ExpressionTool
+ inputs:
+ parseInt_file1:
+ type: File
+ inputBinding: { loadContents: true }
+ outputs:
+ parseInt_output:
+ type: int
+ expression: >
+ ${return {'parseInt_output': parseInt(inputs.parseInt_file1.contents)};}
diff --git a/test/unit/tools/cwl_tools/v1.0/count-lines3-job.json b/test/unit/tools/cwl_tools/v1.0/count-lines3-job.json
new file mode 100644
index 000000000000..3a93e32d4479
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/count-lines3-job.json
@@ -0,0 +1,12 @@
+{
+ "file1": [
+ {
+ "class": "File",
+ "location": "whale.txt"
+ },
+ {
+ "class": "File",
+ "location": "hello.txt"
+ }
+ ]
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/count-lines3-wf.cwl b/test/unit/tools/cwl_tools/v1.0/count-lines3-wf.cwl
new file mode 100755
index 000000000000..0d89a9ff560c
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/count-lines3-wf.cwl
@@ -0,0 +1,23 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+inputs:
+ file1:
+ type: File[]
+
+outputs:
+ count_output:
+ type: int[]
+ outputSource: step1/output
+
+requirements:
+ ScatterFeatureRequirement: {}
+
+steps:
+ step1:
+ run: wc2-tool.cwl
+ scatter: file1
+ in:
+ file1: file1
+ out: [output]
diff --git a/test/unit/tools/cwl_tools/v1.0/count-lines4-job.json b/test/unit/tools/cwl_tools/v1.0/count-lines4-job.json
new file mode 100644
index 000000000000..1c85bea0195b
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/count-lines4-job.json
@@ -0,0 +1,10 @@
+{
+ "file1": {
+ "class": "File",
+ "location": "whale.txt"
+ },
+ "file2": {
+ "class": "File",
+ "location": "hello.txt"
+ }
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/count-lines4-wf.cwl b/test/unit/tools/cwl_tools/v1.0/count-lines4-wf.cwl
new file mode 100755
index 000000000000..cad8367ac43f
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/count-lines4-wf.cwl
@@ -0,0 +1,26 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+inputs:
+ file1:
+ type: File
+ file2:
+ type: File
+
+outputs:
+ count_output:
+ type: int[]
+ outputSource: step1/output
+
+requirements:
+ - class: ScatterFeatureRequirement
+ - class: MultipleInputFeatureRequirement
+
+steps:
+ step1:
+ run: wc2-tool.cwl
+ scatter: file1
+ in:
+ file1: [file1, file2]
+ out: [output]
diff --git a/test/unit/tools/cwl_tools/v1.0/count-lines5-wf.cwl b/test/unit/tools/cwl_tools/v1.0/count-lines5-wf.cwl
new file mode 100755
index 000000000000..cdc85a3613ee
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/count-lines5-wf.cwl
@@ -0,0 +1,18 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+inputs:
+ file1:
+ type: File
+ default: {class: File, location: hello.txt}
+outputs:
+ count_output:
+ type: int
+ outputSource: step1/output
+steps:
+ step1:
+ run: wc2-tool.cwl
+ in:
+ file1: file1
+ out: [output]
diff --git a/test/unit/tools/cwl_tools/v1.0/count-lines6-job.json b/test/unit/tools/cwl_tools/v1.0/count-lines6-job.json
new file mode 100644
index 000000000000..3652ded9d097
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/count-lines6-job.json
@@ -0,0 +1,22 @@
+{
+ "file1": [
+ {
+ "class": "File",
+ "location": "whale.txt"
+ },
+ {
+ "class": "File",
+ "location": "whale.txt"
+ }
+ ],
+ "file2": [
+ {
+ "class": "File",
+ "location": "hello.txt"
+ },
+ {
+ "class": "File",
+ "location": "hello.txt"
+ }
+ ]
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/count-lines6-wf.cwl b/test/unit/tools/cwl_tools/v1.0/count-lines6-wf.cwl
new file mode 100755
index 000000000000..50d71ef125a4
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/count-lines6-wf.cwl
@@ -0,0 +1,26 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+inputs:
+ file1: File[]
+ file2: File[]
+
+outputs:
+ count_output:
+ type: int[]
+ outputSource: step1/output
+
+requirements:
+ - class: ScatterFeatureRequirement
+ - class: MultipleInputFeatureRequirement
+
+steps:
+ step1:
+ run: wc3-tool.cwl
+ scatter: file1
+ in:
+ file1:
+ source: [file1, file2]
+ linkMerge: merge_nested
+ out: [output]
diff --git a/test/unit/tools/cwl_tools/v1.0/count-lines7-wf.cwl b/test/unit/tools/cwl_tools/v1.0/count-lines7-wf.cwl
new file mode 100755
index 000000000000..8cae70883d2a
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/count-lines7-wf.cwl
@@ -0,0 +1,26 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+requirements:
+ - class: MultipleInputFeatureRequirement
+
+inputs:
+ file1:
+ type: File[]
+ file2:
+ type: File[]
+
+outputs:
+ count_output:
+ type: int
+ outputSource: step1/output
+
+steps:
+ step1:
+ run: wc3-tool.cwl
+ in:
+ file1:
+ source: [file1, file2]
+ linkMerge: merge_flattened
+ out: [output]
diff --git a/test/unit/tools/cwl_tools/v1.0/count-lines8-wf.cwl b/test/unit/tools/cwl_tools/v1.0/count-lines8-wf.cwl
new file mode 100755
index 000000000000..3211e3b1753e
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/count-lines8-wf.cwl
@@ -0,0 +1,21 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+inputs:
+ file1: File
+
+outputs:
+ count_output:
+ type: int
+ outputSource: step1/count_output
+
+requirements:
+ - class: SubworkflowFeatureRequirement
+
+steps:
+ step1:
+ run: count-lines1-wf.cwl
+ in:
+ file1: file1
+ out: [count_output]
diff --git a/test/unit/tools/cwl_tools/v1.0/count-lines9-wf.cwl b/test/unit/tools/cwl_tools/v1.0/count-lines9-wf.cwl
new file mode 100755
index 000000000000..ea751deb7c88
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/count-lines9-wf.cwl
@@ -0,0 +1,26 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+inputs: []
+
+outputs:
+ count_output:
+ type: int
+ outputSource: step2/output
+
+steps:
+ step1:
+ run: wc-tool.cwl
+ in:
+ file1:
+ default:
+ class: File
+ location: whale.txt
+ out: [output]
+
+ step2:
+ run: parseInt-tool.cwl
+ in:
+ file1: step1/output
+ out: [output]
diff --git a/test/unit/tools/cwl_tools/v1.0/dcterms.rdf b/test/unit/tools/cwl_tools/v1.0/dcterms.rdf
new file mode 100644
index 000000000000..3873ac54dd8d
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/dcterms.rdf
@@ -0,0 +1,1077 @@
+
+
+
+
+
+
+
+
+
+]>
+
+
+DCMI Metadata Terms - other
+
+2012-06-14
+
+
+Title
+A name given to the resource.
+
+2008-01-14
+2010-10-11
+
+
+
+
+
+
+Creator
+An entity primarily responsible for making the resource.
+Examples of a Creator include a person, an organization, or a service.
+
+2008-01-14
+2010-10-11
+
+
+
+
+
+
+
+
+Subject
+The topic of the resource.
+Typically, the subject will be represented using keywords, key phrases, or classification codes. Recommended best practice is to use a controlled vocabulary.
+
+2008-01-14
+2012-06-14
+
+
+This term is intended to be used with non-literal values as defined in the DCMI Abstract Model (http://dublincore.org/documents/abstract-model/). As of December 2007, the DCMI Usage Board is seeking a way to express this intention with a formal range declaration.
+
+
+
+Description
+An account of the resource.
+Description may include but is not limited to: an abstract, a table of contents, a graphical representation, or a free-text account of the resource.
+
+2008-01-14
+2008-01-14
+
+
+
+
+
+Publisher
+An entity responsible for making the resource available.
+Examples of a Publisher include a person, an organization, or a service.
+
+2008-01-14
+2010-10-11
+
+
+
+
+
+
+Contributor
+An entity responsible for making contributions to the resource.
+Examples of a Contributor include a person, an organization, or a service.
+
+2008-01-14
+2010-10-11
+
+
+
+
+
+
+Date
+A point or period of time associated with an event in the lifecycle of the resource.
+Date may be used to express temporal information at any level of granularity. Recommended best practice is to use an encoding scheme, such as the W3CDTF profile of ISO 8601 [W3CDTF].
+
+2008-01-14
+2008-01-14
+
+
+
+
+
+
+Type
+The nature or genre of the resource.
+Recommended best practice is to use a controlled vocabulary such as the DCMI Type Vocabulary [DCMITYPE]. To describe the file format, physical medium, or dimensions of the resource, use the Format element.
+
+2008-01-14
+2008-01-14
+
+
+
+
+
+
+Format
+The file format, physical medium, or dimensions of the resource.
+Examples of dimensions include size and duration. Recommended best practice is to use a controlled vocabulary such as the list of Internet Media Types [MIME].
+
+2008-01-14
+2008-01-14
+
+
+
+
+
+
+Identifier
+An unambiguous reference to the resource within a given context.
+Recommended best practice is to identify the resource by means of a string conforming to a formal identification system.
+
+2008-01-14
+2008-01-14
+
+
+
+
+
+
+Source
+A related resource from which the described resource is derived.
+The described resource may be derived from the related resource in whole or in part. Recommended best practice is to identify the related resource by means of a string conforming to a formal identification system.
+
+2008-01-14
+2008-01-14
+
+
+This term is intended to be used with non-literal values as defined in the DCMI Abstract Model (http://dublincore.org/documents/abstract-model/). As of December 2007, the DCMI Usage Board is seeking a way to express this intention with a formal range declaration.
+
+
+
+
+Language
+A language of the resource.
+Recommended best practice is to use a controlled vocabulary such as RFC 4646 [RFC4646].
+
+2008-01-14
+2008-01-14
+
+
+
+
+
+
+Relation
+A related resource.
+Recommended best practice is to identify the related resource by means of a string conforming to a formal identification system.
+
+2008-01-14
+2008-01-14
+
+
+This term is intended to be used with non-literal values as defined in the DCMI Abstract Model (http://dublincore.org/documents/abstract-model/). As of December 2007, the DCMI Usage Board is seeking a way to express this intention with a formal range declaration.
+
+
+
+Coverage
+The spatial or temporal topic of the resource, the spatial applicability of the resource, or the jurisdiction under which the resource is relevant.
+Spatial topic and spatial applicability may be a named place or a location specified by its geographic coordinates. Temporal topic may be a named period, date, or date range. A jurisdiction may be a named administrative entity or a geographic place to which the resource applies. Recommended best practice is to use a controlled vocabulary such as the Thesaurus of Geographic Names [TGN]. Where appropriate, named places or time periods can be used in preference to numeric identifiers such as sets of coordinates or date ranges.
+
+2008-01-14
+2008-01-14
+
+
+
+
+
+
+Rights
+Information about rights held in and over the resource.
+Typically, rights information includes a statement about various property rights associated with the resource, including intellectual property rights.
+
+2008-01-14
+2008-01-14
+
+
+
+
+
+
+Audience
+A class of entity for whom the resource is intended or useful.
+
+2001-05-21
+2008-01-14
+
+
+
+
+
+Alternative Title
+An alternative name for the resource.
+The distinction between titles and alternative titles is application-specific.
+
+2000-07-11
+2010-10-11
+
+
+
+
+
+
+
+Table Of Contents
+A list of subunits of the resource.
+
+2000-07-11
+2008-01-14
+
+
+
+
+
+
+Abstract
+A summary of the resource.
+
+2000-07-11
+2008-01-14
+
+
+
+
+
+
+Date Created
+Date of creation of the resource.
+
+2000-07-11
+2008-01-14
+
+
+
+
+
+
+
+Date Valid
+Date (often a range) of validity of a resource.
+
+2000-07-11
+2008-01-14
+
+
+
+
+
+
+
+Date Available
+Date (often a range) that the resource became or will become available.
+
+2000-07-11
+2008-01-14
+
+
+
+
+
+
+
+Date Issued
+Date of formal issuance (e.g., publication) of the resource.
+
+2000-07-11
+2008-01-14
+
+
+
+
+
+
+
+Date Modified
+Date on which the resource was changed.
+
+2000-07-11
+2008-01-14
+
+
+
+
+
+
+
+Extent
+The size or duration of the resource.
+
+2000-07-11
+2008-01-14
+
+
+
+
+
+
+
+Medium
+The material or physical carrier of the resource.
+
+2000-07-11
+2008-01-14
+
+
+
+
+
+
+
+
+Is Version Of
+A related resource of which the described resource is a version, edition, or adaptation.
+Changes in version imply substantive changes in content rather than differences in format.
+
+2000-07-11
+2008-01-14
+
+
+This term is intended to be used with non-literal values as defined in the DCMI Abstract Model (http://dublincore.org/documents/abstract-model/). As of December 2007, the DCMI Usage Board is seeking a way to express this intention with a formal range declaration.
+
+
+
+
+Has Version
+A related resource that is a version, edition, or adaptation of the described resource.
+
+2000-07-11
+2008-01-14
+
+
+This term is intended to be used with non-literal values as defined in the DCMI Abstract Model (http://dublincore.org/documents/abstract-model/). As of December 2007, the DCMI Usage Board is seeking a way to express this intention with a formal range declaration.
+
+
+
+
+Is Replaced By
+A related resource that supplants, displaces, or supersedes the described resource.
+
+2000-07-11
+2008-01-14
+
+
+This term is intended to be used with non-literal values as defined in the DCMI Abstract Model (http://dublincore.org/documents/abstract-model/). As of December 2007, the DCMI Usage Board is seeking a way to express this intention with a formal range declaration.
+
+
+
+
+Replaces
+A related resource that is supplanted, displaced, or superseded by the described resource.
+
+2000-07-11
+2008-01-14
+
+
+This term is intended to be used with non-literal values as defined in the DCMI Abstract Model (http://dublincore.org/documents/abstract-model/). As of December 2007, the DCMI Usage Board is seeking a way to express this intention with a formal range declaration.
+
+
+
+
+Is Required By
+A related resource that requires the described resource to support its function, delivery, or coherence.
+
+2000-07-11
+2008-01-14
+
+
+This term is intended to be used with non-literal values as defined in the DCMI Abstract Model (http://dublincore.org/documents/abstract-model/). As of December 2007, the DCMI Usage Board is seeking a way to express this intention with a formal range declaration.
+
+
+
+
+Requires
+A related resource that is required by the described resource to support its function, delivery, or coherence.
+
+2000-07-11
+2008-01-14
+
+
+This term is intended to be used with non-literal values as defined in the DCMI Abstract Model (http://dublincore.org/documents/abstract-model/). As of December 2007, the DCMI Usage Board is seeking a way to express this intention with a formal range declaration.
+
+
+
+
+Is Part Of
+A related resource in which the described resource is physically or logically included.
+
+2000-07-11
+2008-01-14
+
+
+This term is intended to be used with non-literal values as defined in the DCMI Abstract Model (http://dublincore.org/documents/abstract-model/). As of December 2007, the DCMI Usage Board is seeking a way to express this intention with a formal range declaration.
+
+
+
+
+Has Part
+A related resource that is included either physically or logically in the described resource.
+
+2000-07-11
+2008-01-14
+
+
+This term is intended to be used with non-literal values as defined in the DCMI Abstract Model (http://dublincore.org/documents/abstract-model/). As of December 2007, the DCMI Usage Board is seeking a way to express this intention with a formal range declaration.
+
+
+
+
+Is Referenced By
+A related resource that references, cites, or otherwise points to the described resource.
+
+2000-07-11
+2008-01-14
+
+
+This term is intended to be used with non-literal values as defined in the DCMI Abstract Model (http://dublincore.org/documents/abstract-model/). As of December 2007, the DCMI Usage Board is seeking a way to express this intention with a formal range declaration.
+
+
+
+
+References
+A related resource that is referenced, cited, or otherwise pointed to by the described resource.
+
+2000-07-11
+2008-01-14
+
+
+This term is intended to be used with non-literal values as defined in the DCMI Abstract Model (http://dublincore.org/documents/abstract-model/). As of December 2007, the DCMI Usage Board is seeking a way to express this intention with a formal range declaration.
+
+
+
+
+Is Format Of
+A related resource that is substantially the same as the described resource, but in another format.
+
+2000-07-11
+2008-01-14
+
+
+This term is intended to be used with non-literal values as defined in the DCMI Abstract Model (http://dublincore.org/documents/abstract-model/). As of December 2007, the DCMI Usage Board is seeking a way to express this intention with a formal range declaration.
+
+
+
+
+Has Format
+A related resource that is substantially the same as the pre-existing described resource, but in another format.
+
+2000-07-11
+2008-01-14
+
+
+This term is intended to be used with non-literal values as defined in the DCMI Abstract Model (http://dublincore.org/documents/abstract-model/). As of December 2007, the DCMI Usage Board is seeking a way to express this intention with a formal range declaration.
+
+
+
+
+Conforms To
+An established standard to which the described resource conforms.
+
+2001-05-21
+2008-01-14
+
+
+
+
+
+
+
+Spatial Coverage
+Spatial characteristics of the resource.
+
+2000-07-11
+2008-01-14
+
+
+
+
+
+
+
+Temporal Coverage
+Temporal characteristics of the resource.
+
+2000-07-11
+2008-01-14
+
+
+
+
+
+
+
+Mediator
+An entity that mediates access to the resource and for whom the resource is intended or useful.
+In an educational context, a mediator might be a parent, teacher, teaching assistant, or care-giver.
+
+2001-05-21
+2008-01-14
+
+
+
+
+
+
+Date Accepted
+Date of acceptance of the resource.
+Examples of resources to which a Date Accepted may be relevant are a thesis (accepted by a university department) or an article (accepted by a journal).
+
+2002-07-13
+2008-01-14
+
+
+
+
+
+
+
+Date Copyrighted
+Date of copyright.
+
+2002-07-13
+2008-01-14
+
+
+
+
+
+
+
+Date Submitted
+Date of submission of the resource.
+Examples of resources to which a Date Submitted may be relevant are a thesis (submitted to a university department) or an article (submitted to a journal).
+
+2002-07-13
+2008-01-14
+
+
+
+
+
+
+
+Audience Education Level
+A class of entity, defined in terms of progression through an educational or training context, for which the described resource is intended.
+
+2002-07-13
+2008-01-14
+
+
+
+
+
+
+Access Rights
+Information about who can access the resource or an indication of its security status.
+Access Rights may include information regarding access or restrictions based on privacy, security, or other policies.
+
+2003-02-15
+2008-01-14
+
+
+
+
+
+
+
+Bibliographic Citation
+A bibliographic reference for the resource.
+Recommended practice is to include sufficient bibliographic detail to identify the resource as unambiguously as possible.
+
+2003-02-15
+2008-01-14
+
+
+
+
+
+
+
+
+License
+A legal document giving official permission to do something with the resource.
+
+2004-06-14
+2008-01-14
+
+
+
+
+
+
+
+Rights Holder
+A person or organization owning or managing rights over the resource.
+
+2004-06-14
+2008-01-14
+
+
+
+
+
+Provenance
+A statement of any changes in ownership and custody of the resource since its creation that are significant for its authenticity, integrity, and interpretation.
+The statement may include a description of any changes successive custodians made to the resource.
+
+2004-09-20
+2008-01-14
+
+
+
+
+
+Instructional Method
+A process, used to engender knowledge, attitudes and skills, that the described resource is designed to support.
+Instructional Method will typically include ways of presenting instructional materials or conducting instructional activities, patterns of learner-to-learner and learner-to-instructor interactions, and mechanisms by which group and individual levels of learning are measured. Instructional methods include all aspects of the instruction and learning processes from planning and implementation through evaluation and feedback.
+
+2005-06-13
+2008-01-14
+
+
+
+
+
+Accrual Method
+The method by which items are added to a collection.
+
+2005-06-13
+2010-10-11
+
+
+
+
+
+
+Accrual Periodicity
+The frequency with which items are added to a collection.
+
+2005-06-13
+2010-10-11
+
+
+
+
+
+
+Accrual Policy
+The policy governing the addition of items to a collection.
+
+2005-06-13
+2010-10-11
+
+
+
+
+
+
+Agent
+A resource that acts or has the power to act.
+Examples of Agent include person, organization, and software agent.
+
+2008-01-14
+
+
+
+
+
+Agent Class
+A group of agents.
+Examples of Agent Class include groups seen as classes, such as students, women, charities, lecturers.
+
+2008-01-14
+2012-06-14
+
+
+
+
+
+Bibliographic Resource
+A book, article, or other documentary resource.
+
+2008-01-14
+
+
+
+
+File Format
+A digital resource format.
+Examples include the formats defined by the list of Internet Media Types.
+
+2008-01-14
+
+
+
+
+
+Frequency
+A rate at which something recurs.
+
+2008-01-14
+
+
+
+
+Jurisdiction
+The extent or range of judicial, law enforcement, or other authority.
+
+2008-01-14
+
+
+
+
+
+License Document
+A legal document giving official permission to do something with a Resource.
+
+2008-01-14
+
+
+
+
+
+Linguistic System
+A system of signs, symbols, sounds, gestures, or rules used in communication.
+Examples include written, spoken, sign, and computer languages.
+
+2008-01-14
+
+
+
+
+Location
+A spatial region or named place.
+
+2008-01-14
+
+
+
+
+
+Location, Period, or Jurisdiction
+A location, period of time, or jurisdiction.
+
+2008-01-14
+
+
+
+
+Media Type
+A file format or physical medium.
+
+2008-01-14
+
+
+
+
+
+Media Type or Extent
+A media type or extent.
+
+2008-01-14
+
+
+
+
+Method of Instruction
+A process that is used to engender knowledge, attitudes, and skills.
+
+2008-01-14
+
+
+
+
+Method of Accrual
+A method by which resources are added to a collection.
+
+2008-01-14
+
+
+
+
+Period of Time
+An interval of time that is named or defined by its start and end dates.
+
+2008-01-14
+
+
+
+
+
+Physical Medium
+A physical material or carrier.
+Examples include paper, canvas, or DVD.
+
+2008-01-14
+
+
+
+
+
+Physical Resource
+A material thing.
+
+2008-01-14
+
+
+
+
+Policy
+A plan or course of action by an authority, intended to influence and determine decisions, actions, and other matters.
+
+2008-01-14
+
+
+
+
+Provenance Statement
+A statement of any changes in ownership and custody of a resource since its creation that are significant for its authenticity, integrity, and interpretation.
+
+2008-01-14
+
+
+
+
+Rights Statement
+A statement about the intellectual property rights (IPR) held in or over a Resource, a legal document giving official permission to do something with a resource, or a statement about access rights.
+
+2008-01-14
+
+
+
+
+Size or Duration
+A dimension or extent, or a time taken to play or execute.
+Examples include a number of pages, a specification of length, width, and breadth, or a period in hours, minutes, and seconds.
+
+2008-01-14
+
+
+
+
+
+Standard
+A basis for comparison; a reference point against which other things can be evaluated.
+
+2008-01-14
+
+
+
+
+ISO 639-2
+The three-letter alphabetic codes listed in ISO639-2 for the representation of names of languages.
+
+2000-07-11
+2008-01-14
+
+
+
+
+
+RFC 1766
+The set of tags, constructed according to RFC 1766, for the identification of languages.
+
+2000-07-11
+2008-01-14
+
+
+
+
+
+URI
+The set of identifiers constructed according to the generic syntax for Uniform Resource Identifiers as specified by the Internet Engineering Task Force.
+
+2000-07-11
+2008-01-14
+
+
+
+
+
+DCMI Point
+The set of points in space defined by their geographic coordinates according to the DCMI Point Encoding Scheme.
+
+2000-07-11
+2008-01-14
+
+
+
+
+
+ISO 3166
+The set of codes listed in ISO 3166-1 for the representation of names of countries.
+
+2000-07-11
+2008-01-14
+
+
+
+
+
+DCMI Box
+The set of regions in space defined by their geographic coordinates according to the DCMI Box Encoding Scheme.
+
+2000-07-11
+2008-01-14
+
+
+
+
+
+DCMI Period
+The set of time intervals defined by their limits according to the DCMI Period Encoding Scheme.
+
+2000-07-11
+2008-01-14
+
+
+
+
+
+W3C-DTF
+The set of dates and times constructed according to the W3C Date and Time Formats Specification.
+
+2000-07-11
+2008-01-14
+
+
+
+
+
+RFC 3066
+The set of tags constructed according to RFC 3066 for the identification of languages.
+RFC 3066 has been obsoleted by RFC 4646.
+
+2002-07-13
+2008-01-14
+
+
+
+
+
+RFC 5646
+The set of tags constructed according to RFC 5646 for the identification of languages.
+RFC 5646 obsoletes RFC 4646.
+
+2010-10-11
+
+
+
+
+
+RFC 4646
+The set of tags constructed according to RFC 4646 for the identification of languages.
+RFC 4646 obsoletes RFC 3066.
+
+2008-01-14
+
+
+
+
+
+ISO 639-3
+The set of three-letter codes listed in ISO 639-3 for the representation of names of languages.
+
+2008-01-14
+
+
+
+
+
+LCSH
+The set of labeled concepts specified by the Library of Congress Subject Headings.
+
+2000-07-11
+2008-01-14
+
+
+
+
+MeSH
+The set of labeled concepts specified by the Medical Subject Headings.
+
+2000-07-11
+2008-01-14
+
+
+
+
+
+DDC
+The set of conceptual resources specified by the Dewey Decimal Classification.
+
+2000-07-11
+2008-01-14
+
+
+
+
+
+LCC
+The set of conceptual resources specified by the Library of Congress Classification.
+
+2000-07-11
+2008-01-14
+
+
+
+
+
+UDC
+The set of conceptual resources specified by the Universal Decimal Classification.
+
+2000-07-11
+2008-01-14
+
+
+
+
+
+DCMI Type Vocabulary
+The set of classes specified by the DCMI Type Vocabulary, used to categorize the nature or genre of the resource.
+
+2000-07-11
+2012-06-14
+
+
+
+
+
+IMT
+The set of media types specified by the Internet Assigned Numbers Authority.
+
+2000-07-11
+2008-01-14
+
+
+
+
+
+TGN
+The set of places specified by the Getty Thesaurus of Geographic Names.
+
+2000-07-11
+2008-01-14
+
+
+
+
+
+NLM
+The set of conceptual resources specified by the National Library of Medicine Classification.
+
+2005-06-13
+2008-01-14
+
+
+
+
+
diff --git a/test/unit/tools/cwl_tools/v1.0/default_path.cwl b/test/unit/tools/cwl_tools/v1.0/default_path.cwl
new file mode 100644
index 000000000000..58d6dced3c4d
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/default_path.cwl
@@ -0,0 +1,10 @@
+cwlVersion: v1.0
+class: CommandLineTool
+inputs:
+ - id: "file1"
+ type: File
+ default:
+ class: File
+ path: default.txt
+outputs: []
+arguments: [cat,$(inputs.file1.path)]
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/default_path_job.yml b/test/unit/tools/cwl_tools/v1.0/default_path_job.yml
new file mode 100644
index 000000000000..43471c40cc85
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/default_path_job.yml
@@ -0,0 +1,3 @@
+file1:
+ class: File
+ path: hello.txt
diff --git a/test/unit/tools/cwl_tools/v1.0/dir-job.yml b/test/unit/tools/cwl_tools/v1.0/dir-job.yml
new file mode 100644
index 000000000000..30392cfc0f77
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/dir-job.yml
@@ -0,0 +1,3 @@
+indir:
+ class: Directory
+ location: testdir
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/dir.cwl b/test/unit/tools/cwl_tools/v1.0/dir.cwl
new file mode 100644
index 000000000000..4f39f550da93
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/dir.cwl
@@ -0,0 +1,17 @@
+class: CommandLineTool
+cwlVersion: v1.0
+requirements:
+ - class: ShellCommandRequirement
+inputs:
+ indir: Directory
+outputs:
+ outlist:
+ type: File
+ outputBinding:
+ glob: output.txt
+arguments: ["cd", "$(inputs.indir.path)",
+ {shellQuote: false, valueFrom: "&&"},
+ "find", ".",
+ {shellQuote: false, valueFrom: "|"},
+ "sort"]
+stdout: output.txt
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/dir2.cwl b/test/unit/tools/cwl_tools/v1.0/dir2.cwl
new file mode 100644
index 000000000000..7136f1b14f97
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/dir2.cwl
@@ -0,0 +1,19 @@
+class: CommandLineTool
+cwlVersion: v1.0
+hints:
+ DockerRequirement:
+ dockerPull: debian:stretch-slim
+ ShellCommandRequirement: {}
+inputs:
+ indir: Directory
+outputs:
+ outlist:
+ type: File
+ outputBinding:
+ glob: output.txt
+arguments: ["cd", "$(inputs.indir.path)",
+ {shellQuote: false, valueFrom: "&&"},
+ "find", ".",
+ {shellQuote: false, valueFrom: "|"},
+ "sort"]
+stdout: output.txt
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/dir3-job.yml b/test/unit/tools/cwl_tools/v1.0/dir3-job.yml
new file mode 100644
index 000000000000..aff0e8036e9e
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/dir3-job.yml
@@ -0,0 +1,3 @@
+inf:
+ class: File
+ location: hello.tar
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/dir3.cwl b/test/unit/tools/cwl_tools/v1.0/dir3.cwl
new file mode 100644
index 000000000000..36b56cfc7b99
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/dir3.cwl
@@ -0,0 +1,13 @@
+class: CommandLineTool
+cwlVersion: v1.0
+baseCommand: [tar, xvf]
+inputs:
+ inf:
+ type: File
+ inputBinding:
+ position: 1
+outputs:
+ outdir:
+ type: Directory
+ outputBinding:
+ glob: .
diff --git a/test/unit/tools/cwl_tools/v1.0/dir4-job.yml b/test/unit/tools/cwl_tools/v1.0/dir4-job.yml
new file mode 100644
index 000000000000..f4ef8c7d5cee
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/dir4-job.yml
@@ -0,0 +1,9 @@
+inf:
+ class: File
+ location: hello.tar
+ secondaryFiles:
+ - class: File
+ location: index.py
+ - class: Directory
+ basename: xtestdir
+ location: testdir
diff --git a/test/unit/tools/cwl_tools/v1.0/dir4-subdir-1-job.yml b/test/unit/tools/cwl_tools/v1.0/dir4-subdir-1-job.yml
new file mode 100644
index 000000000000..5cfe5660aee6
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/dir4-subdir-1-job.yml
@@ -0,0 +1,9 @@
+inf:
+ class: File
+ location: hello.tar
+ secondaryFiles:
+ - class: File
+ location: subdirsecondaries/hello.py
+ - class: Directory
+ basename: xtestdir
+ location: subdirsecondaries/testdir
diff --git a/test/unit/tools/cwl_tools/v1.0/dir4-subdir-2-job.yml b/test/unit/tools/cwl_tools/v1.0/dir4-subdir-2-job.yml
new file mode 100644
index 000000000000..e676054febe5
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/dir4-subdir-2-job.yml
@@ -0,0 +1,9 @@
+inf:
+ class: File
+ location: subdirsecondaries/hello.py
+ secondaryFiles:
+ - class: File
+ location: hello.tar
+ - class: Directory
+ basename: xtestdir
+ location: subdirsecondaries/testdir
diff --git a/test/unit/tools/cwl_tools/v1.0/dir4.cwl b/test/unit/tools/cwl_tools/v1.0/dir4.cwl
new file mode 100644
index 000000000000..a585d98ad869
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/dir4.cwl
@@ -0,0 +1,17 @@
+class: CommandLineTool
+cwlVersion: v1.0
+requirements:
+ - class: ShellCommandRequirement
+inputs:
+ inf: File
+outputs:
+ outlist:
+ type: File
+ outputBinding:
+ glob: output.txt
+arguments: ["cd", "$(inputs.inf.dirname)/xtestdir",
+ {shellQuote: false, valueFrom: "&&"},
+ "find", ".",
+ {shellQuote: false, valueFrom: "|"},
+ "sort"]
+stdout: output.txt
diff --git a/test/unit/tools/cwl_tools/v1.0/dir5.cwl b/test/unit/tools/cwl_tools/v1.0/dir5.cwl
new file mode 100644
index 000000000000..f81a9083acfd
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/dir5.cwl
@@ -0,0 +1,17 @@
+class: CommandLineTool
+cwlVersion: v1.0
+requirements:
+ - class: ShellCommandRequirement
+ - class: InitialWorkDirRequirement
+ listing: $(inputs.indir.listing)
+inputs:
+ indir: Directory
+outputs:
+ outlist:
+ type: File
+ outputBinding:
+ glob: output.txt
+arguments: ["find", "-L", ".", "!", "-path", "*.txt",
+ {shellQuote: false, valueFrom: "|"},
+ "sort"]
+stdout: output.txt
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/dir6.cwl b/test/unit/tools/cwl_tools/v1.0/dir6.cwl
new file mode 100644
index 000000000000..93362b5b6f64
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/dir6.cwl
@@ -0,0 +1,21 @@
+class: CommandLineTool
+cwlVersion: v1.0
+requirements:
+ - class: ShellCommandRequirement
+inputs:
+ indir:
+ type: Directory
+ inputBinding:
+ prefix: cd
+ position: -1
+outputs:
+ outlist:
+ type: File
+ outputBinding:
+ glob: output.txt
+arguments: [
+ {shellQuote: false, valueFrom: "&&"},
+ "find", ".",
+ {shellQuote: false, valueFrom: "|"},
+ "sort"]
+stdout: output.txt
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/dir7.cwl b/test/unit/tools/cwl_tools/v1.0/dir7.cwl
new file mode 100644
index 000000000000..88171e6ddc15
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/dir7.cwl
@@ -0,0 +1,12 @@
+class: ExpressionTool
+cwlVersion: v1.0
+requirements:
+ InlineJavascriptRequirement: {}
+inputs:
+ files: File[]
+outputs:
+ dir: Directory
+expression: |
+ ${
+ return {"dir": {"class": "Directory", "basename": "a_directory", "listing": inputs.files}};
+ }
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/dir7.yml b/test/unit/tools/cwl_tools/v1.0/dir7.yml
new file mode 100644
index 000000000000..7ad068585572
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/dir7.yml
@@ -0,0 +1,5 @@
+files:
+ - class: File
+ location: hello.txt
+ - class: File
+ location: whale.txt
diff --git a/test/unit/tools/cwl_tools/v1.0/docker-array-secondaryfiles-job.json b/test/unit/tools/cwl_tools/v1.0/docker-array-secondaryfiles-job.json
new file mode 100644
index 000000000000..5d178fc63e98
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/docker-array-secondaryfiles-job.json
@@ -0,0 +1,12 @@
+{
+ "fasta_path": [
+ {
+ "class": "File",
+ "location": "ref.fasta"
+ },
+ {
+ "class": "File",
+ "location": "ref2.fasta"
+ }
+ ]
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/docker-array-secondaryfiles.cwl b/test/unit/tools/cwl_tools/v1.0/docker-array-secondaryfiles.cwl
new file mode 100644
index 000000000000..7ed1468cf933
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/docker-array-secondaryfiles.cwl
@@ -0,0 +1,38 @@
+#!/usr/bin/env cwl-runner
+
+cwlVersion: v1.0
+
+requirements:
+ - class: DockerRequirement
+ dockerPull: debian:stretch-slim
+ - class: InlineJavascriptRequirement
+ - class: ShellCommandRequirement
+
+class: CommandLineTool
+
+inputs:
+ fasta_path:
+ type:
+ type: array
+ items: File
+ secondaryFiles:
+ - .fai
+
+outputs:
+ bai_list:
+ type: File
+ outputBinding:
+ glob: "fai.list"
+
+arguments:
+ - valueFrom: ${
+ var fai_list = "";
+ for (var i = 0; i < inputs.fasta_path.length; i ++) {
+ fai_list += " cat " + inputs.fasta_path[i].path +".fai" + " >> fai.list && "
+ }
+ return fai_list.slice(0,-3)
+ }
+ position: 1
+ shellQuote: false
+
+baseCommand: []
diff --git a/test/unit/tools/cwl_tools/v1.0/docker-output-dir.cwl b/test/unit/tools/cwl_tools/v1.0/docker-output-dir.cwl
new file mode 100644
index 000000000000..7f392f09575a
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/docker-output-dir.cwl
@@ -0,0 +1,13 @@
+class: CommandLineTool
+cwlVersion: v1.0
+requirements:
+ DockerRequirement:
+ dockerPull: debian:stretch-slim
+ dockerOutputDirectory: /other
+inputs: []
+outputs:
+ thing:
+ type: File
+ outputBinding:
+ glob: thing
+baseCommand: ["touch", "/other/thing"]
diff --git a/test/unit/tools/cwl_tools/v1.0/docker-run-cmd.cwl b/test/unit/tools/cwl_tools/v1.0/docker-run-cmd.cwl
new file mode 100644
index 000000000000..8f0ed68bac4f
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/docker-run-cmd.cwl
@@ -0,0 +1,12 @@
+class: CommandLineTool
+cwlVersion: v1.0
+requirements:
+ DockerRequirement:
+ dockerPull: bash:4.4.12
+inputs: []
+outputs:
+ cow:
+ type: File
+ outputBinding:
+ glob: cow
+baseCommand: ["-c", "echo 'moo' > cow"]
diff --git a/test/unit/tools/cwl_tools/v1.0/dynresreq-dir-job.yaml b/test/unit/tools/cwl_tools/v1.0/dynresreq-dir-job.yaml
new file mode 100644
index 000000000000..a0f3d2d51e6f
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/dynresreq-dir-job.yaml
@@ -0,0 +1,6 @@
+dir:
+ class: Directory
+ basename: test
+ listing:
+ - class: File
+ path: special_file
diff --git a/test/unit/tools/cwl_tools/v1.0/dynresreq-dir.cwl b/test/unit/tools/cwl_tools/v1.0/dynresreq-dir.cwl
new file mode 100644
index 000000000000..c90abcf4f1f0
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/dynresreq-dir.cwl
@@ -0,0 +1,22 @@
+#!/usr/bin/env cwl-runner
+class: CommandLineTool
+cwlVersion: v1.0
+
+requirements:
+ InlineJavascriptRequirement: {}
+ ResourceRequirement:
+ coresMin: $(inputs.dir.listing[0].size)
+ coresMax: $(inputs.dir.listing[0].size)
+
+inputs:
+ dir: Directory
+
+outputs:
+ output:
+ type: stdout
+
+baseCommand: echo
+
+stdout: cores.txt
+
+arguments: [ $(runtime.cores) ]
diff --git a/test/unit/tools/cwl_tools/v1.0/dynresreq-job.yaml b/test/unit/tools/cwl_tools/v1.0/dynresreq-job.yaml
new file mode 100644
index 000000000000..9f8c018cae62
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/dynresreq-job.yaml
@@ -0,0 +1,3 @@
+special_file:
+ class: File
+ path: special_file
diff --git a/test/unit/tools/cwl_tools/v1.0/dynresreq-workflow.cwl b/test/unit/tools/cwl_tools/v1.0/dynresreq-workflow.cwl
new file mode 100644
index 000000000000..e38ba17c5129
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/dynresreq-workflow.cwl
@@ -0,0 +1,24 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+inputs:
+ special_file: File
+
+outputs:
+ cores:
+ type: File
+ outputSource: report/output
+
+steps:
+ count:
+ in:
+ special_file: special_file
+ out: [output]
+ run: dynresreq.cwl
+
+ report:
+ in:
+ file1: count/output
+ out: [output]
+ run: cat-tool.cwl
diff --git a/test/unit/tools/cwl_tools/v1.0/dynresreq.cwl b/test/unit/tools/cwl_tools/v1.0/dynresreq.cwl
new file mode 100644
index 000000000000..9af95330f330
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/dynresreq.cwl
@@ -0,0 +1,21 @@
+#!/usr/bin/env cwl-runner
+class: CommandLineTool
+cwlVersion: v1.0
+
+requirements:
+ ResourceRequirement:
+ coresMin: $(inputs.special_file.size)
+ coresMax: $(inputs.special_file.size)
+
+inputs:
+ special_file: File
+
+outputs:
+ output:
+ type: stdout
+
+baseCommand: echo
+
+stdout: cores.txt
+
+arguments: [ $(runtime.cores) ]
diff --git a/test/unit/tools/cwl_tools/v1.0/echo-file-tool.cwl b/test/unit/tools/cwl_tools/v1.0/echo-file-tool.cwl
new file mode 100644
index 000000000000..189a4c8f4821
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/echo-file-tool.cwl
@@ -0,0 +1,12 @@
+cwlVersion: v1.0
+class: CommandLineTool
+baseCommand: [echo]
+inputs:
+ in:
+ type: string
+ inputBinding:
+ position: 1
+outputs:
+ out:
+ type: stdout
+
diff --git a/test/unit/tools/cwl_tools/v1.0/echo-tool-default.cwl b/test/unit/tools/cwl_tools/v1.0/echo-tool-default.cwl
new file mode 100644
index 000000000000..f88cd7672bbb
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/echo-tool-default.cwl
@@ -0,0 +1,19 @@
+#!/usr/bin/env cwl-runner
+
+class: CommandLineTool
+cwlVersion: v1.0
+inputs:
+ in:
+ type: string
+ default: tool_default
+ inputBinding:
+ position: 1
+outputs:
+ out:
+ type: string
+ outputBinding:
+ glob: out.txt
+ loadContents: true
+ outputEval: $(self[0].contents)
+baseCommand: [echo, -n]
+stdout: out.txt
diff --git a/test/unit/tools/cwl_tools/v1.0/echo-tool.cwl b/test/unit/tools/cwl_tools/v1.0/echo-tool.cwl
new file mode 100644
index 000000000000..0233693267da
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/echo-tool.cwl
@@ -0,0 +1,17 @@
+#!/usr/bin/env cwl-runner
+
+class: CommandLineTool
+cwlVersion: v1.0
+inputs:
+ in:
+ type: Any
+ inputBinding: {}
+outputs:
+ out:
+ type: string
+ outputBinding:
+ glob: out.txt
+ loadContents: true
+ outputEval: $(self[0].contents)
+baseCommand: echo
+stdout: out.txt
diff --git a/test/unit/tools/cwl_tools/v1.0/echo-wf-default.cwl b/test/unit/tools/cwl_tools/v1.0/echo-wf-default.cwl
new file mode 100644
index 000000000000..0dfedd24d8e3
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/echo-wf-default.cwl
@@ -0,0 +1,19 @@
+
+class: Workflow
+cwlVersion: v1.0
+
+inputs: []
+
+steps:
+ step1:
+ run: echo-tool-default.cwl
+ in:
+ in:
+ default: workflow_default
+ out: [out]
+
+outputs:
+ default_output:
+ type: string
+ outputSource: step1/out
+
diff --git a/test/unit/tools/cwl_tools/v1.0/empty-array-input.cwl b/test/unit/tools/cwl_tools/v1.0/empty-array-input.cwl
new file mode 100644
index 000000000000..d2a7f98974b9
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/empty-array-input.cwl
@@ -0,0 +1,32 @@
+#!/usr/bin/env cwl-runner
+
+cwlVersion: v1.0
+class: CommandLineTool
+
+hints:
+ - class: DockerRequirement
+ dockerPull: python:2-slim
+
+inputs:
+ - id: array
+ type: { type: array, items: int }
+ inputBinding:
+ position: 1
+ prefix: -I
+ itemSeparator: ","
+
+ - id: args.py
+ type: File
+ default:
+ class: File
+ location: args.py
+ inputBinding:
+ position: -1
+
+outputs:
+ - id: args
+ type:
+ type: array
+ items: string
+
+baseCommand: python
diff --git a/test/unit/tools/cwl_tools/v1.0/empty-array-job.json b/test/unit/tools/cwl_tools/v1.0/empty-array-job.json
new file mode 100644
index 000000000000..038229f2bfde
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/empty-array-job.json
@@ -0,0 +1,3 @@
+{
+ "array": []
+}
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/empty.json b/test/unit/tools/cwl_tools/v1.0/empty.json
new file mode 100644
index 000000000000..0967ef424bce
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/empty.json
@@ -0,0 +1 @@
+{}
diff --git a/test/unit/tools/cwl_tools/v1.0/empty.txt b/test/unit/tools/cwl_tools/v1.0/empty.txt
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/test/unit/tools/cwl_tools/v1.0/env-job.json b/test/unit/tools/cwl_tools/v1.0/env-job.json
new file mode 100644
index 000000000000..dabf28850927
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/env-job.json
@@ -0,0 +1,3 @@
+{
+ "in": "hello test env"
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/env-tool1.cwl b/test/unit/tools/cwl_tools/v1.0/env-tool1.cwl
new file mode 100644
index 000000000000..b22ba7854817
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/env-tool1.cwl
@@ -0,0 +1,18 @@
+class: CommandLineTool
+cwlVersion: v1.0
+inputs:
+ in: string
+outputs:
+ out:
+ type: File
+ outputBinding:
+ glob: out
+
+requirements:
+ EnvVarRequirement:
+ envDef:
+ TEST_ENV: $(inputs.in)
+
+baseCommand: ["/bin/sh", "-c", "echo $TEST_ENV"]
+
+stdout: out
diff --git a/test/unit/tools/cwl_tools/v1.0/env-tool2.cwl b/test/unit/tools/cwl_tools/v1.0/env-tool2.cwl
new file mode 100644
index 000000000000..6408ce4cf73a
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/env-tool2.cwl
@@ -0,0 +1,18 @@
+class: CommandLineTool
+cwlVersion: v1.0
+inputs:
+ in: string
+outputs:
+ out:
+ type: File
+ outputBinding:
+ glob: out
+
+hints:
+ EnvVarRequirement:
+ envDef:
+ TEST_ENV: $(inputs.in)
+
+baseCommand: ["/bin/sh", "-c", "echo $TEST_ENV"]
+
+stdout: out
diff --git a/test/unit/tools/cwl_tools/v1.0/env-wf1.cwl b/test/unit/tools/cwl_tools/v1.0/env-wf1.cwl
new file mode 100644
index 000000000000..9a8487f2a4d8
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/env-wf1.cwl
@@ -0,0 +1,23 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+inputs:
+ in: string
+
+outputs:
+ out:
+ type: File
+ outputSource: step1/out
+
+requirements:
+ EnvVarRequirement:
+ envDef:
+ TEST_ENV: override
+
+steps:
+ step1:
+ run: env-tool1.cwl
+ in:
+ in: in
+ out: [out]
diff --git a/test/unit/tools/cwl_tools/v1.0/env-wf2.cwl b/test/unit/tools/cwl_tools/v1.0/env-wf2.cwl
new file mode 100644
index 000000000000..2c43e5b65458
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/env-wf2.cwl
@@ -0,0 +1,23 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+inputs:
+ in: string
+
+outputs:
+ out:
+ type: File
+ outputSource: step1/out
+
+requirements:
+ EnvVarRequirement:
+ envDef:
+ TEST_ENV: override
+
+steps:
+ step1:
+ run: env-tool2.cwl
+ in:
+ in: in
+ out: [out]
diff --git a/test/unit/tools/cwl_tools/v1.0/env-wf3.cwl b/test/unit/tools/cwl_tools/v1.0/env-wf3.cwl
new file mode 100644
index 000000000000..4fd2dc3b5d86
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/env-wf3.cwl
@@ -0,0 +1,22 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+inputs:
+ in: string
+
+outputs:
+ out:
+ type: File
+ outputSource: step1/out
+
+steps:
+ step1:
+ run: env-tool2.cwl
+ requirements:
+ EnvVarRequirement:
+ envDef:
+ TEST_ENV: override
+ in:
+ in: in
+ out: [out]
diff --git a/test/unit/tools/cwl_tools/v1.0/envvar.cwl b/test/unit/tools/cwl_tools/v1.0/envvar.cwl
new file mode 100644
index 000000000000..a2b13ae0f703
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/envvar.cwl
@@ -0,0 +1,11 @@
+class: CommandLineTool
+cwlVersion: v1.0
+inputs: []
+outputs: []
+requirements:
+ ShellCommandRequirement: {}
+arguments: [
+ echo, {valueFrom: '"HOME=$HOME"', shellQuote: false}, {valueFrom: '"TMPDIR=$TMPDIR"', shellQuote: false},
+ {valueFrom: '&&', shellQuote: false},
+ test, {valueFrom: '"$HOME"', shellQuote: false}, "=", $(runtime.outdir),
+ "-a", {valueFrom: '"$TMPDIR"', shellQuote: false}, "=", $(runtime.tmpdir)]
diff --git a/test/unit/tools/cwl_tools/v1.0/envvar.yml b/test/unit/tools/cwl_tools/v1.0/envvar.yml
new file mode 100644
index 000000000000..097c3f27d675
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/envvar.yml
@@ -0,0 +1,4 @@
+class: EnvVarRequirement
+envDef:
+ - envName: "TEST_ENV"
+ envValue: "hello test env"
diff --git a/test/unit/tools/cwl_tools/v1.0/envvar2.cwl b/test/unit/tools/cwl_tools/v1.0/envvar2.cwl
new file mode 100644
index 000000000000..d688174e222c
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/envvar2.cwl
@@ -0,0 +1,14 @@
+class: CommandLineTool
+cwlVersion: v1.0
+inputs: []
+outputs: []
+requirements:
+ ShellCommandRequirement: {}
+hints:
+ DockerRequirement:
+ dockerPull: debian:stretch-slim
+arguments: [
+ echo, {valueFrom: '"HOME=$HOME"', shellQuote: false}, {valueFrom: '"TMPDIR=$TMPDIR"', shellQuote: false},
+ {valueFrom: '&&', shellQuote: false},
+ test, {valueFrom: '"$HOME"', shellQuote: false}, "=", $(runtime.outdir),
+ "-a", {valueFrom: '"$TMPDIR"', shellQuote: false}, "=", $(runtime.tmpdir)]
diff --git a/test/unit/tools/cwl_tools/v1.0/example_human_Illumina.pe_1.fastq b/test/unit/tools/cwl_tools/v1.0/example_human_Illumina.pe_1.fastq
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/test/unit/tools/cwl_tools/v1.0/example_human_Illumina.pe_2.fastq b/test/unit/tools/cwl_tools/v1.0/example_human_Illumina.pe_2.fastq
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/test/unit/tools/cwl_tools/v1.0/exit-success.cwl b/test/unit/tools/cwl_tools/v1.0/exit-success.cwl
new file mode 100644
index 000000000000..9e66483eb6de
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/exit-success.cwl
@@ -0,0 +1,11 @@
+#!/usr/bin/env cwl-runner
+cwlVersion: v1.0
+class: CommandLineTool
+
+inputs: []
+baseCommand: "false"
+outputs: []
+
+successCodes: [ 1 ]
+permanentFailCodes: [ 0 ]
+temporaryFailCodes: [ 42 ]
diff --git a/test/unit/tools/cwl_tools/v1.0/fail-unconnected.cwl b/test/unit/tools/cwl_tools/v1.0/fail-unconnected.cwl
new file mode 100644
index 000000000000..3d5445579536
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/fail-unconnected.cwl
@@ -0,0 +1,20 @@
+class: Workflow
+cwlVersion: v1.0
+inputs:
+ inp1:
+ type: string
+ default: hello inp1
+ inp2:
+ type: string
+ default: hello inp2
+outputs:
+ out:
+ type: string
+ outputSource: step1/out
+steps:
+ step1:
+ in:
+ in: inp1
+ in2: inp2
+ out: [out]
+ run: fail-unspecified-input.cwl
diff --git a/test/unit/tools/cwl_tools/v1.0/fail-unspecified-input.cwl b/test/unit/tools/cwl_tools/v1.0/fail-unspecified-input.cwl
new file mode 100644
index 000000000000..95a46b5fb4f8
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/fail-unspecified-input.cwl
@@ -0,0 +1,13 @@
+class: CommandLineTool
+cwlVersion: v1.0
+inputs:
+ in: string
+outputs:
+ out:
+ type: string
+ outputBinding:
+ glob: out.txt
+ loadContents: true
+ outputEval: $(self[0].contents)
+stdout: out.txt
+arguments: [echo, $(inputs.in), $(inputs.in2)]
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/file-literal-ex.cwl b/test/unit/tools/cwl_tools/v1.0/file-literal-ex.cwl
new file mode 100644
index 000000000000..9f809f34f8eb
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/file-literal-ex.cwl
@@ -0,0 +1,11 @@
+class: ExpressionTool
+cwlVersion: v1.0
+requirements:
+ InlineJavascriptRequirement: {}
+inputs: []
+outputs:
+ lit: File
+expression: |
+ ${
+ return {"lit": {"class": "File", "basename": "a_file", "contents": "Hello file literal."}};
+ }
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/file-literal.yml b/test/unit/tools/cwl_tools/v1.0/file-literal.yml
new file mode 100644
index 000000000000..604f9ece93d7
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/file-literal.yml
@@ -0,0 +1,3 @@
+file1:
+ class: File
+ contents: "Hello file literal"
diff --git a/test/unit/tools/cwl_tools/v1.0/file1-null.json b/test/unit/tools/cwl_tools/v1.0/file1-null.json
new file mode 100644
index 000000000000..cc6b361935e2
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/file1-null.json
@@ -0,0 +1,3 @@
+{
+ "file1": null
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/foaf.rdf b/test/unit/tools/cwl_tools/v1.0/foaf.rdf
new file mode 100644
index 000000000000..68d0700caebe
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/foaf.rdf
@@ -0,0 +1,609 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Label Property
+ A foaf:LabelProperty is any RDF property with texual values that serve as labels.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/unit/tools/cwl_tools/v1.0/formattest-job.json b/test/unit/tools/cwl_tools/v1.0/formattest-job.json
new file mode 100644
index 000000000000..0ff02409634a
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/formattest-job.json
@@ -0,0 +1,7 @@
+{
+ "input": {
+ "class": "File",
+ "location": "whale.txt",
+ "format": "edam:format_2330"
+ }
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/formattest.cwl b/test/unit/tools/cwl_tools/v1.0/formattest.cwl
new file mode 100644
index 000000000000..19168e8750e1
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/formattest.cwl
@@ -0,0 +1,20 @@
+$namespaces:
+ edam: "http://edamontology.org/"
+cwlVersion: v1.0
+class: CommandLineTool
+doc: "Reverse each line using the `rev` command"
+inputs:
+ input:
+ type: File
+ inputBinding: {}
+ format: edam:format_2330
+
+outputs:
+ output:
+ type: File
+ outputBinding:
+ glob: output.txt
+ format: edam:format_2330
+
+baseCommand: rev
+stdout: output.txt
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/formattest2-job.json b/test/unit/tools/cwl_tools/v1.0/formattest2-job.json
new file mode 100644
index 000000000000..f706f6ed1d88
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/formattest2-job.json
@@ -0,0 +1,7 @@
+{
+ "input": {
+ "class": "File",
+ "location": "ref.fasta",
+ "format": "edam:format_1929"
+ }
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/formattest2.cwl b/test/unit/tools/cwl_tools/v1.0/formattest2.cwl
new file mode 100644
index 000000000000..9e40cd5ad75c
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/formattest2.cwl
@@ -0,0 +1,26 @@
+$namespaces:
+ edam: http://edamontology.org/
+$schemas:
+ - EDAM.owl
+class: CommandLineTool
+cwlVersion: v1.0
+doc: "Reverse each line using the `rev` command"
+hints:
+ DockerRequirement:
+ dockerPull: "debian:stretch-slim"
+
+inputs:
+ input:
+ type: File
+ inputBinding: {}
+ format: edam:format_2330
+
+outputs:
+ output:
+ type: File
+ outputBinding:
+ glob: output.txt
+ format: $(inputs.input.format)
+
+baseCommand: rev
+stdout: output.txt
diff --git a/test/unit/tools/cwl_tools/v1.0/formattest3.cwl b/test/unit/tools/cwl_tools/v1.0/formattest3.cwl
new file mode 100644
index 000000000000..2e4a1fca5933
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/formattest3.cwl
@@ -0,0 +1,28 @@
+$namespaces:
+ edam: http://edamontology.org/
+ gx: http://galaxyproject.org/formats/
+$schemas:
+ - EDAM.owl
+ - gx_edam.ttl
+class: CommandLineTool
+cwlVersion: v1.0
+doc: "Reverse each line using the `rev` command"
+hints:
+ DockerRequirement:
+ dockerPull: "debian:stretch-slim"
+
+inputs:
+ input:
+ type: File
+ inputBinding: {}
+ format: gx:fasta
+
+outputs:
+ output:
+ type: File
+ outputBinding:
+ glob: output.txt
+ format: $(inputs.input.format)
+
+baseCommand: rev
+stdout: output.txt
diff --git a/test/unit/tools/cwl_tools/v1.0/glob-expr-list.cwl b/test/unit/tools/cwl_tools/v1.0/glob-expr-list.cwl
new file mode 100644
index 000000000000..a70bccf4a965
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/glob-expr-list.cwl
@@ -0,0 +1,16 @@
+class: CommandLineTool
+cwlVersion: v1.0
+
+inputs:
+ ids:
+ type: string[]
+ inputBinding:
+ position: 1
+
+outputs:
+ files:
+ type: File[]
+ outputBinding:
+ glob: $(inputs.ids)
+
+baseCommand: touch
diff --git a/test/unit/tools/cwl_tools/v1.0/glob_test.cwl b/test/unit/tools/cwl_tools/v1.0/glob_test.cwl
new file mode 100644
index 000000000000..59f5af8a361b
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/glob_test.cwl
@@ -0,0 +1,10 @@
+#!/usr/bin/env cwl-runner
+cwlVersion: v1.0
+class: CommandLineTool
+
+inputs: []
+baseCommand: [touch, z, y, x, w, c, b, a]
+outputs:
+ letters:
+ type: File[]
+ outputBinding: { glob: '*' }
diff --git a/test/unit/tools/cwl_tools/v1.0/gx_edam.ttl b/test/unit/tools/cwl_tools/v1.0/gx_edam.ttl
new file mode 100644
index 000000000000..8182f719d216
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/gx_edam.ttl
@@ -0,0 +1,7 @@
+@prefix gx: .
+@prefix owl: .
+@prefix rdfs: .
+@prefix edam: .
+
+gx:fasta a owl:Class .
+gx:fasta owl:equivalentClass edam:format_1929 .
diff --git a/test/unit/tools/cwl_tools/v1.0/hello.2.txt b/test/unit/tools/cwl_tools/v1.0/hello.2.txt
new file mode 100644
index 000000000000..cd0875583aab
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/hello.2.txt
@@ -0,0 +1 @@
+Hello world!
diff --git a/test/unit/tools/cwl_tools/v1.0/hello.tar b/test/unit/tools/cwl_tools/v1.0/hello.tar
new file mode 100644
index 000000000000..58eb35332733
Binary files /dev/null and b/test/unit/tools/cwl_tools/v1.0/hello.tar differ
diff --git a/test/unit/tools/cwl_tools/v1.0/hello.txt b/test/unit/tools/cwl_tools/v1.0/hello.txt
new file mode 100644
index 000000000000..cd0875583aab
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/hello.txt
@@ -0,0 +1 @@
+Hello world!
diff --git a/test/unit/tools/cwl_tools/v1.0/imported-hint.cwl b/test/unit/tools/cwl_tools/v1.0/imported-hint.cwl
new file mode 100755
index 000000000000..ac016fd50afe
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/imported-hint.cwl
@@ -0,0 +1,13 @@
+#!/usr/bin/env cwl-runner
+cwlVersion: v1.0
+class: CommandLineTool
+inputs: []
+outputs:
+ out: stdout
+
+hints:
+- $import: envvar.yml
+
+baseCommand: ["/bin/sh", "-c", "echo $TEST_ENV"]
+
+stdout: out
diff --git a/test/unit/tools/cwl_tools/v1.0/index.py b/test/unit/tools/cwl_tools/v1.0/index.py
new file mode 100755
index 000000000000..77918b5e5ec1
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/index.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python2
+
+# Toy program to generate inverted index of word to line.
+# Takes input text file on stdin and prints output index on stdout.
+
+import sys
+import os
+
+words = {}
+
+mainfile = sys.argv[1]
+indexfile = sys.argv[1] + ".idx1"
+
+main = open(mainfile)
+index = open(indexfile, "w")
+
+linenum = 0
+for l in main:
+ linenum += 1
+ l = l.rstrip().lower().replace(".", "").replace(",", "").replace(";", "").replace("-", " ")
+ for w in l.split(" "):
+ if w:
+ if w not in words:
+ words[w] = set()
+ words[w].add(linenum)
+
+for w in sorted(words.keys()):
+ index.write("%s: %s" % (w, ", ".join((str(i) for i in words[w]))) + "\n")
+
+open(os.path.splitext(sys.argv[1])[0] + ".idx2", "w")
+open(sys.argv[1] + ".idx3", "w")
+open(sys.argv[1] + ".idx4", "w")
+open(sys.argv[1] + ".idx5", "w")
+open(os.path.splitext(sys.argv[1])[0] + ".idx6" + os.path.splitext(sys.argv[1])[1], "w")
+open(sys.argv[1] + ".idx7", "w")
+os.mkdir(sys.argv[1] + "_idx8")
+open(sys.argv[1] + "_idx8/index", "w")
diff --git a/test/unit/tools/cwl_tools/v1.0/initialwork-path.cwl b/test/unit/tools/cwl_tools/v1.0/initialwork-path.cwl
new file mode 100644
index 000000000000..d01b30ad102b
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/initialwork-path.cwl
@@ -0,0 +1,15 @@
+class: CommandLineTool
+cwlVersion: v1.0
+requirements:
+ InitialWorkDirRequirement:
+ listing:
+ - entry: $(inputs.file1)
+ entryname: bob.txt
+ ShellCommandRequirement: {}
+inputs:
+ file1: File
+outputs: []
+arguments:
+ - shellQuote: false
+ valueFrom: |
+ test "$(inputs.file1.path)" = "$(runtime.outdir)/bob.txt"
diff --git a/test/unit/tools/cwl_tools/v1.0/initialworkdirrequirement-docker-out-job.json b/test/unit/tools/cwl_tools/v1.0/initialworkdirrequirement-docker-out-job.json
new file mode 100644
index 000000000000..58f80c4af112
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/initialworkdirrequirement-docker-out-job.json
@@ -0,0 +1,6 @@
+{
+ "INPUT": {
+ "class": "File",
+ "location": "ref.fasta"
+ }
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/initialworkdirrequirement-docker-out.cwl b/test/unit/tools/cwl_tools/v1.0/initialworkdirrequirement-docker-out.cwl
new file mode 100644
index 000000000000..366c18b48e47
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/initialworkdirrequirement-docker-out.cwl
@@ -0,0 +1,30 @@
+#!/usr/bin/env cwl-runner
+
+cwlVersion: v1.0
+
+requirements:
+ - class: DockerRequirement
+ dockerPull: debian:stretch-slim
+ - class: InitialWorkDirRequirement
+ listing:
+ - $(inputs.INPUT)
+
+class: CommandLineTool
+
+inputs:
+ - id: INPUT
+ type: File
+
+outputs:
+ - id: OUTPUT
+ type: File
+ outputBinding:
+ glob: $(inputs.INPUT.basename)
+ secondaryFiles:
+ - .fai
+
+arguments:
+ - valueFrom: $(inputs.INPUT.basename).fai
+ position: 0
+
+baseCommand: [touch]
diff --git a/test/unit/tools/cwl_tools/v1.0/inline-js.cwl b/test/unit/tools/cwl_tools/v1.0/inline-js.cwl
new file mode 100755
index 000000000000..585a3dc222d0
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/inline-js.cwl
@@ -0,0 +1,45 @@
+cwlVersion: v1.0
+class: CommandLineTool
+
+requirements:
+ - class: InlineJavascriptRequirement
+hints:
+ - class: DockerRequirement
+ dockerPull: python:2-slim
+
+inputs:
+ - id: args.py
+ type: File
+ default:
+ class: File
+ location: args.py
+ inputBinding:
+ position: -1
+
+outputs:
+ - id: args
+ type:
+ type: array
+ items: string
+
+baseCommand: python
+
+arguments:
+ - prefix: -A
+ valueFrom: $(1+1)
+ - prefix: -B
+ valueFrom: $("/foo/bar/baz".split('/').slice(-1)[0])
+ - prefix: -C
+ valueFrom: |
+ ${
+ var r = [];
+ for (var i = 10; i >= 1; i--) {
+ r.push(i);
+ }
+ return r;
+ }
+ # Test errors similar to https://github.com/common-workflow-language/cwltool/issues/648 are fixed
+ - prefix: -D
+ valueFrom: $(true)
+ - prefix: -E
+ valueFrom: $(false)
diff --git a/test/unit/tools/cwl_tools/v1.0/io-any-1.cwl b/test/unit/tools/cwl_tools/v1.0/io-any-1.cwl
new file mode 100644
index 000000000000..0a1cca295273
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/io-any-1.cwl
@@ -0,0 +1,17 @@
+#!/usr/bin/env cwl-runner
+class: CommandLineTool
+cwlVersion: v1.0
+requirements:
+ - class: InlineJavascriptRequirement
+
+inputs:
+ - id: bar
+ type: Any
+
+outputs:
+ - id: t1
+ type: Any
+ outputBinding:
+ outputEval: $(inputs.bar.class || inputs.bar)
+
+baseCommand: "true"
diff --git a/test/unit/tools/cwl_tools/v1.0/io-any-array.json b/test/unit/tools/cwl_tools/v1.0/io-any-array.json
new file mode 100644
index 000000000000..8150fc370a5d
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/io-any-array.json
@@ -0,0 +1 @@
+{"bar": [1, "moocow"]}
diff --git a/test/unit/tools/cwl_tools/v1.0/io-any-file.json b/test/unit/tools/cwl_tools/v1.0/io-any-file.json
new file mode 100644
index 000000000000..c2c5f2e84a2a
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/io-any-file.json
@@ -0,0 +1 @@
+{"bar": {"location": "whale.txt", "class": "File"}}
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/io-any-int.json b/test/unit/tools/cwl_tools/v1.0/io-any-int.json
new file mode 100644
index 000000000000..1ee628a21b9a
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/io-any-int.json
@@ -0,0 +1 @@
+{"bar": 7}
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/io-any-record.json b/test/unit/tools/cwl_tools/v1.0/io-any-record.json
new file mode 100644
index 000000000000..d585efa71fb1
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/io-any-record.json
@@ -0,0 +1 @@
+{"bar": {"moo": 1, "cow": 5}}
diff --git a/test/unit/tools/cwl_tools/v1.0/io-any-string.json b/test/unit/tools/cwl_tools/v1.0/io-any-string.json
new file mode 100644
index 000000000000..c5ec93f50c61
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/io-any-string.json
@@ -0,0 +1 @@
+{"bar": "7"}
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/io-any-wf-1.cwl b/test/unit/tools/cwl_tools/v1.0/io-any-wf-1.cwl
new file mode 100644
index 000000000000..447dedfc11fa
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/io-any-wf-1.cwl
@@ -0,0 +1,19 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+inputs:
+ bar:
+ type: Any
+
+outputs:
+ t1:
+ type: Any
+ outputSource: step1/t1
+
+steps:
+ step1:
+ in:
+ bar: bar
+ out: [t1]
+ run: io-any-1.cwl
diff --git a/test/unit/tools/cwl_tools/v1.0/io-file-default-wf.cwl b/test/unit/tools/cwl_tools/v1.0/io-file-default-wf.cwl
new file mode 100644
index 000000000000..8f7ef557359f
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/io-file-default-wf.cwl
@@ -0,0 +1,34 @@
+cwlVersion: v1.0
+class: Workflow
+
+inputs:
+ file1:
+ type: File
+ default:
+ class: File
+ path: whale.txt
+
+outputs:
+ o:
+ type: File
+ outputSource: step1/o
+
+steps:
+ step1:
+ in:
+ catfile1: file1
+ out: [o]
+ run:
+ class: CommandLineTool
+
+ inputs:
+ catfile1:
+ type: File
+
+ outputs:
+ o:
+ type: File
+ outputBinding: { glob: output }
+
+ arguments: [cat,$(inputs.catfile1.path)]
+ stdout: output
diff --git a/test/unit/tools/cwl_tools/v1.0/io-file-or-files.cwl b/test/unit/tools/cwl_tools/v1.0/io-file-or-files.cwl
new file mode 100644
index 000000000000..f42b2809b39f
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/io-file-or-files.cwl
@@ -0,0 +1,43 @@
+#!/usr/bin/env cwl-runner
+
+cwlVersion: v1.0
+
+requirements:
+ - class: InlineJavascriptRequirement
+
+class: CommandLineTool
+
+inputs:
+ - id: input
+ type:
+ - "null"
+ - File
+ - type: array
+ items: File
+
+outputs:
+ output_file:
+ type: File
+ outputBinding: {glob: output.txt}
+
+arguments:
+ - valueFrom: |
+ ${
+ var cmd = [];
+ if (inputs.input === null) {
+ cmd.push('echo');
+ cmd.push('no_inputs');
+ } else {
+ cmd.push('cat');
+ if (Array.isArray(inputs.input)) {
+ for (var i = 0; i < inputs.input.length; i++) {
+ cmd.push(inputs.input[i].path);
+ }
+ } else {
+ cmd.push(inputs.input.path);
+ }
+ }
+ return cmd;
+ }
+baseCommand: []
+stdout: output.txt
diff --git a/test/unit/tools/cwl_tools/v1.0/io-int-default-tool-and-wf.cwl b/test/unit/tools/cwl_tools/v1.0/io-int-default-tool-and-wf.cwl
new file mode 100644
index 000000000000..f05740c90197
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/io-int-default-tool-and-wf.cwl
@@ -0,0 +1,49 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+requirements:
+ - class: InlineJavascriptRequirement
+
+inputs:
+ i:
+ type: int
+ default: 4
+
+outputs:
+ o:
+ type: int
+ outputSource: step2/o
+
+steps:
+ step1:
+ in:
+ i: i
+ out: [o]
+ run:
+ class: ExpressionTool
+ inputs:
+ i:
+ type: int
+ outputs:
+ o:
+ type: int
+ expression: >
+ ${return {'o': (inputs.i || 2)};}
+ step2:
+ in:
+ i: step1/o
+ out: [o]
+ run:
+ class: ExpressionTool
+ inputs:
+ i:
+ type: int
+ i2:
+ type: int
+ default: 5
+ outputs:
+ o:
+ type: int
+ expression: >
+ ${return {'o': inputs.i * 2 + inputs.i2};}
diff --git a/test/unit/tools/cwl_tools/v1.0/io-int-default-wf.cwl b/test/unit/tools/cwl_tools/v1.0/io-int-default-wf.cwl
new file mode 100644
index 000000000000..2b46b2accd5f
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/io-int-default-wf.cwl
@@ -0,0 +1,32 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+requirements:
+ - class: InlineJavascriptRequirement
+
+inputs:
+ i:
+ type: int
+ default: 4
+
+outputs:
+ o:
+ type: int
+ outputSource: step1/o
+
+steps:
+ step1:
+ in:
+ i: i
+ out: [o]
+ run:
+ class: ExpressionTool
+ inputs:
+ i:
+ type: int
+ outputs:
+ o:
+ type: int
+ expression: >
+ ${return {'o': (inputs.i || 2) * 2};}
diff --git a/test/unit/tools/cwl_tools/v1.0/io-int-optional-wf.cwl b/test/unit/tools/cwl_tools/v1.0/io-int-optional-wf.cwl
new file mode 100644
index 000000000000..e9b3187c0fb1
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/io-int-optional-wf.cwl
@@ -0,0 +1,31 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+requirements:
+ - class: InlineJavascriptRequirement
+
+inputs:
+ i:
+ type: int?
+
+outputs:
+ o:
+ type: int
+ outputSource: step1/o
+
+steps:
+ step1:
+ in:
+ i: i
+ out: [o]
+ run:
+ class: ExpressionTool
+ inputs:
+ i:
+ type: int?
+ outputs:
+ o:
+ type: int
+ expression: >
+ ${return {'o': (inputs.i || 2) * 2};}
diff --git a/test/unit/tools/cwl_tools/v1.0/io-int-wf.cwl b/test/unit/tools/cwl_tools/v1.0/io-int-wf.cwl
new file mode 100644
index 000000000000..ad1b39882989
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/io-int-wf.cwl
@@ -0,0 +1,31 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+requirements:
+ - class: InlineJavascriptRequirement
+
+inputs:
+ i:
+ type: int
+
+outputs:
+ o:
+ type: int
+ outputSource: step1/o
+
+steps:
+ step1:
+ in:
+ i: i
+ out: [o]
+ run:
+ class: ExpressionTool
+ inputs:
+ i:
+ type: int
+ outputs:
+ o:
+ type: int
+ expression: >
+ ${return {'o': inputs.i * 2};}
diff --git a/test/unit/tools/cwl_tools/v1.0/io-int.json b/test/unit/tools/cwl_tools/v1.0/io-int.json
new file mode 100644
index 000000000000..6e0326aa9aab
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/io-int.json
@@ -0,0 +1 @@
+{"i": 5}
diff --git a/test/unit/tools/cwl_tools/v1.0/io-union-input-default-wf.cwl b/test/unit/tools/cwl_tools/v1.0/io-union-input-default-wf.cwl
new file mode 100644
index 000000000000..a2f734e2c432
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/io-union-input-default-wf.cwl
@@ -0,0 +1,38 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+requirements:
+ - class: InlineJavascriptRequirement
+
+inputs:
+ bar:
+ type:
+ - File
+ - 'null'
+ - string
+ default: the default value
+
+outputs:
+ o:
+ type: string
+ outputSource: step1/o
+
+steps:
+ step1:
+ in:
+ i: bar
+ out: [o]
+ run:
+ class: ExpressionTool
+ inputs:
+ i:
+ type:
+ - File
+ - 'null'
+ - string
+ outputs:
+ o:
+ type: string
+ expression: >
+ ${return {'o': (inputs.i.class || inputs.i)};}
diff --git a/test/unit/tools/cwl_tools/v1.0/iwdr-entry.cwl b/test/unit/tools/cwl_tools/v1.0/iwdr-entry.cwl
new file mode 100644
index 000000000000..e7553a253335
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/iwdr-entry.cwl
@@ -0,0 +1,20 @@
+#!/usr/bin/env cwl-runner
+
+class: CommandLineTool
+cwlVersion: v1.0
+baseCommand: ["cat", "example.conf"]
+
+requirements:
+ InitialWorkDirRequirement:
+ listing:
+ - entryname: example.conf
+ entry: |
+ CONFIGVAR=$(inputs.message)
+
+inputs:
+ message: string
+outputs:
+ out:
+ type: File
+ outputBinding:
+ glob: example.conf
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/iwdr_with_nested_dirs.cwl b/test/unit/tools/cwl_tools/v1.0/iwdr_with_nested_dirs.cwl
new file mode 100644
index 000000000000..2ff899ce3afe
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/iwdr_with_nested_dirs.cwl
@@ -0,0 +1,41 @@
+#!/usr/bin/env cwl-runner
+cwlVersion: v1.0
+class: Workflow
+
+inputs: []
+outputs:
+ ya_empty:
+ type: File
+ outputSource: second/ya
+
+steps:
+ first:
+ run:
+ class: CommandLineTool
+ baseCommand: [ mkdir, -p, deeply/nested/dir/structure ]
+ inputs: []
+ outputs:
+ deep_dir:
+ type: Directory
+ outputBinding: { glob: deeply }
+ in: {}
+ out: [ deep_dir ]
+
+ second:
+ run:
+ class: CommandLineTool
+ baseCommand: [ touch, deeply/nested/dir/structure/ya ]
+ requirements:
+ InitialWorkDirRequirement:
+ listing:
+ - entry: $(inputs.dir)
+ writable: true
+ inputs:
+ dir: Directory
+ outputs:
+ ya:
+ type: File
+ outputBinding: { glob: deeply/nested/dir/structure/ya }
+
+ in: { dir: first/deep_dir }
+ out: [ ya ]
diff --git a/test/unit/tools/cwl_tools/v1.0/job-input-array-few-files.json b/test/unit/tools/cwl_tools/v1.0/job-input-array-few-files.json
new file mode 100644
index 000000000000..58dfbe1b6268
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/job-input-array-few-files.json
@@ -0,0 +1 @@
+{"input": [{"class": "File", "path": "empty.txt"}, {"class": "File", "path": "whale.txt"}, {"class": "File", "path": "number.txt"}]}
diff --git a/test/unit/tools/cwl_tools/v1.0/job-input-array-one-empty-file.json b/test/unit/tools/cwl_tools/v1.0/job-input-array-one-empty-file.json
new file mode 100644
index 000000000000..ef6f39b61a74
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/job-input-array-one-empty-file.json
@@ -0,0 +1 @@
+{"input": [{"class": "File", "path": "empty.txt"}]}
diff --git a/test/unit/tools/cwl_tools/v1.0/job-input-null.json b/test/unit/tools/cwl_tools/v1.0/job-input-null.json
new file mode 100644
index 000000000000..c6c1fc3c2357
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/job-input-null.json
@@ -0,0 +1 @@
+{"input": null}
diff --git a/test/unit/tools/cwl_tools/v1.0/job-input-one-file.json b/test/unit/tools/cwl_tools/v1.0/job-input-one-file.json
new file mode 100644
index 000000000000..ab85840b9f27
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/job-input-one-file.json
@@ -0,0 +1 @@
+{"input": {"class": "File", "path": "whale.txt"}}
diff --git a/test/unit/tools/cwl_tools/v1.0/js-expr-req-wf.cwl b/test/unit/tools/cwl_tools/v1.0/js-expr-req-wf.cwl
new file mode 100644
index 000000000000..7ed60b0083fd
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/js-expr-req-wf.cwl
@@ -0,0 +1,30 @@
+cwlVersion: v1.0
+$graph:
+ - id: tool
+ class: CommandLineTool
+ requirements:
+ InlineJavascriptRequirement:
+ expressionLib:
+ - "function foo() { return 2; }"
+ inputs: []
+ outputs:
+ out: stdout
+ arguments: [echo, $(foo())]
+ stdout: whatever.txt
+
+ - id: wf
+ class: Workflow
+ requirements:
+ InlineJavascriptRequirement:
+ expressionLib:
+ - "function bar() { return 1; }"
+ inputs: []
+ outputs:
+ out:
+ type: File
+ outputSource: tool/out
+ steps:
+ tool:
+ run: "#tool"
+ in: {}
+ out: [out]
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/linkfile.cwl b/test/unit/tools/cwl_tools/v1.0/linkfile.cwl
new file mode 100644
index 000000000000..60f7996176c7
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/linkfile.cwl
@@ -0,0 +1,22 @@
+cwlVersion: v1.0
+class: CommandLineTool
+
+requirements:
+ InitialWorkDirRequirement:
+ listing:
+ - $(inputs.src)
+
+inputs:
+ src:
+ type: File
+ inputBinding:
+ position: 1
+ valueFrom: $(self.nameroot).class
+
+baseCommand: touch
+
+outputs:
+ classfile:
+ type: File
+ outputBinding:
+ glob: "*.class"
diff --git a/test/unit/tools/cwl_tools/v1.0/metadata.cwl b/test/unit/tools/cwl_tools/v1.0/metadata.cwl
new file mode 100644
index 000000000000..f0dd01c1d41a
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/metadata.cwl
@@ -0,0 +1,32 @@
+#!/usr/bin/env cwl-runner
+$namespaces:
+ dct: http://purl.org/dc/terms/
+ foaf: http://xmlns.com/foaf/0.1/
+$schemas:
+ - foaf.rdf
+ - dcterms.rdf
+
+cwlVersion: v1.0
+class: CommandLineTool
+doc: "Print the contents of a file to stdout using 'cat' running in a docker container."
+
+dct:creator:
+ id: "http://orcid.org/0000-0003-3566-7705"
+ class: foaf:Person
+ foaf:name: Peter Amstutz
+ foaf:mbox: "mailto:peter.amstutz@curoverse.com"
+
+hints:
+ DockerRequirement:
+ dockerPull: debian:stretch-slim
+inputs:
+ file1:
+ type: File
+ inputBinding: {position: 1}
+ numbering:
+ type: boolean?
+ inputBinding:
+ position: 0
+ prefix: -n
+outputs: []
+baseCommand: cat
diff --git a/test/unit/tools/cwl_tools/v1.0/moocow.txt b/test/unit/tools/cwl_tools/v1.0/moocow.txt
new file mode 100644
index 000000000000..11198bfa2c5c
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/moocow.txt
@@ -0,0 +1 @@
+moo cow
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/nameroot.cwl b/test/unit/tools/cwl_tools/v1.0/nameroot.cwl
new file mode 100644
index 000000000000..a028c62ecc65
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/nameroot.cwl
@@ -0,0 +1,9 @@
+cwlVersion: v1.0
+class: CommandLineTool
+inputs:
+ file1: File
+outputs:
+ b: stdout
+stdout: $(inputs.file1.nameroot).xtx
+baseCommand: []
+arguments: [echo, $(inputs.file1.basename), $(inputs.file1.nameroot), $(inputs.file1.nameext)]
diff --git a/test/unit/tools/cwl_tools/v1.0/nested-array-job.yml b/test/unit/tools/cwl_tools/v1.0/nested-array-job.yml
new file mode 100644
index 000000000000..5b82ec4a2fa3
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/nested-array-job.yml
@@ -0,0 +1 @@
+letters: [[a]]
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/nested-array.cwl b/test/unit/tools/cwl_tools/v1.0/nested-array.cwl
new file mode 100644
index 000000000000..62f316f28f92
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/nested-array.cwl
@@ -0,0 +1,15 @@
+cwlVersion: v1.0
+class: CommandLineTool
+baseCommand: echo
+inputs:
+ letters:
+ type:
+ type: array
+ items:
+ type: array
+ items: string
+ inputBinding:
+ position: 1
+stdout: echo.txt
+outputs:
+ echo: stdout
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/null-defined.cwl b/test/unit/tools/cwl_tools/v1.0/null-defined.cwl
new file mode 100644
index 000000000000..097d7822f65b
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/null-defined.cwl
@@ -0,0 +1,15 @@
+cwlVersion: v1.0
+class: CommandLineTool
+requirements:
+ InlineJavascriptRequirement: {}
+inputs:
+ file1: File?
+outputs:
+ out:
+ type: string
+ outputBinding:
+ glob: out.txt
+ loadContents: true
+ outputEval: $(self[0].contents)
+stdout: out.txt
+arguments: [echo, '$(inputs.file1 === null ? "t" : "f")']
diff --git a/test/unit/tools/cwl_tools/v1.0/null-expression1-job.json b/test/unit/tools/cwl_tools/v1.0/null-expression1-job.json
new file mode 100644
index 000000000000..b8ffcd56b138
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/null-expression1-job.json
@@ -0,0 +1,3 @@
+{
+ "i1": null
+}
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/null-expression1-tool.cwl b/test/unit/tools/cwl_tools/v1.0/null-expression1-tool.cwl
new file mode 100644
index 000000000000..828fcfe89e72
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/null-expression1-tool.cwl
@@ -0,0 +1,16 @@
+#!/usr/bin/env cwl-runner
+
+class: ExpressionTool
+requirements:
+ - class: InlineJavascriptRequirement
+cwlVersion: v1.0
+
+inputs:
+ i1:
+ type: Any
+ default: "the-default"
+
+outputs:
+ output: int
+
+expression: "$({'output': (inputs.i1 == 'the-default' ? 1 : 2)})"
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/null-expression2-job.json b/test/unit/tools/cwl_tools/v1.0/null-expression2-job.json
new file mode 100644
index 000000000000..f8f690e3dd25
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/null-expression2-job.json
@@ -0,0 +1,3 @@
+{
+ "i1": "null"
+}
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/null-expression2-tool.cwl b/test/unit/tools/cwl_tools/v1.0/null-expression2-tool.cwl
new file mode 100644
index 000000000000..9c9e8c1a673c
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/null-expression2-tool.cwl
@@ -0,0 +1,14 @@
+#!/usr/bin/env cwl-runner
+
+class: ExpressionTool
+requirements:
+ - class: InlineJavascriptRequirement
+cwlVersion: v1.0
+
+inputs:
+ i1: Any
+
+outputs:
+ output: int
+
+expression: "$({'output': (inputs.i1 == 'the-default' ? 1 : 2)})"
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/number.txt b/test/unit/tools/cwl_tools/v1.0/number.txt
new file mode 100644
index 000000000000..d81cc0710eb6
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/number.txt
@@ -0,0 +1 @@
+42
diff --git a/test/unit/tools/cwl_tools/v1.0/optional-output.cwl b/test/unit/tools/cwl_tools/v1.0/optional-output.cwl
new file mode 100644
index 000000000000..16cf36a1ad24
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/optional-output.cwl
@@ -0,0 +1,26 @@
+#!/usr/bin/env cwl-runner
+class: CommandLineTool
+cwlVersion: "v1.0"
+doc: "Print the contents of a file to stdout using 'cat' running in a docker container."
+hints:
+ DockerRequirement:
+ dockerPull: debian:stretch-slim
+inputs:
+ file1:
+ type: File
+ label: Input File
+ doc: "The file that will be copied using 'cat'"
+ inputBinding: {position: 1}
+outputs:
+ output_file:
+ type: File
+ outputBinding:
+ glob: output.txt
+ secondaryFiles:
+ - .idx
+ optional_file:
+ type: File?
+ outputBinding:
+ glob: bumble.txt
+baseCommand: cat
+stdout: output.txt
diff --git a/test/unit/tools/cwl_tools/v1.0/output-arrays-file-job.json b/test/unit/tools/cwl_tools/v1.0/output-arrays-file-job.json
new file mode 100644
index 000000000000..4519397b6dad
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/output-arrays-file-job.json
@@ -0,0 +1 @@
+{"i": {"class": "File", "location": "moocow.txt"}}
diff --git a/test/unit/tools/cwl_tools/v1.0/output-arrays-file-wf.cwl b/test/unit/tools/cwl_tools/v1.0/output-arrays-file-wf.cwl
new file mode 100644
index 000000000000..30b315f7dc3d
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/output-arrays-file-wf.cwl
@@ -0,0 +1,53 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+requirements:
+ - class: InlineJavascriptRequirement
+
+inputs:
+ i: File
+
+outputs:
+ o:
+ type: File[]
+ outputSource: step2/o
+
+steps:
+ step1:
+ in:
+ i: i
+ out: [o]
+ run:
+ class: ExpressionTool
+ inputs:
+ i:
+ type: File
+ inputBinding: { loadContents: true }
+
+ outputs:
+ o:
+ type: string[]
+ expression: >
+ ${return {'o': inputs.i.contents.split(" ")};}
+ step2:
+ in:
+ i:
+ source: step1/o
+ out: [o]
+ run:
+ class: CommandLineTool
+
+ inputs:
+ i:
+ type: string[]
+ inputBinding:
+ position: 1
+
+ outputs:
+ o:
+ type: File[]
+ outputBinding:
+ glob: $(inputs.i)
+
+ baseCommand: touch
diff --git a/test/unit/tools/cwl_tools/v1.0/output-arrays-int-job.json b/test/unit/tools/cwl_tools/v1.0/output-arrays-int-job.json
new file mode 100644
index 000000000000..1a74992cf1b0
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/output-arrays-int-job.json
@@ -0,0 +1 @@
+{"i": 3}
diff --git a/test/unit/tools/cwl_tools/v1.0/output-arrays-int-wf.cwl b/test/unit/tools/cwl_tools/v1.0/output-arrays-int-wf.cwl
new file mode 100644
index 000000000000..58641846ac02
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/output-arrays-int-wf.cwl
@@ -0,0 +1,60 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+requirements:
+ - class: InlineJavascriptRequirement
+
+inputs:
+ i: int
+
+outputs:
+ o:
+ type: int
+ outputSource: step3/o
+
+steps:
+ step1:
+ in:
+ i: i
+ out: [o]
+ run:
+ class: ExpressionTool
+ inputs:
+ i:
+ type: int
+ outputs:
+ o:
+ type: int[]
+ expression: >
+ ${return {'o': Array.apply(null, {length: inputs.i}).map(Number.call, Number)};}
+ step2:
+ in:
+ i:
+ source: step1/o
+ out: [o]
+ run:
+ class: ExpressionTool
+ inputs:
+ i:
+ type: int[]
+ outputs:
+ o:
+ type: int[]
+ expression: >
+ ${return {'o': inputs.i.map(function(x) { return (x + 1) * 2; })};}
+ step3:
+ in:
+ i:
+ source: step2/o
+ out: [o]
+ run:
+ class: ExpressionTool
+ inputs:
+ i:
+ type: int[]
+ outputs:
+ o:
+ type: int
+ expression: >
+ ${return {'o': inputs.i.reduce(function(a, b) { return a + b; })};}
diff --git a/test/unit/tools/cwl_tools/v1.0/output-arrays-int.cwl b/test/unit/tools/cwl_tools/v1.0/output-arrays-int.cwl
new file mode 100644
index 000000000000..e66dc7c502f6
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/output-arrays-int.cwl
@@ -0,0 +1,18 @@
+#!/usr/bin/env cwl-runner
+
+cwlVersion: v1.0
+class: ExpressionTool
+
+requirements:
+ - class: InlineJavascriptRequirement
+
+inputs:
+ i:
+ type: int
+
+outputs:
+ o:
+ type: int[]
+
+expression: >
+ ${return {'o': Array.apply(null, {length: inputs.i}).map(Number.call, Number)};}
diff --git a/test/unit/tools/cwl_tools/v1.0/params.cwl b/test/unit/tools/cwl_tools/v1.0/params.cwl
new file mode 100644
index 000000000000..7b52f83b9d93
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/params.cwl
@@ -0,0 +1,16 @@
+class: CommandLineTool
+cwlVersion: v1.0
+inputs:
+ bar:
+ type: Any
+ default: {
+ "baz": "zab1",
+ "b az": 2,
+ "b'az": true,
+ 'b"az': null,
+ "buz": ['a', 'b', 'c']
+ }
+
+outputs: {"$import": params_inc.yml}
+
+baseCommand: "true"
diff --git a/test/unit/tools/cwl_tools/v1.0/params2.cwl b/test/unit/tools/cwl_tools/v1.0/params2.cwl
new file mode 100644
index 000000000000..5a0e34565477
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/params2.cwl
@@ -0,0 +1,19 @@
+class: CommandLineTool
+cwlVersion: v1.0
+requirements:
+ - class: InlineJavascriptRequirement # needed by params_inc.yml
+
+inputs:
+ bar:
+ type: Any
+ default: {
+ "baz": "zab1",
+ "b az": 2,
+ "b'az": true,
+ 'b"az': null,
+ "buz": ['a', 'b', 'c']
+ }
+
+outputs: {"$import": params_inc.yml}
+
+baseCommand: "true"
diff --git a/test/unit/tools/cwl_tools/v1.0/params_inc.yml b/test/unit/tools/cwl_tools/v1.0/params_inc.yml
new file mode 100644
index 000000000000..930d2e89cc54
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/params_inc.yml
@@ -0,0 +1,120 @@
+ - id: t1
+ type: Any
+ outputBinding:
+ outputEval: $(inputs)
+ - id: t2
+ type: Any
+ outputBinding:
+ outputEval: $(inputs.bar)
+ - id: t3
+ type: Any
+ outputBinding:
+ outputEval: $(inputs['bar'])
+ - id: t4
+ type: Any
+ outputBinding:
+ outputEval: $(inputs["bar"])
+
+ - id: t5
+ type: Any
+ outputBinding:
+ outputEval: $(inputs.bar.baz)
+ - id: t6
+ type: Any
+ outputBinding:
+ outputEval: $(inputs['bar'].baz)
+ - id: t7
+ type: Any
+ outputBinding:
+ outputEval: $(inputs['bar']["baz"])
+ - id: t8
+ type: Any
+ outputBinding:
+ outputEval: $(inputs.bar['baz'])
+
+ - id: t9
+ type: Any
+ outputBinding:
+ outputEval: $(inputs.bar['b az'])
+ - id: t10
+ type: Any
+ outputBinding:
+ outputEval: $(inputs.bar['b\'az'])
+ - id: t11
+ type: Any
+ outputBinding:
+ outputEval: $(inputs.bar["b'az"])
+ - id: t12
+ type: "null"
+ outputBinding:
+ outputEval: $(inputs.bar['b"az'])
+
+ - id: t13
+ type: Any
+ outputBinding:
+ outputEval: -$(inputs.bar.baz)
+ - id: t14
+ type: Any
+ outputBinding:
+ outputEval: -$(inputs['bar'].baz)
+ - id: t15
+ type: Any
+ outputBinding:
+ outputEval: -$(inputs['bar']["baz"])
+ - id: t16
+ type: Any
+ outputBinding:
+ outputEval: -$(inputs.bar['baz'])
+
+ - id: t17
+ type: Any
+ outputBinding:
+ outputEval: $(inputs.bar.baz) $(inputs.bar.baz)
+ - id: t18
+ type: Any
+ outputBinding:
+ outputEval: $(inputs['bar'].baz) $(inputs['bar'].baz)
+ - id: t19
+ type: Any
+ outputBinding:
+ outputEval: $(inputs['bar']["baz"]) $(inputs['bar']["baz"])
+ - id: t20
+ type: Any
+ outputBinding:
+ outputEval: $(inputs.bar['baz']) $(inputs.bar['baz'])
+
+ - id: t21
+ type: Any
+ outputBinding:
+ outputEval: $(inputs.bar['b az']) $(inputs.bar['b az'])
+ - id: t22
+ type: Any
+ outputBinding:
+ outputEval: $(inputs.bar['b\'az']) $(inputs.bar['b\'az'])
+ - id: t23
+ type: Any
+ outputBinding:
+ outputEval: $(inputs.bar["b'az"]) $(inputs.bar["b'az"])
+ - id: t24
+ type: Any
+ outputBinding:
+ outputEval: $(inputs.bar['b"az']) $(inputs.bar['b"az'])
+
+ - id: t25
+ type: Any
+ outputBinding:
+ outputEval: $(inputs.bar.buz[1])
+ - id: t26
+ type: Any
+ outputBinding:
+ outputEval: $(inputs.bar.buz[1]) $(inputs.bar.buz[1])
+
+ - id: t27
+ type: "null"
+ outputBinding:
+ outputEval: $(null)
+
+ - id: t28
+ type: int
+ outputBinding:
+ outputEval: $(inputs.bar.buz.length)
diff --git a/test/unit/tools/cwl_tools/v1.0/parseInt-job.json b/test/unit/tools/cwl_tools/v1.0/parseInt-job.json
new file mode 100644
index 000000000000..b584ea23ba0b
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/parseInt-job.json
@@ -0,0 +1,6 @@
+{
+ "file1": {
+ "class": "File",
+ "location": "number.txt"
+ }
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/parseInt-tool.cwl b/test/unit/tools/cwl_tools/v1.0/parseInt-tool.cwl
new file mode 100755
index 000000000000..42f166bde21f
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/parseInt-tool.cwl
@@ -0,0 +1,16 @@
+#!/usr/bin/env cwl-runner
+
+class: ExpressionTool
+requirements:
+ - class: InlineJavascriptRequirement
+cwlVersion: v1.0
+
+inputs:
+ file1:
+ type: File
+ inputBinding: { loadContents: true }
+
+outputs:
+ output: int
+
+expression: "$({'output': parseInt(inputs.file1.contents)})"
diff --git a/test/unit/tools/cwl_tools/v1.0/pass-unconnected.cwl b/test/unit/tools/cwl_tools/v1.0/pass-unconnected.cwl
new file mode 100644
index 000000000000..349c7eb417d6
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/pass-unconnected.cwl
@@ -0,0 +1,20 @@
+class: Workflow
+cwlVersion: v1.0
+inputs:
+ inp1:
+ type: string
+ default: hello inp1
+ inp2:
+ type: string
+ default: hello inp2
+outputs:
+ out:
+ type: string
+ outputSource: step1/out
+steps:
+ step1:
+ in:
+ in: inp1
+ in2: inp2
+ out: [out]
+ run: echo-tool.cwl
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/reads.fastq b/test/unit/tools/cwl_tools/v1.0/reads.fastq
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/test/unit/tools/cwl_tools/v1.0/record-output-job.json b/test/unit/tools/cwl_tools/v1.0/record-output-job.json
new file mode 100644
index 000000000000..df29550c2a3f
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/record-output-job.json
@@ -0,0 +1,6 @@
+{
+ "irec": {
+ "ifoo": {"location": "whale.txt", "class": "File"},
+ "ibar": {"location": "ref.fasta", "class": "File"}
+ }
+}
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/record-output-wf.cwl b/test/unit/tools/cwl_tools/v1.0/record-output-wf.cwl
new file mode 100644
index 000000000000..e45b8dc237d8
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/record-output-wf.cwl
@@ -0,0 +1,33 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+inputs:
+ irec:
+ type:
+ name: irec
+ type: record
+ fields:
+ - name: ifoo
+ type: File
+ - name: ibar
+ type: File
+
+outputs:
+ orec:
+ type:
+ name: orec
+ type: record
+ fields:
+ - name: ofoo
+ type: File
+ - name: obar
+ type: File
+ outputSource: step1/orec
+
+steps:
+ step1:
+ run: record-output.cwl
+ in:
+ irec: irec
+ out: [orec]
diff --git a/test/unit/tools/cwl_tools/v1.0/record-output.cwl b/test/unit/tools/cwl_tools/v1.0/record-output.cwl
new file mode 100644
index 000000000000..c6be952a89bf
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/record-output.cwl
@@ -0,0 +1,38 @@
+class: CommandLineTool
+cwlVersion: v1.0
+requirements:
+ - class: ShellCommandRequirement
+inputs:
+ irec:
+ type:
+ name: irec
+ type: record
+ fields:
+ - name: ifoo
+ type: File
+ inputBinding:
+ position: 2
+ - name: ibar
+ type: File
+ inputBinding:
+ position: 6
+outputs:
+ orec:
+ type:
+ name: orec
+ type: record
+ fields:
+ - name: ofoo
+ type: File
+ outputBinding:
+ glob: foo
+ - name: obar
+ type: File
+ outputBinding:
+ glob: bar
+arguments:
+ - {valueFrom: "cat", position: 1}
+ - {valueFrom: "> foo", position: 3, shellQuote: false}
+ - {valueFrom: "&&", position: 4, shellQuote: false}
+ - {valueFrom: "cat", position: 5}
+ - {valueFrom: "> bar", position: 7, shellQuote: false}
diff --git a/test/unit/tools/cwl_tools/v1.0/recursive-input-directory.cwl b/test/unit/tools/cwl_tools/v1.0/recursive-input-directory.cwl
new file mode 100644
index 000000000000..0353a3b29a6e
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/recursive-input-directory.cwl
@@ -0,0 +1,33 @@
+cwlVersion: v1.0
+class: CommandLineTool
+requirements:
+ InitialWorkDirRequirement:
+ listing:
+ - entry: $(inputs.input_dir)
+ entryname: work_dir
+ writable: true
+ ShellCommandRequirement: {}
+stdout: output.txt
+arguments:
+ - shellQuote: false
+ valueFrom: |
+ touch work_dir/e;
+ if [ ! -w work_dir ]; then echo work_dir not writable; fi;
+ if [ -L work_dir ]; then echo work_dir is a symlink; fi;
+ if [ ! -w work_dir/a ]; then echo work_dir/a not writable; fi;
+ if [ -L work_dir/a ]; then echo work_dir/a is a symlink; fi;
+ if [ ! -w work_dir/c ]; then echo work_dir/c not writable; fi;
+ if [ -L work_dir/c ]; then echo work_dir/c is a symlink; fi;
+ if [ ! -w work_dir/c/d ]; then echo work_dir/c/d not writable; fi;
+ if [ -L work_dir/c/d ]; then echo work_dir/c/d is a symlink; fi;
+ if [ ! -w work_dir/e ]; then echo work_dir/e not writable; fi;
+ if [ -L work_dir/e ]; then echo work_dir/e is a symlink ; fi;
+inputs:
+ input_dir: Directory
+outputs:
+ output_dir:
+ type: Directory
+ outputBinding:
+ glob: work_dir
+ test_result:
+ type: stdout
diff --git a/test/unit/tools/cwl_tools/v1.0/recursive-input-directory.yml b/test/unit/tools/cwl_tools/v1.0/recursive-input-directory.yml
new file mode 100644
index 000000000000..7c83f3f26914
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/recursive-input-directory.yml
@@ -0,0 +1,3 @@
+input_dir:
+ class: Directory
+ location: testdir
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/ref.fasta b/test/unit/tools/cwl_tools/v1.0/ref.fasta
new file mode 100644
index 000000000000..bad272557772
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/ref.fasta
@@ -0,0 +1,10 @@
+>0$chr1$9001$11468
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCCTAACCCTAACCCTAACCCTAACCCTAACCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCCTAACCCTAACCCTAAACCCTAAACCCTAACCCTAACCCTAACCCTAACCCTAACCCCAACCCCAACCCCAACCCCAACCCCAACCCCAACCCTAACCCCTAACCCTAACCCTAACCCTACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCCTAACCCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCCTAACCCTAACCCTAACCCTAACCCTCGCGGTACCCTCAGCCGGCCCGCCCGCCCGGGTCTGACCTGAGGAGAACTGTGCTCCGCCTTCAGAGTACCACCGAAATCTGTGCAGAGGACAACGCAGCTCCGCCCTCGCGGTGCTCTCCGGGTCTGTGCTGAGGAGAACGCAACTCCGCCGTTGCAAAGGCGCGCCGCGCCGGCGCAGGCGCAGAGAGGCGCGCCGCGCCGGCGCAGGCGCAGAGAGGCGCGCCGCGCCGGCGCAGGCGCAGAGAGGCGCGCCGCGCCGGCGCAGGCGCAGAGAGGCGCGCCGCGCCGGCGCAGGCGCAGAGAGGCGCGCCGCGCCGGCGCAGGCGCAGACACATGCTAGCGCGTCGGGGTGGAGGCGTGGCGCAGGCGCAGAGAGGCGCGCCGCGCCGGCGCAGGCGCAGAGACACATGCTACCGCGTCCAGGGGTGGAGGCGTGGCGCAGGCGCAGAGAGGCGCACCGCGCCGGCGCAGGCGCAGAGACACATGCTAGCGCGTCCAGGGGTGGAGGCGTGGCGCAGGCGCAGAGACGCAAGCCTACGGGCGGGGGTTGGGGGGGCGTGTGTTGCAGGAGCAAAGTCGCACGGCGCCGGGCTGGGGCGGGGGGAGGGTGGCGCCGTGCACGCGCAGAAACTCACGTCACGGTGGCGCGGCGCAGAGACGGGTAGAACCTCAGTAATCCGAAAAGCCGGGATCGACCGCCCCTTGCTTGCAGCCGGGCACTACAGGACCCGCTTGCTCACGGTGCTGTGCCAGGGCGCCCCCTGCTGGCGACTAGGGCAACTGCAGGGCTCTCTTGCTTAGAGTGGTGGCCAGCGCCCCCTGCTGGCGCCGGGGCACTGCAGGGCCCTCTTGCTTACTGTATAGTGGTGGCACGCCGCCTGCTGGCAGCTAGGGACATTGCAGGGTCCTCTTGCTCAAGGTGTAGTGGCAGCACGCCCACCTGCTGGCAGCTGGGGACACTGCCGGGCCCTCTTGCTCCAACAGTACTGGCGGATTATNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+>1$chr1$53713$55817
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNATAGATGGAATAAATAAAATGTGAACTTAGGTAAATTATAAATTAATAAAGTATATTTTTAAAATTTCCATTTTAATTTCTGTTTAAATTAGAATAAGAAACAAAAACAACTATGTAATACGTGTGCAAAGCCCTGAACTGAGATTTGACTTTACCTTGAGCTTTGTCAGTTTACGATGCTATTTCAGTTTTGTGCTCAGATTTGAGTGATTGCAGGAAGAGAATAAATTTCTTTAATGCTGTCAAGACTTTAAATAGATACAGACAGAGCATTTTCACTTTTTCCTACATCTCTATTATTCTAAAAATGAGAACATTCCAAAAGTCAACCATCCAAGTTTATTCTAAATAGATGTGTAGAAATAACAGTTGTTTCACAGGAGACTAATCGCCCAAGGATATGTGTTTAGAGGTACTGGTTTCTTAAATAAGGTTTTCTAGTCAGGCAAAAGATTCCCTGGAGCTTATGCATCTGTGGTTGATATTTTGGGATAAGAATAAAGCTAGAAATGGTGAGGCATATTCAATTTCATTGAAGATTTCTGCATTCAAAATAAAAACTCTATTGAAGTTACACATACTTTTTTCATGTATTTGTTTCTACTGCTTTGTAAATTATAACAGCTCAATTAAGAGAAACCGTACCTATGCTATTTTGTCCTGTGATTCTCCAAGAACCTTCCTAAGTTATTCTACTTAATTGCTTTATCACTCATATGAATGGGAATTTCTTCTCTTAATTGCTGCTAATCTCCCCCATCTTCAAATACTCTACCGGGCTTCTGGAACACCACAGCTTCCTGGCTTTTTCTCCTACCTCCTGGGCAAGTCCTTCCCTGTGTCTTTTGTTGAGTGTTCCTCATCTGCTTAACTACCAATCAACCTATTGCCCCTAATTTGATCTTTGGCCTGTTTTCACTTAGATTCTATCCCTACGTATCACCCATTCCCACAGCTTTAATCACCATCTAAACACTAGGGGCTCTCAAACCTTGTATTTTTCTTTCTTTCTTTCTTTCTTTCTTTCTTTCTTTCTTTCTTTCTTTCTTTCTTTCTTTCTTCCTCCTTTTCTTTCCTTTTCTTTCTTTCATTCTTTCTTTCTTTTTTAAGGGGCAGGGTCTCACTATGTTGCTGAGGCTGGTCTCAAACTCCTGACCTCAAGCAATCTGTCTGCTTCAGCCTCCCAAGTAGCTGAGAATACAGGGACAAGCCATTGCACCTGACCCTGGTACTATTTCTTGAGTTCCTGATCCACAGATCTAACCTCCTACTTTCCTGGATGCCACACAAGATCTTCCACTCAACAAGTCTGCAACTAAACTAGCCTTCCTCTTTTCAAACCTACTCTTCTTTCAGTGTTCTCAGTCACAATAATTTGTACCAACTAGTTACCTAGTTGCACAACCCAAAATCTGGGAAAAATAATAGATTTCTTTCTCCATAGTACCCCCAAATCAATAAATCATCAAGTCTTATTCTACCTTCCAAAGAGCCTTACATATGTTCCTTTATTTTCATCTGTAACACCACTATTCCTGTCTAAGCCTACCTATGTCATTTTTGGAAGAGAATATAGTCACCTATGCGACCTTCCCACTTAAAATCCTACTATTTACGCTTCAGTAAAAGAAAAAAAATTTTTAATCTAAGTATGTAATTCTTTTGCTGAAGACACTTCACTTGCTTCTGTGCCCTTAAACTGGTATGTTATCATGGTATAGTAGGCCATCCAAGACCTGGCTTCCTTCCTTTTTTTCAGTCTCAGAGAATAACATACTCTTTCCCTGCAACTCCAGATCCAATTTGGTTTTCTTTTACTTGCCTGGAAACTCCAAAATCTATCAACTCTGGGGCTTTCCACTAGCTAATCATTTTGTATACAATATTTGTCCTTCATGTTTTGCCTCTTAACATCTCAGCTTTCAGTTTCATCATTTTACCAGGGAGGCCTCCCAGAACCTGAGTCCAGAAGAGTTCCTTCCATTGTATATTCCTCTAGCACTACCTATTACCTCTTTTGTAAGACTAACAGCCCTCAAAATTTTTCATTCAGTGATGTCTTCCTCATTGCATTTTAAGTTCAACATGAGCAGGACTTTGTCGTNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+>2$chr1$65161$67630
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNAAATCGAAGCATTATTACTTACTCTCTTGTTAACCTATCTGGATTTTAATTTTGTAACTTTATTATATTTGTTTTGCTGTGATTCTTTAAAAAGCACCTTTAGACTCAGTGAGATAGCAAAAATATCCAAATAGGCCAAAAAATTGTGGCAATGTCCTCTCACTCAGGAAAATTCTGTGTGTTTTCTCTAATGGCCAAGGGAAAACTTGTGAGACTATAAAAGTTAGTCTCAGTACACAAAGCTCAGACTGGCTATTCCCAGATCTCTTCAGGTACATCTAGTCCATTCATAAAGGGCTTTTAATTAACCAAGTGGTTTACTAAAAAGGACAATTCACTACATATTATTCTCTTACAGTTTTTATGCCTCATTCTGTGAAAATTGCTGTAGTCTCTTCCAGTTATGAAGAAGGTAGGTGGAAACAAAGACAAAACACATATATTAGAAGAATGAATGAAATTGTAGCATTTTATTGACAATGAGATGGTTCTATTAGTAGGAATCTATTCTGCATAATTCCATTTTGTGTTTACCTTCTGGAAAAATGAAAGGATTCTGTATGGTTAACTTAAATACTTAGAGAAATTAATATGAATAATGTTAGCAAGAATAACCCTTGTTATAAGTATTATGCTGGCAACAATTGTCGAGTCCTCCTCCTCACTCTTCTGGGCTAATTTGTTCTTTTCTCCCCATTTAATAGTCCTTTTCCCCATCTTTCCCCAGGTCCGGTGTTTTCTTACCCACCTCCTTCCCTCCTTTTTATAATACCAGTGAAACTTGGTTTGGAGCATTTCTTTCACATAAAGGTACAAATCATACTGCTAGAGTTGTGAGGATTTTTACAGCTTTTGAAAGAATAAACTCATTTTAAAAACAGGAAAGCTAAGGCCCAGAGATTTTTAAATGATATTCCCATGATCACACTGTGAATTTGTGCCAGAACCCAAATGCCTACTCCCATCTCACTGAGACTTACTATAAGGACATAAGGCATTTATATATATATATATTATATATACTATATATTTATATATATTACATATTATATATATAATATATATTATATAATATATATTATATTATATAATATATAATATAAATATAATATAAATTATATTATATAATATATAATATAAATATAATATAAATTATATAAATATAATATATATTTTATTATATAATATAATATATATTATATAAATATAATATATAAATTATATAATATAATATATATTATATAATATAATATATTTTATTATATAAATATATATTATATTATATAATATATATTTTATTATATAATATATATTATATATTTATAGAATATAATATATATTTTATTATATAATATATATTATATAATATATATTATATTTATATATAACATATATTATTATATAAAATATGTATAATATATATTATATAAATATATTTATATATTATATAAATATATATATTATATATAATTCTAATGGTTGAATTCCAAGAATAATCTATGGCATGAAAGATTTTACCTGTCAACAGTGGCTGGCTCTTCATGGTTGCTACAATGAGTGTGTAAGATTCTGAAGGACTCCTTTAATAAGCCTAAACTTAATGTTCAACTTAGAATAAATACAATTCTTCTAATTTTTTTTGAATAATTTTTAAAAAGTCAGAAATGAGCTTTGAAAGAATTATGGTGGTGAAGGATCCCCTCAGCAGCACAAATTCAGGAGAGAGATGTCTTAACTACGTTAGCAAGAAATTCCTTTTGCTAAAGAATAGCATTCCTGAATTCTTACTAACAGCCATGATAGAAAGTCTTTTGCTACAGATGAGAACCCTCGGGTCAACCTCATCCTTGGCATATTTCATGTGAAGATATAACTTCAAGATTGTCCTTGCCTATCAATGAAATGAATTAATTTTATGTCAATGCATATTTAAGGTCTATTCTAAATTGCACACTTTGATTCAAAAGAAACAGTCCAACCAACCAGTCAGGACAGAAATTATCTCACAATAAAAATCCTATCGTTTGTACTGTCAATGATTAGTATGATTATATTTATTACCGTGCTAAGCAGAAGAGAAATGAAGTGAATGTTCATGATTTATTCCACTATTAGACTTCTCTTTATTCTTAAAAATATTTAAGATCACTAAATTTTTATAGGACTTTAAAAACAGTAATGTGCTGCTTTGAGTGTGTAGGACTAAGAAATGGGATTCAGAGTAGTAAAGAGAAAAGTGGAATTTCCAAGCACTATGAATTACTGTTCTTTAAAAAACAGCAAAAATCAAATAACAGTATTCCTCCAAAAAAGATGGCAAGTGTAAACTCTATACCTTCATGTCTCCCGTGGAATGTTAGTGATCAATTTCCACTTCTCTCTTTTACATCTTACTTGCCCATTAACTCTTATACCTAATCCAAAGATTGTTAATATGGCTATGTCTCACTTTCAGGACACCTTTTATTTNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+>3$chr1$82792$85041
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNACTACATCCCACAGTGGGTAACAAAAATAACCTTGAAGAAGGGAAAAATTTGGTTTCCAGAATAAACACATTATAATATCCAAAATGTCCAGTTTTCAACAAAAATTAAGAAGCATGCAAATAAACACAAAACTATGGCCCATTTACAGAAGAAATAAATGAGACTCTCCCTGAGTAAGCAGATATTGAAAATATTAGACAAAAACTTTATATAACTGTCTTAAATAAACTTAAAGAGCTAAAGAAACCCAAGAGAATGACATATAAATAAATAAGAAATATGAATTTTTTTAAAGGTACAAAAAAATTCTGAGGCTGAAAAGTACAATAAGTAAAAAGTTACTTTTTACTTAGGGTTCCAATAGAAGATTTGAGCAGCTGGAAAAAAGAATCAGTGAACTTGATAGATCAAATGAAATGATTCAGTCTGAAGAGCAGGAAAATGAAAGAATGACAACAAAAAAGAATAGAGCCTAAAGACCTGTGTAACAACATCAAGAATGCCTACATACAGAATCCTGGTGGGGAGTGAGGGGCAGGAAGACTATTTGAAGAAATGTGTTTGAAAGCTTCCCAAATTTCACTAAAAACAAATATATACATTCAAAAAGCTCAGTGAACTTCATCAAGGAAATATACAAAGATATTCACACCAAGACACACTATGTTTCAAATTGTCAAAAGGCAAAGCGAATGTTTGAAAGCAGCAAGAGAAAGGCAACGCGTCATTTACAAAGGATCCTCAATAAGTTTGACAGCAGATAGTGCATTATAAGCCATGGATGCCAGAAGAGCTTAGGAAAAAGGCAACGCGTCATTTACAAAGGATCCTCAGTAAGTTTGACAGCAGAGAGCTCATTATAAACCATGGGTGCCAGAAGAGCTTAGGATGACATTTTAAAGTTCTGAAAGAAAAAAACACTGTCAACCAAAAATTCTATAACTTGGAAGATGCCCCTTCAAGTATTAAGGATAAATTACACATTCCCAGATTAAAAAAAAGAAAGAGAGAGAGAGAGAAAGAGAAAGAAAGAAAGAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAGAGAAAGAAAGAAAGAAGAAAGAGAAAGAAAGAAAGAAAGAGAGAGAGAAAGAGAGAGAAAGAAAAAGAAGGAAAGAAAGAAAGAAAGAAAAAAGAAAGAAAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAAGCAAGCAAGCTTTAAAAGTTCATGTTTGGTAGGCTGTACTTCAAGATACACTTTTAAAAAAAAGACTCCTTCAGATACAAACTAAAAAACACTAGAAAGTAACTCAAAACCACATAAAGAAATAACTCCAGTAAAGATAACTACATAGGTAAATATAAAAGCAATTATCACATTTTTTGTAAGTCTTTTTTAATATTCTATATGTTTTAAAACAAATGTGTAAAATAATGACTATAAATCTATGTTAATGAAGCATGATGTATACAGATGTGGTTTGTGAAATTACCAACATAAAGAAATTCATAGGAAACTAAATAATAATAGAGATTTTGTATACTATTGAAGTTGTTTCAATTTACTCTAAATTGTTCCAAATTAAGAATGTTAATTGTAAATCCCCATGGTAACCACTAAGTTAATATCTTTTGAAAATACAGAAAAGGAAAGCACAGGGTAAACACAGTGATATGCTACAAAATAGCAACTAAACACAAAAGAAGGCGATAATTGAGGAAATTAGGAACAAAGGAGGTATAAGACATACAGAAAACAAAAGCAAAATGGTAGGAGTAAGCCCCTCTTTATCAGTAATTACATTAAATACAAATGAATTAAACTCTCCAATCCAAAGAAAGAGATTAACAGAATGGATTTTTTAAAAATGATCCAACTATATTGTCCACAAGATACTCACTTTAGATCAAAATACACAATGAGTTGAAATGAAAGGATGGGAGAAAATATTCCATGTAAGTAATAACCAAAGGAGATCTGAGGCAAATATACTTATATCAGACAAAATAGACTTTAAGTCAAAAACTGTTACAAAATACAAAGAACAGTATATATTGATTTCAAAATTAATTAAGAAGATATAACAATTATAAATATATGTACACCAACTAACAGGGCTCCAAAATATATAATGTAACCATTGAGAGAATTAAAGGGAGAGACAGACAATTCCACGAAAATTGTTGGGCATTTGAAAACCCAACTNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+>4$chr1$98000$100116
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNCCATGACAGGCAGAGCTCCCTGTTGAGCCACAGAGATTTAGAGAATGGCTGTTAACACAGCATAATCCAGCCCATCCTGACTAATCTGATATTAACATGTATAATAAAGAATTCTATCAATGCTGAGGGAAGATGACTAGTTAAGGTCCTAGGTTGCAAGTCTCAAAACCTCTTCTAAGGATTGTAGACAGGAAATTAAATGACTTCTAGTCCCTAGAGTTCCCAATCTCCTACCATCCCATCCTAATATGACAGAAGTAATTCCTGAGTTGCTTCTGAAACCAGAGCTTCCCTCAGAACCCTTAGCCTGCCAGATGGCTTCTTGGAGAGCCCTCACTCACTTTTCTCCTTCTGCTATTGCTGCTCATTCATTCCAGTTTTTAAAAATTCATCTTTATCCAGGAACCTCGCTTCTAGAAAAGTCATACAGGTGCTTCCAGGAGGCTACATGGGCACCCATATTTTTCTAGCCACTTTCATTAGACCAATGCAGCAGAGAAGAAAAGCCTCAATAATTATTATGACATGGCATGTTAGGATACCAAGTAAATTGCATTTGTAAAATGTGATTTTCTGTTGGTGTTCACTTCAGCTCTACTGACATTTGGTAAGTATTATTGACTGACTGACTAACTAATGTGGTCATTAGTCTTCATAAAGAAAGGCTCTCTACAAAAACGGAGGGATGCCCTTTTTCTGGCATTTAATACGTAAGAAATTGCCTCCAATAGAAACCAGAGTTGCCTGATTACTATCAGCACAGGAGAAATGTATTAATGTGCCTTTCTAGTAACAGGTTTTTAGAAAGTCAAATATAAACAAATCTGTCTATTTGTGTGTGTGCATGTGGTAGTGGGGAGGGAAGAAAAAAGGAGGGGGAGAGAAAGAGAAATAAGAACCAAGTTTATTATACTGTATTCAGGGGGAAAAAATTTTCCCAAGGTCCTAACAGAAGAGCAAAGTGCCACTGTCAATAGCCTCAGTAGTGTTAGGGTTGCTTTTATTTATTTATTTATTTATTTATTTATTTATTTATTTATTTTTCCTTTTTTTTCTTTCTCTTTTTTTCTTCTTTTTTTTTTCTTTTCTTTCTTTTTTTTTTTTTTTTTTTTTTTTGGACAGAGTCTCACACTGTCACCTGGGCTGGAGTGCATTGGTGCAATCTCGACTCACTGCAACTTCTGCCTCCCAGGTTCAAGTGATTCTCCTGCCTCAGCCGCCCAAGTAGCTGGGATTACAGGTGTCTGCCACCGTGCCTAGCTAATTTTTTTGTATTTTTAGTAGAGATGAGGTTTCACTATGTTGGCCAGGCTGGTCTCAAACTCCTGACCTCATGATCCACCCACGTTGGCCTCCCAAAGTGCTGGGATTACAGGCGTGAGCCACCGCCCCTGGCCAGGATTGCTTTTACAGCCAGTCTTCAGGTGCCCACTGTAGGAACAATGTCATTTAACCCTCGGGATTATTCTGTGCCAAATATGGATAATGACTAATATCCAACACAGATATTCTCAGCTCAGAAGAGCAATTAGCAAATTCATAAATTAAGTGCTTGCTTCCTCTTTAGTCAAATACAAACGTTTGTTAAAAGATATTATTTTGCTTTACACTTTTTCTCTCAGAAATAAGCAGATGCTTGAATTCCCACAGTGCTGCTTGAGCCTCACACCATGTCATCCTGCCAGGCACCCAGATCCAGTTCTAGAGTTTCACATGATCGTGAGTGTTGGTTAATAAGTCAATGTGAACTGGGAGGGGAGATTTTTCAGGAGTGCCACAGGGCTCTCCCTTTAATCACATACACTCCCTGCTTTCATTGGAAAGTGTATAATGATGTCAGAGTGCCCCAGAATGGAGCTAGTTGGAAGACTGCCGTCATAGGGATGCCTTAGTGAATTAATAAGGTTTTAATTTCTGGCTCTCAACTTTGTAGATGTAAAAGTTGATTTATCAATATGTGAGAAAGGATGAATCTTTCTGAAGGTTATGTCATCACACTCACTAAGCACACAGAGAATAATGTCTAGAATCTGAGTGCCATGTTATCAAATTGTACTGAGACTCTTGCAGTCACACAGGCTGACATGTAAGCATCGCCATGCCTAGTACAGACTCTNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
diff --git a/test/unit/tools/cwl_tools/v1.0/ref.fasta.fai b/test/unit/tools/cwl_tools/v1.0/ref.fasta.fai
new file mode 100644
index 000000000000..e5d66331d601
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/ref.fasta.fai
@@ -0,0 +1,5 @@
+0$chr1$9001$11468 2567 19 2567 2568
+1$chr1$53713$55817 2204 2607 2204 2205
+2$chr1$65161$67630 2569 4832 2569 2570
+3$chr1$82792$85041 2349 7422 2349 2350
+4$chr1$98000$100116 2216 9793 2216 2217
diff --git a/test/unit/tools/cwl_tools/v1.0/ref2.fasta b/test/unit/tools/cwl_tools/v1.0/ref2.fasta
new file mode 100644
index 000000000000..bad272557772
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/ref2.fasta
@@ -0,0 +1,10 @@
+>0$chr1$9001$11468
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCCTAACCCTAACCCTAACCCTAACCCTAACCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCCTAACCCTAACCCTAAACCCTAAACCCTAACCCTAACCCTAACCCTAACCCTAACCCCAACCCCAACCCCAACCCCAACCCCAACCCCAACCCTAACCCCTAACCCTAACCCTAACCCTACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCCTAACCCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCTAACCCCTAACCCTAACCCTAACCCTAACCCTCGCGGTACCCTCAGCCGGCCCGCCCGCCCGGGTCTGACCTGAGGAGAACTGTGCTCCGCCTTCAGAGTACCACCGAAATCTGTGCAGAGGACAACGCAGCTCCGCCCTCGCGGTGCTCTCCGGGTCTGTGCTGAGGAGAACGCAACTCCGCCGTTGCAAAGGCGCGCCGCGCCGGCGCAGGCGCAGAGAGGCGCGCCGCGCCGGCGCAGGCGCAGAGAGGCGCGCCGCGCCGGCGCAGGCGCAGAGAGGCGCGCCGCGCCGGCGCAGGCGCAGAGAGGCGCGCCGCGCCGGCGCAGGCGCAGAGAGGCGCGCCGCGCCGGCGCAGGCGCAGACACATGCTAGCGCGTCGGGGTGGAGGCGTGGCGCAGGCGCAGAGAGGCGCGCCGCGCCGGCGCAGGCGCAGAGACACATGCTACCGCGTCCAGGGGTGGAGGCGTGGCGCAGGCGCAGAGAGGCGCACCGCGCCGGCGCAGGCGCAGAGACACATGCTAGCGCGTCCAGGGGTGGAGGCGTGGCGCAGGCGCAGAGACGCAAGCCTACGGGCGGGGGTTGGGGGGGCGTGTGTTGCAGGAGCAAAGTCGCACGGCGCCGGGCTGGGGCGGGGGGAGGGTGGCGCCGTGCACGCGCAGAAACTCACGTCACGGTGGCGCGGCGCAGAGACGGGTAGAACCTCAGTAATCCGAAAAGCCGGGATCGACCGCCCCTTGCTTGCAGCCGGGCACTACAGGACCCGCTTGCTCACGGTGCTGTGCCAGGGCGCCCCCTGCTGGCGACTAGGGCAACTGCAGGGCTCTCTTGCTTAGAGTGGTGGCCAGCGCCCCCTGCTGGCGCCGGGGCACTGCAGGGCCCTCTTGCTTACTGTATAGTGGTGGCACGCCGCCTGCTGGCAGCTAGGGACATTGCAGGGTCCTCTTGCTCAAGGTGTAGTGGCAGCACGCCCACCTGCTGGCAGCTGGGGACACTGCCGGGCCCTCTTGCTCCAACAGTACTGGCGGATTATNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+>1$chr1$53713$55817
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNATAGATGGAATAAATAAAATGTGAACTTAGGTAAATTATAAATTAATAAAGTATATTTTTAAAATTTCCATTTTAATTTCTGTTTAAATTAGAATAAGAAACAAAAACAACTATGTAATACGTGTGCAAAGCCCTGAACTGAGATTTGACTTTACCTTGAGCTTTGTCAGTTTACGATGCTATTTCAGTTTTGTGCTCAGATTTGAGTGATTGCAGGAAGAGAATAAATTTCTTTAATGCTGTCAAGACTTTAAATAGATACAGACAGAGCATTTTCACTTTTTCCTACATCTCTATTATTCTAAAAATGAGAACATTCCAAAAGTCAACCATCCAAGTTTATTCTAAATAGATGTGTAGAAATAACAGTTGTTTCACAGGAGACTAATCGCCCAAGGATATGTGTTTAGAGGTACTGGTTTCTTAAATAAGGTTTTCTAGTCAGGCAAAAGATTCCCTGGAGCTTATGCATCTGTGGTTGATATTTTGGGATAAGAATAAAGCTAGAAATGGTGAGGCATATTCAATTTCATTGAAGATTTCTGCATTCAAAATAAAAACTCTATTGAAGTTACACATACTTTTTTCATGTATTTGTTTCTACTGCTTTGTAAATTATAACAGCTCAATTAAGAGAAACCGTACCTATGCTATTTTGTCCTGTGATTCTCCAAGAACCTTCCTAAGTTATTCTACTTAATTGCTTTATCACTCATATGAATGGGAATTTCTTCTCTTAATTGCTGCTAATCTCCCCCATCTTCAAATACTCTACCGGGCTTCTGGAACACCACAGCTTCCTGGCTTTTTCTCCTACCTCCTGGGCAAGTCCTTCCCTGTGTCTTTTGTTGAGTGTTCCTCATCTGCTTAACTACCAATCAACCTATTGCCCCTAATTTGATCTTTGGCCTGTTTTCACTTAGATTCTATCCCTACGTATCACCCATTCCCACAGCTTTAATCACCATCTAAACACTAGGGGCTCTCAAACCTTGTATTTTTCTTTCTTTCTTTCTTTCTTTCTTTCTTTCTTTCTTTCTTTCTTTCTTTCTTTCTTTCTTCCTCCTTTTCTTTCCTTTTCTTTCTTTCATTCTTTCTTTCTTTTTTAAGGGGCAGGGTCTCACTATGTTGCTGAGGCTGGTCTCAAACTCCTGACCTCAAGCAATCTGTCTGCTTCAGCCTCCCAAGTAGCTGAGAATACAGGGACAAGCCATTGCACCTGACCCTGGTACTATTTCTTGAGTTCCTGATCCACAGATCTAACCTCCTACTTTCCTGGATGCCACACAAGATCTTCCACTCAACAAGTCTGCAACTAAACTAGCCTTCCTCTTTTCAAACCTACTCTTCTTTCAGTGTTCTCAGTCACAATAATTTGTACCAACTAGTTACCTAGTTGCACAACCCAAAATCTGGGAAAAATAATAGATTTCTTTCTCCATAGTACCCCCAAATCAATAAATCATCAAGTCTTATTCTACCTTCCAAAGAGCCTTACATATGTTCCTTTATTTTCATCTGTAACACCACTATTCCTGTCTAAGCCTACCTATGTCATTTTTGGAAGAGAATATAGTCACCTATGCGACCTTCCCACTTAAAATCCTACTATTTACGCTTCAGTAAAAGAAAAAAAATTTTTAATCTAAGTATGTAATTCTTTTGCTGAAGACACTTCACTTGCTTCTGTGCCCTTAAACTGGTATGTTATCATGGTATAGTAGGCCATCCAAGACCTGGCTTCCTTCCTTTTTTTCAGTCTCAGAGAATAACATACTCTTTCCCTGCAACTCCAGATCCAATTTGGTTTTCTTTTACTTGCCTGGAAACTCCAAAATCTATCAACTCTGGGGCTTTCCACTAGCTAATCATTTTGTATACAATATTTGTCCTTCATGTTTTGCCTCTTAACATCTCAGCTTTCAGTTTCATCATTTTACCAGGGAGGCCTCCCAGAACCTGAGTCCAGAAGAGTTCCTTCCATTGTATATTCCTCTAGCACTACCTATTACCTCTTTTGTAAGACTAACAGCCCTCAAAATTTTTCATTCAGTGATGTCTTCCTCATTGCATTTTAAGTTCAACATGAGCAGGACTTTGTCGTNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+>2$chr1$65161$67630
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNAAATCGAAGCATTATTACTTACTCTCTTGTTAACCTATCTGGATTTTAATTTTGTAACTTTATTATATTTGTTTTGCTGTGATTCTTTAAAAAGCACCTTTAGACTCAGTGAGATAGCAAAAATATCCAAATAGGCCAAAAAATTGTGGCAATGTCCTCTCACTCAGGAAAATTCTGTGTGTTTTCTCTAATGGCCAAGGGAAAACTTGTGAGACTATAAAAGTTAGTCTCAGTACACAAAGCTCAGACTGGCTATTCCCAGATCTCTTCAGGTACATCTAGTCCATTCATAAAGGGCTTTTAATTAACCAAGTGGTTTACTAAAAAGGACAATTCACTACATATTATTCTCTTACAGTTTTTATGCCTCATTCTGTGAAAATTGCTGTAGTCTCTTCCAGTTATGAAGAAGGTAGGTGGAAACAAAGACAAAACACATATATTAGAAGAATGAATGAAATTGTAGCATTTTATTGACAATGAGATGGTTCTATTAGTAGGAATCTATTCTGCATAATTCCATTTTGTGTTTACCTTCTGGAAAAATGAAAGGATTCTGTATGGTTAACTTAAATACTTAGAGAAATTAATATGAATAATGTTAGCAAGAATAACCCTTGTTATAAGTATTATGCTGGCAACAATTGTCGAGTCCTCCTCCTCACTCTTCTGGGCTAATTTGTTCTTTTCTCCCCATTTAATAGTCCTTTTCCCCATCTTTCCCCAGGTCCGGTGTTTTCTTACCCACCTCCTTCCCTCCTTTTTATAATACCAGTGAAACTTGGTTTGGAGCATTTCTTTCACATAAAGGTACAAATCATACTGCTAGAGTTGTGAGGATTTTTACAGCTTTTGAAAGAATAAACTCATTTTAAAAACAGGAAAGCTAAGGCCCAGAGATTTTTAAATGATATTCCCATGATCACACTGTGAATTTGTGCCAGAACCCAAATGCCTACTCCCATCTCACTGAGACTTACTATAAGGACATAAGGCATTTATATATATATATATTATATATACTATATATTTATATATATTACATATTATATATATAATATATATTATATAATATATATTATATTATATAATATATAATATAAATATAATATAAATTATATTATATAATATATAATATAAATATAATATAAATTATATAAATATAATATATATTTTATTATATAATATAATATATATTATATAAATATAATATATAAATTATATAATATAATATATATTATATAATATAATATATTTTATTATATAAATATATATTATATTATATAATATATATTTTATTATATAATATATATTATATATTTATAGAATATAATATATATTTTATTATATAATATATATTATATAATATATATTATATTTATATATAACATATATTATTATATAAAATATGTATAATATATATTATATAAATATATTTATATATTATATAAATATATATATTATATATAATTCTAATGGTTGAATTCCAAGAATAATCTATGGCATGAAAGATTTTACCTGTCAACAGTGGCTGGCTCTTCATGGTTGCTACAATGAGTGTGTAAGATTCTGAAGGACTCCTTTAATAAGCCTAAACTTAATGTTCAACTTAGAATAAATACAATTCTTCTAATTTTTTTTGAATAATTTTTAAAAAGTCAGAAATGAGCTTTGAAAGAATTATGGTGGTGAAGGATCCCCTCAGCAGCACAAATTCAGGAGAGAGATGTCTTAACTACGTTAGCAAGAAATTCCTTTTGCTAAAGAATAGCATTCCTGAATTCTTACTAACAGCCATGATAGAAAGTCTTTTGCTACAGATGAGAACCCTCGGGTCAACCTCATCCTTGGCATATTTCATGTGAAGATATAACTTCAAGATTGTCCTTGCCTATCAATGAAATGAATTAATTTTATGTCAATGCATATTTAAGGTCTATTCTAAATTGCACACTTTGATTCAAAAGAAACAGTCCAACCAACCAGTCAGGACAGAAATTATCTCACAATAAAAATCCTATCGTTTGTACTGTCAATGATTAGTATGATTATATTTATTACCGTGCTAAGCAGAAGAGAAATGAAGTGAATGTTCATGATTTATTCCACTATTAGACTTCTCTTTATTCTTAAAAATATTTAAGATCACTAAATTTTTATAGGACTTTAAAAACAGTAATGTGCTGCTTTGAGTGTGTAGGACTAAGAAATGGGATTCAGAGTAGTAAAGAGAAAAGTGGAATTTCCAAGCACTATGAATTACTGTTCTTTAAAAAACAGCAAAAATCAAATAACAGTATTCCTCCAAAAAAGATGGCAAGTGTAAACTCTATACCTTCATGTCTCCCGTGGAATGTTAGTGATCAATTTCCACTTCTCTCTTTTACATCTTACTTGCCCATTAACTCTTATACCTAATCCAAAGATTGTTAATATGGCTATGTCTCACTTTCAGGACACCTTTTATTTNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+>3$chr1$82792$85041
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNACTACATCCCACAGTGGGTAACAAAAATAACCTTGAAGAAGGGAAAAATTTGGTTTCCAGAATAAACACATTATAATATCCAAAATGTCCAGTTTTCAACAAAAATTAAGAAGCATGCAAATAAACACAAAACTATGGCCCATTTACAGAAGAAATAAATGAGACTCTCCCTGAGTAAGCAGATATTGAAAATATTAGACAAAAACTTTATATAACTGTCTTAAATAAACTTAAAGAGCTAAAGAAACCCAAGAGAATGACATATAAATAAATAAGAAATATGAATTTTTTTAAAGGTACAAAAAAATTCTGAGGCTGAAAAGTACAATAAGTAAAAAGTTACTTTTTACTTAGGGTTCCAATAGAAGATTTGAGCAGCTGGAAAAAAGAATCAGTGAACTTGATAGATCAAATGAAATGATTCAGTCTGAAGAGCAGGAAAATGAAAGAATGACAACAAAAAAGAATAGAGCCTAAAGACCTGTGTAACAACATCAAGAATGCCTACATACAGAATCCTGGTGGGGAGTGAGGGGCAGGAAGACTATTTGAAGAAATGTGTTTGAAAGCTTCCCAAATTTCACTAAAAACAAATATATACATTCAAAAAGCTCAGTGAACTTCATCAAGGAAATATACAAAGATATTCACACCAAGACACACTATGTTTCAAATTGTCAAAAGGCAAAGCGAATGTTTGAAAGCAGCAAGAGAAAGGCAACGCGTCATTTACAAAGGATCCTCAATAAGTTTGACAGCAGATAGTGCATTATAAGCCATGGATGCCAGAAGAGCTTAGGAAAAAGGCAACGCGTCATTTACAAAGGATCCTCAGTAAGTTTGACAGCAGAGAGCTCATTATAAACCATGGGTGCCAGAAGAGCTTAGGATGACATTTTAAAGTTCTGAAAGAAAAAAACACTGTCAACCAAAAATTCTATAACTTGGAAGATGCCCCTTCAAGTATTAAGGATAAATTACACATTCCCAGATTAAAAAAAAGAAAGAGAGAGAGAGAGAAAGAGAAAGAAAGAAAGAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAGAGAAAGAAAGAAAGAAGAAAGAGAAAGAAAGAAAGAAAGAGAGAGAGAAAGAGAGAGAAAGAAAAAGAAGGAAAGAAAGAAAGAAAGAAAAAAGAAAGAAAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAGAAAAGCAAGCAAGCTTTAAAAGTTCATGTTTGGTAGGCTGTACTTCAAGATACACTTTTAAAAAAAAGACTCCTTCAGATACAAACTAAAAAACACTAGAAAGTAACTCAAAACCACATAAAGAAATAACTCCAGTAAAGATAACTACATAGGTAAATATAAAAGCAATTATCACATTTTTTGTAAGTCTTTTTTAATATTCTATATGTTTTAAAACAAATGTGTAAAATAATGACTATAAATCTATGTTAATGAAGCATGATGTATACAGATGTGGTTTGTGAAATTACCAACATAAAGAAATTCATAGGAAACTAAATAATAATAGAGATTTTGTATACTATTGAAGTTGTTTCAATTTACTCTAAATTGTTCCAAATTAAGAATGTTAATTGTAAATCCCCATGGTAACCACTAAGTTAATATCTTTTGAAAATACAGAAAAGGAAAGCACAGGGTAAACACAGTGATATGCTACAAAATAGCAACTAAACACAAAAGAAGGCGATAATTGAGGAAATTAGGAACAAAGGAGGTATAAGACATACAGAAAACAAAAGCAAAATGGTAGGAGTAAGCCCCTCTTTATCAGTAATTACATTAAATACAAATGAATTAAACTCTCCAATCCAAAGAAAGAGATTAACAGAATGGATTTTTTAAAAATGATCCAACTATATTGTCCACAAGATACTCACTTTAGATCAAAATACACAATGAGTTGAAATGAAAGGATGGGAGAAAATATTCCATGTAAGTAATAACCAAAGGAGATCTGAGGCAAATATACTTATATCAGACAAAATAGACTTTAAGTCAAAAACTGTTACAAAATACAAAGAACAGTATATATTGATTTCAAAATTAATTAAGAAGATATAACAATTATAAATATATGTACACCAACTAACAGGGCTCCAAAATATATAATGTAACCATTGAGAGAATTAAAGGGAGAGACAGACAATTCCACGAAAATTGTTGGGCATTTGAAAACCCAACTNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
+>4$chr1$98000$100116
+NNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNCCATGACAGGCAGAGCTCCCTGTTGAGCCACAGAGATTTAGAGAATGGCTGTTAACACAGCATAATCCAGCCCATCCTGACTAATCTGATATTAACATGTATAATAAAGAATTCTATCAATGCTGAGGGAAGATGACTAGTTAAGGTCCTAGGTTGCAAGTCTCAAAACCTCTTCTAAGGATTGTAGACAGGAAATTAAATGACTTCTAGTCCCTAGAGTTCCCAATCTCCTACCATCCCATCCTAATATGACAGAAGTAATTCCTGAGTTGCTTCTGAAACCAGAGCTTCCCTCAGAACCCTTAGCCTGCCAGATGGCTTCTTGGAGAGCCCTCACTCACTTTTCTCCTTCTGCTATTGCTGCTCATTCATTCCAGTTTTTAAAAATTCATCTTTATCCAGGAACCTCGCTTCTAGAAAAGTCATACAGGTGCTTCCAGGAGGCTACATGGGCACCCATATTTTTCTAGCCACTTTCATTAGACCAATGCAGCAGAGAAGAAAAGCCTCAATAATTATTATGACATGGCATGTTAGGATACCAAGTAAATTGCATTTGTAAAATGTGATTTTCTGTTGGTGTTCACTTCAGCTCTACTGACATTTGGTAAGTATTATTGACTGACTGACTAACTAATGTGGTCATTAGTCTTCATAAAGAAAGGCTCTCTACAAAAACGGAGGGATGCCCTTTTTCTGGCATTTAATACGTAAGAAATTGCCTCCAATAGAAACCAGAGTTGCCTGATTACTATCAGCACAGGAGAAATGTATTAATGTGCCTTTCTAGTAACAGGTTTTTAGAAAGTCAAATATAAACAAATCTGTCTATTTGTGTGTGTGCATGTGGTAGTGGGGAGGGAAGAAAAAAGGAGGGGGAGAGAAAGAGAAATAAGAACCAAGTTTATTATACTGTATTCAGGGGGAAAAAATTTTCCCAAGGTCCTAACAGAAGAGCAAAGTGCCACTGTCAATAGCCTCAGTAGTGTTAGGGTTGCTTTTATTTATTTATTTATTTATTTATTTATTTATTTATTTATTTTTCCTTTTTTTTCTTTCTCTTTTTTTCTTCTTTTTTTTTTCTTTTCTTTCTTTTTTTTTTTTTTTTTTTTTTTTGGACAGAGTCTCACACTGTCACCTGGGCTGGAGTGCATTGGTGCAATCTCGACTCACTGCAACTTCTGCCTCCCAGGTTCAAGTGATTCTCCTGCCTCAGCCGCCCAAGTAGCTGGGATTACAGGTGTCTGCCACCGTGCCTAGCTAATTTTTTTGTATTTTTAGTAGAGATGAGGTTTCACTATGTTGGCCAGGCTGGTCTCAAACTCCTGACCTCATGATCCACCCACGTTGGCCTCCCAAAGTGCTGGGATTACAGGCGTGAGCCACCGCCCCTGGCCAGGATTGCTTTTACAGCCAGTCTTCAGGTGCCCACTGTAGGAACAATGTCATTTAACCCTCGGGATTATTCTGTGCCAAATATGGATAATGACTAATATCCAACACAGATATTCTCAGCTCAGAAGAGCAATTAGCAAATTCATAAATTAAGTGCTTGCTTCCTCTTTAGTCAAATACAAACGTTTGTTAAAAGATATTATTTTGCTTTACACTTTTTCTCTCAGAAATAAGCAGATGCTTGAATTCCCACAGTGCTGCTTGAGCCTCACACCATGTCATCCTGCCAGGCACCCAGATCCAGTTCTAGAGTTTCACATGATCGTGAGTGTTGGTTAATAAGTCAATGTGAACTGGGAGGGGAGATTTTTCAGGAGTGCCACAGGGCTCTCCCTTTAATCACATACACTCCCTGCTTTCATTGGAAAGTGTATAATGATGTCAGAGTGCCCCAGAATGGAGCTAGTTGGAAGACTGCCGTCATAGGGATGCCTTAGTGAATTAATAAGGTTTTAATTTCTGGCTCTCAACTTTGTAGATGTAAAAGTTGATTTATCAATATGTGAGAAAGGATGAATCTTTCTGAAGGTTATGTCATCACACTCACTAAGCACACAGAGAATAATGTCTAGAATCTGAGTGCCATGTTATCAAATTGTACTGAGACTCTTGCAGTCACACAGGCTGACATGTAAGCATCGCCATGCCTAGTACAGACTCTNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN
diff --git a/test/unit/tools/cwl_tools/v1.0/ref2.fasta.fai b/test/unit/tools/cwl_tools/v1.0/ref2.fasta.fai
new file mode 100644
index 000000000000..e5d66331d601
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/ref2.fasta.fai
@@ -0,0 +1,5 @@
+0$chr1$9001$11468 2567 19 2567 2568
+1$chr1$53713$55817 2204 2607 2204 2205
+2$chr1$65161$67630 2569 4832 2569 2570
+3$chr1$82792$85041 2349 7422 2349 2350
+4$chr1$98000$100116 2216 9793 2216 2217
diff --git a/test/unit/tools/cwl_tools/v1.0/rename-job.json b/test/unit/tools/cwl_tools/v1.0/rename-job.json
new file mode 100644
index 000000000000..c8ff9606966b
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/rename-job.json
@@ -0,0 +1,7 @@
+{
+ "srcfile": {
+ "location": "whale.txt",
+ "class": "File"
+ },
+ "newname": "fish.txt"
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/rename.cwl b/test/unit/tools/cwl_tools/v1.0/rename.cwl
new file mode 100644
index 000000000000..2326e7fada75
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/rename.cwl
@@ -0,0 +1,16 @@
+class: CommandLineTool
+cwlVersion: v1.0
+baseCommand: "true"
+requirements:
+ InitialWorkDirRequirement:
+ listing:
+ - entryname: $(inputs.newname)
+ entry: $(inputs.srcfile)
+inputs:
+ srcfile: File
+ newname: string
+outputs:
+ outfile:
+ type: File
+ outputBinding:
+ glob: $(inputs.newname)
diff --git a/test/unit/tools/cwl_tools/v1.0/revsort-job.json b/test/unit/tools/cwl_tools/v1.0/revsort-job.json
new file mode 100644
index 000000000000..f5671aab2a56
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/revsort-job.json
@@ -0,0 +1,6 @@
+{
+ "input": {
+ "class": "File",
+ "location": "whale.txt"
+ }
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/revsort-packed.cwl b/test/unit/tools/cwl_tools/v1.0/revsort-packed.cwl
new file mode 100644
index 000000000000..05fcb667e7e4
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/revsort-packed.cwl
@@ -0,0 +1,125 @@
+{
+ "cwlVersion": "v1.0",
+ "$graph": [
+ {
+ "class": "Workflow",
+ "doc": "Reverse the lines in a document, then sort those lines.",
+ "hints": [
+ {
+ "class": "DockerRequirement",
+ "dockerPull": "debian:stretch-slim"
+ }
+ ],
+ "inputs": [
+ {
+ "type": "File",
+ "doc": "The input file to be processed.",
+ "id": "#main/input"
+ },
+ {
+ "type": "boolean",
+ "default": true,
+ "doc": "If true, reverse (decending) sort",
+ "id": "#main/reverse_sort"
+ }
+ ],
+ "outputs": [
+ {
+ "type": "File",
+ "outputSource": "#main/sorted/output",
+ "doc": "The output with the lines reversed and sorted.",
+ "id": "#main/output"
+ }
+ ],
+ "steps": [
+ {
+ "in": [
+ {
+ "source": "#main/input",
+ "id": "#main/rev/input"
+ }
+ ],
+ "out": [
+ "#main/rev/output"
+ ],
+ "run": "#revtool.cwl",
+ "id": "#main/rev"
+ },
+ {
+ "in": [
+ {
+ "source": "#main/rev/output",
+ "id": "#main/sorted/input"
+ },
+ {
+ "source": "#main/reverse_sort",
+ "id": "#main/sorted/reverse"
+ }
+ ],
+ "out": [
+ "#main/sorted/output"
+ ],
+ "run": "#sorttool.cwl",
+ "id": "#main/sorted"
+ }
+ ],
+ "id": "#main"
+ },
+ {
+ "class": "CommandLineTool",
+ "doc": "Reverse each line using the `rev` command",
+ "inputs": [
+ {
+ "type": "File",
+ "inputBinding": {},
+ "id": "#revtool.cwl/input"
+ }
+ ],
+ "outputs": [
+ {
+ "type": "File",
+ "outputBinding": {
+ "glob": "output.txt"
+ },
+ "id": "#revtool.cwl/output"
+ }
+ ],
+ "baseCommand": "rev",
+ "stdout": "output.txt",
+ "id": "#revtool.cwl"
+ },
+ {
+ "class": "CommandLineTool",
+ "doc": "Sort lines using the `sort` command",
+ "inputs": [
+ {
+ "id": "#sorttool.cwl/reverse",
+ "type": "boolean",
+ "inputBinding": {
+ "position": 1,
+ "prefix": "--reverse"
+ }
+ },
+ {
+ "id": "#sorttool.cwl/input",
+ "type": "File",
+ "inputBinding": {
+ "position": 2
+ }
+ }
+ ],
+ "outputs": [
+ {
+ "id": "#sorttool.cwl/output",
+ "type": "File",
+ "outputBinding": {
+ "glob": "output.txt"
+ }
+ }
+ ],
+ "baseCommand": "sort",
+ "stdout": "output.txt",
+ "id": "#sorttool.cwl"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/revsort.cwl b/test/unit/tools/cwl_tools/v1.0/revsort.cwl
new file mode 100644
index 000000000000..ba8fe5958c11
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/revsort.cwl
@@ -0,0 +1,65 @@
+#
+# This is a two-step workflow which uses "revtool" and "sorttool" defined above.
+#
+class: Workflow
+doc: "Reverse the lines in a document, then sort those lines."
+cwlVersion: v1.0
+
+# Requirements & hints specify prerequisites and extensions to the workflow.
+# In this example, DockerRequirement specifies a default Docker container
+# in which the command line tools will execute.
+hints:
+ - class: DockerRequirement
+ dockerPull: debian:stretch-slim
+
+
+# The inputs array defines the structure of the input object that describes
+# the inputs to the workflow.
+#
+# The "reverse_sort" input parameter demonstrates the "default" field. If the
+# field "reverse_sort" is not provided in the input object, the default value will
+# be used.
+inputs:
+ input:
+ type: File
+ doc: "The input file to be processed."
+ reverse_sort:
+ type: boolean
+ default: true
+ doc: "If true, reverse (decending) sort"
+
+# The "outputs" array defines the structure of the output object that describes
+# the outputs of the workflow.
+#
+# Each output field must be connected to the output of one of the workflow
+# steps using the "connect" field. Here, the parameter "#output" of the
+# workflow comes from the "#sorted" output of the "sort" step.
+outputs:
+ output:
+ type: File
+ outputSource: sorted/output
+ doc: "The output with the lines reversed and sorted."
+
+# The "steps" array lists the executable steps that make up the workflow.
+# The tool to execute each step is listed in the "run" field.
+#
+# In the first step, the "inputs" field of the step connects the upstream
+# parameter "#input" of the workflow to the input parameter of the tool
+# "revtool.cwl#input"
+#
+# In the second step, the "inputs" field of the step connects the output
+# parameter "#reversed" from the first step to the input parameter of the
+# tool "sorttool.cwl#input".
+steps:
+ rev:
+ in:
+ input: input
+ out: [output]
+ run: revtool.cwl
+
+ sorted:
+ in:
+ input: rev/output
+ reverse: reverse_sort
+ out: [output]
+ run: sorttool.cwl
diff --git a/test/unit/tools/cwl_tools/v1.0/revtool.cwl b/test/unit/tools/cwl_tools/v1.0/revtool.cwl
new file mode 100644
index 000000000000..7f279643a860
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/revtool.cwl
@@ -0,0 +1,37 @@
+#
+# Simplest example command line program wrapper for the Unix tool "rev".
+#
+class: CommandLineTool
+cwlVersion: v1.0
+doc: "Reverse each line using the `rev` command"
+
+# The "inputs" array defines the structure of the input object that describes
+# the inputs to the underlying program. Here, there is one input field
+# defined that will be called "input" and will contain a "File" object.
+#
+# The input binding indicates that the input value should be turned into a
+# command line argument. In this example inputBinding is an empty object,
+# which indicates that the file name should be added to the command line at
+# a default location.
+inputs:
+ input:
+ type: File
+ inputBinding: {}
+
+# The "outputs" array defines the structure of the output object that
+# describes the outputs of the underlying program. Here, there is one
+# output field defined that will be called "output", must be a "File" type,
+# and after the program executes, the output value will be the file
+# output.txt in the designated output directory.
+outputs:
+ output:
+ type: File
+ outputBinding:
+ glob: output.txt
+
+# The actual program to execute.
+baseCommand: rev
+
+# Specify that the standard output stream must be redirected to a file called
+# output.txt in the designated output directory.
+stdout: output.txt
diff --git a/test/unit/tools/cwl_tools/v1.0/scatter-empty-job1.json b/test/unit/tools/cwl_tools/v1.0/scatter-empty-job1.json
new file mode 100644
index 000000000000..dfd8a4904fc1
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/scatter-empty-job1.json
@@ -0,0 +1,3 @@
+{
+ "inp": []
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/scatter-empty-job2.json b/test/unit/tools/cwl_tools/v1.0/scatter-empty-job2.json
new file mode 100644
index 000000000000..a73a3785f04f
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/scatter-empty-job2.json
@@ -0,0 +1,4 @@
+{
+ "inp1": ["one", "two"],
+ "inp2": []
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/scatter-empty-job3.json b/test/unit/tools/cwl_tools/v1.0/scatter-empty-job3.json
new file mode 100644
index 000000000000..6aa7b6d5e375
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/scatter-empty-job3.json
@@ -0,0 +1,4 @@
+{
+ "inp1": [],
+ "inp2": ["one", "two"]
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/scatter-empty-job4.json b/test/unit/tools/cwl_tools/v1.0/scatter-empty-job4.json
new file mode 100644
index 000000000000..e90c2665af61
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/scatter-empty-job4.json
@@ -0,0 +1,4 @@
+{
+ "inp1": [],
+ "inp2": []
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/scatter-job1.json b/test/unit/tools/cwl_tools/v1.0/scatter-job1.json
new file mode 100644
index 000000000000..426785083ea3
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/scatter-job1.json
@@ -0,0 +1,3 @@
+{
+ "inp": ["one", "two", "three", "four"]
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/scatter-job2.json b/test/unit/tools/cwl_tools/v1.0/scatter-job2.json
new file mode 100644
index 000000000000..a07d40abab94
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/scatter-job2.json
@@ -0,0 +1,4 @@
+{
+ "inp1": ["one", "two"],
+ "inp2": ["three", "four"]
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/scatter-valueFrom-tool.cwl b/test/unit/tools/cwl_tools/v1.0/scatter-valueFrom-tool.cwl
new file mode 100644
index 000000000000..a406b8c6bd86
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/scatter-valueFrom-tool.cwl
@@ -0,0 +1,15 @@
+cwlVersion: v1.0
+class: CommandLineTool
+inputs:
+ scattered_message:
+ type: string
+ inputBinding:
+ position: 2
+ message:
+ type: string
+ inputBinding:
+ position: 1
+outputs:
+ out_message:
+ type: stdout
+baseCommand: echo
diff --git a/test/unit/tools/cwl_tools/v1.0/scatter-valuefrom-job1.json b/test/unit/tools/cwl_tools/v1.0/scatter-valuefrom-job1.json
new file mode 100644
index 000000000000..02fc3f2f238e
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/scatter-valuefrom-job1.json
@@ -0,0 +1,7 @@
+{
+ "inp": [{"instr": "one"},
+ {"instr": "two"},
+ {"instr": "three"},
+ {"instr": "four"}
+ ]
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/scatter-valuefrom-job2.json b/test/unit/tools/cwl_tools/v1.0/scatter-valuefrom-job2.json
new file mode 100644
index 000000000000..874ba038e737
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/scatter-valuefrom-job2.json
@@ -0,0 +1,4 @@
+{
+ "inp1": [{"instr": "one"}, {"instr": "two"}],
+ "inp2": ["three", "four"]
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/scatter-valuefrom-job3.json b/test/unit/tools/cwl_tools/v1.0/scatter-valuefrom-job3.json
new file mode 100644
index 000000000000..23b6e38c4fc8
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/scatter-valuefrom-job3.json
@@ -0,0 +1 @@
+{"scattered_messages": ["message a", "message b"]}
diff --git a/test/unit/tools/cwl_tools/v1.0/scatter-valuefrom-wf1.cwl b/test/unit/tools/cwl_tools/v1.0/scatter-valuefrom-wf1.cwl
new file mode 100644
index 000000000000..f38d1ed15880
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/scatter-valuefrom-wf1.cwl
@@ -0,0 +1,58 @@
+#!/usr/bin/env cwl-runner
+cwlVersion: v1.0
+class: Workflow
+inputs:
+ inp:
+ type:
+ type: array
+ items:
+ type: record
+ name: instr
+ fields:
+ - name: instr
+ type: string
+outputs:
+ out:
+ type:
+ type: array
+ items: string
+ outputSource: step1/echo_out
+
+requirements:
+ - class: ScatterFeatureRequirement
+ - class: StepInputExpressionRequirement
+
+steps:
+ step1:
+ in:
+ echo_in:
+ source: inp
+ valueFrom: $(self.instr)
+ first:
+ source: inp
+ valueFrom: "$(self[0].instr)"
+ out: [echo_out]
+ scatter: echo_in
+ run:
+ class: CommandLineTool
+ inputs:
+ first:
+ type: string
+ inputBinding:
+ position: 1
+ echo_in:
+ type: string
+ inputBinding:
+ position: 2
+ outputs:
+ echo_out:
+ type: string
+ outputBinding:
+ glob: "step1_out"
+ loadContents: true
+ outputEval: $(self[0].contents)
+ baseCommand: "echo"
+ arguments:
+ - "-n"
+ - "foo"
+ stdout: "step1_out"
diff --git a/test/unit/tools/cwl_tools/v1.0/scatter-valuefrom-wf2.cwl b/test/unit/tools/cwl_tools/v1.0/scatter-valuefrom-wf2.cwl
new file mode 100644
index 000000000000..aa12c4cf7600
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/scatter-valuefrom-wf2.cwl
@@ -0,0 +1,73 @@
+#!/usr/bin/env cwl-runner
+cwlVersion: v1.0
+class: Workflow
+
+inputs:
+ inp1:
+ type:
+ type: array
+ items:
+ type: record
+ name: instr
+ fields:
+ - name: instr
+ type: string
+ inp2:
+ type:
+ type: array
+ items: string
+outputs:
+ out:
+ outputSource: step1/echo_out
+ type:
+ type: array
+ items:
+ type: array
+ items: string
+
+requirements:
+ - class: ScatterFeatureRequirement
+ - class: StepInputExpressionRequirement
+
+steps:
+ step1:
+ in:
+ echo_in1:
+ source: inp1
+ valueFrom: $(self.instr)
+ echo_in2: inp2
+ first:
+ source: inp1
+ valueFrom: "$(self[0].instr)"
+ out: [echo_out]
+
+ scatter: [echo_in1, echo_in2]
+ scatterMethod: nested_crossproduct
+ run:
+ class: CommandLineTool
+ id: step1command
+ inputs:
+ first:
+ type: string
+ inputBinding:
+ position: 1
+ echo_in1:
+ type: string
+ inputBinding:
+ position: 2
+ echo_in2:
+ type: string
+ inputBinding:
+ position: 3
+ outputs:
+ echo_out:
+ type: string
+ outputBinding:
+ glob: "step1_out"
+ loadContents: true
+ outputEval: $(self[0].contents)
+ baseCommand: "echo"
+ arguments:
+ - "-n"
+ - "foo"
+ stdout: step1_out
diff --git a/test/unit/tools/cwl_tools/v1.0/scatter-valuefrom-wf3.cwl b/test/unit/tools/cwl_tools/v1.0/scatter-valuefrom-wf3.cwl
new file mode 100644
index 000000000000..5d0e0d688f81
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/scatter-valuefrom-wf3.cwl
@@ -0,0 +1,69 @@
+#!/usr/bin/env cwl-runner
+
+cwlVersion: v1.0
+$graph:
+
+- id: echo
+ class: CommandLineTool
+ inputs:
+ first:
+ type: string
+ inputBinding:
+ position: 1
+ echo_in1:
+ type: string
+ inputBinding:
+ position: 2
+ echo_in2:
+ type: string
+ inputBinding:
+ position: 3
+ outputs:
+ echo_out:
+ type: string
+ outputBinding:
+ glob: "step1_out"
+ loadContents: true
+ outputEval: $(self[0].contents)
+ baseCommand: "echo"
+ arguments: ["-n", "foo"]
+ stdout: step1_out
+
+- id: main
+ class: Workflow
+ inputs:
+ inp1:
+ type:
+ type: array
+ items:
+ type: record
+ name: instr
+ fields:
+ - name: instr
+ type: string
+ inp2:
+ type: { type: array, items: string }
+ requirements:
+ - class: ScatterFeatureRequirement
+ - class: StepInputExpressionRequirement
+ steps:
+ step1:
+ scatter: [echo_in1, echo_in2]
+ scatterMethod: flat_crossproduct
+ in:
+ echo_in1:
+ source: inp1
+ valueFrom: $(self.instr)
+ echo_in2: inp2
+ first:
+ source: inp1
+ valueFrom: "$(self[0].instr)"
+ out: [echo_out]
+ run: "#echo"
+
+ outputs:
+ out:
+ outputSource: step1/echo_out
+ type:
+ type: array
+ items: string
diff --git a/test/unit/tools/cwl_tools/v1.0/scatter-valuefrom-wf4.cwl b/test/unit/tools/cwl_tools/v1.0/scatter-valuefrom-wf4.cwl
new file mode 100644
index 000000000000..1c8c078d6b74
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/scatter-valuefrom-wf4.cwl
@@ -0,0 +1,67 @@
+#!/usr/bin/env cwl-runner
+cwlVersion: v1.0
+$graph:
+- id: echo
+ class: CommandLineTool
+ inputs:
+ first:
+ type: string
+ inputBinding:
+ position: 1
+ echo_in1:
+ type: string
+ inputBinding:
+ position: 2
+ echo_in2:
+ type: string
+ inputBinding:
+ position: 3
+ outputs:
+ echo_out:
+ type: string
+ outputBinding:
+ glob: "step1_out"
+ loadContents: true
+ outputEval: $(self[0].contents)
+ baseCommand: "echo"
+ arguments: ["-n", "foo"]
+ stdout: step1_out
+
+- id: main
+ class: Workflow
+ inputs:
+ inp1:
+ type:
+ type: array
+ items:
+ type: record
+ name: instr
+ fields:
+ - name: instr
+ type: string
+ inp2:
+ type: { type: array, items: string }
+ requirements:
+ - class: ScatterFeatureRequirement
+ - class: StepInputExpressionRequirement
+ steps:
+ step1:
+ scatter: [echo_in1, echo_in2]
+ scatterMethod: dotproduct
+ in:
+ echo_in1:
+ source: inp1
+ valueFrom: $(self.instr)
+ echo_in2: inp2
+ first:
+ source: inp1
+ valueFrom: "$(self[0].instr)"
+ out: [echo_out]
+ run: "#echo"
+
+ outputs:
+ out:
+ outputSource: step1/echo_out
+ type:
+ type: array
+ items: string
diff --git a/test/unit/tools/cwl_tools/v1.0/scatter-valuefrom-wf5.cwl b/test/unit/tools/cwl_tools/v1.0/scatter-valuefrom-wf5.cwl
new file mode 100644
index 000000000000..016eebfd4e8a
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/scatter-valuefrom-wf5.cwl
@@ -0,0 +1,58 @@
+#!/usr/bin/env cwl-runner
+cwlVersion: v1.0
+class: Workflow
+inputs:
+ inp:
+ type:
+ type: array
+ items:
+ type: record
+ name: instr
+ fields:
+ - name: instr
+ type: string
+outputs:
+ out:
+ type:
+ type: array
+ items: string
+ outputSource: step1/echo_out
+
+requirements:
+ - class: ScatterFeatureRequirement
+ - class: StepInputExpressionRequirement
+
+steps:
+ step1:
+ in:
+ echo_in:
+ source: inp
+ valueFrom: $(self.instr)
+ first:
+ source: inp
+ valueFrom: $(inputs.echo_in.instr)
+ out: [echo_out]
+ scatter: echo_in
+ run:
+ class: CommandLineTool
+ inputs:
+ first:
+ type: string
+ inputBinding:
+ position: 1
+ echo_in:
+ type: string
+ inputBinding:
+ position: 2
+ outputs:
+ echo_out:
+ type: string
+ outputBinding:
+ glob: "step1_out"
+ loadContents: true
+ outputEval: $(self[0].contents)
+ baseCommand: "echo"
+ arguments:
+ - "-n"
+ - "foo"
+ stdout: "step1_out"
diff --git a/test/unit/tools/cwl_tools/v1.0/scatter-valuefrom-wf6.cwl b/test/unit/tools/cwl_tools/v1.0/scatter-valuefrom-wf6.cwl
new file mode 100644
index 000000000000..ed60c76846b9
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/scatter-valuefrom-wf6.cwl
@@ -0,0 +1,21 @@
+cwlVersion: v1.0
+class: Workflow
+requirements:
+ - class: ScatterFeatureRequirement
+ - class: StepInputExpressionRequirement
+inputs:
+ scattered_messages: string[]
+outputs:
+ out_message:
+ type: File[]
+ outputSource: step1/out_message
+steps:
+ step1:
+ run: scatter-valueFrom-tool.cwl
+ scatter: [scattered_message]
+ scatterMethod: dotproduct
+ in:
+ scattered_message: scattered_messages
+ message:
+ valueFrom: "Hello"
+ out: [out_message]
diff --git a/test/unit/tools/cwl_tools/v1.0/scatter-wf1.cwl b/test/unit/tools/cwl_tools/v1.0/scatter-wf1.cwl
new file mode 100644
index 000000000000..d470658605d4
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/scatter-wf1.cwl
@@ -0,0 +1,37 @@
+#!/usr/bin/env cwl-runner
+cwlVersion: v1.0
+class: Workflow
+inputs:
+ inp: string[]
+outputs:
+ out:
+ type: string[]
+ outputSource: step1/echo_out
+
+requirements:
+ - class: ScatterFeatureRequirement
+
+steps:
+ step1:
+ in:
+ echo_in: inp
+ out: [echo_out]
+ scatter: echo_in
+ run:
+ class: CommandLineTool
+ inputs:
+ echo_in:
+ type: string
+ inputBinding: {}
+ outputs:
+ echo_out:
+ type: string
+ outputBinding:
+ glob: "step1_out"
+ loadContents: true
+ outputEval: $(self[0].contents)
+ baseCommand: "echo"
+ arguments:
+ - "-n"
+ - "foo"
+ stdout: "step1_out"
diff --git a/test/unit/tools/cwl_tools/v1.0/scatter-wf2.cwl b/test/unit/tools/cwl_tools/v1.0/scatter-wf2.cwl
new file mode 100644
index 000000000000..e46b829c101b
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/scatter-wf2.cwl
@@ -0,0 +1,51 @@
+#!/usr/bin/env cwl-runner
+cwlVersion: v1.0
+class: Workflow
+
+inputs:
+ inp1: string[]
+ inp2: string[]
+
+outputs:
+ out:
+ outputSource: step1/echo_out
+ type:
+ type: array
+ items:
+ type: array
+ items: string
+
+requirements:
+ - class: ScatterFeatureRequirement
+
+steps:
+ step1:
+ in:
+ echo_in1: inp1
+ echo_in2: inp2
+ out: [echo_out]
+
+ scatter: [echo_in1, echo_in2]
+ scatterMethod: nested_crossproduct
+ run:
+ class: CommandLineTool
+ id: step1command
+ inputs:
+ echo_in1:
+ type: string
+ inputBinding: {}
+ echo_in2:
+ type: string
+ inputBinding: {}
+ outputs:
+ echo_out:
+ type: string
+ outputBinding:
+ glob: "step1_out"
+ loadContents: true
+ outputEval: $(self[0].contents)
+ baseCommand: "echo"
+ arguments:
+ - "-n"
+ - "foo"
+ stdout: step1_out
diff --git a/test/unit/tools/cwl_tools/v1.0/scatter-wf3.cwl b/test/unit/tools/cwl_tools/v1.0/scatter-wf3.cwl
new file mode 100644
index 000000000000..358a99fa80e0
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/scatter-wf3.cwl
@@ -0,0 +1,48 @@
+#!/usr/bin/env cwl-runner
+
+cwlVersion: v1.0
+$graph:
+
+- id: echo
+ class: CommandLineTool
+ inputs:
+ echo_in1:
+ type: string
+ inputBinding: {}
+ echo_in2:
+ type: string
+ inputBinding: {}
+ outputs:
+ echo_out:
+ type: string
+ outputBinding:
+ glob: "step1_out"
+ loadContents: true
+ outputEval: $(self[0].contents)
+ baseCommand: "echo"
+ arguments: ["-n", "foo"]
+ stdout: step1_out
+
+- id: main
+ class: Workflow
+ inputs:
+ inp1: string[]
+ inp2: string[]
+ requirements:
+ - class: ScatterFeatureRequirement
+ steps:
+ step1:
+ scatter: [echo_in1, echo_in2]
+ scatterMethod: flat_crossproduct
+ in:
+ echo_in1: inp1
+ echo_in2: inp2
+ out: [echo_out]
+ run: "#echo"
+
+ outputs:
+ out:
+ outputSource: step1/echo_out
+ type:
+ type: array
+ items: string
diff --git a/test/unit/tools/cwl_tools/v1.0/scatter-wf4.cwl b/test/unit/tools/cwl_tools/v1.0/scatter-wf4.cwl
new file mode 100644
index 000000000000..e51db2f7a7e0
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/scatter-wf4.cwl
@@ -0,0 +1,46 @@
+#!/usr/bin/env cwl-runner
+cwlVersion: v1.0
+$graph:
+- id: echo
+ class: CommandLineTool
+ inputs:
+ echo_in1:
+ type: string
+ inputBinding: {}
+ echo_in2:
+ type: string
+ inputBinding: {}
+ outputs:
+ echo_out:
+ type: string
+ outputBinding:
+ glob: "step1_out"
+ loadContents: true
+ outputEval: $(self[0].contents)
+ baseCommand: "echo"
+ arguments: ["-n", "foo"]
+ stdout: step1_out
+
+- id: main
+ class: Workflow
+ inputs:
+ inp1: string[]
+ inp2: string[]
+ requirements:
+ - class: ScatterFeatureRequirement
+ steps:
+ step1:
+ scatter: [echo_in1, echo_in2]
+ scatterMethod: dotproduct
+ in:
+ echo_in1: inp1
+ echo_in2: inp2
+ out: [echo_out]
+ run: "#echo"
+
+ outputs:
+ - id: out
+ outputSource: step1/echo_out
+ type:
+ type: array
+ items: string
diff --git a/test/unit/tools/cwl_tools/v1.0/schemadef-job.json b/test/unit/tools/cwl_tools/v1.0/schemadef-job.json
new file mode 100644
index 000000000000..520730a9b453
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/schemadef-job.json
@@ -0,0 +1,6 @@
+{
+ "hello": {
+ "a": "hello",
+ "b": "world"
+ }
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/schemadef-tool.cwl b/test/unit/tools/cwl_tools/v1.0/schemadef-tool.cwl
new file mode 100644
index 000000000000..f773b9a521a8
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/schemadef-tool.cwl
@@ -0,0 +1,21 @@
+#!/usr/bin/env cwl-runner
+class: CommandLineTool
+cwlVersion: v1.0
+
+requirements:
+ - $import: schemadef-type.yml
+
+inputs:
+ - id: hello
+ type: "schemadef-type.yml#HelloType"
+ inputBinding:
+ valueFrom: $(self.a)/$(self.b)
+
+outputs:
+ - id: output
+ type: File
+ outputBinding:
+ glob: output.txt
+
+stdout: output.txt
+baseCommand: echo
diff --git a/test/unit/tools/cwl_tools/v1.0/schemadef-type.yml b/test/unit/tools/cwl_tools/v1.0/schemadef-type.yml
new file mode 100644
index 000000000000..45a82a75d073
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/schemadef-type.yml
@@ -0,0 +1,9 @@
+class: SchemaDefRequirement
+types:
+ - name: HelloType
+ type: record
+ fields:
+ - name: a
+ type: string
+ - name: b
+ type: string
diff --git a/test/unit/tools/cwl_tools/v1.0/schemadef-wf.cwl b/test/unit/tools/cwl_tools/v1.0/schemadef-wf.cwl
new file mode 100644
index 000000000000..d3e38ebd8986
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/schemadef-wf.cwl
@@ -0,0 +1,22 @@
+#!/usr/bin/env cwl-runner
+
+cwlVersion: v1.0
+class: Workflow
+
+requirements:
+ - $import: schemadef-type.yml
+
+inputs:
+ hello: "schemadef-type.yml#HelloType"
+
+outputs:
+ output:
+ type: File
+ outputSource: step1/output
+
+steps:
+ step1:
+ in:
+ hello: hello
+ out: [output]
+ run: schemadef-tool.cwl
diff --git a/test/unit/tools/cwl_tools/v1.0/search-job.json b/test/unit/tools/cwl_tools/v1.0/search-job.json
new file mode 100644
index 000000000000..a5f9d98057cc
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/search-job.json
@@ -0,0 +1,11 @@
+{
+ "infile": {
+ "class": "File",
+ "location": "whale.txt"
+ },
+ "secondfile": {
+ "class": "File",
+ "location": "hello.txt"
+ },
+ "term": "find"
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/search.cwl b/test/unit/tools/cwl_tools/v1.0/search.cwl
new file mode 100644
index 000000000000..e65ab69ee32d
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/search.cwl
@@ -0,0 +1,110 @@
+cwlVersion: v1.0
+$graph:
+- id: index
+ class: CommandLineTool
+ baseCommand: python
+ arguments:
+ - valueFrom: input.txt
+ position: 1
+ requirements:
+ - class: InitialWorkDirRequirement
+ listing:
+ - entryname: input.txt
+ entry: $(inputs.file)
+ - class: InlineJavascriptRequirement
+ hints:
+ - class: DockerRequirement
+ dockerPull: python:2-slim
+
+ inputs:
+ file: File
+ secondfile: File
+ index.py:
+ type: File
+ default:
+ class: File
+ location: index.py
+ inputBinding:
+ position: 0
+ outputs:
+ result:
+ type: File
+ outputBinding:
+ glob: input.txt
+ secondaryFiles:
+ - ".idx1"
+ - "^.idx2"
+ - '$(self.basename).idx3'
+ - '${ return self.basename+".idx4"; }'
+ - '$({"path": self.path+".idx5", "class": "File"})'
+ - '$(self.nameroot).idx6$(self.nameext)'
+ - '${ return [self.basename+".idx7", inputs.secondfile]; }'
+ - "_idx8"
+
+- id: search
+ class: CommandLineTool
+ baseCommand: python
+ requirements:
+ - class: InlineJavascriptRequirement
+ hints:
+ - class: DockerRequirement
+ dockerPull: python:2-slim
+ inputs:
+ file:
+ type: File
+ inputBinding:
+ position: 1
+ secondaryFiles:
+ - ".idx1"
+ - "^.idx2"
+ - '$(self.basename).idx3'
+ - '${ return self.basename+".idx4"; }'
+ - '$(self.nameroot).idx6$(self.nameext)'
+ - '${ return [self.basename+".idx7"]; }'
+ - "_idx8"
+ search.py:
+ type: File
+ default:
+ class: File
+ location: search.py
+ inputBinding:
+ position: 0
+ term:
+ type: string
+ inputBinding:
+ position: 2
+ outputs:
+ result:
+ type: File
+ outputBinding:
+ glob: result.txt
+ stdout: result.txt
+
+- id: main
+ class: Workflow
+ inputs:
+ infile: File
+ secondfile: File
+ term: string
+ outputs:
+ outfile:
+ type: File
+ outputSource: search/result
+ indexedfile:
+ type: File
+ outputSource: index/result
+
+ steps:
+ index:
+ run: "#index"
+ in:
+ file: infile
+ secondfile: secondfile
+ out: [result]
+
+ search:
+ run: "#search"
+ in:
+ file: index/result
+ term: term
+ out: [result]
diff --git a/test/unit/tools/cwl_tools/v1.0/search.py b/test/unit/tools/cwl_tools/v1.0/search.py
new file mode 100755
index 000000000000..ffd34e5df772
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/search.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python2
+
+# Toy program to search inverted index and print out each line the term
+# appears.
+
+import sys
+
+mainfile = sys.argv[1]
+indexfile = sys.argv[1] + ".idx1"
+term = sys.argv[2]
+
+main = open(mainfile)
+index = open(indexfile)
+
+st = term + ": "
+
+for a in index:
+ if a.startswith(st):
+ n = [int(i) for i in a[len(st):].split(", ") if i]
+ linenum = 0
+ for l in main:
+ linenum += 1
+ if linenum in n:
+ print linenum, l.rstrip()
+ break
diff --git a/test/unit/tools/cwl_tools/v1.0/shellchar.cwl b/test/unit/tools/cwl_tools/v1.0/shellchar.cwl
new file mode 100644
index 000000000000..fc32afd1d992
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/shellchar.cwl
@@ -0,0 +1,13 @@
+#!/usr/bin/env cwl-runner
+class: CommandLineTool
+cwlVersion: v1.0
+doc: |
+ Ensure that arguments containing shell directives are not interpreted and
+ that `shellQuote: false` has no effect when ShellCommandRequirement is not in
+ effect.
+inputs: []
+outputs:
+ stdout_file: stdout
+ stderr_file: stderr
+baseCommand: echo
+arguments: [{valueFrom: "foo 1>&2", shellQuote: false}]
diff --git a/test/unit/tools/cwl_tools/v1.0/shellchar2.cwl b/test/unit/tools/cwl_tools/v1.0/shellchar2.cwl
new file mode 100644
index 000000000000..cb0b1ce7855c
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/shellchar2.cwl
@@ -0,0 +1,14 @@
+#!/usr/bin/env cwl-runner
+class: CommandLineTool
+cwlVersion: v1.0
+doc: |
+ Ensure that `shellQuote: true` is the default behavior when
+ ShellCommandRequirement is in effect.
+requirements:
+ ShellCommandRequirement: {}
+inputs: []
+outputs:
+ stdout_file: stdout
+ stderr_file: stderr
+baseCommand: echo
+arguments: ["foo 1>&2"]
diff --git a/test/unit/tools/cwl_tools/v1.0/size-expression-tool.cwl b/test/unit/tools/cwl_tools/v1.0/size-expression-tool.cwl
new file mode 100644
index 000000000000..73693190ab0e
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/size-expression-tool.cwl
@@ -0,0 +1,43 @@
+#!/usr/bin/env cwl-runner
+
+cwlVersion: v1.0
+
+requirements:
+ - class: InlineJavascriptRequirement
+
+class: CommandLineTool
+
+inputs:
+ - id: input
+ type:
+ type: array
+ items: File
+
+outputs:
+ output_file:
+ type: File
+ outputBinding: {glob: output.txt}
+
+arguments:
+ - valueFrom: |
+ ${
+ var cmd = ["echo"];
+ if (inputs.input.length == 0) {
+ cmd.push('no_inputs');
+ }
+ else {
+ for (var i = 0; i < inputs.input.length; i++) {
+ var filesize = inputs.input[i].size;
+ if (filesize == 0) {
+ cmd.push("empty_file");
+ } else if (filesize <= 16) {
+ cmd.push("small_file");
+ } else {
+ cmd.push("big_file")
+ }
+ }
+ }
+ return cmd;
+ }
+baseCommand: []
+stdout: output.txt
diff --git a/test/unit/tools/cwl_tools/v1.0/sorttool.cwl b/test/unit/tools/cwl_tools/v1.0/sorttool.cwl
new file mode 100644
index 000000000000..a4853217baa8
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/sorttool.cwl
@@ -0,0 +1,35 @@
+# Example command line program wrapper for the Unix tool "sort"
+# demonstrating command line flags.
+class: CommandLineTool
+doc: "Sort lines using the `sort` command"
+cwlVersion: v1.0
+
+# This example is similar to the previous one, with an additional input
+# parameter called "reverse". It is a boolean parameter, which is
+# intepreted as a command line flag. The value of "prefix" is used for
+# flag to put on the command line if "reverse" is true, if "reverse" is
+# false, no flag is added.
+#
+# This example also introduced the "position" field. This indicates the
+# sorting order of items on the command line. Lower numbers are placed
+# before higher numbers. Here, the "--reverse" flag (if present) will be
+# added to the command line before the input file path.
+inputs:
+ - id: reverse
+ type: boolean
+ inputBinding:
+ position: 1
+ prefix: "--reverse"
+ - id: input
+ type: File
+ inputBinding:
+ position: 2
+
+outputs:
+ - id: output
+ type: File
+ outputBinding:
+ glob: output.txt
+
+baseCommand: sort
+stdout: output.txt
diff --git a/test/unit/tools/cwl_tools/v1.0/special_file b/test/unit/tools/cwl_tools/v1.0/special_file
new file mode 100644
index 000000000000..f70d7bba4ae1
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/special_file
@@ -0,0 +1 @@
+42
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/stage-unprovided-file.cwl b/test/unit/tools/cwl_tools/v1.0/stage-unprovided-file.cwl
new file mode 100644
index 000000000000..1cbebcbc91e1
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/stage-unprovided-file.cwl
@@ -0,0 +1,24 @@
+cwlVersion: v1.0
+class: CommandLineTool
+hints:
+ - class: DockerRequirement
+ dockerPull: python:2-slim
+inputs:
+ - id: infile
+ type: File?
+ inputBinding:
+ prefix: -cfg
+ valueFrom: $(self.basename)
+ - id: "args.py"
+ type: File
+ default:
+ class: File
+ location: args.py
+ inputBinding:
+ position: -1
+
+baseCommand: python
+
+outputs:
+- id: args
+ type: string[]
diff --git a/test/unit/tools/cwl_tools/v1.0/stagefile-job.yml b/test/unit/tools/cwl_tools/v1.0/stagefile-job.yml
new file mode 100644
index 000000000000..b9852bce0339
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/stagefile-job.yml
@@ -0,0 +1,3 @@
+infile:
+ class: File
+ location: whale.txt
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/stagefile.cwl b/test/unit/tools/cwl_tools/v1.0/stagefile.cwl
new file mode 100644
index 000000000000..ec0825a3f01d
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/stagefile.cwl
@@ -0,0 +1,26 @@
+class: CommandLineTool
+cwlVersion: v1.0
+hints:
+ - class: DockerRequirement
+ dockerPull: python:2-slim
+requirements:
+ InitialWorkDirRequirement:
+ listing:
+ - entry: $(inputs.infile)
+ entryname: bob.txt
+ writable: true
+inputs:
+ infile: File
+outputs:
+ outfile:
+ type: File
+ outputBinding:
+ glob: bob.txt
+baseCommand: "python2"
+arguments:
+ - "-c"
+ - |
+ f = open("bob.txt", "r+")
+ f.seek(8)
+ f.write("Bob. ")
+ f.close()
diff --git a/test/unit/tools/cwl_tools/v1.0/stderr-mediumcut.cwl b/test/unit/tools/cwl_tools/v1.0/stderr-mediumcut.cwl
new file mode 100644
index 000000000000..f04138efc020
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/stderr-mediumcut.cwl
@@ -0,0 +1,13 @@
+#!/usr/bin/env cwl-runner
+class: CommandLineTool
+cwlVersion: v1.0
+doc: "Test of capturing stderr output in a docker container."
+requirements:
+ ShellCommandRequirement: {}
+inputs: []
+outputs:
+ output_file:
+ type: stderr
+arguments:
+ - { valueFrom: "echo foo 1>&2", shellQuote: False }
+stderr: std.err
diff --git a/test/unit/tools/cwl_tools/v1.0/stderr-shortcut.cwl b/test/unit/tools/cwl_tools/v1.0/stderr-shortcut.cwl
new file mode 100644
index 000000000000..df69badd3af3
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/stderr-shortcut.cwl
@@ -0,0 +1,12 @@
+#!/usr/bin/env cwl-runner
+class: CommandLineTool
+cwlVersion: v1.0
+doc: "Test of capturing stderr output in a docker container."
+requirements:
+ ShellCommandRequirement: {}
+inputs: []
+outputs:
+ output_file:
+ type: stderr
+arguments:
+ - { valueFrom: "echo foo 1>&2", shellQuote: False }
diff --git a/test/unit/tools/cwl_tools/v1.0/stderr.cwl b/test/unit/tools/cwl_tools/v1.0/stderr.cwl
new file mode 100644
index 000000000000..577b770c1c23
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/stderr.cwl
@@ -0,0 +1,14 @@
+#!/usr/bin/env cwl-runner
+class: CommandLineTool
+cwlVersion: v1.0
+doc: "Test of capturing stderr output."
+requirements:
+ ShellCommandRequirement: {}
+inputs: []
+outputs:
+ output_file:
+ type: File
+ outputBinding: {glob: error.txt}
+arguments:
+ - { valueFrom: "echo foo 1>&2", shellQuote: False }
+stderr: error.txt
diff --git a/test/unit/tools/cwl_tools/v1.0/step-valuefrom-job.json b/test/unit/tools/cwl_tools/v1.0/step-valuefrom-job.json
new file mode 100644
index 000000000000..062e54fb49e8
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/step-valuefrom-job.json
@@ -0,0 +1 @@
+{"a": 1, "b": 2}
diff --git a/test/unit/tools/cwl_tools/v1.0/step-valuefrom-wf.cwl b/test/unit/tools/cwl_tools/v1.0/step-valuefrom-wf.cwl
new file mode 100644
index 000000000000..1b6d65e692ad
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/step-valuefrom-wf.cwl
@@ -0,0 +1,34 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+requirements:
+ - class: StepInputExpressionRequirement
+
+inputs:
+ in:
+ type:
+ name: in
+ type: record
+ fields:
+ - name: file1
+ type: File
+
+outputs:
+ count_output:
+ type: int
+ outputSource: step2/output
+
+steps:
+ step1:
+ run: wc-tool.cwl
+ in:
+ file1:
+ source: in
+ valueFrom: $(self.file1)
+ out: [output]
+
+ step2:
+ run: parseInt-tool.cwl
+ in:
+ file1: step1/output
+ out: [output]
diff --git a/test/unit/tools/cwl_tools/v1.0/step-valuefrom-wf.json b/test/unit/tools/cwl_tools/v1.0/step-valuefrom-wf.json
new file mode 100644
index 000000000000..b133241741fb
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/step-valuefrom-wf.json
@@ -0,0 +1,8 @@
+{
+ "in": {
+ "file1": {
+ "class": "File",
+ "location": "whale.txt"
+ }
+ }
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/step-valuefrom2-wf.cwl b/test/unit/tools/cwl_tools/v1.0/step-valuefrom2-wf.cwl
new file mode 100644
index 000000000000..2edb3f65fa1e
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/step-valuefrom2-wf.cwl
@@ -0,0 +1,41 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+requirements:
+ - class: StepInputExpressionRequirement
+ - class: InlineJavascriptRequirement
+ - class: MultipleInputFeatureRequirement
+
+inputs:
+ a: int
+ b: int
+
+outputs:
+ val:
+ type: string
+ outputSource: step1/echo_out
+
+steps:
+ step1:
+ run:
+ id: echo
+ class: CommandLineTool
+ inputs:
+ c:
+ type: int
+ inputBinding: {}
+ outputs:
+ echo_out:
+ type: string
+ outputBinding:
+ glob: "step1_out"
+ loadContents: true
+ outputEval: $(self[0].contents)
+ baseCommand: "echo"
+ stdout: step1_out
+
+ in:
+ c:
+ source: [a, b]
+ valueFrom: "$(self[0] + self[1])"
+ out: [echo_out]
diff --git a/test/unit/tools/cwl_tools/v1.0/step-valuefrom3-wf.cwl b/test/unit/tools/cwl_tools/v1.0/step-valuefrom3-wf.cwl
new file mode 100644
index 000000000000..f1974ddfe104
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/step-valuefrom3-wf.cwl
@@ -0,0 +1,41 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+requirements:
+ - class: StepInputExpressionRequirement
+ - class: InlineJavascriptRequirement
+
+inputs:
+ a: int
+ b: int
+
+outputs:
+ val:
+ type: string
+ outputSource: step1/echo_out
+
+steps:
+ step1:
+ run:
+ id: echo
+ class: CommandLineTool
+ inputs:
+ c:
+ type: int
+ inputBinding: {}
+ outputs:
+ echo_out:
+ type: string
+ outputBinding:
+ glob: "step1_out"
+ loadContents: true
+ outputEval: $(self[0].contents)
+ baseCommand: "echo"
+ stdout: step1_out
+
+ in:
+ a: a
+ b: b
+ c:
+ valueFrom: "$(inputs.a + inputs.b)"
+ out: [echo_out]
diff --git a/test/unit/tools/cwl_tools/v1.0/step-valuefrom4-wf.cwl b/test/unit/tools/cwl_tools/v1.0/step-valuefrom4-wf.cwl
new file mode 100644
index 000000000000..67573a6c630d
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/step-valuefrom4-wf.cwl
@@ -0,0 +1,37 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+requirements:
+ - class: StepInputExpressionRequirement
+ - class: InlineJavascriptRequirement
+
+inputs: []
+
+outputs:
+ val:
+ type: string
+ outputSource: step1/echo_out
+
+steps:
+ step1:
+ run:
+ id: echo
+ class: CommandLineTool
+ inputs:
+ a:
+ type: string
+ inputBinding: {}
+ outputs:
+ echo_out:
+ type: string
+ outputBinding:
+ glob: "step1_out"
+ loadContents: true
+ outputEval: $(self[0].contents)
+ baseCommand: "echo"
+ stdout: step1_out
+
+ in:
+ a:
+ valueFrom: "moocow"
+ out: [echo_out]
diff --git a/test/unit/tools/cwl_tools/v1.0/step-valuefrom5-wf.cwl b/test/unit/tools/cwl_tools/v1.0/step-valuefrom5-wf.cwl
new file mode 100644
index 000000000000..15808d13aa44
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/step-valuefrom5-wf.cwl
@@ -0,0 +1,76 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+requirements:
+ - class: StepInputExpressionRequirement
+ - class: InlineJavascriptRequirement
+ - class: MultipleInputFeatureRequirement
+
+inputs:
+ file1: File
+
+outputs:
+ val1:
+ type: string
+ outputSource: step1/echo_out
+ val2:
+ type: string
+ outputSource: step2/echo_out
+
+steps:
+ step1:
+ run:
+ class: CommandLineTool
+
+ inputs:
+ name:
+ type: string
+ inputBinding: {}
+
+ outputs:
+ echo_out:
+ type: string
+ outputBinding:
+ glob: "step1_out"
+ loadContents: true
+ outputEval: $(self[0].contents)
+ echo_out_file:
+ type: File
+ outputBinding:
+ glob: "step1_out"
+
+ baseCommand: "echo"
+ stdout: step1_out
+
+ in:
+ name:
+ source: file1
+ valueFrom: "$(self.basename)"
+ out: [echo_out, echo_out_file]
+
+
+ step2:
+ run:
+ class: CommandLineTool
+
+ inputs:
+ name:
+ type: string
+ inputBinding: {}
+
+ outputs:
+ echo_out:
+ type: string
+ outputBinding:
+ glob: "step1_out"
+ loadContents: true
+ outputEval: $(self[0].contents)
+
+ baseCommand: "echo"
+ stdout: step1_out
+
+ in:
+ name:
+ source: step1/echo_out_file
+ valueFrom: "$(self.basename)"
+ out: [echo_out]
diff --git a/test/unit/tools/cwl_tools/v1.0/steplevel-resreq.cwl b/test/unit/tools/cwl_tools/v1.0/steplevel-resreq.cwl
new file mode 100644
index 000000000000..b2b24c5fe5fb
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/steplevel-resreq.cwl
@@ -0,0 +1,33 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+requirements:
+ ResourceRequirement:
+ coresMin: 4
+ coresMax: 4
+
+inputs: []
+
+steps:
+ step1:
+ requirements:
+ ResourceRequirement:
+ coresMin: 1
+ coresMax: 1
+ run:
+ class: CommandLineTool
+ inputs: []
+ outputs:
+ output:
+ type: stdout
+ baseCommand: echo
+ stdout: cores.txt
+ arguments: [ $(runtime.cores) ]
+ in: []
+ out: [output]
+
+outputs:
+ out:
+ type: File
+ outputSource: step1/output
diff --git a/test/unit/tools/cwl_tools/v1.0/string-job.json b/test/unit/tools/cwl_tools/v1.0/string-job.json
new file mode 100644
index 000000000000..ff89222db782
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/string-job.json
@@ -0,0 +1,3 @@
+{
+ "message": "hello"
+}
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/subdirsecondaries/hello.py b/test/unit/tools/cwl_tools/v1.0/subdirsecondaries/hello.py
new file mode 100644
index 000000000000..f301245e2429
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/subdirsecondaries/hello.py
@@ -0,0 +1 @@
+print("Hello World!")
diff --git a/test/unit/tools/cwl_tools/v1.0/subdirsecondaries/index.py b/test/unit/tools/cwl_tools/v1.0/subdirsecondaries/index.py
new file mode 100755
index 000000000000..77918b5e5ec1
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/subdirsecondaries/index.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python2
+
+# Toy program to generate inverted index of word to line.
+# Takes input text file on stdin and prints output index on stdout.
+
+import sys
+import os
+
+words = {}
+
+mainfile = sys.argv[1]
+indexfile = sys.argv[1] + ".idx1"
+
+main = open(mainfile)
+index = open(indexfile, "w")
+
+linenum = 0
+for l in main:
+ linenum += 1
+ l = l.rstrip().lower().replace(".", "").replace(",", "").replace(";", "").replace("-", " ")
+ for w in l.split(" "):
+ if w:
+ if w not in words:
+ words[w] = set()
+ words[w].add(linenum)
+
+for w in sorted(words.keys()):
+ index.write("%s: %s" % (w, ", ".join((str(i) for i in words[w]))) + "\n")
+
+open(os.path.splitext(sys.argv[1])[0] + ".idx2", "w")
+open(sys.argv[1] + ".idx3", "w")
+open(sys.argv[1] + ".idx4", "w")
+open(sys.argv[1] + ".idx5", "w")
+open(os.path.splitext(sys.argv[1])[0] + ".idx6" + os.path.splitext(sys.argv[1])[1], "w")
+open(sys.argv[1] + ".idx7", "w")
+os.mkdir(sys.argv[1] + "_idx8")
+open(sys.argv[1] + "_idx8/index", "w")
diff --git a/test/unit/tools/cwl_tools/v1.0/subdirsecondaries/testdir/p b/test/unit/tools/cwl_tools/v1.0/subdirsecondaries/testdir/p
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/test/unit/tools/cwl_tools/v1.0/subdirsecondaries/testdir/q b/test/unit/tools/cwl_tools/v1.0/subdirsecondaries/testdir/q
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/test/unit/tools/cwl_tools/v1.0/subdirsecondaries/testdir/r b/test/unit/tools/cwl_tools/v1.0/subdirsecondaries/testdir/r
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/test/unit/tools/cwl_tools/v1.0/sum-job.json b/test/unit/tools/cwl_tools/v1.0/sum-job.json
new file mode 100644
index 000000000000..64e29bfa105a
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/sum-job.json
@@ -0,0 +1,4 @@
+{
+ "int_1": 5,
+ "int_2": 7
+}
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0/sum-wf.cwl b/test/unit/tools/cwl_tools/v1.0/sum-wf.cwl
new file mode 100644
index 000000000000..566957b643f7
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/sum-wf.cwl
@@ -0,0 +1,49 @@
+cwlVersion: v1.0
+class: Workflow
+
+requirements:
+ - class: StepInputExpressionRequirement
+ - class: MultipleInputFeatureRequirement
+ - class: InlineJavascriptRequirement
+
+inputs:
+ int_1:
+ type:
+ - int
+ - string
+ int_2:
+ type:
+ - int
+ - string
+
+outputs:
+ result:
+ type: int
+ outputSource: sum/result
+
+steps:
+ sum:
+ in:
+ data:
+ source: [int_1, int_2]
+ valueFrom: |
+ ${
+ var sum = 0;
+ for (var i = 0; i < self.length; i++){
+ sum += self[i];
+ };
+ return sum;
+ }
+ out:
+ - result
+ run:
+ class: ExpressionTool
+ inputs:
+ data:
+ type: int
+ outputs:
+ result: int
+ expression: |
+ ${
+ return {"result": inputs.data};
+ }
diff --git a/test/unit/tools/cwl_tools/v1.0/template-tool.cwl b/test/unit/tools/cwl_tools/v1.0/template-tool.cwl
new file mode 100755
index 000000000000..d70085fd328a
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/template-tool.cwl
@@ -0,0 +1,24 @@
+#!/usr/bin/env cwl-runner
+cwlVersion: v1.0
+class: CommandLineTool
+requirements:
+ - class: InlineJavascriptRequirement
+ expressionLib:
+ - { $include: underscore.js }
+ - "var t = function(s) { return _.template(s, {variable: 'data'})({'inputs': inputs}); };"
+ - class: InitialWorkDirRequirement
+ listing:
+ - entryname: foo.txt
+ entry: $(t("The file is <%= data.inputs.file1.path.split('/').slice(-1)[0] %>\n"))
+hints:
+ DockerRequirement:
+ dockerPull: "debian:stretch-slim"
+inputs:
+ - id: file1
+ type: File
+outputs:
+ - id: foo
+ type: File
+ outputBinding:
+ glob: foo.txt
+baseCommand: [cat, foo.txt]
diff --git a/test/unit/tools/cwl_tools/v1.0/test-cwl-out.cwl b/test/unit/tools/cwl_tools/v1.0/test-cwl-out.cwl
new file mode 100644
index 000000000000..c84adb771045
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/test-cwl-out.cwl
@@ -0,0 +1,18 @@
+class: CommandLineTool
+cwlVersion: v1.0
+requirements:
+ - class: ShellCommandRequirement
+hints:
+ DockerRequirement:
+ dockerPull: "debian:stretch-slim"
+
+inputs: []
+
+outputs:
+ - id: foo
+ type: File
+
+arguments:
+ - valueFrom: >
+ echo foo > foo && echo '{"foo": {"path": "$(runtime.outdir)/foo", "class": "File"} }' > cwl.output.json
+ shellQuote: false
diff --git a/test/unit/tools/cwl_tools/v1.0/test-cwl-out2.cwl b/test/unit/tools/cwl_tools/v1.0/test-cwl-out2.cwl
new file mode 100644
index 000000000000..23724a3cfb77
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/test-cwl-out2.cwl
@@ -0,0 +1,18 @@
+class: CommandLineTool
+cwlVersion: v1.0
+requirements:
+ - class: ShellCommandRequirement
+hints:
+ DockerRequirement:
+ dockerPull: "debian:stretch-slim"
+
+inputs: []
+
+outputs:
+ - id: foo
+ type: File
+
+arguments:
+ - valueFrom: >
+ echo foo > foo && echo '{"foo": {"location": "file://$(runtime.outdir)/foo", "class": "File"} }' > cwl.output.json
+ shellQuote: false
diff --git a/test/unit/tools/cwl_tools/v1.0/testdir/a b/test/unit/tools/cwl_tools/v1.0/testdir/a
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/test/unit/tools/cwl_tools/v1.0/testdir/b b/test/unit/tools/cwl_tools/v1.0/testdir/b
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/test/unit/tools/cwl_tools/v1.0/testdir/c/d b/test/unit/tools/cwl_tools/v1.0/testdir/c/d
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/test/unit/tools/cwl_tools/v1.0/tmap-job.json b/test/unit/tools/cwl_tools/v1.0/tmap-job.json
new file mode 100644
index 000000000000..24cdda68a196
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/tmap-job.json
@@ -0,0 +1,38 @@
+{
+ "reads": {
+ "class": "File",
+ "location": "reads.fastq"
+ },
+ "stages": [
+ {
+ "algos": [
+ {
+ "algo": "map1",
+ "minSeqLen": 20
+ },
+ {
+ "algo": "map2",
+ "minSeqLen": 20
+ }
+ ],
+ "stageId": 1
+ },
+ {
+ "algos": [
+ {
+ "minSeqLen": 10,
+ "maxSeqLen": 20,
+ "seedLength": 16,
+ "algo": "map1"
+ },
+ {
+ "maxSeedHits": -1,
+ "minSeqLen": 10,
+ "maxSeqLen": 20,
+ "algo": "map2"
+ }
+ ],
+ "stageId": 2
+ }
+ ]
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/tmap-tool.cwl b/test/unit/tools/cwl_tools/v1.0/tmap-tool.cwl
new file mode 100755
index 000000000000..734487a761d0
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/tmap-tool.cwl
@@ -0,0 +1,257 @@
+#!/usr/bin/env cwl-runner
+{
+ "cwlVersion": "v1.0",
+
+ "class": "CommandLineTool",
+ "hints": [
+ {
+ "class": "DockerRequirement",
+ "dockerPull": "python:2-slim"
+ }
+ ],
+ "inputs": [
+ {
+ "id": "reads",
+ "type": "File"
+ },
+ {
+ "id": "stages",
+ "inputBinding": {
+ "position": 1
+ },
+ "type": {
+ "type": "array",
+ "items": "#Stage"
+ }
+ },
+ {
+ id: "#args.py",
+ type: File,
+ default: {
+ class: File,
+ location: args.py
+ },
+ inputBinding: {
+ position: -1
+ }
+ }
+ ],
+ "outputs": [
+ {
+ "id": "sam",
+ "outputBinding": {
+ "glob": "output.sam"
+ },
+ "type": ["null", "File"]
+ },
+ {"id": "args", "type": "string[]"}
+ ],
+ "requirements": [
+ {"class": "SchemaDefRequirement",
+ "types": [
+ {
+ "fields": [
+ {
+ "inputBinding": {
+ "position": 0
+ },
+ "name": "algo",
+ "type": {
+ "type": "enum",
+ "name": "JustMap1",
+ "symbols": ["map1"]
+ }
+ },
+ {
+ "name": "maxSeqLen",
+ "type": ["null", "int"],
+ "inputBinding": {
+ "prefix": "--max-seq-length",
+ "position": 2
+ }
+ },
+ {
+ "name": "minSeqLen",
+ "type": ["null", "int"],
+ "inputBinding": {
+ "prefix": "--min-seq-length",
+ "position": 2
+ }
+ },
+ {
+ "inputBinding": {
+ "position": 2,
+ "prefix": "--seed-length"
+ },
+ "name": "seedLength",
+ "type": ["null", "int"]
+ }
+ ],
+ "name": "Map1",
+ "type": "record"
+ },
+ {
+ "fields": [
+ {
+ "inputBinding": {
+ "position": 0
+ },
+ "name": "algo",
+ "type": {
+ "type": "enum",
+ "name": "JustMap2",
+ "symbols": ["map2"]
+ }
+ },
+ {
+ "name": "maxSeqLen",
+ "type": ["null", "int"],
+ "inputBinding": {
+ "prefix": "--max-seq-length",
+ "position": 2
+ }
+ },
+ {
+ "name": "minSeqLen",
+ "type": ["null", "int"],
+ "inputBinding": {
+ "prefix": "--min-seq-length",
+ "position": 2
+ }
+ },
+ {
+ "inputBinding": {
+ "position": 2,
+ "prefix": "--max-seed-hits"
+ },
+ "name": "maxSeedHits",
+ "type": ["null", "int"]
+ }
+ ],
+ "name": "Map2",
+ "type": "record"
+ },
+ {
+ "fields": [
+ {
+ "inputBinding": {
+ "position": 0
+ },
+ "name": "algo",
+ "type": {
+ "type": "enum",
+ "name": "JustMap3",
+ "symbols": ["map3"]
+ }
+ },
+ {
+ "name": "maxSeqLen",
+ "type": ["null", "int"],
+ "inputBinding": {
+ "prefix": "--max-seq-length",
+ "position": 2
+ }
+ },
+ {
+ "name": "minSeqLen",
+ "type": ["null", "int"],
+ "inputBinding": {
+ "prefix": "--min-seq-length",
+ "position": 2
+ }
+ },
+ {
+ "inputBinding": {
+ "position": 2,
+ "prefix": "--fwd-search"
+ },
+ "name": "fwdSearch",
+ "type": ["null", "boolean"]
+ }
+ ],
+ "name": "Map3",
+ "type": "record"
+ },
+ {
+ "fields": [
+ {
+ "inputBinding": {
+ "position": 0
+ },
+ "name": "algo",
+ "type": {
+ "type": "enum",
+ "name": "JustMap4",
+ "symbols": ["map4"]
+ }
+ },
+ {
+ "name": "maxSeqLen",
+ "type": ["null", "int"],
+ "inputBinding": {
+ "prefix": "--max-seq-length",
+ "position": 2
+ }
+ },
+ {
+ "name": "minSeqLen",
+ "type": ["null", "int"],
+ "inputBinding": {
+ "prefix": "--min-seq-length",
+ "position": 2
+ }
+ },
+ {
+ "inputBinding": {
+ "position": 2,
+ "prefix": "--seed-step"
+ },
+ "name": "seedStep",
+ "type": ["null", "int"]
+ }
+ ],
+ "name": "Map4",
+ "type": "record"
+ },
+ {
+ "type": "record",
+ "name": "Stage",
+ "fields": [
+ {
+ "inputBinding": {
+ "position": 0,
+ "prefix": "stage",
+ "separate": false
+ },
+ "name": "stageId",
+ "type": ["null", "int"]
+ },
+ {
+ "inputBinding": {
+ "position": 1,
+ "prefix": "-n"
+ },
+ "name": "stageOption1",
+ "type": ["null", "boolean"]
+ },
+ {
+ "inputBinding": {
+ "position": 2
+ },
+ "name": "algos",
+ "type": {
+ "type": "array",
+ "items": [
+ "#Map1",
+ "#Map2",
+ "#Map3",
+ "#Map4"
+ ]
+ }
+ }
+ ]
+ }
+ ]}],
+ "baseCommand": "python",
+ "arguments": ["tmap", "mapall"]
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/underscore.js b/test/unit/tools/cwl_tools/v1.0/underscore.js
new file mode 100644
index 000000000000..05bfdc0b637f
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/underscore.js
@@ -0,0 +1,1416 @@
+// Underscore.js 1.7.0
+// http://underscorejs.org
+// (c) 2009-2014 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
+// Underscore may be freely distributed under the MIT license.
+
+(function() {
+
+ // Baseline setup
+ // --------------
+
+ // Establish the root object, `window` in the browser, or `exports` on the server.
+ var root = this;
+
+ // Save the previous value of the `_` variable.
+ //var previousUnderscore = root._;
+
+ // Save bytes in the minified (but not gzipped) version:
+ var ArrayProto = Array.prototype, ObjProto = Object.prototype, FuncProto = Function.prototype;
+
+ // Create quick reference variables for speed access to core prototypes.
+ var
+ push = ArrayProto.push,
+ slice = ArrayProto.slice,
+ concat = ArrayProto.concat,
+ toString = ObjProto.toString,
+ hasOwnProperty = ObjProto.hasOwnProperty;
+
+ // All **ECMAScript 5** native function implementations that we hope to use
+ // are declared here.
+ var
+ nativeIsArray = Array.isArray,
+ nativeKeys = Object.keys,
+ nativeBind = FuncProto.bind;
+
+ // Create a safe reference to the Underscore object for use below.
+ var _ = function(obj) {
+ if (obj instanceof _) return obj;
+ if (!(this instanceof _)) return new _(obj);
+ this._wrapped = obj;
+ };
+
+ // Export the Underscore object for **Node.js**, with
+ // backwards-compatibility for the old `require()` API. If we're in
+ // the browser, add `_` as a global object.
+ if (typeof exports !== 'undefined') {
+ if (typeof module !== 'undefined' && module.exports) {
+ exports = module.exports = _;
+ }
+ exports._ = _;
+ } else {
+ root._ = _;
+ }
+
+ // Current version.
+ _.VERSION = '1.7.0';
+
+ // Internal function that returns an efficient (for current engines) version
+ // of the passed-in callback, to be repeatedly applied in other Underscore
+ // functions.
+ var createCallback = function(func, context, argCount) {
+ if (context === void 0) return func;
+ switch (argCount == null ? 3 : argCount) {
+ case 1: return function(value) {
+ return func.call(context, value);
+ };
+ case 2: return function(value, other) {
+ return func.call(context, value, other);
+ };
+ case 3: return function(value, index, collection) {
+ return func.call(context, value, index, collection);
+ };
+ case 4: return function(accumulator, value, index, collection) {
+ return func.call(context, accumulator, value, index, collection);
+ };
+ }
+ return function() {
+ return func.apply(context, arguments);
+ };
+ };
+
+ // A mostly-internal function to generate callbacks that can be applied
+ // to each element in a collection, returning the desired result — either
+ // identity, an arbitrary callback, a property matcher, or a property accessor.
+ _.iteratee = function(value, context, argCount) {
+ if (value == null) return _.identity;
+ if (_.isFunction(value)) return createCallback(value, context, argCount);
+ if (_.isObject(value)) return _.matches(value);
+ return _.property(value);
+ };
+
+ // Collection Functions
+ // --------------------
+
+ // The cornerstone, an `each` implementation, aka `forEach`.
+ // Handles raw objects in addition to array-likes. Treats all
+ // sparse array-likes as if they were dense.
+ _.each = _.forEach = function(obj, iteratee, context) {
+ if (obj == null) return obj;
+ iteratee = createCallback(iteratee, context);
+ var i, length = obj.length;
+ if (length === +length) {
+ for (i = 0; i < length; i++) {
+ iteratee(obj[i], i, obj);
+ }
+ } else {
+ var keys = _.keys(obj);
+ for (i = 0, length = keys.length; i < length; i++) {
+ iteratee(obj[keys[i]], keys[i], obj);
+ }
+ }
+ return obj;
+ };
+
+ // Return the results of applying the iteratee to each element.
+ _.map = _.collect = function(obj, iteratee, context) {
+ if (obj == null) return [];
+ iteratee = _.iteratee(iteratee, context);
+ var keys = obj.length !== +obj.length && _.keys(obj),
+ length = (keys || obj).length,
+ results = Array(length),
+ currentKey;
+ for (var index = 0; index < length; index++) {
+ currentKey = keys ? keys[index] : index;
+ results[index] = iteratee(obj[currentKey], currentKey, obj);
+ }
+ return results;
+ };
+
+ var reduceError = 'Reduce of empty array with no initial value';
+
+ // **Reduce** builds up a single result from a list of values, aka `inject`,
+ // or `foldl`.
+ _.reduce = _.foldl = _.inject = function(obj, iteratee, memo, context) {
+ if (obj == null) obj = [];
+ iteratee = createCallback(iteratee, context, 4);
+ var keys = obj.length !== +obj.length && _.keys(obj),
+ length = (keys || obj).length,
+ index = 0, currentKey;
+ if (arguments.length < 3) {
+ if (!length) throw new TypeError(reduceError);
+ memo = obj[keys ? keys[index++] : index++];
+ }
+ for (; index < length; index++) {
+ currentKey = keys ? keys[index] : index;
+ memo = iteratee(memo, obj[currentKey], currentKey, obj);
+ }
+ return memo;
+ };
+
+ // The right-associative version of reduce, also known as `foldr`.
+ _.reduceRight = _.foldr = function(obj, iteratee, memo, context) {
+ if (obj == null) obj = [];
+ iteratee = createCallback(iteratee, context, 4);
+ var keys = obj.length !== + obj.length && _.keys(obj),
+ index = (keys || obj).length,
+ currentKey;
+ if (arguments.length < 3) {
+ if (!index) throw new TypeError(reduceError);
+ memo = obj[keys ? keys[--index] : --index];
+ }
+ while (index--) {
+ currentKey = keys ? keys[index] : index;
+ memo = iteratee(memo, obj[currentKey], currentKey, obj);
+ }
+ return memo;
+ };
+
+ // Return the first value which passes a truth test. Aliased as `detect`.
+ _.find = _.detect = function(obj, predicate, context) {
+ var result;
+ predicate = _.iteratee(predicate, context);
+ _.some(obj, function(value, index, list) {
+ if (predicate(value, index, list)) {
+ result = value;
+ return true;
+ }
+ });
+ return result;
+ };
+
+ // Return all the elements that pass a truth test.
+ // Aliased as `select`.
+ _.filter = _.select = function(obj, predicate, context) {
+ var results = [];
+ if (obj == null) return results;
+ predicate = _.iteratee(predicate, context);
+ _.each(obj, function(value, index, list) {
+ if (predicate(value, index, list)) results.push(value);
+ });
+ return results;
+ };
+
+ // Return all the elements for which a truth test fails.
+ _.reject = function(obj, predicate, context) {
+ return _.filter(obj, _.negate(_.iteratee(predicate)), context);
+ };
+
+ // Determine whether all of the elements match a truth test.
+ // Aliased as `all`.
+ _.every = _.all = function(obj, predicate, context) {
+ if (obj == null) return true;
+ predicate = _.iteratee(predicate, context);
+ var keys = obj.length !== +obj.length && _.keys(obj),
+ length = (keys || obj).length,
+ index, currentKey;
+ for (index = 0; index < length; index++) {
+ currentKey = keys ? keys[index] : index;
+ if (!predicate(obj[currentKey], currentKey, obj)) return false;
+ }
+ return true;
+ };
+
+ // Determine if at least one element in the object matches a truth test.
+ // Aliased as `any`.
+ _.some = _.any = function(obj, predicate, context) {
+ if (obj == null) return false;
+ predicate = _.iteratee(predicate, context);
+ var keys = obj.length !== +obj.length && _.keys(obj),
+ length = (keys || obj).length,
+ index, currentKey;
+ for (index = 0; index < length; index++) {
+ currentKey = keys ? keys[index] : index;
+ if (predicate(obj[currentKey], currentKey, obj)) return true;
+ }
+ return false;
+ };
+
+ // Determine if the array or object contains a given value (using `===`).
+ // Aliased as `include`.
+ _.contains = _.include = function(obj, target) {
+ if (obj == null) return false;
+ if (obj.length !== +obj.length) obj = _.values(obj);
+ return _.indexOf(obj, target) >= 0;
+ };
+
+ // Invoke a method (with arguments) on every item in a collection.
+ _.invoke = function(obj, method) {
+ var args = slice.call(arguments, 2);
+ var isFunc = _.isFunction(method);
+ return _.map(obj, function(value) {
+ return (isFunc ? method : value[method]).apply(value, args);
+ });
+ };
+
+ // Convenience version of a common use case of `map`: fetching a property.
+ _.pluck = function(obj, key) {
+ return _.map(obj, _.property(key));
+ };
+
+ // Convenience version of a common use case of `filter`: selecting only objects
+ // containing specific `key:value` pairs.
+ _.where = function(obj, attrs) {
+ return _.filter(obj, _.matches(attrs));
+ };
+
+ // Convenience version of a common use case of `find`: getting the first object
+ // containing specific `key:value` pairs.
+ _.findWhere = function(obj, attrs) {
+ return _.find(obj, _.matches(attrs));
+ };
+
+ // Return the maximum element (or element-based computation).
+ _.max = function(obj, iteratee, context) {
+ var result = -Infinity, lastComputed = -Infinity,
+ value, computed;
+ if (iteratee == null && obj != null) {
+ obj = obj.length === +obj.length ? obj : _.values(obj);
+ for (var i = 0, length = obj.length; i < length; i++) {
+ value = obj[i];
+ if (value > result) {
+ result = value;
+ }
+ }
+ } else {
+ iteratee = _.iteratee(iteratee, context);
+ _.each(obj, function(value, index, list) {
+ computed = iteratee(value, index, list);
+ if (computed > lastComputed || computed === -Infinity && result === -Infinity) {
+ result = value;
+ lastComputed = computed;
+ }
+ });
+ }
+ return result;
+ };
+
+ // Return the minimum element (or element-based computation).
+ _.min = function(obj, iteratee, context) {
+ var result = Infinity, lastComputed = Infinity,
+ value, computed;
+ if (iteratee == null && obj != null) {
+ obj = obj.length === +obj.length ? obj : _.values(obj);
+ for (var i = 0, length = obj.length; i < length; i++) {
+ value = obj[i];
+ if (value < result) {
+ result = value;
+ }
+ }
+ } else {
+ iteratee = _.iteratee(iteratee, context);
+ _.each(obj, function(value, index, list) {
+ computed = iteratee(value, index, list);
+ if (computed < lastComputed || computed === Infinity && result === Infinity) {
+ result = value;
+ lastComputed = computed;
+ }
+ });
+ }
+ return result;
+ };
+
+ // Shuffle a collection, using the modern version of the
+ // [Fisher-Yates shuffle](http://en.wikipedia.org/wiki/Fisher–Yates_shuffle).
+ _.shuffle = function(obj) {
+ var set = obj && obj.length === +obj.length ? obj : _.values(obj);
+ var length = set.length;
+ var shuffled = Array(length);
+ for (var index = 0, rand; index < length; index++) {
+ rand = _.random(0, index);
+ if (rand !== index) shuffled[index] = shuffled[rand];
+ shuffled[rand] = set[index];
+ }
+ return shuffled;
+ };
+
+ // Sample **n** random values from a collection.
+ // If **n** is not specified, returns a single random element.
+ // The internal `guard` argument allows it to work with `map`.
+ _.sample = function(obj, n, guard) {
+ if (n == null || guard) {
+ if (obj.length !== +obj.length) obj = _.values(obj);
+ return obj[_.random(obj.length - 1)];
+ }
+ return _.shuffle(obj).slice(0, Math.max(0, n));
+ };
+
+ // Sort the object's values by a criterion produced by an iteratee.
+ _.sortBy = function(obj, iteratee, context) {
+ iteratee = _.iteratee(iteratee, context);
+ return _.pluck(_.map(obj, function(value, index, list) {
+ return {
+ value: value,
+ index: index,
+ criteria: iteratee(value, index, list)
+ };
+ }).sort(function(left, right) {
+ var a = left.criteria;
+ var b = right.criteria;
+ if (a !== b) {
+ if (a > b || a === void 0) return 1;
+ if (a < b || b === void 0) return -1;
+ }
+ return left.index - right.index;
+ }), 'value');
+ };
+
+ // An internal function used for aggregate "group by" operations.
+ var group = function(behavior) {
+ return function(obj, iteratee, context) {
+ var result = {};
+ iteratee = _.iteratee(iteratee, context);
+ _.each(obj, function(value, index) {
+ var key = iteratee(value, index, obj);
+ behavior(result, value, key);
+ });
+ return result;
+ };
+ };
+
+ // Groups the object's values by a criterion. Pass either a string attribute
+ // to group by, or a function that returns the criterion.
+ _.groupBy = group(function(result, value, key) {
+ if (_.has(result, key)) result[key].push(value); else result[key] = [value];
+ });
+
+ // Indexes the object's values by a criterion, similar to `groupBy`, but for
+ // when you know that your index values will be unique.
+ _.indexBy = group(function(result, value, key) {
+ result[key] = value;
+ });
+
+ // Counts instances of an object that group by a certain criterion. Pass
+ // either a string attribute to count by, or a function that returns the
+ // criterion.
+ _.countBy = group(function(result, value, key) {
+ if (_.has(result, key)) result[key]++; else result[key] = 1;
+ });
+
+ // Use a comparator function to figure out the smallest index at which
+ // an object should be inserted so as to maintain order. Uses binary search.
+ _.sortedIndex = function(array, obj, iteratee, context) {
+ iteratee = _.iteratee(iteratee, context, 1);
+ var value = iteratee(obj);
+ var low = 0, high = array.length;
+ while (low < high) {
+ var mid = low + high >>> 1;
+ if (iteratee(array[mid]) < value) low = mid + 1; else high = mid;
+ }
+ return low;
+ };
+
+ // Safely create a real, live array from anything iterable.
+ _.toArray = function(obj) {
+ if (!obj) return [];
+ if (_.isArray(obj)) return slice.call(obj);
+ if (obj.length === +obj.length) return _.map(obj, _.identity);
+ return _.values(obj);
+ };
+
+ // Return the number of elements in an object.
+ _.size = function(obj) {
+ if (obj == null) return 0;
+ return obj.length === +obj.length ? obj.length : _.keys(obj).length;
+ };
+
+ // Split a collection into two arrays: one whose elements all satisfy the given
+ // predicate, and one whose elements all do not satisfy the predicate.
+ _.partition = function(obj, predicate, context) {
+ predicate = _.iteratee(predicate, context);
+ var pass = [], fail = [];
+ _.each(obj, function(value, key, obj) {
+ (predicate(value, key, obj) ? pass : fail).push(value);
+ });
+ return [pass, fail];
+ };
+
+ // Array Functions
+ // ---------------
+
+ // Get the first element of an array. Passing **n** will return the first N
+ // values in the array. Aliased as `head` and `take`. The **guard** check
+ // allows it to work with `_.map`.
+ _.first = _.head = _.take = function(array, n, guard) {
+ if (array == null) return void 0;
+ if (n == null || guard) return array[0];
+ if (n < 0) return [];
+ return slice.call(array, 0, n);
+ };
+
+ // Returns everything but the last entry of the array. Especially useful on
+ // the arguments object. Passing **n** will return all the values in
+ // the array, excluding the last N. The **guard** check allows it to work with
+ // `_.map`.
+ _.initial = function(array, n, guard) {
+ return slice.call(array, 0, Math.max(0, array.length - (n == null || guard ? 1 : n)));
+ };
+
+ // Get the last element of an array. Passing **n** will return the last N
+ // values in the array. The **guard** check allows it to work with `_.map`.
+ _.last = function(array, n, guard) {
+ if (array == null) return void 0;
+ if (n == null || guard) return array[array.length - 1];
+ return slice.call(array, Math.max(array.length - n, 0));
+ };
+
+ // Returns everything but the first entry of the array. Aliased as `tail` and `drop`.
+ // Especially useful on the arguments object. Passing an **n** will return
+ // the rest N values in the array. The **guard**
+ // check allows it to work with `_.map`.
+ _.rest = _.tail = _.drop = function(array, n, guard) {
+ return slice.call(array, n == null || guard ? 1 : n);
+ };
+
+ // Trim out all falsy values from an array.
+ _.compact = function(array) {
+ return _.filter(array, _.identity);
+ };
+
+ // Internal implementation of a recursive `flatten` function.
+ var flatten = function(input, shallow, strict, output) {
+ if (shallow && _.every(input, _.isArray)) {
+ return concat.apply(output, input);
+ }
+ for (var i = 0, length = input.length; i < length; i++) {
+ var value = input[i];
+ if (!_.isArray(value) && !_.isArguments(value)) {
+ if (!strict) output.push(value);
+ } else if (shallow) {
+ push.apply(output, value);
+ } else {
+ flatten(value, shallow, strict, output);
+ }
+ }
+ return output;
+ };
+
+ // Flatten out an array, either recursively (by default), or just one level.
+ _.flatten = function(array, shallow) {
+ return flatten(array, shallow, false, []);
+ };
+
+ // Return a version of the array that does not contain the specified value(s).
+ _.without = function(array) {
+ return _.difference(array, slice.call(arguments, 1));
+ };
+
+ // Produce a duplicate-free version of the array. If the array has already
+ // been sorted, you have the option of using a faster algorithm.
+ // Aliased as `unique`.
+ _.uniq = _.unique = function(array, isSorted, iteratee, context) {
+ if (array == null) return [];
+ if (!_.isBoolean(isSorted)) {
+ context = iteratee;
+ iteratee = isSorted;
+ isSorted = false;
+ }
+ if (iteratee != null) iteratee = _.iteratee(iteratee, context);
+ var result = [];
+ var seen = [];
+ for (var i = 0, length = array.length; i < length; i++) {
+ var value = array[i];
+ if (isSorted) {
+ if (!i || seen !== value) result.push(value);
+ seen = value;
+ } else if (iteratee) {
+ var computed = iteratee(value, i, array);
+ if (_.indexOf(seen, computed) < 0) {
+ seen.push(computed);
+ result.push(value);
+ }
+ } else if (_.indexOf(result, value) < 0) {
+ result.push(value);
+ }
+ }
+ return result;
+ };
+
+ // Produce an array that contains the union: each distinct element from all of
+ // the passed-in arrays.
+ _.union = function() {
+ return _.uniq(flatten(arguments, true, true, []));
+ };
+
+ // Produce an array that contains every item shared between all the
+ // passed-in arrays.
+ _.intersection = function(array) {
+ if (array == null) return [];
+ var result = [];
+ var argsLength = arguments.length;
+ for (var i = 0, length = array.length; i < length; i++) {
+ var item = array[i];
+ if (_.contains(result, item)) continue;
+ for (var j = 1; j < argsLength; j++) {
+ if (!_.contains(arguments[j], item)) break;
+ }
+ if (j === argsLength) result.push(item);
+ }
+ return result;
+ };
+
+ // Take the difference between one array and a number of other arrays.
+ // Only the elements present in just the first array will remain.
+ _.difference = function(array) {
+ var rest = flatten(slice.call(arguments, 1), true, true, []);
+ return _.filter(array, function(value){
+ return !_.contains(rest, value);
+ });
+ };
+
+ // Zip together multiple lists into a single array -- elements that share
+ // an index go together.
+ _.zip = function(array) {
+ if (array == null) return [];
+ var length = _.max(arguments, 'length').length;
+ var results = Array(length);
+ for (var i = 0; i < length; i++) {
+ results[i] = _.pluck(arguments, i);
+ }
+ return results;
+ };
+
+ // Converts lists into objects. Pass either a single array of `[key, value]`
+ // pairs, or two parallel arrays of the same length -- one of keys, and one of
+ // the corresponding values.
+ _.object = function(list, values) {
+ if (list == null) return {};
+ var result = {};
+ for (var i = 0, length = list.length; i < length; i++) {
+ if (values) {
+ result[list[i]] = values[i];
+ } else {
+ result[list[i][0]] = list[i][1];
+ }
+ }
+ return result;
+ };
+
+ // Return the position of the first occurrence of an item in an array,
+ // or -1 if the item is not included in the array.
+ // If the array is large and already in sort order, pass `true`
+ // for **isSorted** to use binary search.
+ _.indexOf = function(array, item, isSorted) {
+ if (array == null) return -1;
+ var i = 0, length = array.length;
+ if (isSorted) {
+ if (typeof isSorted == 'number') {
+ i = isSorted < 0 ? Math.max(0, length + isSorted) : isSorted;
+ } else {
+ i = _.sortedIndex(array, item);
+ return array[i] === item ? i : -1;
+ }
+ }
+ for (; i < length; i++) if (array[i] === item) return i;
+ return -1;
+ };
+
+ _.lastIndexOf = function(array, item, from) {
+ if (array == null) return -1;
+ var idx = array.length;
+ if (typeof from == 'number') {
+ idx = from < 0 ? idx + from + 1 : Math.min(idx, from + 1);
+ }
+ while (--idx >= 0) if (array[idx] === item) return idx;
+ return -1;
+ };
+
+ // Generate an integer Array containing an arithmetic progression. A port of
+ // the native Python `range()` function. See
+ // [the Python documentation](http://docs.python.org/library/functions.html#range).
+ _.range = function(start, stop, step) {
+ if (arguments.length <= 1) {
+ stop = start || 0;
+ start = 0;
+ }
+ step = step || 1;
+
+ var length = Math.max(Math.ceil((stop - start) / step), 0);
+ var range = Array(length);
+
+ for (var idx = 0; idx < length; idx++, start += step) {
+ range[idx] = start;
+ }
+
+ return range;
+ };
+
+ // Function (ahem) Functions
+ // ------------------
+
+ // Reusable constructor function for prototype setting.
+ var Ctor = function(){};
+
+ // Create a function bound to a given object (assigning `this`, and arguments,
+ // optionally). Delegates to **ECMAScript 5**'s native `Function.bind` if
+ // available.
+ _.bind = function(func, context) {
+ var args, bound;
+ if (nativeBind && func.bind === nativeBind) return nativeBind.apply(func, slice.call(arguments, 1));
+ if (!_.isFunction(func)) throw new TypeError('Bind must be called on a function');
+ args = slice.call(arguments, 2);
+ bound = function() {
+ if (!(this instanceof bound)) return func.apply(context, args.concat(slice.call(arguments)));
+ Ctor.prototype = func.prototype;
+ var self = new Ctor;
+ Ctor.prototype = null;
+ var result = func.apply(self, args.concat(slice.call(arguments)));
+ if (_.isObject(result)) return result;
+ return self;
+ };
+ return bound;
+ };
+
+ // Partially apply a function by creating a version that has had some of its
+ // arguments pre-filled, without changing its dynamic `this` context. _ acts
+ // as a placeholder, allowing any combination of arguments to be pre-filled.
+ _.partial = function(func) {
+ var boundArgs = slice.call(arguments, 1);
+ return function() {
+ var position = 0;
+ var args = boundArgs.slice();
+ for (var i = 0, length = args.length; i < length; i++) {
+ if (args[i] === _) args[i] = arguments[position++];
+ }
+ while (position < arguments.length) args.push(arguments[position++]);
+ return func.apply(this, args);
+ };
+ };
+
+ // Bind a number of an object's methods to that object. Remaining arguments
+ // are the method names to be bound. Useful for ensuring that all callbacks
+ // defined on an object belong to it.
+ _.bindAll = function(obj) {
+ var i, length = arguments.length, key;
+ if (length <= 1) throw new Error('bindAll must be passed function names');
+ for (i = 1; i < length; i++) {
+ key = arguments[i];
+ obj[key] = _.bind(obj[key], obj);
+ }
+ return obj;
+ };
+
+ // Memoize an expensive function by storing its results.
+ _.memoize = function(func, hasher) {
+ var memoize = function(key) {
+ var cache = memoize.cache;
+ var address = hasher ? hasher.apply(this, arguments) : key;
+ if (!_.has(cache, address)) cache[address] = func.apply(this, arguments);
+ return cache[address];
+ };
+ memoize.cache = {};
+ return memoize;
+ };
+
+ // Delays a function for the given number of milliseconds, and then calls
+ // it with the arguments supplied.
+ _.delay = function(func, wait) {
+ var args = slice.call(arguments, 2);
+ return setTimeout(function(){
+ return func.apply(null, args);
+ }, wait);
+ };
+
+ // Defers a function, scheduling it to run after the current call stack has
+ // cleared.
+ _.defer = function(func) {
+ return _.delay.apply(_, [func, 1].concat(slice.call(arguments, 1)));
+ };
+
+ // Returns a function, that, when invoked, will only be triggered at most once
+ // during a given window of time. Normally, the throttled function will run
+ // as much as it can, without ever going more than once per `wait` duration;
+ // but if you'd like to disable the execution on the leading edge, pass
+ // `{leading: false}`. To disable execution on the trailing edge, ditto.
+ _.throttle = function(func, wait, options) {
+ var context, args, result;
+ var timeout = null;
+ var previous = 0;
+ if (!options) options = {};
+ var later = function() {
+ previous = options.leading === false ? 0 : _.now();
+ timeout = null;
+ result = func.apply(context, args);
+ if (!timeout) context = args = null;
+ };
+ return function() {
+ var now = _.now();
+ if (!previous && options.leading === false) previous = now;
+ var remaining = wait - (now - previous);
+ context = this;
+ args = arguments;
+ if (remaining <= 0 || remaining > wait) {
+ clearTimeout(timeout);
+ timeout = null;
+ previous = now;
+ result = func.apply(context, args);
+ if (!timeout) context = args = null;
+ } else if (!timeout && options.trailing !== false) {
+ timeout = setTimeout(later, remaining);
+ }
+ return result;
+ };
+ };
+
+ // Returns a function, that, as long as it continues to be invoked, will not
+ // be triggered. The function will be called after it stops being called for
+ // N milliseconds. If `immediate` is passed, trigger the function on the
+ // leading edge, instead of the trailing.
+ _.debounce = function(func, wait, immediate) {
+ var timeout, args, context, timestamp, result;
+
+ var later = function() {
+ var last = _.now() - timestamp;
+
+ if (last < wait && last > 0) {
+ timeout = setTimeout(later, wait - last);
+ } else {
+ timeout = null;
+ if (!immediate) {
+ result = func.apply(context, args);
+ if (!timeout) context = args = null;
+ }
+ }
+ };
+
+ return function() {
+ context = this;
+ args = arguments;
+ timestamp = _.now();
+ var callNow = immediate && !timeout;
+ if (!timeout) timeout = setTimeout(later, wait);
+ if (callNow) {
+ result = func.apply(context, args);
+ context = args = null;
+ }
+
+ return result;
+ };
+ };
+
+ // Returns the first function passed as an argument to the second,
+ // allowing you to adjust arguments, run code before and after, and
+ // conditionally execute the original function.
+ _.wrap = function(func, wrapper) {
+ return _.partial(wrapper, func);
+ };
+
+ // Returns a negated version of the passed-in predicate.
+ _.negate = function(predicate) {
+ return function() {
+ return !predicate.apply(this, arguments);
+ };
+ };
+
+ // Returns a function that is the composition of a list of functions, each
+ // consuming the return value of the function that follows.
+ _.compose = function() {
+ var args = arguments;
+ var start = args.length - 1;
+ return function() {
+ var i = start;
+ var result = args[start].apply(this, arguments);
+ while (i--) result = args[i].call(this, result);
+ return result;
+ };
+ };
+
+ // Returns a function that will only be executed after being called N times.
+ _.after = function(times, func) {
+ return function() {
+ if (--times < 1) {
+ return func.apply(this, arguments);
+ }
+ };
+ };
+
+ // Returns a function that will only be executed before being called N times.
+ _.before = function(times, func) {
+ var memo;
+ return function() {
+ if (--times > 0) {
+ memo = func.apply(this, arguments);
+ } else {
+ func = null;
+ }
+ return memo;
+ };
+ };
+
+ // Returns a function that will be executed at most one time, no matter how
+ // often you call it. Useful for lazy initialization.
+ _.once = _.partial(_.before, 2);
+
+ // Object Functions
+ // ----------------
+
+ // Retrieve the names of an object's properties.
+ // Delegates to **ECMAScript 5**'s native `Object.keys`
+ _.keys = function(obj) {
+ if (!_.isObject(obj)) return [];
+ if (nativeKeys) return nativeKeys(obj);
+ var keys = [];
+ for (var key in obj) if (_.has(obj, key)) keys.push(key);
+ return keys;
+ };
+
+ // Retrieve the values of an object's properties.
+ _.values = function(obj) {
+ var keys = _.keys(obj);
+ var length = keys.length;
+ var values = Array(length);
+ for (var i = 0; i < length; i++) {
+ values[i] = obj[keys[i]];
+ }
+ return values;
+ };
+
+ // Convert an object into a list of `[key, value]` pairs.
+ _.pairs = function(obj) {
+ var keys = _.keys(obj);
+ var length = keys.length;
+ var pairs = Array(length);
+ for (var i = 0; i < length; i++) {
+ pairs[i] = [keys[i], obj[keys[i]]];
+ }
+ return pairs;
+ };
+
+ // Invert the keys and values of an object. The values must be serializable.
+ _.invert = function(obj) {
+ var result = {};
+ var keys = _.keys(obj);
+ for (var i = 0, length = keys.length; i < length; i++) {
+ result[obj[keys[i]]] = keys[i];
+ }
+ return result;
+ };
+
+ // Return a sorted list of the function names available on the object.
+ // Aliased as `methods`
+ _.functions = _.methods = function(obj) {
+ var names = [];
+ for (var key in obj) {
+ if (_.isFunction(obj[key])) names.push(key);
+ }
+ return names.sort();
+ };
+
+ // Extend a given object with all the properties in passed-in object(s).
+ _.extend = function(obj) {
+ if (!_.isObject(obj)) return obj;
+ var source, prop;
+ for (var i = 1, length = arguments.length; i < length; i++) {
+ source = arguments[i];
+ for (prop in source) {
+ if (hasOwnProperty.call(source, prop)) {
+ obj[prop] = source[prop];
+ }
+ }
+ }
+ return obj;
+ };
+
+ // Return a copy of the object only containing the whitelisted properties.
+ _.pick = function(obj, iteratee, context) {
+ var result = {}, key;
+ if (obj == null) return result;
+ if (_.isFunction(iteratee)) {
+ iteratee = createCallback(iteratee, context);
+ for (key in obj) {
+ var value = obj[key];
+ if (iteratee(value, key, obj)) result[key] = value;
+ }
+ } else {
+ var keys = concat.apply([], slice.call(arguments, 1));
+ obj = new Object(obj);
+ for (var i = 0, length = keys.length; i < length; i++) {
+ key = keys[i];
+ if (key in obj) result[key] = obj[key];
+ }
+ }
+ return result;
+ };
+
+ // Return a copy of the object without the blacklisted properties.
+ _.omit = function(obj, iteratee, context) {
+ if (_.isFunction(iteratee)) {
+ iteratee = _.negate(iteratee);
+ } else {
+ var keys = _.map(concat.apply([], slice.call(arguments, 1)), String);
+ iteratee = function(value, key) {
+ return !_.contains(keys, key);
+ };
+ }
+ return _.pick(obj, iteratee, context);
+ };
+
+ // Fill in a given object with default properties.
+ _.defaults = function(obj) {
+ if (!_.isObject(obj)) return obj;
+ for (var i = 1, length = arguments.length; i < length; i++) {
+ var source = arguments[i];
+ for (var prop in source) {
+ if (obj[prop] === void 0) obj[prop] = source[prop];
+ }
+ }
+ return obj;
+ };
+
+ // Create a (shallow-cloned) duplicate of an object.
+ _.clone = function(obj) {
+ if (!_.isObject(obj)) return obj;
+ return _.isArray(obj) ? obj.slice() : _.extend({}, obj);
+ };
+
+ // Invokes interceptor with the obj, and then returns obj.
+ // The primary purpose of this method is to "tap into" a method chain, in
+ // order to perform operations on intermediate results within the chain.
+ _.tap = function(obj, interceptor) {
+ interceptor(obj);
+ return obj;
+ };
+
+ // Internal recursive comparison function for `isEqual`.
+ var eq = function(a, b, aStack, bStack) {
+ // Identical objects are equal. `0 === -0`, but they aren't identical.
+ // See the [Harmony `egal` proposal](http://wiki.ecmascript.org/doku.php?id=harmony:egal).
+ if (a === b) return a !== 0 || 1 / a === 1 / b;
+ // A strict comparison is necessary because `null == undefined`.
+ if (a == null || b == null) return a === b;
+ // Unwrap any wrapped objects.
+ if (a instanceof _) a = a._wrapped;
+ if (b instanceof _) b = b._wrapped;
+ // Compare `[[Class]]` names.
+ var className = toString.call(a);
+ if (className !== toString.call(b)) return false;
+ switch (className) {
+ // Strings, numbers, regular expressions, dates, and booleans are compared by value.
+ case '[object RegExp]':
+ // RegExps are coerced to strings for comparison (Note: '' + /a/i === '/a/i')
+ case '[object String]':
+ // Primitives and their corresponding object wrappers are equivalent; thus, `"5"` is
+ // equivalent to `new String("5")`.
+ return '' + a === '' + b;
+ case '[object Number]':
+ // `NaN`s are equivalent, but non-reflexive.
+ // Object(NaN) is equivalent to NaN
+ if (+a !== +a) return +b !== +b;
+ // An `egal` comparison is performed for other numeric values.
+ return +a === 0 ? 1 / +a === 1 / b : +a === +b;
+ case '[object Date]':
+ case '[object Boolean]':
+ // Coerce dates and booleans to numeric primitive values. Dates are compared by their
+ // millisecond representations. Note that invalid dates with millisecond representations
+ // of `NaN` are not equivalent.
+ return +a === +b;
+ }
+ if (typeof a != 'object' || typeof b != 'object') return false;
+ // Assume equality for cyclic structures. The algorithm for detecting cyclic
+ // structures is adapted from ES 5.1 section 15.12.3, abstract operation `JO`.
+ var length = aStack.length;
+ while (length--) {
+ // Linear search. Performance is inversely proportional to the number of
+ // unique nested structures.
+ if (aStack[length] === a) return bStack[length] === b;
+ }
+ // Objects with different constructors are not equivalent, but `Object`s
+ // from different frames are.
+ var aCtor = a.constructor, bCtor = b.constructor;
+ if (
+ aCtor !== bCtor &&
+ // Handle Object.create(x) cases
+ 'constructor' in a && 'constructor' in b &&
+ !(_.isFunction(aCtor) && aCtor instanceof aCtor &&
+ _.isFunction(bCtor) && bCtor instanceof bCtor)
+ ) {
+ return false;
+ }
+ // Add the first object to the stack of traversed objects.
+ aStack.push(a);
+ bStack.push(b);
+ var size, result;
+ // Recursively compare objects and arrays.
+ if (className === '[object Array]') {
+ // Compare array lengths to determine if a deep comparison is necessary.
+ size = a.length;
+ result = size === b.length;
+ if (result) {
+ // Deep compare the contents, ignoring non-numeric properties.
+ while (size--) {
+ if (!(result = eq(a[size], b[size], aStack, bStack))) break;
+ }
+ }
+ } else {
+ // Deep compare objects.
+ var keys = _.keys(a), key;
+ size = keys.length;
+ // Ensure that both objects contain the same number of properties before comparing deep equality.
+ result = _.keys(b).length === size;
+ if (result) {
+ while (size--) {
+ // Deep compare each member
+ key = keys[size];
+ if (!(result = _.has(b, key) && eq(a[key], b[key], aStack, bStack))) break;
+ }
+ }
+ }
+ // Remove the first object from the stack of traversed objects.
+ aStack.pop();
+ bStack.pop();
+ return result;
+ };
+
+ // Perform a deep comparison to check if two objects are equal.
+ _.isEqual = function(a, b) {
+ return eq(a, b, [], []);
+ };
+
+ // Is a given array, string, or object empty?
+ // An "empty" object has no enumerable own-properties.
+ _.isEmpty = function(obj) {
+ if (obj == null) return true;
+ if (_.isArray(obj) || _.isString(obj) || _.isArguments(obj)) return obj.length === 0;
+ for (var key in obj) if (_.has(obj, key)) return false;
+ return true;
+ };
+
+ // Is a given value a DOM element?
+ _.isElement = function(obj) {
+ return !!(obj && obj.nodeType === 1);
+ };
+
+ // Is a given value an array?
+ // Delegates to ECMA5's native Array.isArray
+ _.isArray = nativeIsArray || function(obj) {
+ return toString.call(obj) === '[object Array]';
+ };
+
+ // Is a given variable an object?
+ _.isObject = function(obj) {
+ var type = typeof obj;
+ return type === 'function' || type === 'object' && !!obj;
+ };
+
+ // Add some isType methods: isArguments, isFunction, isString, isNumber, isDate, isRegExp.
+ _.each(['Arguments', 'Function', 'String', 'Number', 'Date', 'RegExp'], function(name) {
+ _['is' + name] = function(obj) {
+ return toString.call(obj) === '[object ' + name + ']';
+ };
+ });
+
+ // Define a fallback version of the method in browsers (ahem, IE), where
+ // there isn't any inspectable "Arguments" type.
+ if (!_.isArguments(arguments)) {
+ _.isArguments = function(obj) {
+ return _.has(obj, 'callee');
+ };
+ }
+
+ // Optimize `isFunction` if appropriate. Work around an IE 11 bug.
+ if (typeof /./ !== 'function') {
+ _.isFunction = function(obj) {
+ return typeof obj == 'function' || false;
+ };
+ }
+
+ // Is a given object a finite number?
+ _.isFinite = function(obj) {
+ return isFinite(obj) && !isNaN(parseFloat(obj));
+ };
+
+ // Is the given value `NaN`? (NaN is the only number which does not equal itself).
+ _.isNaN = function(obj) {
+ return _.isNumber(obj) && obj !== +obj;
+ };
+
+ // Is a given value a boolean?
+ _.isBoolean = function(obj) {
+ return obj === true || obj === false || toString.call(obj) === '[object Boolean]';
+ };
+
+ // Is a given value equal to null?
+ _.isNull = function(obj) {
+ return obj === null;
+ };
+
+ // Is a given variable undefined?
+ _.isUndefined = function(obj) {
+ return obj === void 0;
+ };
+
+ // Shortcut function for checking if an object has a given property directly
+ // on itself (in other words, not on a prototype).
+ _.has = function(obj, key) {
+ return obj != null && hasOwnProperty.call(obj, key);
+ };
+
+ // Utility Functions
+ // -----------------
+
+ // Run Underscore.js in *noConflict* mode, returning the `_` variable to its
+ // previous owner. Returns a reference to the Underscore object.
+ _.noConflict = function() {
+ root._ = previousUnderscore;
+ return this;
+ };
+
+ // Keep the identity function around for default iteratees.
+ _.identity = function(value) {
+ return value;
+ };
+
+ // Predicate-generating functions. Often useful outside of Underscore.
+ _.constant = function(value) {
+ return function() {
+ return value;
+ };
+ };
+
+ _.noop = function(){};
+
+ _.property = function(key) {
+ return function(obj) {
+ return obj[key];
+ };
+ };
+
+ // Returns a predicate for checking whether an object has a given set of `key:value` pairs.
+ _.matches = function(attrs) {
+ var pairs = _.pairs(attrs), length = pairs.length;
+ return function(obj) {
+ if (obj == null) return !length;
+ obj = new Object(obj);
+ for (var i = 0; i < length; i++) {
+ var pair = pairs[i], key = pair[0];
+ if (pair[1] !== obj[key] || !(key in obj)) return false;
+ }
+ return true;
+ };
+ };
+
+ // Run a function **n** times.
+ _.times = function(n, iteratee, context) {
+ var accum = Array(Math.max(0, n));
+ iteratee = createCallback(iteratee, context, 1);
+ for (var i = 0; i < n; i++) accum[i] = iteratee(i);
+ return accum;
+ };
+
+ // Return a random integer between min and max (inclusive).
+ _.random = function(min, max) {
+ if (max == null) {
+ max = min;
+ min = 0;
+ }
+ return min + Math.floor(Math.random() * (max - min + 1));
+ };
+
+ // A (possibly faster) way to get the current timestamp as an integer.
+ _.now = Date.now || function() {
+ return new Date().getTime();
+ };
+
+ // List of HTML entities for escaping.
+ var escapeMap = {
+ '&': '&',
+ '<': '<',
+ '>': '>',
+ '"': '"',
+ "'": ''',
+ '`': '`'
+ };
+ var unescapeMap = _.invert(escapeMap);
+
+ // Functions for escaping and unescaping strings to/from HTML interpolation.
+ var createEscaper = function(map) {
+ var escaper = function(match) {
+ return map[match];
+ };
+ // Regexes for identifying a key that needs to be escaped
+ var source = '(?:' + _.keys(map).join('|') + ')';
+ var testRegexp = RegExp(source);
+ var replaceRegexp = RegExp(source, 'g');
+ return function(string) {
+ string = string == null ? '' : '' + string;
+ return testRegexp.test(string) ? string.replace(replaceRegexp, escaper) : string;
+ };
+ };
+ _.escape = createEscaper(escapeMap);
+ _.unescape = createEscaper(unescapeMap);
+
+ // If the value of the named `property` is a function then invoke it with the
+ // `object` as context; otherwise, return it.
+ _.result = function(object, property) {
+ if (object == null) return void 0;
+ var value = object[property];
+ return _.isFunction(value) ? object[property]() : value;
+ };
+
+ // Generate a unique integer id (unique within the entire client session).
+ // Useful for temporary DOM ids.
+ var idCounter = 0;
+ _.uniqueId = function(prefix) {
+ var id = ++idCounter + '';
+ return prefix ? prefix + id : id;
+ };
+
+ // By default, Underscore uses ERB-style template delimiters, change the
+ // following template settings to use alternative delimiters.
+ _.templateSettings = {
+ evaluate : /<%([\s\S]+?)%>/g,
+ interpolate : /<%=([\s\S]+?)%>/g,
+ escape : /<%-([\s\S]+?)%>/g
+ };
+
+ // When customizing `templateSettings`, if you don't want to define an
+ // interpolation, evaluation or escaping regex, we need one that is
+ // guaranteed not to match.
+ var noMatch = /(.)^/;
+
+ // Certain characters need to be escaped so that they can be put into a
+ // string literal.
+ var escapes = {
+ "'": "'",
+ '\\': '\\',
+ '\r': 'r',
+ '\n': 'n',
+ '\u2028': 'u2028',
+ '\u2029': 'u2029'
+ };
+
+ var escaper = /\\|'|\r|\n|\u2028|\u2029/g;
+
+ var escapeChar = function(match) {
+ return '\\' + escapes[match];
+ };
+
+ // JavaScript micro-templating, similar to John Resig's implementation.
+ // Underscore templating handles arbitrary delimiters, preserves whitespace,
+ // and correctly escapes quotes within interpolated code.
+ // NB: `oldSettings` only exists for backwards compatibility.
+ _.template = function(text, settings, oldSettings) {
+ if (!settings && oldSettings) settings = oldSettings;
+ settings = _.defaults({}, settings, _.templateSettings);
+
+ // Combine delimiters into one regular expression via alternation.
+ var matcher = RegExp([
+ (settings.escape || noMatch).source,
+ (settings.interpolate || noMatch).source,
+ (settings.evaluate || noMatch).source
+ ].join('|') + '|$', 'g');
+
+ // Compile the template source, escaping string literals appropriately.
+ var index = 0;
+ var source = "__p+='";
+ text.replace(matcher, function(match, escape, interpolate, evaluate, offset) {
+ source += text.slice(index, offset).replace(escaper, escapeChar);
+ index = offset + match.length;
+
+ if (escape) {
+ source += "'+\n((__t=(" + escape + "))==null?'':_.escape(__t))+\n'";
+ } else if (interpolate) {
+ source += "'+\n((__t=(" + interpolate + "))==null?'':__t)+\n'";
+ } else if (evaluate) {
+ source += "';\n" + evaluate + "\n__p+='";
+ }
+
+ // Adobe VMs need the match returned to produce the correct offest.
+ return match;
+ });
+ source += "';\n";
+
+ // If a variable is not specified, place data values in local scope.
+ if (!settings.variable) source = 'with(obj||{}){\n' + source + '}\n';
+
+ source = "var __t,__p='',__j=Array.prototype.join," +
+ "print=function(){__p+=__j.call(arguments,'');};\n" +
+ source + 'return __p;\n';
+
+ try {
+ var render = new Function(settings.variable || 'obj', '_', source);
+ } catch (e) {
+ e.source = source;
+ throw e;
+ }
+
+ var template = function(data) {
+ return render.call(this, data, _);
+ };
+
+ // Provide the compiled source as a convenience for precompilation.
+ var argument = settings.variable || 'obj';
+ template.source = 'function(' + argument + '){\n' + source + '}';
+
+ return template;
+ };
+
+ // Add a "chain" function. Start chaining a wrapped Underscore object.
+ _.chain = function(obj) {
+ var instance = _(obj);
+ instance._chain = true;
+ return instance;
+ };
+
+ // OOP
+ // ---------------
+ // If Underscore is called as a function, it returns a wrapped object that
+ // can be used OO-style. This wrapper holds altered versions of all the
+ // underscore functions. Wrapped objects may be chained.
+
+ // Helper function to continue chaining intermediate results.
+ var result = function(obj) {
+ return this._chain ? _(obj).chain() : obj;
+ };
+
+ // Add your own custom functions to the Underscore object.
+ _.mixin = function(obj) {
+ _.each(_.functions(obj), function(name) {
+ var func = _[name] = obj[name];
+ _.prototype[name] = function() {
+ var args = [this._wrapped];
+ push.apply(args, arguments);
+ return result.call(this, func.apply(_, args));
+ };
+ });
+ };
+
+ // Add all of the Underscore functions to the wrapper object.
+ _.mixin(_);
+
+ // Add all mutator Array functions to the wrapper.
+ _.each(['pop', 'push', 'reverse', 'shift', 'sort', 'splice', 'unshift'], function(name) {
+ var method = ArrayProto[name];
+ _.prototype[name] = function() {
+ var obj = this._wrapped;
+ method.apply(obj, arguments);
+ if ((name === 'shift' || name === 'splice') && obj.length === 0) delete obj[0];
+ return result.call(this, obj);
+ };
+ });
+
+ // Add all accessor Array functions to the wrapper.
+ _.each(['concat', 'join', 'slice'], function(name) {
+ var method = ArrayProto[name];
+ _.prototype[name] = function() {
+ return result.call(this, method.apply(this._wrapped, arguments));
+ };
+ });
+
+ // Extracts the result from a wrapped and chained object.
+ _.prototype.value = function() {
+ return this._wrapped;
+ };
+
+ // AMD registration happens at the end for compatibility with AMD loaders
+ // that may not enforce next-turn semantics on modules. Even though general
+ // practice for AMD registration is to be anonymous, underscore registers
+ // as a named module because, like jQuery, it is a base library that is
+ // popular enough to be bundled in a third party lib, but not be part of
+ // an AMD load request. Those cases could generate an error when an
+ // anonymous define() is called outside of a loader request.
+ if (typeof define === 'function' && define.amd) {
+ define('underscore', [], function() {
+ return _;
+ });
+ }
+}.call(this));
diff --git a/test/unit/tools/cwl_tools/v1.0/valueFrom-constant.cwl b/test/unit/tools/cwl_tools/v1.0/valueFrom-constant.cwl
new file mode 100644
index 000000000000..5caa866e41b9
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/valueFrom-constant.cwl
@@ -0,0 +1,28 @@
+class: CommandLineTool
+cwlVersion: v1.0
+
+hints:
+ - class: DockerRequirement
+ dockerPull: python:2-slim
+
+inputs:
+ - id: array_input
+ type:
+ - type: array
+ items: File
+ inputBinding:
+ valueFrom: replacementValue
+
+ - id: args.py
+ type: File
+ default:
+ class: File
+ location: args.py
+ inputBinding:
+ position: -1
+
+outputs:
+ - id: args
+ type: string[]
+
+baseCommand: python
diff --git a/test/unit/tools/cwl_tools/v1.0/vf-concat.cwl b/test/unit/tools/cwl_tools/v1.0/vf-concat.cwl
new file mode 100644
index 000000000000..b9a65a4e6667
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/vf-concat.cwl
@@ -0,0 +1,19 @@
+cwlVersion: v1.0
+class: CommandLineTool
+requirements:
+ - class: InlineJavascriptRequirement
+
+baseCommand: echo
+inputs:
+ file1:
+ type: File?
+ inputBinding:
+ valueFrom: $("a ")$("string")
+outputs:
+ out:
+ type: string
+ outputBinding:
+ glob: output.txt
+ loadContents: true
+ outputEval: $(self[0].contents)
+stdout: output.txt
diff --git a/test/unit/tools/cwl_tools/v1.0/wc-job.json b/test/unit/tools/cwl_tools/v1.0/wc-job.json
new file mode 100644
index 000000000000..598568d385f2
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/wc-job.json
@@ -0,0 +1,6 @@
+{
+ "file1": {
+ "class": "File",
+ "location": "whale.txt"
+ }
+}
diff --git a/test/unit/tools/cwl_tools/v1.0/wc-tool.cwl b/test/unit/tools/cwl_tools/v1.0/wc-tool.cwl
new file mode 100755
index 000000000000..165585494089
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/wc-tool.cwl
@@ -0,0 +1,17 @@
+#!/usr/bin/env cwl-runner
+
+class: CommandLineTool
+cwlVersion: v1.0
+
+inputs:
+ file1: File
+
+outputs:
+ output:
+ type: File
+ outputBinding: { glob: output }
+
+baseCommand: [wc, -l]
+
+stdin: $(inputs.file1.path)
+stdout: output
diff --git a/test/unit/tools/cwl_tools/v1.0/wc2-tool.cwl b/test/unit/tools/cwl_tools/v1.0/wc2-tool.cwl
new file mode 100755
index 000000000000..f1ad4587aaf9
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/wc2-tool.cwl
@@ -0,0 +1,17 @@
+#!/usr/bin/env cwl-runner
+class: CommandLineTool
+cwlVersion: v1.0
+requirements:
+ - class: InlineJavascriptRequirement
+
+inputs:
+ - { id: file1, type: File, inputBinding: {} }
+outputs:
+ - id: output
+ type: int
+ outputBinding:
+ glob: output.txt
+ loadContents: true
+ outputEval: "$(parseInt(self[0].contents))"
+stdout: output.txt
+baseCommand: wc
diff --git a/test/unit/tools/cwl_tools/v1.0/wc3-tool.cwl b/test/unit/tools/cwl_tools/v1.0/wc3-tool.cwl
new file mode 100644
index 000000000000..2392eb7d958a
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/wc3-tool.cwl
@@ -0,0 +1,24 @@
+#!/usr/bin/env cwl-runner
+class: CommandLineTool
+cwlVersion: v1.0
+
+requirements:
+ - class: InlineJavascriptRequirement
+
+inputs:
+ file1:
+ type: File[]
+ inputBinding: {}
+outputs:
+ output:
+ type: int
+ outputBinding:
+ glob: output.txt
+ loadContents: true
+ outputEval: |
+ ${
+ var s = self[0].contents.split(/\r?\n/);
+ return parseInt(s[s.length-2]);
+ }
+stdout: output.txt
+baseCommand: wc
diff --git a/test/unit/tools/cwl_tools/v1.0/wc4-tool.cwl b/test/unit/tools/cwl_tools/v1.0/wc4-tool.cwl
new file mode 100644
index 000000000000..f85e93444894
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/wc4-tool.cwl
@@ -0,0 +1,23 @@
+#!/usr/bin/env cwl-runner
+class: CommandLineTool
+cwlVersion: v1.0
+requirements:
+ - class: InlineJavascriptRequirement
+
+inputs:
+ file1:
+ type: File
+ inputBinding: {}
+outputs:
+ - id: output
+ type: int
+ outputBinding:
+ glob: output.txt
+ loadContents: true
+ outputEval: |
+ ${
+ var s = self[0].contents.split(/\r?\n/);
+ return parseInt(s[s.length-2]);
+ }
+stdout: output.txt
+baseCommand: wc
diff --git a/test/unit/tools/cwl_tools/v1.0/whale.txt b/test/unit/tools/cwl_tools/v1.0/whale.txt
new file mode 100644
index 000000000000..425d1ed02c8d
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/whale.txt
@@ -0,0 +1,16 @@
+Call me Ishmael. Some years ago--never mind how long precisely--having
+little or no money in my purse, and nothing particular to interest me on
+shore, I thought I would sail about a little and see the watery part of
+the world. It is a way I have of driving off the spleen and regulating
+the circulation. Whenever I find myself growing grim about the mouth;
+whenever it is a damp, drizzly November in my soul; whenever I find
+myself involuntarily pausing before coffin warehouses, and bringing up
+the rear of every funeral I meet; and especially whenever my hypos get
+such an upper hand of me, that it requires a strong moral principle to
+prevent me from deliberately stepping into the street, and methodically
+knocking people's hats off--then, I account it high time to get to sea
+as soon as I can. This is my substitute for pistol and ball. With a
+philosophical flourish Cato throws himself upon his sword; I quietly
+take to the ship. There is nothing surprising in this. If they but knew
+it, almost all men in their degree, some time or other, cherish very
+nearly the same feelings towards the ocean with me.
diff --git a/test/unit/tools/cwl_tools/v1.0/writable-dir-docker.cwl b/test/unit/tools/cwl_tools/v1.0/writable-dir-docker.cwl
new file mode 100644
index 000000000000..a60851c277fb
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/writable-dir-docker.cwl
@@ -0,0 +1,23 @@
+cwlVersion: v1.0
+
+class: CommandLineTool
+
+requirements:
+ - class: InlineJavascriptRequirement
+ - class: InitialWorkDirRequirement
+ listing:
+ - entryname: emptyWritableDir
+ entry: "$({class: 'Directory', listing: []})"
+ writable: true
+
+hints:
+ - class: DockerRequirement
+ dockerPull: alpine
+
+inputs: []
+outputs:
+ out:
+ type: Directory
+ outputBinding:
+ glob: emptyWritableDir
+arguments: [touch, emptyWritableDir/blurg]
diff --git a/test/unit/tools/cwl_tools/v1.0/writable-dir.cwl b/test/unit/tools/cwl_tools/v1.0/writable-dir.cwl
new file mode 100644
index 000000000000..52cf488a430b
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0/writable-dir.cwl
@@ -0,0 +1,16 @@
+cwlVersion: v1.0
+class: CommandLineTool
+requirements:
+ InlineJavascriptRequirement: {}
+ InitialWorkDirRequirement:
+ listing:
+ - entryname: emptyWritableDir
+ writable: true
+ entry: "$({class: 'Directory', listing: []})"
+inputs: []
+outputs:
+ out:
+ type: Directory
+ outputBinding:
+ glob: emptyWritableDir
+arguments: [touch, emptyWritableDir/blurg]
diff --git a/test/unit/tools/cwl_tools/v1.0_custom/any1-file.json b/test/unit/tools/cwl_tools/v1.0_custom/any1-file.json
new file mode 100644
index 000000000000..c2c5f2e84a2a
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0_custom/any1-file.json
@@ -0,0 +1 @@
+{"bar": {"location": "whale.txt", "class": "File"}}
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0_custom/any1-int.json b/test/unit/tools/cwl_tools/v1.0_custom/any1-int.json
new file mode 100644
index 000000000000..1ee628a21b9a
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0_custom/any1-int.json
@@ -0,0 +1 @@
+{"bar": 7}
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0_custom/any1-string.json b/test/unit/tools/cwl_tools/v1.0_custom/any1-string.json
new file mode 100644
index 000000000000..c5ec93f50c61
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0_custom/any1-string.json
@@ -0,0 +1 @@
+{"bar": "7"}
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0_custom/any1-wf.cwl b/test/unit/tools/cwl_tools/v1.0_custom/any1-wf.cwl
new file mode 100644
index 000000000000..ef001c173b47
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0_custom/any1-wf.cwl
@@ -0,0 +1,19 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+inputs:
+ bar:
+ type: Any
+
+outputs:
+ t1:
+ type: Any
+ outputSource: step1/t1
+
+steps:
+ step1:
+ in:
+ bar: bar
+ out: [t1]
+ run: any1.cwl
diff --git a/test/unit/tools/cwl_tools/v1.0_custom/any1.cwl b/test/unit/tools/cwl_tools/v1.0_custom/any1.cwl
new file mode 100644
index 000000000000..0a1cca295273
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0_custom/any1.cwl
@@ -0,0 +1,17 @@
+#!/usr/bin/env cwl-runner
+class: CommandLineTool
+cwlVersion: v1.0
+requirements:
+ - class: InlineJavascriptRequirement
+
+inputs:
+ - id: bar
+ type: Any
+
+outputs:
+ - id: t1
+ type: Any
+ outputBinding:
+ outputEval: $(inputs.bar.class || inputs.bar)
+
+baseCommand: "true"
diff --git a/test/unit/tools/cwl_tools/v1.0_custom/conformance_tests.yaml b/test/unit/tools/cwl_tools/v1.0_custom/conformance_tests.yaml
new file mode 100644
index 000000000000..21e02a35a5cf
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0_custom/conformance_tests.yaml
@@ -0,0 +1,99 @@
+
+- job: v1.0/record-output-job.json
+ output:
+ "orec": {
+ "ofoo": {
+ "location": "foo",
+ "size": 1111,
+ "class": "File",
+ "checksum": "sha1$327fc7aedf4f6b69a42a7c8b808dc5a7aff61376"
+ },
+ "obar": {
+ "location": "bar",
+ "size": 12010,
+ "class": "File",
+ "checksum": "sha1$aeb3d11bdf536511649129f4077d5cda6a324118"
+ }
+ }
+ tool: v1.0_custom/record-output-wf.cwl
+ doc: Test record type inputs to and outputs from workflows.
+
+- job: v1.0_custom/int-io-job.json
+ output: {"o": 10}
+ tool: v1.0_custom/int-io-wf.cwl
+ doc: Test integer workflow input and outputs
+
+- job: v1.0_custom/int-io-job.json
+ output: {"o": 10}
+ tool: v1.0_custom/int-opt-io-wf.cwl
+ doc: Test optional integer workflow inputs (specified)
+
+- job: v1.0_custom/empty.json
+ output: {"o": 4}
+ tool: v1.0_custom/int-opt-io-wf.cwl
+ doc: Test optional integer workflow inputs (unspecified)
+
+- job: v1.0_custom/empty.json
+ output: {"o": 10}
+ tool: v1.0_custom/int-default-io-wf.cwl
+ doc: Test default integer workflow inputs (unspecified)
+
+- job: v1.0_custom/any1-int.json
+ output: {"t1": 7}
+ tool: v1.0_custom/any1.cwl
+ doc: Test any parameter with integer input to a tool
+
+- job: v1.0_custom/any1-string.json
+ output: {"t1": "7"}
+ tool: v1.0_custom/any1.cwl
+ doc: Test any parameter with string input to a tool
+
+- job: v1.0_custom/any1-file.json
+ output: {"t1": {
+ "location": "foo",
+ "size": 1111,
+ "class": "File",
+ "checksum": "sha1$327fc7aedf4f6b69a42a7c8b808dc5a7aff61376"
+ }}
+ tool: v1.0_custom/any1.cwl
+ doc: Test any parameter with file input to a tool
+
+- job: v1.0_custom/any1-int.json
+ output: {"t1": 7}
+ tool: v1.0_custom/any1-wf.cwl
+ doc: Test any parameter with integer input to a workflow
+
+- job: v1.0_custom/any1-string.json
+ output: {"t1": "7"}
+ tool: v1.0_custom/any1-wf.cwl
+ doc: Test any parameter with string input to a workflow
+
+- job: v1.0_custom/any1-file.json
+ output: {"t1": {
+ "location": "foo",
+ "size": 1111,
+ "class": "File",
+ "checksum": "sha1$327fc7aedf4f6b69a42a7c8b808dc5a7aff61376"
+ }}
+ tool: v1.0_custom/any1-wf.cwl
+ doc: Test any parameter with file input to a workflow
+
+- job: v1.0_custom/empty.json
+ output: {"o": {
+ "location": "foo",
+ "size": 1111,
+ "class": "File",
+ "checksum": "sha1$327fc7aedf4f6b69a42a7c8b808dc5a7aff61376"
+ }}
+ tool: v1.0_custom/default_path_custom_1_wf.cwl
+ doc: Test File input with default unspecified
+
+- job: v1.0_custom/empty.json
+ output: {"o": "the default value"}
+ tool: v1.0_custom/union-input-default-wf.cwl
+ doc: Test union type input to workflow with default unspecified
+
+- job: v1.0_custom/union-input-default-job-file.json
+ output: {"o": "File"}
+ tool: v1.0_custom/union-input-default-wf.cwl
+ doc: Test union type input to workflow with default specified as file
diff --git a/test/unit/tools/cwl_tools/v1.0_custom/default_path_custom_1.cwl b/test/unit/tools/cwl_tools/v1.0_custom/default_path_custom_1.cwl
new file mode 100644
index 000000000000..68dbd9f4a96c
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0_custom/default_path_custom_1.cwl
@@ -0,0 +1,10 @@
+cwlVersion: v1.0
+class: CommandLineTool
+inputs:
+ - id: "file1"
+ type: File
+ default:
+ class: File
+ path: md5sum.input
+outputs: []
+arguments: [cat,$(inputs.file1.path)]
diff --git a/test/unit/tools/cwl_tools/v1.0_custom/default_path_custom_1_wf.cwl b/test/unit/tools/cwl_tools/v1.0_custom/default_path_custom_1_wf.cwl
new file mode 100644
index 000000000000..8f7ef557359f
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0_custom/default_path_custom_1_wf.cwl
@@ -0,0 +1,34 @@
+cwlVersion: v1.0
+class: Workflow
+
+inputs:
+ file1:
+ type: File
+ default:
+ class: File
+ path: whale.txt
+
+outputs:
+ o:
+ type: File
+ outputSource: step1/o
+
+steps:
+ step1:
+ in:
+ catfile1: file1
+ out: [o]
+ run:
+ class: CommandLineTool
+
+ inputs:
+ catfile1:
+ type: File
+
+ outputs:
+ o:
+ type: File
+ outputBinding: { glob: output }
+
+ arguments: [cat,$(inputs.catfile1.path)]
+ stdout: output
diff --git a/test/unit/tools/cwl_tools/v1.0_custom/empty.json b/test/unit/tools/cwl_tools/v1.0_custom/empty.json
new file mode 120000
index 000000000000..5bd735b8b255
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0_custom/empty.json
@@ -0,0 +1 @@
+../v1.0/empty.json
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0_custom/int-default-io-wf.cwl b/test/unit/tools/cwl_tools/v1.0_custom/int-default-io-wf.cwl
new file mode 100644
index 000000000000..a53e2b4f1255
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0_custom/int-default-io-wf.cwl
@@ -0,0 +1,32 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+requirements:
+ - class: InlineJavascriptRequirement
+
+inputs:
+ i:
+ type: int
+ default: 5
+
+outputs:
+ o:
+ type: int
+ outputSource: step1/o
+
+steps:
+ step1:
+ in:
+ i: i
+ out: [o]
+ run:
+ class: ExpressionTool
+ inputs:
+ i:
+ type: int
+ outputs:
+ o:
+ type: int
+ expression: >
+ ${return {'o': (inputs.i || 2) * 2};}
diff --git a/test/unit/tools/cwl_tools/v1.0_custom/int-io-job.json b/test/unit/tools/cwl_tools/v1.0_custom/int-io-job.json
new file mode 100644
index 000000000000..6e0326aa9aab
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0_custom/int-io-job.json
@@ -0,0 +1 @@
+{"i": 5}
diff --git a/test/unit/tools/cwl_tools/v1.0_custom/int-io-wf.cwl b/test/unit/tools/cwl_tools/v1.0_custom/int-io-wf.cwl
new file mode 100644
index 000000000000..ad1b39882989
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0_custom/int-io-wf.cwl
@@ -0,0 +1,31 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+requirements:
+ - class: InlineJavascriptRequirement
+
+inputs:
+ i:
+ type: int
+
+outputs:
+ o:
+ type: int
+ outputSource: step1/o
+
+steps:
+ step1:
+ in:
+ i: i
+ out: [o]
+ run:
+ class: ExpressionTool
+ inputs:
+ i:
+ type: int
+ outputs:
+ o:
+ type: int
+ expression: >
+ ${return {'o': inputs.i * 2};}
diff --git a/test/unit/tools/cwl_tools/v1.0_custom/int-opt-io-wf.cwl b/test/unit/tools/cwl_tools/v1.0_custom/int-opt-io-wf.cwl
new file mode 100644
index 000000000000..e9b3187c0fb1
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0_custom/int-opt-io-wf.cwl
@@ -0,0 +1,31 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+requirements:
+ - class: InlineJavascriptRequirement
+
+inputs:
+ i:
+ type: int?
+
+outputs:
+ o:
+ type: int
+ outputSource: step1/o
+
+steps:
+ step1:
+ in:
+ i: i
+ out: [o]
+ run:
+ class: ExpressionTool
+ inputs:
+ i:
+ type: int?
+ outputs:
+ o:
+ type: int
+ expression: >
+ ${return {'o': (inputs.i || 2) * 2};}
diff --git a/test/unit/tools/cwl_tools/v1.0_custom/just-wc-wf.cwl b/test/unit/tools/cwl_tools/v1.0_custom/just-wc-wf.cwl
new file mode 100644
index 000000000000..537c4db9b014
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0_custom/just-wc-wf.cwl
@@ -0,0 +1,32 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+inputs:
+ file1:
+ type: File
+
+outputs:
+ count_output:
+ type: File
+ outputSource: step1/wc_output
+
+steps:
+ step1:
+ in:
+ wc_file1: file1
+ out: [wc_output]
+ run:
+ id: wc
+ class: CommandLineTool
+ inputs:
+ wc_file1:
+ type: File
+ inputBinding: {}
+ outputs:
+ wc_output:
+ type: File
+ outputBinding:
+ glob: output.txt
+ stdout: output.txt
+ baseCommand: wc
diff --git a/test/unit/tools/cwl_tools/v1.0_custom/md5sum.input b/test/unit/tools/cwl_tools/v1.0_custom/md5sum.input
new file mode 100644
index 000000000000..3c5e4cdbdcfd
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0_custom/md5sum.input
@@ -0,0 +1 @@
+this is the test file that will be used when calculating an md5sum
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0_custom/md5sum_job.json b/test/unit/tools/cwl_tools/v1.0_custom/md5sum_job.json
new file mode 100644
index 000000000000..4621073a3d11
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0_custom/md5sum_job.json
@@ -0,0 +1 @@
+{"input_file": {"class": "File", "location": "md5sum.input"}}
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0_custom/md5sum_non_strict.cwl b/test/unit/tools/cwl_tools/v1.0_custom/md5sum_non_strict.cwl
new file mode 100644
index 000000000000..3656fe1176c7
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0_custom/md5sum_non_strict.cwl
@@ -0,0 +1,50 @@
+#!/usr/bin/env cwl-runner
+
+class: CommandLineTool
+id: Md5sum
+label: Simple md5sum tool
+cwlVersion: v1.0
+
+$namespaces:
+ dct: http://purl.org/dc/terms/
+ foaf: http://xmlns.com/foaf/0.1/
+
+doc: |
+ [](https://quay.io/repository/briandoconnor/dockstore-tool-md5sum)
+ [](https://travis-ci.org/briandoconnor/dockstore-tool-md5sum)
+ A very, very simple Docker container for the md5sum command. See the [README](https://github.com/briandoconnor/dockstore-tool-md5sum/blob/master/README.md) for more information.
+
+
+dct:creator:
+ '@id': http://orcid.org/0000-0002-7681-6415
+ foaf:name: Brian O'Connor
+ foaf:mbox: briandoconnor@gmail.com
+
+requirements:
+- class: DockerRequirement
+ dockerPull: quay.io/briandoconnor/dockstore-tool-md5sum:1.0.2
+- class: InlineJavascriptRequirement
+
+hints:
+- class: ResourceRequirement
+ coresMin: 1
+ ramMin: 1024
+ outdirMin: 512000
+ description: the command really requires very little resources.
+
+inputs:
+ input_file:
+ type: File
+ inputBinding:
+ position: 1
+ doc: The file that will have its md5sum calculated.
+
+outputs:
+ output_file:
+ type: File
+ format: http://edamontology.org/data_3671
+ outputBinding:
+ glob: md5sum.txt
+ doc: A text file that contains a single line that is the md5sum of the input file.
+
+baseCommand: [/bin/my_md5sum]
diff --git a/test/unit/tools/cwl_tools/v1.0_custom/record-output-job.json b/test/unit/tools/cwl_tools/v1.0_custom/record-output-job.json
new file mode 120000
index 000000000000..380e23dc8526
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0_custom/record-output-job.json
@@ -0,0 +1 @@
+../v1.0/record-output-job.json
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0_custom/record-output-wf.cwl b/test/unit/tools/cwl_tools/v1.0_custom/record-output-wf.cwl
new file mode 100644
index 000000000000..e45b8dc237d8
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0_custom/record-output-wf.cwl
@@ -0,0 +1,33 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+inputs:
+ irec:
+ type:
+ name: irec
+ type: record
+ fields:
+ - name: ifoo
+ type: File
+ - name: ibar
+ type: File
+
+outputs:
+ orec:
+ type:
+ name: orec
+ type: record
+ fields:
+ - name: ofoo
+ type: File
+ - name: obar
+ type: File
+ outputSource: step1/orec
+
+steps:
+ step1:
+ run: record-output.cwl
+ in:
+ irec: irec
+ out: [orec]
diff --git a/test/unit/tools/cwl_tools/v1.0_custom/record-output.cwl b/test/unit/tools/cwl_tools/v1.0_custom/record-output.cwl
new file mode 120000
index 000000000000..7090d4834880
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0_custom/record-output.cwl
@@ -0,0 +1 @@
+../v1.0/record-output.cwl
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0_custom/ref.fasta b/test/unit/tools/cwl_tools/v1.0_custom/ref.fasta
new file mode 120000
index 000000000000..c155be32eae5
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0_custom/ref.fasta
@@ -0,0 +1 @@
+../v1.0/ref.fasta
\ No newline at end of file
diff --git a/test/unit/tools/cwl_tools/v1.0_custom/union-input-default-job-file.json b/test/unit/tools/cwl_tools/v1.0_custom/union-input-default-job-file.json
new file mode 100644
index 000000000000..f5053f6e3081
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0_custom/union-input-default-job-file.json
@@ -0,0 +1 @@
+{"i": {"location": "whale.txt", "class": "File"}}
diff --git a/test/unit/tools/cwl_tools/v1.0_custom/union-input-default-wf.cwl b/test/unit/tools/cwl_tools/v1.0_custom/union-input-default-wf.cwl
new file mode 100644
index 000000000000..2e07b828ce72
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0_custom/union-input-default-wf.cwl
@@ -0,0 +1,38 @@
+#!/usr/bin/env cwl-runner
+class: Workflow
+cwlVersion: v1.0
+
+requirements:
+ - class: InlineJavascriptRequirement
+
+inputs:
+ i:
+ type:
+ - File
+ - 'null'
+ - string
+ default: the default value
+
+outputs:
+ o:
+ type: string
+ outputSource: step1/o
+
+steps:
+ step1:
+ in:
+ i: i
+ out: [o]
+ run:
+ class: ExpressionTool
+ inputs:
+ i:
+ type:
+ - File
+ - 'null'
+ - string
+ outputs:
+ o:
+ type: string
+ expression: >
+ ${return {'o': (inputs.i.class || inputs.i)};}
diff --git a/test/unit/tools/cwl_tools/v1.0_custom/whale.txt b/test/unit/tools/cwl_tools/v1.0_custom/whale.txt
new file mode 120000
index 000000000000..2fa0d48aa7ef
--- /dev/null
+++ b/test/unit/tools/cwl_tools/v1.0_custom/whale.txt
@@ -0,0 +1 @@
+../v1.0/whale.txt
\ No newline at end of file
diff --git a/test/unit/tools/test_cwl.py b/test/unit/tools/test_cwl.py
new file mode 100644
index 000000000000..cc5fb7a9b113
--- /dev/null
+++ b/test/unit/tools/test_cwl.py
@@ -0,0 +1,608 @@
+
+import os
+import sys
+import tempfile
+import shutil
+
+from unittest import TestCase
+
+import galaxy.model
+
+from galaxy.tools.cwl import tool_proxy
+from galaxy.tools.cwl.parser import ToolProxy, tool_proxy_from_persistent_representation, to_cwl_tool_object
+from galaxy.tools.cwl import workflow_proxy
+from galaxy.tools.cwl.representation import USE_FIELD_TYPES
+
+from galaxy.tools.parser.cwl import CWL_DEFAULT_FILE_OUTPUT
+from galaxy.tools.parser.factory import get_tool_source
+
+from .. import tools_support
+
+unit_root = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
+sys.path.insert(1, unit_root)
+from unittest_utils import galaxy_mock
+
+TESTS_DIRECTORY = os.path.dirname(__file__)
+CWL_TOOLS_DIRECTORY = os.path.abspath(os.path.join(TESTS_DIRECTORY, "cwl_tools"))
+
+
+def test_tool_proxy():
+ """Test that tool proxies load some valid tools correctly."""
+ tool_proxy(_cwl_tool_path("v1.0/cat1-testcli.cwl"))
+ tool_proxy(_cwl_tool_path("v1.0/cat3-tool.cwl"))
+ tool_proxy(_cwl_tool_path("v1.0/env-tool1.cwl"))
+ tool_proxy(_cwl_tool_path("v1.0/sorttool.cwl"))
+ tool_proxy(_cwl_tool_path("v1.0/bwa-mem-tool.cwl"))
+
+ tool_proxy(_cwl_tool_path("v1.0/parseInt-tool.cwl"))
+
+
+def test_tool_source_records():
+ record_output_path = _cwl_tool_path("v1.0/record-output.cwl")
+ tool_source = get_tool_source(record_output_path)
+ inputs = _inputs(tool_source)
+ assert len(inputs) == 1, inputs
+
+ output_data, output_collections = _outputs(tool_source)
+ assert len(output_data) == 1
+ assert len(output_collections) == 1
+
+
+def test_serialize_deserialize():
+ path = _cwl_tool_path("v1.0/cat5-tool.cwl")
+ tool = tool_proxy(path)
+ print(tool._tool.tool)
+ rep = tool.to_persistent_representation()
+ tool = tool_proxy_from_persistent_representation(rep)
+ print(tool)
+ tool.job_proxy({"file1": "/moo"}, {})
+ print(tool._tool.tool)
+
+ with open(path, "r") as f:
+ import yaml
+ tool_object = yaml.load(f)
+ import json
+ tool_object = json.loads(json.dumps(tool_object))
+ tool = to_cwl_tool_object(tool_object=tool_object)
+
+
+def test_job_proxy():
+ bwa_parser = get_tool_source(_cwl_tool_path("v1.0/bwa-mem-tool.cwl"))
+ bwa_inputs = {
+ "reference": {
+ "class": "File",
+ "location": _cwl_tool_path("v1.0/chr20.fa"),
+ "size": 123,
+ "checksum": "sha1$hash"
+ },
+ "reads": [
+ {
+ "class": "File",
+ "location": _cwl_tool_path("v1.0/example_human_Illumina.pe_1.fastq")
+ },
+ {
+ "class": "File",
+ "location": _cwl_tool_path("v1.0/example_human_Illumina.pe_2.fastq")
+ }
+ ],
+ "min_std_max_min": [
+ 1,
+ 2,
+ 3,
+ 4
+ ],
+ "minimum_seed_length": 3
+ }
+ bwa_proxy = bwa_parser.tool_proxy
+ bwa_id = bwa_parser.parse_id()
+
+ job_proxy = bwa_proxy.job_proxy(
+ bwa_inputs,
+ {},
+ "/",
+ )
+
+ cmd = job_proxy.command_line
+ print(cmd)
+
+ bind_parser = get_tool_source(_cwl_tool_path("v1.0/binding-test.cwl"))
+ binding_proxy = bind_parser.tool_proxy
+ binding_id = bind_parser.parse_id()
+
+ job_proxy = binding_proxy.job_proxy(
+ bwa_inputs,
+ {},
+ "/",
+ )
+
+ cmd = job_proxy.command_line
+ assert bwa_id != binding_id, bwa_ida_id
+
+
+def test_cores_min():
+ sort_parser = get_tool_source(_cwl_tool_path("v1.0/sorttool.cwl"))
+ bwa_parser = get_tool_source(_cwl_tool_path("v1.0/bwa-mem-tool.cwl"))
+
+ assert sort_parser.parse_cores_min() == 1
+ assert bwa_parser.parse_cores_min() == 2
+
+
+def test_success_codes():
+ exit_success_parser = get_tool_source(_cwl_tool_path("v1.0/exit-success.cwl"))
+
+ stdio, _ = exit_success_parser.parse_stdio()
+ assert len(stdio) == 2
+ stdio_0 = stdio[0]
+ assert stdio_0.range_start == float("-inf")
+ assert stdio_0.range_end == 0
+
+ stdio_1 = stdio[1]
+ assert stdio_1.range_start == 2
+ assert stdio_1.range_end == float("inf")
+
+ bwa_parser = get_tool_source(_cwl_tool_path("v1.0/bwa-mem-tool.cwl"))
+ stdio, _ = bwa_parser.parse_stdio()
+
+ assert len(stdio) == 2
+ stdio_0 = stdio[0]
+ assert stdio_0.range_start == float("-inf")
+ assert stdio_0.range_end == -1
+
+ stdio_1 = stdio[1]
+ assert stdio_1.range_start == 1
+ assert stdio_1.range_end == float("inf")
+
+
+
+def test_serialize_deserialize_workflow_embed():
+ # Test inherited hints and requirements from workflow -> tool
+ # work here.
+ versions = ["v1.0"]
+ for version in versions:
+ proxy = workflow_proxy(_cwl_tool_path("%s/count-lines2-wf.cwl" % version))
+ step_proxies = proxy.step_proxies()
+ tool_proxy = step_proxies[0].tool_proxy
+ assert tool_proxy.requirements, tool_proxy.requirements
+
+
+def test_reference_proxies():
+ versions = ["v1.0"]
+ for version in versions:
+ proxy = workflow_proxy(_cwl_tool_path("%s/count-lines1-wf.cwl" % version))
+ proxy.tool_reference_proxies()
+
+
+def test_subworkflow_parsing():
+ version = "v1.0"
+ proxy = workflow_proxy(_cwl_tool_path("%s/count-lines10-wf.cwl" % version))
+ assert len(proxy.tool_reference_proxies()) == 2
+
+ assert len(proxy.output_labels) == 1
+ assert "count_output" in proxy.output_labels, proxy.output_labels
+
+ galaxy_workflow_dict = proxy.to_dict()
+ steps = galaxy_workflow_dict["steps"]
+ assert len(steps) == 2 # One input, one subworkflow
+
+ subworkflow_step = steps[1]
+ assert subworkflow_step["type"] == "subworkflow"
+
+
+# No longer unmet - need a new test I think.
+# def test_checks_requirements():
+# """Test that tool proxy will not load unmet requirements."""
+# exception = None
+# try:
+# tool_proxy(_cwl_tool_path("draft3_custom/unmet-requirement.cwl"))
+# except Exception as e:
+# exception = e
+
+# assert exception is not None
+# assert "Unsupported requirement SubworkflowFeatureRequirement" in str(exception), str(exception)
+
+
+def test_checks_is_a_tool():
+ """Test that tool proxy cannot be created for a workflow."""
+ exception = None
+ try:
+ tool_proxy(_cwl_tool_path("v1.0/count-lines1-wf.cwl"))
+ except Exception as e:
+ exception = e
+
+ assert exception is not None
+ assert "CommandLineTool" in str(exception), str(exception)
+
+
+def test_checks_cwl_version():
+ """Test that tool proxy verifies supported version."""
+ exception = None
+ try:
+ tool_proxy(_cwl_tool_path("draft3_custom/version345.cwl"))
+ except Exception as e:
+ exception = e
+
+ assert exception is not None
+
+
+def test_workflow_of_files_proxy():
+ versions = ["v1.0"]
+ for version in versions:
+ proxy = workflow_proxy(_cwl_tool_path("%s/count-lines1-wf.cwl" % version))
+ step_proxies = proxy.step_proxies()
+ assert len(step_proxies) == 2
+
+ galaxy_workflow_dict = proxy.to_dict()
+
+ assert len(proxy.runnables) == 2
+
+ assert len(galaxy_workflow_dict["steps"]) == 3
+ wc_step = galaxy_workflow_dict["steps"][1]
+ exp_step = galaxy_workflow_dict["steps"][2]
+ assert wc_step["input_connections"]
+ assert exp_step["input_connections"]
+
+
+def test_workflow_embedded_tools_proxy():
+ versions = ["v1.0"]
+ for version in versions:
+ proxy = workflow_proxy(_cwl_tool_path("%s/count-lines2-wf.cwl" % version))
+ step_proxies = proxy.step_proxies()
+ assert len(step_proxies) == 2
+ print step_proxies[1].requirements
+ print step_proxies[1]._step.embedded_tool.requirements
+ galaxy_workflow_dict = proxy.to_dict()
+
+ assert len(proxy.runnables) == 2
+ print(proxy.runnables[1])
+
+ assert len(galaxy_workflow_dict["steps"]) == 3
+ wc_step = galaxy_workflow_dict["steps"][1]
+ exp_step = galaxy_workflow_dict["steps"][2]
+ assert wc_step["input_connections"]
+ assert exp_step["input_connections"]
+
+
+def test_workflow_scatter():
+ version = "v1.0"
+ proxy = workflow_proxy(_cwl_tool_path("%s/count-lines3-wf.cwl" % version))
+
+ step_proxies = proxy.step_proxies()
+ assert len(step_proxies) == 1
+
+ galaxy_workflow_dict = proxy.to_dict()
+ assert len(galaxy_workflow_dict["steps"]) == 2
+
+ # TODO: For CWL - deactivate implicit scattering Galaxy does
+ # and force annotation in the workflow of scattering? Maybe?
+ wc_step = galaxy_workflow_dict["steps"][1]
+ assert wc_step["input_connections"]
+
+ assert "inputs" in wc_step
+ wc_inputs = wc_step["inputs"]
+ assert len(wc_inputs) == 1
+ file_input = wc_inputs[0]
+ assert file_input["scatter_type"] == "dotproduct", wc_step
+
+ assert len(wc_step["workflow_outputs"]) == 1
+
+
+def test_workflow_outputs_of_inputs():
+ version = "v1.0"
+ proxy = workflow_proxy(_cwl_tool_path("%s/any-type-compat.cwl" % version))
+
+ galaxy_workflow_dict = proxy.to_dict()
+ assert len(galaxy_workflow_dict["steps"]) == 3
+ input_step = galaxy_workflow_dict["steps"][0]
+
+ assert len(input_step["workflow_outputs"]) == 1
+
+
+def test_workflow_scatter_multiple_input():
+ version = "v1.0"
+ proxy = workflow_proxy(_cwl_tool_path("%s/count-lines4-wf.cwl" % version))
+
+ step_proxies = proxy.step_proxies()
+ assert len(step_proxies) == 1
+
+ galaxy_workflow_dict = proxy.to_dict()
+ assert len(galaxy_workflow_dict["steps"]) == 3
+
+
+def test_workflow_multiple_input_merge_flattened():
+ version = "v1.0"
+ proxy = workflow_proxy(_cwl_tool_path("%s/count-lines7-wf.cwl" % version))
+
+ galaxy_workflow_dict = proxy.to_dict()
+ assert len(galaxy_workflow_dict["steps"]) == 3
+
+ tool_step = galaxy_workflow_dict["steps"][2]
+ assert "inputs" in tool_step
+ inputs = tool_step["inputs"]
+ assert len(inputs) == 1
+ input = inputs[0]
+ assert input["merge_type"] == "merge_flattened"
+
+
+def test_workflow_step_value_from():
+ version = "v1.0"
+ proxy = workflow_proxy(_cwl_tool_path("%s/step-valuefrom-wf.cwl" % version))
+
+ galaxy_workflow_dict = proxy.to_dict()
+ assert len(galaxy_workflow_dict["steps"]) == 3
+
+ print(galaxy_workflow_dict["steps"])
+ tool_step = [s for s in galaxy_workflow_dict["steps"].values() if s["label"] == "step1"][0]
+ assert "inputs" in tool_step
+ inputs = tool_step["inputs"]
+ assert len(inputs) == 1
+ assert "value_from" in inputs[0], inputs
+
+
+def test_workflow_input_without_source():
+ version = "v1.0"
+ proxy = workflow_proxy(_cwl_tool_path("%s/step-valuefrom3-wf.cwl" % version))
+
+ galaxy_workflow_dict = proxy.to_dict()
+ assert len(galaxy_workflow_dict["steps"]) == 3
+
+ tool_step = galaxy_workflow_dict["steps"][2]
+ assert "inputs" in tool_step
+ inputs = tool_step["inputs"]
+ assert len(inputs) == 3, inputs
+ assert inputs[2].get("value_from")
+
+
+def test_workflow_input_default():
+ version = "v1.0"
+ proxy = workflow_proxy(_cwl_tool_path("%s/pass-unconnected.cwl" % version))
+ galaxy_workflow_dict = proxy.to_dict()
+ assert len(galaxy_workflow_dict["steps"]) == 3
+
+ tool_step = galaxy_workflow_dict["steps"][2]
+
+ assert "inputs" in tool_step
+ inputs = tool_step["inputs"]
+ assert len(inputs) == 2, inputs
+ assert inputs[1]
+
+
+def test_search_workflow():
+ proxy = workflow_proxy(_cwl_tool_path("v1.0/search.cwl#main"))
+ galaxy_workflow_dict = proxy.to_dict()
+ assert len(galaxy_workflow_dict["steps"]) == 5
+
+
+def test_workflow_simple_optional_input():
+ proxy = workflow_proxy(_cwl_tool_path("v1.0_custom/int-opt-io-wf.cwl"))
+
+ galaxy_workflow_dict = proxy.to_dict()
+ assert len(galaxy_workflow_dict["steps"]) == 2
+
+ input_step = galaxy_workflow_dict["steps"][0]
+ assert input_step['type'] == "parameter_input", input_step
+ assert input_step['tool_state']['parameter_type'] == "field", input_step
+
+
+def test_load_proxy_simple():
+ cat3 = _cwl_tool_path("v1.0/cat3-tool.cwl")
+ tool_source = get_tool_source(cat3)
+
+ # Behavior was changed - too verbose?
+ # description = tool_source.parse_description()
+ # assert description == "Print the contents of a file to stdout using 'cat' running in a docker container.", description
+
+ input_sources = _inputs(tool_source)
+ assert len(input_sources) == 1
+
+ input_source = input_sources[0]
+ assert input_source.parse_help() == "The file that will be copied using 'cat'"
+ assert input_source.parse_label() == "Input File"
+
+ outputs, output_collections = tool_source.parse_outputs(None)
+ assert len(outputs) == 1
+
+ output1 = outputs['output_file']
+ assert output1.format == CWL_DEFAULT_FILE_OUTPUT, output1.format # Have Galaxy auto-detect
+
+ _, containers = tool_source.parse_requirements_and_containers()
+ assert len(containers) == 1
+
+
+def test_cwl_strict_parsing():
+ md5sum_non_strict_path = _cwl_tool_path("v1.0_custom/md5sum_non_strict.cwl")
+ threw_exception = False
+ try:
+ get_tool_source(md5sum_non_strict_path).tool_proxy
+ except Exception:
+ threw_exception = True
+
+ assert threw_exception
+ get_tool_source(md5sum_non_strict_path, strict_cwl_validation=False).tool_proxy
+
+
+def test_load_proxy_bwa_mem():
+ bwa_mem = _cwl_tool_path("v1.0/bwa-mem-tool.cwl")
+ tool_source = get_tool_source(bwa_mem)
+ tool_id = tool_source.parse_id()
+ assert tool_id == "bwa-mem-tool", tool_id
+ _inputs(tool_source)
+ # TODO: test repeat generated...
+
+
+def test_representation_id():
+ import yaml
+ cat3 = _cwl_tool_path("v1.0/cat3-tool.cwl")
+ with open(cat3, "r") as f:
+ representation = yaml.load(f)
+ representation["id"] = "my-cool-id"
+
+ proxy = tool_proxy(tool_object=representation, tool_directory="/")
+ tool_id = proxy.galaxy_id()
+ assert tool_id == "my-cool-id", tool_id
+ id_proxy = tool_proxy_from_persistent_representation(proxy.to_persistent_representation())
+ tool_id = id_proxy.galaxy_id()
+ # assert tool_id == "my-cool-id", tool_id
+
+
+def test_env_tool1():
+ env_tool1 = _cwl_tool_path("v1.0/env-tool1.cwl")
+ tool_source = get_tool_source(env_tool1)
+ _inputs(tool_source)
+
+
+def test_wc2_tool():
+ env_tool1 = _cwl_tool_path("v1.0/wc2-tool.cwl")
+ tool_source = get_tool_source(env_tool1)
+ _inputs(tool_source)
+ datasets, collections = _outputs(tool_source)
+ assert len(datasets) == 1, datasets
+ output = datasets["output"]
+ assert output.format == "expression.json", output.format
+
+
+def test_optional_output():
+ optional_output2_tool1 = _cwl_tool_path("v1.0/optional-output.cwl")
+ tool_source = get_tool_source(optional_output2_tool1)
+ datasets, collections = _outputs(tool_source)
+ assert len(datasets) == 2, datasets
+ output = datasets["optional_file"]
+ assert output.format == CWL_DEFAULT_FILE_OUTPUT, output.format
+
+
+def test_sorttool():
+ env_tool1 = _cwl_tool_path("v1.0/sorttool.cwl")
+ tool_source = get_tool_source(env_tool1)
+
+ assert tool_source.parse_id() == "sorttool"
+
+ inputs = _inputs(tool_source)
+ assert len(inputs) == 2
+ bool_input = inputs[0]
+ file_input = inputs[1]
+ assert bool_input.parse_input_type() == "param"
+ assert bool_input.get("type") == "boolean"
+
+ assert file_input.parse_input_type() == "param"
+ assert file_input.get("type") == "data", file_input.get("type")
+
+ output_data, output_collections = _outputs(tool_source)
+ assert len(output_data) == 1
+ assert len(output_collections) == 0
+
+
+def test_scheadef_tool():
+ tool_path = _cwl_tool_path("v1.0/schemadef-tool.cwl")
+ tool_source = get_tool_source(tool_path)
+ _inputs(tool_source)
+
+
+def test_params_tool():
+ tool_path = _cwl_tool_path("v1.0/params.cwl")
+ tool_source = get_tool_source(tool_path)
+ _inputs(tool_source)
+
+
+def test_cat1():
+ cat1_tool = _cwl_tool_path("v1.0/cat1-testcli.cwl")
+ tool_source = get_tool_source(cat1_tool)
+ inputs = _inputs(tool_source)
+
+ assert len(inputs) == 3, inputs
+ file_input = inputs[0]
+
+ assert file_input.parse_input_type() == "param"
+ assert file_input.get("type") == "data", file_input.get("type")
+
+ # User needs to specify if want to select boolean or not.
+ if not USE_FIELD_TYPES:
+ null_or_bool_input = inputs[1]
+ assert null_or_bool_input.parse_input_type() == "conditional"
+ else:
+ field_input = inputs[1]
+ assert field_input.parse_input_type() == "param"
+ assert field_input.get("type") == "field", field_input.get("type")
+
+ output_data, output_collections = _outputs(tool_source)
+ assert len(output_data) == 1
+ assert len(output_collections) == 1
+
+
+def test_tool_reload():
+ cat1_tool = _cwl_tool_path("v1.0/cat1-testcli.cwl")
+ tool_source = get_tool_source(cat1_tool)
+ _inputs(tool_source)
+
+ # Test reloading - had a regression where this broke down.
+ cat1_tool_again = _cwl_tool_path("v1.0/cat1-testcli.cwl")
+ tool_source = get_tool_source(cat1_tool_again)
+ _inputs(tool_source)
+
+
+class CwlToolObjectTestCase(TestCase, tools_support.UsesApp, tools_support.UsesTools):
+
+ def setUp(self):
+ self.test_directory = tempfile.mkdtemp()
+ self.app = galaxy_mock.MockApp()
+ self.history = galaxy.model.History()
+ self.trans = galaxy_mock.MockTrans(history=self.history)
+
+ def tearDown(self):
+ shutil.rmtree(self.test_directory)
+
+ def test_default_data_inputs(self):
+ self._init_tool(tool_path=_cwl_tool_path("v1.0/default_path.cwl"))
+ print("TOOL IS %s" % self.tool)
+ hda = self._new_hda()
+ from galaxy.tools.cwl import to_cwl_job
+ from galaxy.tools.parameters import populate_state
+ errors = {}
+ cwl_inputs = {
+ "file1": {"src": "hda", "id": self.app.security.encode_id(hda.id)}
+ }
+ inputs = self.tool.inputs_from_dict({"inputs": cwl_inputs, "inputs_representation": "cwl"})
+ print inputs
+ print("pre-populated state is %s" % inputs)
+ populated_state = {}
+ populate_state(self.trans, self.tool.inputs, inputs, populated_state, errors)
+ print("populated state is %s" % inputs)
+ from galaxy.tools.parameters.wrapped import WrappedParameters
+ wrapped_params = WrappedParameters(galaxy_mock.MockTrans(), self.tool, populated_state)
+ input_json = to_cwl_job(self.tool, wrapped_params.params, self.test_directory)
+ print inputs
+ print "to_cwl_job is %s" % input_json
+ # assert False
+
+ def _new_hda(self):
+ hda = galaxy.model.HistoryDatasetAssociation(history=self.history)
+ hda.visible = True
+ hda.dataset = galaxy.model.Dataset()
+ self.app.model.context.add(hda)
+ self.app.model.context.flush()
+ return hda
+
+
+def _outputs(tool_source):
+ return tool_source.parse_outputs(object())
+
+
+def get_cwl_tool_source(path):
+ path = _cwl_tool_path(path)
+ return get_tool_source(path)
+
+
+def _inputs(tool_source=None, path=None):
+ if tool_source is None:
+ path = _cwl_tool_path(path)
+ tool_source = get_tool_source(path)
+
+ input_pages = tool_source.parse_input_pages()
+ assert input_pages.inputs_defined
+ page_sources = input_pages.page_sources
+ assert len(page_sources) == 1
+ page_source = page_sources[0]
+ input_sources = page_source.parse_input_sources()
+ return input_sources
+
+
+def _cwl_tool_path(path):
+ return os.path.join(CWL_TOOLS_DIRECTORY, path)
diff --git a/test/unit/tools/test_expression_basics.py b/test/unit/tools/test_expression_basics.py
new file mode 100644
index 000000000000..51c7293ac3f2
--- /dev/null
+++ b/test/unit/tools/test_expression_basics.py
@@ -0,0 +1,8 @@
+from galaxy.tools.expressions import evaluate
+
+
+def test_evaluate():
+ input = {
+ "script": "{return 5;}"
+ }
+ assert evaluate(None, input) == 5
diff --git a/test/unit/tools/test_parsing.py b/test/unit/tools/test_parsing.py
index b1563568562f..30f64bda3166 100644
--- a/test/unit/tools/test_parsing.py
+++ b/test/unit/tools/test_parsing.py
@@ -92,6 +92,39 @@
compare: sim_size
"""
+TOOL_EXPRESSION_XML_1 = """
+
+ Parse Int
+
+ {return {'output': parseInt($job.input1)};}
+
+
+
+
+
+
+
+ Parse an integer from text.
+
+"""
+
+
+TOOL_EXPRESSION_YAML_1 = """
+class: GalaxyExpressionTool
+name: "parse_int"
+id: parse_int
+version: 1.0.2
+expression: "{return {'output': parseInt($job.input1)};}"
+inputs:
+ - name: input1
+ label: Text to parse
+ type: text
+outputs:
+ out1:
+ type: integer
+ from: "#output"
+"""
+
class BaseLoaderTestCase(unittest.TestCase):
@@ -119,6 +152,22 @@ def _get_tool_source(self, source_file_name=None, source_contents=None):
return tool_source
+class XmlExpressionLoaderTestCase(BaseLoaderTestCase):
+ source_file_name = "expression.xml"
+ source_contents = TOOL_EXPRESSION_XML_1
+
+ def test_expression(self):
+ assert self._tool_source.parse_expression().strip() == "{return {'output': parseInt($job.input1)};}"
+
+ def test_tool_type(self):
+ assert self._tool_source.parse_tool_type() == "expression"
+
+
+class YamlExpressionLoaderTestCase(BaseLoaderTestCase):
+ source_file_name = "expression.yml"
+ source_contents = TOOL_EXPRESSION_XML_1
+
+
class XmlLoaderTestCase(BaseLoaderTestCase):
source_file_name = "bwa.xml"
source_contents = TOOL_XML_1
@@ -376,6 +425,28 @@ def test_sanitize(self):
assert self._tool_source.parse_sanitize() is True
+class GalaxyFlavoredCwlLoaderTestCase(BaseLoaderTestCase):
+ source_file_name = os.path.join(os.getcwd(), "test/functional/tools/cwl_tools/galactic_flavored/record_input.cwl")
+ source_contents = None
+
+ def test_parsing(self):
+ tool_type = self._tool_source.parse_tool_type()
+ assert tool_type == "galactic_cwl", tool_type
+
+ def test_inputs_parsing(self):
+ input_pages = self._tool_source.parse_input_pages()
+ assert input_pages.inputs_defined
+ page_sources = input_pages.page_sources
+ assert len(page_sources) == 1
+ page_source = page_sources[0]
+ input_sources = page_source.parse_input_sources()
+ assert len(input_sources) == 1
+
+ def test_id(self):
+ tool_id = self._tool_source.parse_id()
+ assert tool_id == "galactic_record_input", tool_id
+
+
class DataSourceLoaderTestCase(BaseLoaderTestCase):
source_file_name = "ds.xml"
source_contents = """
diff --git a/test/unit/tools_support.py b/test/unit/tools_support.py
index 0f9e9d617fa7..8d8790c24ff2 100644
--- a/test/unit/tools_support.py
+++ b/test/unit/tools_support.py
@@ -74,14 +74,19 @@ def _init_tool(
tool_id="test_tool",
extra_file_contents=None,
extra_file_path=None,
+ tool_path=None,
):
+ if tool_path is None:
+ filename = filename or "tool.xml"
+ self.tool_file = os.path.join(self.test_directory, filename)
+ contents_template = string.Template(tool_contents or SIMPLE_TOOL_CONTENTS)
+ tool_contents = contents_template.safe_substitute(dict(version=version, profile=profile, tool_id=tool_id))
+ self.__write_tool(tool_contents)
+ if extra_file_contents and extra_file_path:
+ self.__write_tool(extra_file_contents, path=os.path.join(self.test_directory, extra_file_path))
+ else:
+ self.tool_file = tool_path
self._init_app_for_tools()
- self.tool_file = os.path.join(self.test_directory, filename)
- contents_template = string.Template(tool_contents)
- tool_contents = contents_template.safe_substitute(dict(version=version, profile=profile, tool_id=tool_id))
- self.__write_tool(tool_contents)
- if extra_file_contents and extra_file_path:
- self.__write_tool(extra_file_contents, path=os.path.join(self.test_directory, extra_file_path))
return self.__setup_tool()
def _init_app_for_tools(self):
diff --git a/test/unit/workflows/test_modules.py b/test/unit/workflows/test_modules.py
index 1394a39645e8..64d479b50be9 100644
--- a/test/unit/workflows/test_modules.py
+++ b/test/unit/workflows/test_modules.py
@@ -40,7 +40,7 @@ def test_data_input_step_modified_state():
def test_data_input_compute_runtime_state_default():
module = __from_step(type="data_input")
- state, errors = module.compute_runtime_state(module.trans)
+ state, errors = module.compute_runtime_state(module.trans, module.test_step)
assert not errors
assert "input" in state.inputs
assert state.inputs["input"] is None
@@ -52,7 +52,7 @@ def test_data_input_compute_runtime_state_args():
hda = model.HistoryDatasetAssociation()
with mock.patch("galaxy.workflow.modules.check_param") as check_method:
check_method.return_value = (hda, None)
- state, errors = module.compute_runtime_state(module.trans, {"input": 4, "tool_state": tool_state})
+ state, errors = module.compute_runtime_state(module.trans, module.test_step, {"input": 4, "tool_state": tool_state})
assert not errors
assert "input" in state.inputs
assert state.inputs["input"] is hda
@@ -166,25 +166,28 @@ def test_tool_version_same():
label: "input2"
- type: "tool"
tool_id: "cat1"
- input_connections:
- - input_name: "input1"
- "@output_step": 0
- output_name: "output"
+ inputs:
+ input1:
+ connections:
+ - "@output_step": 0
+ output_name: "output"
- type: "tool"
tool_id: "cat1"
- input_connections:
- - input_name: "input1"
- "@output_step": 0
- output_name: "output"
+ inputs:
+ input1:
+ connections:
+ - "@output_step": 0
+ output_name: "output"
workflow_outputs:
- output_name: "out_file1"
label: "out1"
- type: "tool"
tool_id: "cat1"
- input_connections:
- - input_name: "input1"
- "@output_step": 2
- output_name: "out_file1"
+ inputs:
+ input1:
+ connections:
+ - "@output_step": 2
+ output_name: "out_file1"
workflow_outputs:
- output_name: "out_file1"
"""
diff --git a/test/unit/workflows/test_render.py b/test/unit/workflows/test_render.py
index b7afcd28cd16..04ee10c791cf 100644
--- a/test/unit/workflows/test_render.py
+++ b/test/unit/workflows/test_render.py
@@ -6,28 +6,28 @@
- type: "data_input"
order_index: 0
tool_inputs: {"name": "input1"}
- input_connections: []
position: {"top": 3, "left": 3}
- type: "data_input"
order_index: 1
tool_inputs: {"name": "input2"}
- input_connections: []
position: {"top": 6, "left": 4}
- type: "tool"
tool_id: "cat1"
order_index: 2
- input_connections:
- - input_name: "input1"
- "@output_step": 0
- output_name: "di1"
+ inputs:
+ input1:
+ connection:
+ - "@output_step": 0
+ output_name: "di1"
position: {"top": 13, "left": 10}
- type: "tool"
tool_id: "cat1"
order_index: 3
- input_connections:
- - input_name: "input1"
- "@output_step": 0
- output_name: "di1"
+ inputs:
+ input1:
+ connection:
+ - "@output_step": 0
+ output_name: "di1"
position: {"top": 33, "left": 103}
"""
diff --git a/test/unit/workflows/test_workflow_progress.py b/test/unit/workflows/test_workflow_progress.py
index 1cede6d09aae..028483f1640c 100644
--- a/test/unit/workflows/test_workflow_progress.py
+++ b/test/unit/workflows/test_workflow_progress.py
@@ -12,22 +12,25 @@
tool_inputs: {"name": "input2"}
- type: "tool"
tool_id: "cat1"
- input_connections:
- - input_name: "input1"
- "@output_step": 0
- output_name: "output"
+ inputs:
+ "input1":
+ connections:
+ - "@output_step": 0
+ output_name: "output"
- type: "tool"
tool_id: "cat1"
- input_connections:
- - input_name: "input1"
- "@output_step": 0
- output_name: "output"
+ inputs:
+ input1:
+ connections:
+ - "@output_step": 0
+ output_name: "output"
- type: "tool"
tool_id: "cat1"
- input_connections:
- - input_name: "input1"
- "@output_step": 2
- output_name: "out_file1"
+ inputs:
+ "input1":
+ connections:
+ - "@output_step": 2
+ output_name: "out_file1"
"""
TEST_SUBWORKFLOW_YAML = """
@@ -41,15 +44,17 @@
tool_inputs: {"name": "inner_input"}
- type: "tool"
tool_id: "cat1"
- input_connections:
- - input_name: "input1"
- "@output_step": 0
- output_name: "output"
- input_connections:
- - input_name: "inner_input"
- "@output_step": 0
- output_name: "output"
- "@input_subworkflow_step": 0
+ inputs:
+ "input1":
+ connections:
+ - "@output_step": 0
+ output_name: "output"
+ inputs:
+ inner_input:
+ connections:
+ - "@output_step": 0
+ output_name: "output"
+ "@input_subworkflow_step": 0
"""
UNSCHEDULED_STEP = object()
diff --git a/test/unit/workflows/workflow_support.py b/test/unit/workflows/workflow_support.py
index 6f4f567cb66d..b460bfe1061f 100644
--- a/test/unit/workflows/workflow_support.py
+++ b/test/unit/workflows/workflow_support.py
@@ -99,20 +99,28 @@ def yaml_to_model(has_dict, id_offset=100):
for key, value in step.items():
if key == "input_connections":
- connections = []
- for conn_dict in value:
- conn = model.WorkflowStepConnection()
- for conn_key, conn_value in conn_dict.items():
- if conn_key == "@output_step":
- target_step = workflow.steps[conn_value]
- conn_value = target_step
- conn_key = "output_step"
- if conn_key == "@input_subworkflow_step":
- conn_value = step["subworkflow"].step_by_index(conn_value)
- conn_key = "input_subworkflow_step"
- setattr(conn, conn_key, conn_value)
- connections.append(conn)
- value = connections
+ raise NotImplementedError()
+ if key == "inputs":
+ inputs = []
+ for input_name, input_def in value.items():
+ step_input = model.WorkflowStepInput(workflow_step)
+ step_input.name = input_name
+ connections = []
+ for conn_dict in input_def.get("connections", []):
+ conn = model.WorkflowStepConnection()
+ for conn_key, conn_value in conn_dict.items():
+ if conn_key == "@output_step":
+ target_step = workflow.steps[conn_value]
+ conn_value = target_step
+ conn_key = "output_step"
+ if conn_key == "@input_subworkflow_step":
+ conn_value = step["subworkflow"].step_by_index(conn_value)
+ conn_key = "input_subworkflow_step"
+ setattr(conn, conn_key, conn_value)
+ connections.append(conn)
+ step_input.connections = connections
+ inputs.append(step_input)
+ value = inputs
if key == "workflow_outputs":
value = [partial(_dict_to_workflow_output, workflow_step)(_) for _ in value]
setattr(workflow_step, key, value)
diff --git a/tox.ini b/tox.ini
index 03d5632475c3..6ed37617a7ba 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,7 +1,7 @@
[tox]
# envlist is the list of environments that are tested when `tox` is run without any option
# hyphens in an environment name are used to delimit factors
-envlist = check_py3_compatibility, py27-first_startup, py27-lint, py27-lint_docstring_include_list, py27-unit, py34-first_startup, py34-lint, py34-unit, qunit, validate_test_tools
+envlist = check_py3_compatibility, py27-first_startup, py27-lint, py27-lint_docstring_include_list, py27-unit, py34-first_startup, py34-lint, py34-unit, qunit, validate_test_tools, cwl
skipsdist = True
[testenv:check_py3_compatibility]
@@ -82,3 +82,25 @@ whitelist_externals = bash
[testenv:web_controller_line_count]
commands = bash .ci/check_controller.sh
whitelist_externals = bash
+
+[testenv:cwl_green]
+commands = bash run_tests.sh -api test/api/test_cwl_conformance_green_v1_0.py
+whitelist_externals = bash
+setenv =
+ GALAXY_VIRTUAL_ENV={envdir}
+deps =
+ nose
+ NoseHTML
+ mock
+ mock-ssh-server
+
+[testenv:cwl]
+commands = bash run_tests.sh -api test/api/test_cwl_conformance_v1_0.py
+whitelist_externals = bash
+setenv =
+ GALAXY_VIRTUAL_ENV={envdir}
+deps =
+ nose
+ NoseHTML
+ mock
+ mock-ssh-server