From 22fa1805c6b7c68efcfa0b59caee5f78a9617de4 Mon Sep 17 00:00:00 2001 From: Tomas Baca Date: Wed, 3 Apr 2024 11:52:04 +0200 Subject: [PATCH] using catkin_pkg for determining the build order --- .ci/catkin_pkg/__init__.py | 38 + .ci/catkin_pkg/changelog.py | 566 ++++++++++++++ .ci/catkin_pkg/changelog_generator.py | 298 ++++++++ .ci/catkin_pkg/changelog_generator_vcs.py | 411 ++++++++++ .ci/catkin_pkg/cli/__init__.py | 0 .ci/catkin_pkg/cli/create_pkg.py | 70 ++ .ci/catkin_pkg/cli/find_pkg.py | 29 + .ci/catkin_pkg/cli/generate_changelog.py | 137 ++++ .ci/catkin_pkg/cli/package_version.py | 39 + .ci/catkin_pkg/cli/prepare_release.py | 451 +++++++++++ .ci/catkin_pkg/cli/tag_changelog.py | 114 +++ .ci/catkin_pkg/cli/test_changelog.py | 46 ++ .ci/catkin_pkg/cmake.py | 80 ++ .ci/catkin_pkg/condition.py | 161 ++++ .ci/catkin_pkg/group_dependency.py | 62 ++ .ci/catkin_pkg/group_membership.py | 52 ++ .ci/catkin_pkg/metapackage.py | 173 +++++ .ci/catkin_pkg/package.py | 876 ++++++++++++++++++++++ .ci/catkin_pkg/package_templates.py | 445 +++++++++++ .ci/catkin_pkg/package_version.py | 213 ++++++ .ci/catkin_pkg/packages.py | 186 +++++ .ci/catkin_pkg/python_setup.py | 177 +++++ .ci/catkin_pkg/rospack.py | 43 ++ .ci/catkin_pkg/terminal_color.py | 126 ++++ .ci/catkin_pkg/tool_detection.py | 79 ++ .ci/catkin_pkg/topological_order.py | 310 ++++++++ .ci/catkin_pkg/workspace_vcs.py | 63 ++ .ci/catkin_pkg/workspaces.py | 108 +++ .ci/get_build_matrix.sh | 34 +- .ci/get_build_order.py | 19 + .gitignore | 3 + 31 files changed, 5390 insertions(+), 19 deletions(-) create mode 100644 .ci/catkin_pkg/__init__.py create mode 100644 .ci/catkin_pkg/changelog.py create mode 100644 .ci/catkin_pkg/changelog_generator.py create mode 100644 .ci/catkin_pkg/changelog_generator_vcs.py create mode 100644 .ci/catkin_pkg/cli/__init__.py create mode 100644 .ci/catkin_pkg/cli/create_pkg.py create mode 100644 .ci/catkin_pkg/cli/find_pkg.py create mode 100644 .ci/catkin_pkg/cli/generate_changelog.py create mode 100644 .ci/catkin_pkg/cli/package_version.py create mode 100644 .ci/catkin_pkg/cli/prepare_release.py create mode 100644 .ci/catkin_pkg/cli/tag_changelog.py create mode 100644 .ci/catkin_pkg/cli/test_changelog.py create mode 100644 .ci/catkin_pkg/cmake.py create mode 100644 .ci/catkin_pkg/condition.py create mode 100644 .ci/catkin_pkg/group_dependency.py create mode 100644 .ci/catkin_pkg/group_membership.py create mode 100644 .ci/catkin_pkg/metapackage.py create mode 100644 .ci/catkin_pkg/package.py create mode 100644 .ci/catkin_pkg/package_templates.py create mode 100644 .ci/catkin_pkg/package_version.py create mode 100644 .ci/catkin_pkg/packages.py create mode 100644 .ci/catkin_pkg/python_setup.py create mode 100644 .ci/catkin_pkg/rospack.py create mode 100644 .ci/catkin_pkg/terminal_color.py create mode 100644 .ci/catkin_pkg/tool_detection.py create mode 100644 .ci/catkin_pkg/topological_order.py create mode 100644 .ci/catkin_pkg/workspace_vcs.py create mode 100644 .ci/catkin_pkg/workspaces.py create mode 100755 .ci/get_build_order.py diff --git a/.ci/catkin_pkg/__init__.py b/.ci/catkin_pkg/__init__.py new file mode 100644 index 0000000000..39940e5add --- /dev/null +++ b/.ci/catkin_pkg/__init__.py @@ -0,0 +1,38 @@ +# Software License Agreement (BSD License) +# +# Copyright (c) 2012, Willow Garage, Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# * Neither the name of Willow Garage, Inc. nor the names of its +# contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +"""Library for retrieving information about catkin packages.""" + +# same version as in: +# - setup.py +# - stdeb.cfg +__version__ = '1.0.0' diff --git a/.ci/catkin_pkg/changelog.py b/.ci/catkin_pkg/changelog.py new file mode 100644 index 0000000000..c3f0304084 --- /dev/null +++ b/.ci/catkin_pkg/changelog.py @@ -0,0 +1,566 @@ +# Software License Agreement (BSD License) +# +# Copyright (c) 2013, Open Source Robotics Foundation, Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# * Neither the name of Open Source Robotics Foundation, Inc. nor +# the names of its contributors may be used to endorse or promote +# products derived from this software without specific prior +# written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Processes ROS changelogs so that they can be used in binary packaging. + +The Changelog format is described in REP-0132: + +http://ros.org/reps/rep-0132.html +""" + +from __future__ import print_function +from __future__ import unicode_literals + +import logging +import os +import re +import sys + +import dateutil.parser +import docutils +import docutils.core +import pkg_resources + +_py3 = sys.version_info[0] >= 3 + +try: + _unicode = unicode +except NameError: + _unicode = str + +__author__ = 'William Woodall' +__email__ = 'william@osrfoundation.org' +__maintainer__ = 'William Woodall' + +log = logging.getLogger('changelog') + +CHANGELOG_FILENAME = 'CHANGELOG.rst' + +example_rst = """\ +^^^^^^^^^^^^^^^^^^^^^^^^^ +Changelog for package foo +^^^^^^^^^^^^^^^^^^^^^^^^^ + +0.1 +=== +Free form text about this minor release. + +0.1.27 (forthcoming) +-------------------- +* Great new feature + +0.1.26 (2012-12-26) +------------------- +* Utilizes caching to improve query performance (fix https://github.com/ros/ros_comm/pull/2) +* Simplified API calls based on (https://github.com/ros/robot_model): + + * Note that these changes are based on REP 192 + * Also they fix a problem related to initialization + +* Fixed synchronization issue on startup + +.. not mentioning secret feature on purpose + +0.1.25 (2012-11-25) +------------------- + +- Added thread safety +- Replaced custom XML parser with `TinyXML `_. +- Fixed regression introduced in 0.1.22 +- New syntax for foo:: + + foo('bar') + +- Added a safety check for XML parsing + +---- + +The library should now compile under ``Win32`` + +0.1.0 (2012-10-01) +------------------ + +*First* public **stable** release + +0.0 +=== + +0.0.1 (2012-01-31) +------------------ + +1. Initial release +2. Initial bugs +""" + + +def bullet_list_class_from_docutils(bullet_list, bullet_type=None): + """ + Process elements of bullet list into an encapsulating class. + + :param bullet_list: ``docutils.nodes.bullet_list`` list to be processed + :param bullet_type: ``str`` either 'bullet' or 'enumerated' + :returns: ``BulletList`` object representing a docutils bullet_list + """ + content = BulletList(bullet_type=bullet_type) + for child in bullet_list.children: + if isinstance(child, docutils.nodes.list_item): + content.bullets.append(mixed_text_from_docutils(child)) + else: + log.debug("Skipped bullet_list child: '{0}'".format(child)) + return content + + +def mixed_text_from_docutils(node): + """ + Take most Text-ish docutils objects and converts them to MixedText. + + :param node: ``docutils.nodes.{paragraph, list_item, ...}`` text-ish + :returns: ``MixedText`` representing the given docutils object + """ + content = MixedText() + for child in node.children: + if isinstance(child, docutils.nodes.paragraph): + content.texts.extend(mixed_text_from_docutils(child).texts) + elif isinstance(child, docutils.nodes.Text): + content.texts.append(child.astext()) + elif isinstance(child, docutils.nodes.reference): + content.texts.append(reference_from_docutils(child)) + elif isinstance(child, docutils.nodes.emphasis): + content.texts.append('*{0}*'.format(child.astext())) + elif isinstance(child, docutils.nodes.strong): + content.texts.append('**{0}**'.format(child.astext())) + elif isinstance(child, docutils.nodes.literal): + content.texts.append('``{0}``'.format(child.astext())) + elif isinstance(child, docutils.nodes.literal_block): + content.texts.append('\n\n ' + child.astext() + '\n') + elif isinstance(child, docutils.nodes.target): + pass + elif isinstance(child, docutils.nodes.system_message): + log.debug('Skipping system_message: {0}'.format(child)) + elif isinstance(child, docutils.nodes.bullet_list): + content.texts.append(bullet_list_class_from_docutils(child)) + else: + try: + # Try to add it as plain text + log.debug("Trying to add {0}'s child of type {1}: '{2}'" + .format(type(node), type(child), child)) + content.texts.append(child.astext()) + except AttributeError: + log.debug("Ignored {0} child of type {1}: '{2}'" + .format(type(node), type(child), child)) + return content + + +def get_changelog_from_path(path, package_name=None): + """ + Changelog factory, which reads a changelog file into a class. + + :param path: ``str`` the path of the changelog including or excluding the filename CHANGELOG.rst + :param package_name: ``str`` the package name + :returns: ``Changelog`` changelog class or None if file was not readable + """ + changelog = Changelog(package_name) + if os.path.isdir(path): + path = os.path.join(path, CHANGELOG_FILENAME) + try: + with open(path, 'rb') as f: + populate_changelog_from_rst(changelog, f.read().decode('utf-8')) + except IOError: + return None + return changelog + + +def populate_changelog_from_rst(changelog, rst): + """ + Changelog factory, which converts the raw ReST into a class. + + :param changelog: ``Changelog`` changelog to be populated + :param rst: ``str`` raw ReST changelog + :returns: ``Changelog`` changelog that was populated + """ + document = docutils.core.publish_doctree(rst) + processes_changelog_children(changelog, document.children) + changelog.rst = rst + return changelog + + +def processes_changelog_children(changelog, children): + """ + Process docutils children into a REP-0132 changelog instance. + + Recurse into sections, check (sub-)titles if they are valid versions. + + :param changelog: ``Changelog`` changelog to be populated + :param section: ``docutils.nodes.section`` section to be processed + :returns: ``Changelog`` changelog that was populated + """ + for i, child in enumerate(children): + if isinstance(child, docutils.nodes.section): + processes_changelog_children(changelog, child.children) + elif isinstance(child, docutils.nodes.title) or isinstance(child, docutils.nodes.subtitle): + version, date = None, None + # See if the title has a text element in it + if len(child.children) > 0 and any(isinstance(c, docutils.nodes.Text) for c in child.traverse()): + # Extract version and date from (sub-)title + title_text = child.astext() + try: + version, date = version_and_date_from_title(title_text) + except InvalidSectionTitle: + # Catch invalid section titles + log.debug("Ignored non-compliant title: '{0}'".format(title_text)) + continue + valid_section = None not in (version, date) + if valid_section: + contents = [] + # For each remaining sibling + for child in children[i + 1:]: + # Skip sections (nesting of valid sections not allowed) + if isinstance(child, docutils.nodes.section): + log.debug("Ignored section child: '{0}'".format(child)) + continue + # Skip title + if isinstance(child, docutils.nodes.title): + continue + # Skip comments + if isinstance(child, docutils.nodes.comment): + log.debug("Ignored section child: '{0}'".format(child)) + continue + # Process other elements into the contents + if isinstance(child, docutils.nodes.bullet_list): + contents.append(bullet_list_class_from_docutils(child)) + elif isinstance(child, docutils.nodes.enumerated_list): + contents.append(bullet_list_class_from_docutils(child, bullet_type='enumerated')) + elif isinstance(child, docutils.nodes.transition): + contents.append(Transition()) + elif isinstance(child, docutils.nodes.paragraph): + contents.append(mixed_text_from_docutils(child)) + else: + log.debug("Skipped section child: '{0}'".format(child)) + changelog.add_version_section(version, date, contents) + break + else: + log.debug("Ignored non-compliant title: '{0}'".format(child)) + + +def reference_from_docutils(reference): + """ + Turn a reference element into a ``Reference``. + + :param reference: ``docutils.nodes.reference`` reference element + :returns: ``Reference`` simpler object representing the reference + """ + name, refuri = None, None + for pair in reference.attlist(): + if pair[0] == 'name': + name = pair[1] + if pair[0] == 'refuri': + refuri = pair[1] + return Reference(name, refuri) + + +def version_and_date_from_title(title): + """ + Split a section title into version and date if possible. + + :param title: ``str`` raw section title to be processed + :returns: ``(str, datetime.datetime)`` + :raises: ``InvalidSectionTitle`` for non REP-0132 section titles + """ + match = re.search(r'^([0-9]+\.[0-9]+\.[0-9]+)[ ]\((.+)\)$', title) + if match is None: + raise InvalidSectionTitle(title) + version, date_str = match.groups() + try: + date = dateutil.parser.parse(date_str) + except (ValueError, TypeError) as e: + # Catch invalid dates + log.debug("Error parsing date ({0}): '{1}'".format(date_str, e)) + raise InvalidSectionTitle(title) + return version, date + + +class BulletList(object): + """Represent a bulleted list of text.""" + + def __init__(self, bullets=None, bullet_type=None): + """ + Initialize BulletList. + + :param bullets: ``list(MixedText)`` list of text bullets + :param bullet_type: ``str`` either 'bullet' or 'enumerated' + """ + bullet_type = 'bullet' if bullet_type is None else bullet_type + if bullet_type not in ['bullet', 'enumerated']: + raise RuntimeError("Invalid bullet type: '{0}'".format(bullet_type)) + self.bullets = bullets or [] + self.bullet_type = bullet_type + + def __iter__(self): + for bullet in self.bullets: + yield bullet + + def __str__(self): + value = self.__unicode__() + if not _py3: + value = value.encode('ascii', 'replace') + return value + + def __unicode__(self): + return self.as_txt() + + def as_rst(self): + return self.as_txt(indent='', use_hyphen_bullet=True) + + def as_txt(self, indent='', use_hyphen_bullet=False): + bullet = '*' if self.bullet_type == 'bullet' else '#' + if use_hyphen_bullet and bullet == '*': + bullet = '-' + b = self.bullet_generator(bullet) + i = indent + n = '\n' + i + ' ' + lines = [i + next(b) + _unicode(item).replace('\n', n) for item in self] + return '\n'.join(lines) + + def bullet_generator(self, bullet): + if '#' == bullet: + bullets = [str(i) + '. ' for i in range(1, len(self.bullets) + 1)] + else: + bullets = [bullet + ' '] * len(self.bullets) + for b in bullets: + yield b + + +class Changelog(object): + """Represents a REP-0132 changelog.""" + + def __init__(self, package_name=None): + self.__package_name = package_name + self.__versions = [] + self.__parsed_versions = [] + self.__dates = {} + self.__content = {} + self.__rst = '' + + def __str__(self): + value = self.__unicode__() + if not _py3: + value = value.encode('ascii', 'replace') + return value + + def __unicode__(self): + msg = [] + if self.__package_name: + msg.append("Changelog for package '{0}'".format(self.package_name)) + for version, date, content in self.foreach_version(reverse=True): + msg.append(' ' + version + ' ({0}):'.format(date)) + for item in content: + msg.extend([' ' + i for i in _unicode(item).splitlines()]) + return '\n'.join(msg) + + @property + def package_name(self): + return self.__package_name + + @package_name.setter + def package_name(self, package_name): + self.__package_name = package_name + + @property + def rst(self): + return self.__rst + + @rst.setter + def rst(self, rst): + self.__rst = rst + + def add_version_section(self, version, date, contents): + """ + Add a version section. + + :param version: ``str`` version as a string + :param date: ``datetime.datetime`` version date + :param contents: ``list(list([str|Reference]))``` contents as a list + of lists which contain a combination of ``str`` and + ``Reference`` objects + :returns: None + """ + if version in self.__versions: + raise DuplicateVersionsException(version) + self.__parsed_versions.append(pkg_resources.parse_version(version)) + self.__parsed_versions = sorted(self.__parsed_versions) + # Cannot go parsed -> str, so sorting must be done by comparison + new_versions = [None] * len(self.__parsed_versions) + for v in self.__versions + [version]: + parsed_v = pkg_resources.parse_version(v) + index = self.__parsed_versions.index(parsed_v) + if index == -1: + raise RuntimeError('Inconsistent internal version storage state') + new_versions[index] = v + self.__versions = new_versions + self.__dates[version] = date + self.__content[version] = contents + + def foreach_version(self, reverse=False): + """ + Create a generator for iterating over the versions, dates and content. + + Versions are stored and iterated in order. + + :param reverse: ``bool`` if True then the iteration is reversed + :returns: ``generator`` for iterating over versions, dates and content + """ + for version in reversed(self.__versions) if reverse else self.__versions: + yield version, self.__dates[version], self.__content[version] + + def get_date_of_version(self, version): + """Return date of a given version as a ``datetime.datetime``.""" + if version not in self.__versions: + raise KeyError("No date for version '{0}'".format(version)) + return self.__dates[version] + + def get_content_of_version(self, version): + """ + Return changelog content for a given version. + + :param version: ``str`` version + :returns: ``list(list([str|Reference]))`` content expanded + """ + if version not in self.__versions: + raise KeyError("No content for version '{0}'".format(version)) + return self.__content[version] + + +class DuplicateVersionsException(Exception): + """Raised when more than one section per version is given.""" + + def __init__(self, version): + self.version = version + Exception.__init__(self, "Version '{0}' is specified twice".format(version)) + + +class InvalidSectionTitle(Exception): + """raised on non REP-0132 section titles.""" + + def __init__(self, title): + self.title = title + msg = "Section title does not conform to REP-0132: '{0}'".format(title) + Exception.__init__(self, msg) + + +class MixedText(object): + """Represents text mixed with references and nested bullets.""" + + def __init__(self, texts=[]): + self.texts = list(texts) + + def __iter__(self): + for text in self.texts: + yield text + + def __str__(self): + value = self.__unicode__() + if not _py3: + value = value.encode('ascii', 'replace') + return value + + def __unicode__(self): + return self.to_txt() + + def to_txt(self, bullet_indent=' '): + lines = [] + for t in self: + if isinstance(t, BulletList): + bullets = [bullet_indent + x for x in _unicode(t).splitlines()] + bullets = ['', ''] + bullets + [''] + lines.extend('\n'.join(bullets)) + else: + lines.append(_unicode(t)) + return ''.join(lines) + + +class Reference(object): + """Represents a piece of text with an associated link.""" + + def __init__(self, text, link): + self.text = text + self.link = link + + def __str__(self): + value = self.__unicode__() + if not _py3: + value = value.encode('ascii', 'replace') + return value + + def __unicode__(self): + return self.as_txt() + + def as_rst(self): + """Self as rst (unicode).""" + if self.text is None: + return _unicode(self.link) + return '`{0} <{1}>`_'.format(self.text, self.link) + + def as_txt(self): + """Self formatted for plain text (unicode).""" + if self.text is None: + return _unicode(self.link) + return '{0} <{1}>'.format(self.text, self.link) + + +class Transition(object): + """Represents a trasition element from ReST.""" + + def __str__(self): + value = self.__unicode__() + if not _py3: + value = value.encode('ascii', 'replace') + return value + + def __unicode__(self): + return '-' * 20 + + def __iter__(self): + yield self.unicode() + + +def __test(): + package_name = 'foo' + changelog = Changelog(package_name) + print(populate_changelog_from_rst(changelog, example_rst)) + + +if __name__ == '__main__': + logging.basicConfig() + log.setLevel(logging.DEBUG) + __test() diff --git a/.ci/catkin_pkg/changelog_generator.py b/.ci/catkin_pkg/changelog_generator.py new file mode 100644 index 0000000000..0a9df03f99 --- /dev/null +++ b/.ci/catkin_pkg/changelog_generator.py @@ -0,0 +1,298 @@ +# Software License Agreement (BSD License) +# +# Copyright (c) 2013, Open Source Robotics Foundation, Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# * Neither the name of Open Source Robotics Foundation, Inc. nor +# the names of its contributors may be used to endorse or promote +# products derived from this software without specific prior +# written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Generate/update ROS changelog files. + +The Changelog format is described in REP-0132: + +http://ros.org/reps/rep-0132.html +""" + +import os +import re + +from catkin_pkg.changelog import CHANGELOG_FILENAME +from catkin_pkg.changelog_generator_vcs import Tag + +FORTHCOMING_LABEL = 'Forthcoming' + + +def get_all_changes(vcs_client, skip_merges=False, only_merges=False): + tags = _get_version_tags(vcs_client) + + # query all log entries per tag range + tag2log_entries = {} + previous_tag = Tag(None) + for tag in sorted_tags(tags): + log_entries = vcs_client.get_log_entries( + from_tag=previous_tag.name, to_tag=tag.name, skip_merges=skip_merges, only_merges=only_merges) + tag2log_entries[previous_tag] = log_entries + previous_tag = tag + log_entries = vcs_client.get_log_entries( + from_tag=previous_tag.name, to_tag=None, skip_merges=skip_merges, only_merges=only_merges) + tag2log_entries[previous_tag] = log_entries + return tag2log_entries + + +def get_forthcoming_changes(vcs_client, skip_merges=False, only_merges=False): + tags = _get_version_tags(vcs_client) + latest_tag_name = _get_latest_version_tag_name(vcs_client) + + # query log entries since latest tag only + tag2log_entries = {} + from_tag = Tag(None) + to_tag = Tag(latest_tag_name) + for tag in sorted_tags(tags): + if to_tag.name is None: + to_tag = tag + # ignore non-forthcoming log entries but keep version to identify injection point of forthcoming + tag2log_entries[tag] = None + log_entries = vcs_client.get_log_entries( + from_tag=from_tag.name, to_tag=to_tag.name, skip_merges=skip_merges, only_merges=only_merges) + tag2log_entries[from_tag] = log_entries + return tag2log_entries + + +def _get_version_tags(vcs_client): + # get all tags in descending order + tags = vcs_client.get_tags() + version_tags = [t for t in tags if re.match(r'^v?\d+\.\d+.\d+$', t.name)] + return version_tags + + +def _get_latest_version_tag_name(vcs_client): + # get latest tag + tag_name = vcs_client.get_latest_tag_name() + if not re.match(r'^v?\d+\.\d+.\d+$', tag_name): + raise RuntimeError( + "The tag name '{}' doesn't match the version pattern v?x.y.z".format(tag_name)) + return tag_name + + +def generate_changelogs(base_path, packages, tag2log_entries, logger=None, vcs_client=None, skip_contributors=False): + for pkg_path, package in packages.items(): + changelog_path = os.path.join(base_path, pkg_path, CHANGELOG_FILENAME) + if os.path.exists(changelog_path): + continue + # generate package specific changelog file + if logger: + logger.debug("- creating '%s'" % os.path.join(pkg_path, CHANGELOG_FILENAME)) + pkg_tag2log_entries = filter_package_changes(tag2log_entries, pkg_path) + data = generate_changelog_file(package.name, pkg_tag2log_entries, vcs_client=vcs_client, skip_contributors=skip_contributors) + with open(changelog_path, 'wb') as f: + f.write(data.encode('utf-8')) + + +def update_changelogs(base_path, packages, tag2log_entries, logger=None, vcs_client=None, skip_contributors=False): + for pkg_path in packages.keys(): + # update package specific changelog file + if logger: + logger.debug("- updating '%s'" % os.path.join(pkg_path, CHANGELOG_FILENAME)) + pkg_tag2log_entries = filter_package_changes(tag2log_entries, pkg_path) + changelog_path = os.path.join(base_path, pkg_path, CHANGELOG_FILENAME) + with open(changelog_path, 'rb') as f: + data = f.read().decode('utf-8') + data = update_changelog_file(data, pkg_tag2log_entries, vcs_client=vcs_client, skip_contributors=skip_contributors) + with open(changelog_path, 'wb') as f: + f.write(data.encode('utf-8')) + + +def filter_package_changes(tag2log_entries, pkg_path): + pkg_tag2log_entries = {} + # collect all log entries relevant for this package + for tag, log_entries in tag2log_entries.items(): + if log_entries is None: + pkg_log_entries = None + else: + pkg_log_entries = [] + for log_entry in log_entries: + if log_entry.affects_path(pkg_path): + pkg_log_entries.append(log_entry) + pkg_tag2log_entries[tag] = pkg_log_entries + return pkg_tag2log_entries + + +def generate_changelog_file(pkg_name, tag2log_entries, vcs_client=None, skip_contributors=False): + blocks = [] + blocks.append(generate_package_headline(pkg_name)) + + for tag in sorted_tags(tag2log_entries.keys()): + log_entries = tag2log_entries[tag] + if log_entries is not None: + blocks.append(generate_version_block(version_from_tag(tag.name), tag.timestamp, log_entries, vcs_client=vcs_client, skip_contributors=skip_contributors)) + + return '\n'.join(blocks) + + +def update_changelog_file(data, tag2log_entries, vcs_client=None, skip_contributors=False): + tags = sorted_tags(tag2log_entries.keys()) + for i, tag in enumerate(tags): + log_entries = tag2log_entries[tag] + if log_entries is None: + continue + content = generate_version_content(log_entries, vcs_client=vcs_client, skip_contributors=skip_contributors) + + # check if version section exists + match = get_version_section_match(data, version_from_tag(tag.name)) + if match: + # prepend content to existing section + data = prepend_version_content(data, version_from_tag(tag.name), content) + assert data is not None + else: + # find injection point of earliest following version + for next_tag in list(tags)[i:]: + match = get_version_section_match(data, version_from_tag(next_tag.name)) + if match: + block = generate_version_block(version_from_tag(tag.name), tag.timestamp, log_entries, vcs_client=vcs_client, skip_contributors=skip_contributors) + data = data[:match.start()] + block + '\n' + data[match.start():] + break + if not match: + if tag.name is None: + raise RuntimeError('Could not find section "%s"' % next_tag.name) + else: + raise RuntimeError('Could neither find section "%s" nor any other section' % tag.name) + return data + + +def get_version_section_match(data, version): + pattern = get_version_section_pattern(version) + matches = re.finditer(pattern, data, flags=re.MULTILINE) + matches = list(matches) + if len(matches) > 1: + raise RuntimeError('Found multiple matching sections') + return matches[0] if matches else None + + +def get_version_section_pattern(version): + valid_section_characters = '!"#$%&\'()*+,-./:;<=>?@[\\]^_`{|}~' + headline = get_version_headline(version, None) + pattern = '^(' + re.escape(headline) + r'( \([0-9 \-:|+]+\))?)\r?\n([' + re.escape(valid_section_characters) + ']+)\r?\n?$' + return pattern + + +def prepend_version_content(data, version, content): + pattern = get_version_section_pattern(version) + + def replace_section(match): + headline = match.group(1) + section = match.group(3) + data = content.rstrip() + if data: + data += '\n' + return headline + '\n' + section + '\n' + data + + data, count = re.subn(pattern, replace_section, data, flags=re.MULTILINE) + if count > 1: + raise RuntimeError('Found multiple matching sections') + return data if count == 1 else None + + +def version_from_tag(tag_name): + if tag_name is None: + return None + if tag_name.startswith('v'): + return tag_name[1:] + return tag_name + + +def sorted_tags(tags): + # first return the forthcoming tag + for tag in tags: + if not tag.name: + yield tag + # then return the tags in descending order + name_and_tag = [(t.name, t) for t in tags if t.name] + name_and_tag.sort(key=lambda x: [int(y) for y in version_from_tag(x[0]).split('.')]) + name_and_tag.reverse() + for (_, tag) in name_and_tag: + yield tag + + +def generate_package_headline(pkg_name): + headline = 'Changelog for package %s' % pkg_name + section_marker = '^' * len(headline) + return '%s\n%s\n%s\n' % (section_marker, headline, section_marker) + + +def generate_version_block(version, timestamp, log_entries, vcs_client=None, skip_contributors=False): + data = generate_version_headline(version, timestamp) + data += generate_version_content(log_entries, vcs_client=vcs_client, skip_contributors=skip_contributors) + return data + + +def generate_version_headline(version, timestamp): + headline = get_version_headline(version, timestamp) + return '%s\n%s\n' % (headline, '-' * len(headline)) + + +def get_version_headline(version, timestamp): + if not version: + return FORTHCOMING_LABEL + headline = version + if timestamp: + headline += ' (%s)' % timestamp + return headline + + +def generate_version_content(log_entries, vcs_client=None, skip_contributors=False): + data = '' + all_authors = set() + for entry in log_entries: + msg = entry.msg + lines = msg.splitlines() + lines = [line.strip() for line in lines] + lines = [line for line in lines if line and not line.startswith('Signed-off-by:')] + lines = [escape_trailing_underscores(line) for line in lines] + data += '* %s\n' % (replace_repository_references(lines[0], vcs_client=vcs_client) if lines else '') + for line in lines[1:]: + data += ' %s\n' % replace_repository_references(line, vcs_client=vcs_client) + all_authors.add(entry.author) + if all_authors and not skip_contributors: + data += '* Contributors: %s\n' % ', '.join(sorted(all_authors)) + return data + + +def escape_trailing_underscores(line): + if line.endswith('_'): + line = line[:-1] + r'\_' + # match words ending with an underscore which are not followed by another word + # and insert a backslash before the underscore to escape it + line = re.sub(r'(\w+)_([^\w])', '\\1\\_\\2', line) + return line + + +def replace_repository_references(line, vcs_client=None): + if vcs_client: + line = vcs_client.replace_repository_references(line) + return line diff --git a/.ci/catkin_pkg/changelog_generator_vcs.py b/.ci/catkin_pkg/changelog_generator_vcs.py new file mode 100644 index 0000000000..75d7175cc2 --- /dev/null +++ b/.ci/catkin_pkg/changelog_generator_vcs.py @@ -0,0 +1,411 @@ +# Software License Agreement (BSD License) +# +# Copyright (c) 2013, Open Source Robotics Foundation, Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# * Neither the name of Open Source Robotics Foundation, Inc. nor +# the names of its contributors may be used to endorse or promote +# products derived from this software without specific prior +# written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +"""Extract log information from repositories.""" + +import os +import re +import shutil +import subprocess +import tempfile + + +try: + from shutil import which +except ImportError: + # fallback for Python < 3.3 + def which(cmd): + for path in os.getenv('PATH').split(os.path.pathsep): + file_path = os.path.join(path, cmd) + if os.path.isfile(file_path): + return file_path + return None + + +class Tag(object): + + def __init__(self, name, timestamp=None): + self.name = name + self.timestamp = timestamp + + +class LogEntry(object): + + def __init__(self, msg, affected_paths, author): + self.msg = msg + self.author = author + self._affected_paths = [p for p in affected_paths if p] + + def affects_path(self, path): + for apath in self._affected_paths: + # if the path is the root of the repository + # it is affected by all changes + if path == '.': + return True + if apath.startswith(os.path.join(path, '')): + return True + return False + + +class VcsClientBase(object): + + def __init__(self, path): + self.path = path + + def get_tags(self): + raise NotImplementedError() + + def get_latest_tag_name(self): + raise NotImplementedError() + + def get_log_entries(self, from_tag, to_tag, skip_merges=False, only_merges=False): + raise NotImplementedError() + + def replace_repository_references(self, line): + return line + + def _run_command(self, cmd, env=None): + cwd = os.path.abspath(self.path) + result = {'cmd': ' '.join(cmd), 'cwd': cwd} + try: + proc = subprocess.Popen(cmd, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env) + output, _ = proc.communicate() + result['output'] = output.rstrip().decode('utf-8') + result['returncode'] = proc.returncode + except subprocess.CalledProcessError as e: + result['output'] = e.output + result['returncode'] = e.returncode + return result + + def _truncate_timestamps(self, tags): + # truncate timestamps to shortest unique representation + # - date only + # - date including hours and minutes + # - date include hours, minutes and seconds + lengths = [10, 16, 19] + for length in lengths: + # filter tags which have not been truncated yet + considered_tags = [t for t in tags if len(t.timestamp) > length] + # count tags which timestamps have the same truncated representation + grouped_by_timestamp = {} + for t in considered_tags: + truncated_timestamp = t.timestamp[:length] + if truncated_timestamp not in grouped_by_timestamp: + grouped_by_timestamp[truncated_timestamp] = [] + grouped_by_timestamp[truncated_timestamp].append(t) + # truncate timestamp of tags which are unique + for truncated_timestamp, similar_tags in grouped_by_timestamp.items(): + if len(similar_tags) == 1: + similar_tags[0].timestamp = truncated_timestamp + + +class GitClient(VcsClientBase): + + type = 'git' # noqa: A003 + + def __init__(self, path): + super(GitClient, self).__init__(path) + self._executable = which('git') + self._repo_hosting = None + self._github_base_url = 'https://github.com/' + self._github_path = None + self._gitlab_base_url = 'https://gitlab.com/' + self._gitlab_path = None + + # query author + def _get_author(self, hash_): + cmd = [self._executable, 'log', hash_, '-n', '1', '--format=format:%aN'] + result = self._run_command(cmd) + if result['returncode']: + raise RuntimeError('Could not fetch author:\n%s' % result['output']) + return result['output'] + + def get_tags(self): + # Get a decorated log, use the refnames to find the ancestor tags + cmd_tag = [self._executable, 'log', '--simplify-by-decoration', '--decorate', '--pretty=oneline'] + result_tag = self._run_command(cmd_tag) + if result_tag['returncode']: + raise RuntimeError('Could not fetch tags:\n%s' % result_tag['output']) + # Parse a comma-separated list of refname decorators out of the log + decorations = ', '.join(re.findall(r'^[a-f0-9]+ \(([^)]*)\) .', result_tag['output'], re.MULTILINE)) + ',' + # Extract only refnames that are tags + tag_names = re.findall('tag: ([^,]+)[,]', decorations) + + tags = [] + for tag_name in tag_names: + cmd = [self._executable, 'log', tag_name, '-n', '1', '--format=format:%ai'] + result = self._run_command(cmd) + if result['returncode']: + raise RuntimeError('Could not fetch timestamp:\n%s' % result['output']) + tags.append(Tag(tag_name, result['output'])) + self._truncate_timestamps(tags) + return tags + + def get_latest_tag_name(self): + cmd_describe = [self._executable, 'describe', '--abbrev=0', '--tags'] + result_describe = self._run_command(cmd_describe) + if result_describe['returncode']: + raise RuntimeError('Could not fetch latest tag:\n%s' % result_describe['output']) + tag_name = result_describe['output'] + return tag_name + + def get_log_entries(self, from_tag, to_tag, skip_merges=False, only_merges=False): + # query all hashes in the range + cmd = [self._executable, 'log'] + if from_tag or to_tag: + cmd.append('%s%s' % ('%s..' % to_tag if to_tag else '', from_tag if from_tag else '')) + cmd.append('--format=format:%H') + if skip_merges and only_merges: + raise RuntimeError('Both "skip_merges" and "only_merges" are set to True, which contradicts.') + if skip_merges: + cmd.append('--no-merges') + if only_merges: + cmd.append('--merges') + result = self._run_command(cmd) + if result['returncode']: + raise RuntimeError('Could not fetch commit hashes:\n%s' % result['output']) + + log_entries = [] + if result['output']: + # query further information for each changeset + hashes = result['output'].splitlines() + for hash_ in hashes: + # query commit message + cmd = [self._executable, 'log', hash_, '-n', '1', '--format=format:%B'] + result = self._run_command(cmd) + if result['returncode']: + raise RuntimeError('Could not fetch commit message:\n%s' % result['output']) + if result['output'] == from_tag: + continue + msg = result['output'] + # query affected paths + cmd = [self._executable, 'show', '--first-parent', hash_, '--name-only', '--format=format:""'] + result = self._run_command(cmd) + if result['returncode']: + raise RuntimeError('Could not fetch affected paths:\n%s' % result['output']) + affected_paths = result['output'].splitlines() + log_entries.append(LogEntry(msg, affected_paths, self._get_author(hash_))) + return log_entries + + def replace_repository_references(self, line): + if self._repo_hosting is None: + self._repo_hosting = False + try: + self._determine_repo_hosting() + except RuntimeError: + pass + if self._repo_hosting == 'github': + line = self._replace_github_issue_references(line) + elif self._repo_hosting == 'gitlab': + line = self._replace_gitlab_issue_references(line) + return line + + def _determine_repo_hosting(self): + cmd = [self._executable, 'config', '--get', 'remote.origin.url'] + result = self._run_command(cmd) + if result['returncode']: + raise RuntimeError('Could not fetch remote url:\n%s' % result['output']) + + # detect github hosting + prefixes = ['git@github.com:', 'https://github.com/', 'git://github.com/'] + for prefix in prefixes: + if result['output'].startswith(prefix): + self._repo_hosting = 'github' + path = result['output'][len(prefix):] + if path.endswith('.git'): + path = path[:-4] + self._github_path = path + break + + # detect gitlab hosting + prefixes = ['git@gitlab.com:', 'https://gitlab.com/', 'git://gitlab.com/'] + for prefix in prefixes: + if result['output'].startswith(prefix): + self._repo_hosting = 'gitlab' + path = result['output'][len(prefix):] + if path.endswith('.git'): + path = path[:-4] + self._gitlab_path = path + break + + def _replace_github_issue_references(self, line): + valid_name = '[\\w._-]+' + issue_pattern = '#(\\d+)' + + def replace_issue_number(match): + issue_url = self._github_base_url + if match.group(1): + path = match.group(1) + issue_url += path + else: + path = '' + issue_url += self._github_path + issue_number = match.group(2) + issue_url += '/issues/' + issue_number + return '`%s#%s <%s>`_' % (path, issue_number, issue_url) + line = re.sub(('(%s/%s)?' % (valid_name, valid_name)) + issue_pattern, replace_issue_number, line) + return line + + def _replace_gitlab_issue_references(self, line): + valid_name = '[\\w._-]+' + issue_pattern = '#(\\d+)' + merge_request_pattern = '!(\\d+)' + + def replace_issue_number(match): + issue_url = self._gitlab_base_url + if match.group(1): + path = match.group(1) + issue_url += path + else: + path = '' + issue_url += self._gitlab_path + issue_number = match.group(3) + issue_url += '/-/issues/' + issue_number + return '`%s#%s <%s>`_' % (path, issue_number, issue_url) + line = re.sub(('(%s(/%s)+)?' % (valid_name, valid_name)) + issue_pattern, replace_issue_number, line) + + def replace_merge_request_number(match): + merge_request_url = self._gitlab_base_url + if match.group(1): + path = match.group(1) + merge_request_url += path + else: + path = '' + merge_request_url += self._gitlab_path + merge_request_number = match.group(3) + merge_request_url += '/-/merge_requests/' + merge_request_number + return '`%s!%s <%s>`_' % (path, merge_request_number, merge_request_url) + line = re.sub(('(%s(/%s)+)?' % (valid_name, valid_name)) + merge_request_pattern, replace_merge_request_number, line) + return line + + +class HgClient(VcsClientBase): + + type = 'hg' # noqa: A003 + + def __init__(self, path): + super(HgClient, self).__init__(path) + self._executable = which('hg') + + # query author + def _get_author(self, hash_): + cmd = [self._executable, 'log', '-r', hash_, '--template', '{author}'] + result = self._run_command(cmd) + if result['returncode']: + raise RuntimeError('Could not fetch author:\n%s' % result['output']) + return result['output'] + + def get_tags(self): + cmd_tag = [self._executable, 'tags', '-q'] + result_tag = self._run_command(cmd_tag) + if result_tag['returncode']: + raise RuntimeError('Could not fetch tags:\n%s' % result_tag['output']) + tag_names = result_tag['output'].splitlines() + + tags = [] + for tag_name in tag_names: + cmd = [self._executable, 'log', '-r', tag_name, '--template', '{date|isodatesec}'] + result = self._run_command(cmd) + if result['returncode']: + raise RuntimeError('Could not fetch timestamp:\n%s' % result['output']) + tags.append(Tag(tag_name, result['output'])) + self._truncate_timestamps(tags) + return tags + + def get_latest_tag_name(self): + cmd_log = [self._executable, 'log', '--rev', '.', '--template', '{latesttag}'] + result_log = self._run_command(cmd_log) + if result_log['returncode']: + raise RuntimeError('Could not fetch latest tag:\n%s' % result_log['output']) + tag_name = result_log['output'] + if tag_name == 'null': + raise RuntimeError('Could not find latest tagn') + return tag_name + + def get_log_entries(self, from_tag, to_tag, skip_merges=False, only_merges=False): + # query all hashes in the range + # ascending chronological order since than it is easier to handle empty tag names + revrange = '%s:%s' % ((to_tag if to_tag else ''), (from_tag if from_tag else 'tip')) + if to_tag: + revrange += '-%s' % to_tag + if from_tag: + revrange += '-%s' % from_tag + cmd = [self._executable, 'log', '-r', revrange, '--template', '{rev}\n'] + result = self._run_command(cmd) + if result['returncode']: + raise RuntimeError('Could not fetch commit hashes:\n%s' % result['output']) + + tmp_base = tempfile.mkdtemp('-hg-style') + try: + style_file = os.path.join(tmp_base, 'hg-changeset-files-per-line.style') + with open(style_file, 'w') as f: + f.write("changeset = '{files}'\n") + f.write("file = '{file}\\n'\n") + + log_entries = [] + if result['output']: + # query further information for each changeset + revs = reversed(result['output'].splitlines()) + for rev in revs: + # query commit message + cmd = [self._executable, 'log', '-r', rev, '-l', '1', '--template', '{desc}'] + result = self._run_command(cmd) + if result['returncode']: + raise RuntimeError('Could not fetch commit message:\n%s' % result['output']) + if result['output'] == from_tag: + continue + msg = result['output'] + # query affected paths + cmd = [self._executable, 'log', '-r', rev, '-l', '1', '--style', style_file] + result = self._run_command(cmd) + if result['returncode']: + raise RuntimeError('Could not fetch affected paths:\n%s' % result['output']) + affected_paths = result['output'].splitlines() + log_entries.append(LogEntry(msg, affected_paths, self._get_author(rev))) + finally: + shutil.rmtree(tmp_base) + return log_entries + + +def get_vcs_client(base_path): + vcs_clients = [] + vcs_clients.append(GitClient) + vcs_clients.append(HgClient) + client_types = [c.type for c in vcs_clients] + if len(client_types) != len(set(client_types)): + raise RuntimeError('Multiple vcs clients share the same type: %s' % ', '.join(sorted(client_types))) + + for vcs_client in vcs_clients: + if os.path.exists(os.path.join(base_path, '.%s' % vcs_client.type)): + return vcs_client(base_path) + raise RuntimeError('Could not detect repository type - currently supports: %s' % ', '.join([c.type for c in vcs_clients])) diff --git a/.ci/catkin_pkg/cli/__init__.py b/.ci/catkin_pkg/cli/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/.ci/catkin_pkg/cli/create_pkg.py b/.ci/catkin_pkg/cli/create_pkg.py new file mode 100644 index 0000000000..6626b533ca --- /dev/null +++ b/.ci/catkin_pkg/cli/create_pkg.py @@ -0,0 +1,70 @@ +"""This script creates the skeletton of a catkin package.""" + +from __future__ import print_function + +import argparse +import os +import sys + +from catkin_pkg.package_templates import create_package_files, PackageTemplate + + +def main(argv=sys.argv[1:], parent_path=os.getcwd()): + parser = argparse.ArgumentParser( + description='Creates a new catkin package') + parser.add_argument('name', + nargs=1, + help='The name for the package') + parser.add_argument('--meta', + action='store_true', + help='Creates meta-package files') + parser.add_argument('dependencies', + nargs='*', + help='Catkin package Dependencies') + parser.add_argument('-s', '--sys-deps', + nargs='*', + help='System Dependencies') + parser.add_argument('-b', '--boost-comps', + nargs='*', + help='Boost Components') + parser.add_argument('-V', '--pkg_version', + action='store', + help='Initial Package version') + parser.add_argument('-D', '--description', + action='store', + help='Description') + parser.add_argument('-l', '--license', + action='append', + help='Name for License, (e.g. BSD, MIT, GPLv3...)') + parser.add_argument('-a', '--author', + action='append', + help='A single author, may be used multiple times') + parser.add_argument('-m', '--maintainer', + action='append', + help='A single maintainer, may be used multiple times') + rosdistro_name = os.environ['ROS_DISTRO'] if 'ROS_DISTRO' in os.environ else None + parser.add_argument('--rosdistro', required=rosdistro_name is None, default=rosdistro_name, help='The ROS distro (default: environment variable ROS_DISTRO if defined)') + + args = parser.parse_args(argv) + + try: + package_name = args.name[0] + target_path = os.path.join(parent_path, package_name) + package_template = PackageTemplate._create_package_template( + package_name=package_name, + description=args.description, + licenses=args.license or [], + maintainer_names=args.maintainer, + author_names=args.author, + version=args.pkg_version, + catkin_deps=args.dependencies, + system_deps=args.sys_deps, + boost_comps=args.boost_comps) + create_package_files(target_path=target_path, + package_template=package_template, + rosdistro=args.rosdistro, + newfiles={}, + meta=args.meta) + print('Successfully created files in %s. Please adjust the values in package.xml.' % target_path) + except ValueError as vae: + parser.error(str(vae)) diff --git a/.ci/catkin_pkg/cli/find_pkg.py b/.ci/catkin_pkg/cli/find_pkg.py new file mode 100644 index 0000000000..726ebed010 --- /dev/null +++ b/.ci/catkin_pkg/cli/find_pkg.py @@ -0,0 +1,29 @@ +"""This script finds a catkin packages.""" + +from __future__ import print_function + +import argparse +import os +import sys + +from catkin_pkg.packages import find_packages + + +def main(argv=sys.argv[1:]): + parser = argparse.ArgumentParser(description='Find a catkin package') + parser.add_argument('pkg', help='The name of the package') + parser.add_argument('base_path', nargs='?', default=os.curdir, help='The base path to crawl for packages') + + args = parser.parse_args(argv) + + try: + packages = find_packages(args.base_path) + catkin_pkg = [path for path, p in packages.items() if p.name == args.pkg] + if catkin_pkg: + print(catkin_pkg[0]) + else: + print("Could not find package '%s'." % args.pkg, file=sys.stderr) + sys.exit(2) + except RuntimeError as e: + print('ERROR: ' + str(e), file=sys.stderr) + sys.exit(1) diff --git a/.ci/catkin_pkg/cli/generate_changelog.py b/.ci/catkin_pkg/cli/generate_changelog.py new file mode 100644 index 0000000000..082299185c --- /dev/null +++ b/.ci/catkin_pkg/cli/generate_changelog.py @@ -0,0 +1,137 @@ +"""This script generates REP-0132 CHANGELOG.rst files for git or hg repositories.""" + +from __future__ import print_function + +import argparse +import logging +import os +import sys + +from catkin_pkg.changelog import CHANGELOG_FILENAME +from catkin_pkg.changelog_generator import generate_changelog_file, generate_changelogs, get_all_changes, get_forthcoming_changes, update_changelogs +from catkin_pkg.changelog_generator_vcs import get_vcs_client +from catkin_pkg.packages import find_packages + +try: + raw_input +except NameError: + raw_input = input # noqa: A001 + + +def prompt_continue(msg, default): + """Prompt the user for continuation.""" + if default: + msg += ' [Y/n]?' + else: + msg += ' [y/N]?' + + while True: + response = raw_input(msg) + if not response: + response = 'y' if default else 'n' + else: + response = response.lower() + + if response in ['y', 'n']: + return response == 'y' + + print("Response '%s' was not recognized, please use one of the following options: y, Y, n, N" % response, file=sys.stderr) + + +def main(sysargs=None): + parser = argparse.ArgumentParser(description='Generate a REP-0132 %s' % CHANGELOG_FILENAME) + group_merge = parser.add_mutually_exclusive_group() + parser.add_argument( + '-a', '--all', action='store_true', default=False, + help='Generate changelog for all versions instead of only the forthcoming one (only supported when no changelog file exists yet)') + group_merge.add_argument( + '--only-merges', action='store_true', default=False, + help='Only add merge commits to the changelog') + parser.add_argument( + '--print-root', action='store_true', default=False, + help='Output changelog content to the console as if there would be only one package in the root of the repository') + parser.add_argument( + '--skip-contributors', action='store_true', default=False, + help='Skip adding the list of contributors to the changelog') + group_merge.add_argument( + '--skip-merges', action='store_true', default=False, + help='Skip adding merge commits to the changelog') + parser.add_argument( + '-y', '--non-interactive', action='store_true', default=False, + help="Run without user interaction, confirming all questions with 'yes'") + args = parser.parse_args(sysargs) + + base_path = '.' + logging.basicConfig(format='%(message)s', level=logging.DEBUG) + + vcs_client = get_vcs_client(base_path) + + if args.print_root: + # printing status messages to stderr to allow piping the changelog to a file + if args.all: + print('Querying all tags and commit information...', file=sys.stderr) + tag2log_entries = get_all_changes(vcs_client, skip_merges=args.skip_merges, only_merges=args.only_merges) + print('Generating changelog output with all versions...', file=sys.stderr) + else: + print('Querying commit information since latest tag...', file=sys.stderr) + tag2log_entries = get_forthcoming_changes(vcs_client, skip_merges=args.skip_merges, only_merges=args.only_merges) + print('Generating changelog files with forthcoming version...', file=sys.stderr) + print('', file=sys.stderr) + data = generate_changelog_file('repository-level', tag2log_entries, vcs_client=vcs_client) + print(data) + return 0 + + # find packages + packages = find_packages(base_path) + if not packages: + raise RuntimeError('No packages found') + print('Found packages: %s' % ', '.join(sorted(p.name for p in packages.values()))) + + # check for missing changelogs + missing_changelogs = [] + for pkg_path, package in packages.items(): + changelog_path = os.path.join(base_path, pkg_path, CHANGELOG_FILENAME) + if not os.path.exists(changelog_path): + missing_changelogs.append(package.name) + + if args.all and not missing_changelogs: + raise RuntimeError('All packages already have a changelog. Either remove (some of) them before using --all or invoke the script without --all.') + + if args.all and len(missing_changelogs) != len(packages): + ignored = set([p.name for p in packages.values()]) - set(missing_changelogs) + print('The following packages already have a changelog file and will be ignored: %s' % ', '.join(sorted(ignored)), file=sys.stderr) + + # prompt to switch to --all + if not args.all and missing_changelogs: + print('Some of the packages have no changelog file: %s' % ', '.join(sorted(missing_changelogs))) + print('You might consider to use --all to generate the changelogs for all versions (not only for the forthcoming version).') + if not args.non_interactive and not prompt_continue('Continue without --all option', default=False): + raise RuntimeError('Skipping generation, rerun the script with --all.') + + if args.all: + print('Querying all tags and commit information...') + tag2log_entries = get_all_changes(vcs_client, skip_merges=args.skip_merges, only_merges=args.only_merges) + print('Generating changelog files with all versions...') + generate_changelogs(base_path, packages, tag2log_entries, logger=logging, vcs_client=vcs_client, skip_contributors=args.skip_contributors) + else: + print('Querying commit information since latest tag...') + tag2log_entries = get_forthcoming_changes(vcs_client, skip_merges=args.skip_merges, only_merges=args.only_merges) + # separate packages with/without a changelog file + packages_without = {pkg_path: package for pkg_path, package in packages.items() if package.name in missing_changelogs} + if packages_without: + print('Generating changelog files with forthcoming version...') + generate_changelogs(base_path, packages_without, tag2log_entries, logger=logging, vcs_client=vcs_client, skip_contributors=args.skip_contributors) + packages_with = {pkg_path: package for pkg_path, package in packages.items() if package.name not in missing_changelogs} + if packages_with: + print('Updating forthcoming section of changelog files...') + update_changelogs(base_path, packages_with, tag2log_entries, logger=logging, vcs_client=vcs_client, skip_contributors=args.skip_contributors) + print('Done.') + print('Please review the extracted commit messages and consolidate the changelog entries before committing the files!') + + +def main_catching_runtime_error(*args, **kwargs): + try: + main(*args, **kwargs) + except RuntimeError as e: + print('ERROR: ' + str(e), file=sys.stderr) + sys.exit(1) diff --git a/.ci/catkin_pkg/cli/package_version.py b/.ci/catkin_pkg/cli/package_version.py new file mode 100644 index 0000000000..62eee185c4 --- /dev/null +++ b/.ci/catkin_pkg/cli/package_version.py @@ -0,0 +1,39 @@ +from __future__ import print_function + +import argparse +import os +import sys + +from catkin_pkg.package_version import bump_version +from catkin_pkg.package_version import update_versions +from catkin_pkg.packages import find_packages, verify_equal_package_versions + +# find the import relatively if available to work before installing catkin or overlaying installed version +if os.path.exists(os.path.join(os.path.dirname(__file__), '..', 'python', 'catkin', '__init__.py')): + sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'python')) + + +def main(): + parser = argparse.ArgumentParser(description='Show or bump the version number in package.xml files.') + parser.add_argument('path', nargs='?', default='.', help='The path to a parent folder which contains package.xml files (default: .)') + parser.add_argument('--bump', choices=('major', 'minor', 'patch'), help='Which part of the version number to bump?') + args = parser.parse_args() + + try: + packages = find_packages(args.path) + if not packages: + print('No packages found', file=sys.stderr) + sys.exit(1) + version = verify_equal_package_versions(packages.values()) + + # only print the version number + if args.bump is None: + print(version) + + else: + # bump the version number + new_version = bump_version(version, args.bump) + update_versions(packages, new_version) + print('%s -> %s' % (version, new_version)) + except Exception as e: # noqa: B902 + sys.exit(str(e)) diff --git a/.ci/catkin_pkg/cli/prepare_release.py b/.ci/catkin_pkg/cli/prepare_release.py new file mode 100644 index 0000000000..b7b5c9a497 --- /dev/null +++ b/.ci/catkin_pkg/cli/prepare_release.py @@ -0,0 +1,451 @@ +from __future__ import print_function + +import argparse +import os +import re +import subprocess +import sys + +from catkin_pkg import metapackage +from catkin_pkg.changelog import CHANGELOG_FILENAME, get_changelog_from_path +from catkin_pkg.package import InvalidPackage, PACKAGE_MANIFEST_FILENAME +from catkin_pkg.package_version import bump_version +from catkin_pkg.package_version import get_forthcoming_label, update_changelog_sections, update_versions +from catkin_pkg.packages import find_packages, verify_equal_package_versions +from catkin_pkg.terminal_color import disable_ANSI_colors, fmt +from catkin_pkg.workspace_vcs import get_repository_type, vcs_remotes + +try: + from shutil import which +except ImportError: + # fallback for Python < 3.3 + def which(exe): + for path in os.getenv('PATH').split(os.path.pathsep): + file_path = os.path.join(path, exe) + if os.path.isfile(file_path): + return file_path + return None + +try: + raw_input +except NameError: + raw_input = input # noqa: A001 + + +def has_changes(base_path, path, vcs_type): + cmd = [_find_executable(vcs_type), 'diff', path] + try: + output = subprocess.check_output(cmd, cwd=base_path) + except subprocess.CalledProcessError as e: + raise RuntimeError(fmt("@{rf}Failed to check if '@{boldon}%s@{boldoff}' has modifications: %s" % (path, str(e)))) + return output.decode('utf-8').rstrip() != '' + + +def prompt_continue(msg, default): + """Prompt the user for continuation.""" + if default: + msg += fmt(' @{yf}[Y/n]@{reset}?') + else: + msg += fmt(' @{yf}[y/N]@{reset}?') + + while True: + _flush_stdin() + try: + response = raw_input(msg) + except EOFError: + response = '' + if not response: + response = 'y' if default else 'n' + else: + response = response.lower() + + if response in ['y', 'n']: + return response == 'y' + + print( + fmt( + "@{yf}Response '@{boldon}%s@{boldoff}' was not recognized, please use one of the following options: %s" % + (response, ', '.join([('@{boldon}%s@{boldoff}' % x) for x in ['y', 'Y', 'n', 'N']])) + ), file=sys.stderr) + + +def _flush_stdin(): + try: + from termios import tcflush, TCIFLUSH + tcflush(sys.stdin, TCIFLUSH) + except ImportError: + # fallback if not supported on some platforms + pass + + +def get_git_branch(base_path): + cmd_branch = [_find_executable('git'), 'rev-parse', '--abbrev-ref', 'HEAD'] + try: + branch = subprocess.check_output(cmd_branch, cwd=base_path) + except subprocess.CalledProcessError as e: + raise RuntimeError(fmt('@{rf}Could not determine git branch: %s' % str(e))) + return branch.decode('utf-8').rstrip() + + +def get_git_remote(base_path): + branch = get_git_branch(base_path) + + cmd_remote = [_find_executable('git'), 'config', '--get', 'branch.%s.remote' % branch] + try: + remote = subprocess.check_output(cmd_remote, cwd=base_path) + except subprocess.CalledProcessError as e: + msg = 'Could not determine git remote: %s' % str(e) + msg += "\n\nMay be the branch '%s' is not tracking a remote branch?" % branch + raise RuntimeError(fmt('@{rf}%s' % msg)) + return remote.decode('utf-8').rstrip() + + +def try_repo_push(base_path, vcs_type): + if vcs_type in ['git']: + print('Trying to push to remote repository (dry run)...') + cmd = [_find_executable(vcs_type), 'push'] + if vcs_type == 'git': + cmd.extend(['-n'] + [get_git_remote(base_path), get_git_branch(base_path)]) + try: + subprocess.check_call(cmd, cwd=base_path) + except (subprocess.CalledProcessError, RuntimeError) as e: + raise RuntimeError(fmt('@{rf}Failed to dry push to repository: %s' % str(e))) + + +def check_clean_working_copy(base_path, vcs_type): + if vcs_type in ['bzr', 'hg', 'svn']: + cmd = [_find_executable(vcs_type), 'status'] + elif vcs_type in ['git']: + cmd = [_find_executable(vcs_type), 'status', '-s', '-u'] + else: + assert False, 'Unknown vcs type: %s' % vcs_type + try: + output = subprocess.check_output(cmd, cwd=base_path) + except subprocess.CalledProcessError as e: + raise RuntimeError(fmt('@{rf}Failed to check working copy state: %s' % str(e))) + output = output.decode('utf-8').rstrip() + if output != '': + print(output) + return False + return True + + +def commit_files(base_path, vcs_type, packages, packages_with_changelogs, message, dry_run=False): + cmd = [_find_executable(vcs_type), 'commit', '-m', message] + cmd += [os.path.join(p, PACKAGE_MANIFEST_FILENAME) for p in packages.keys()] + cmd += [s for s in [os.path.join(p, 'setup.py') for p in packages.keys()] if os.path.exists(s)] + cmd += [path for path, _, _ in packages_with_changelogs.values()] + if not dry_run: + try: + subprocess.check_call(cmd, cwd=base_path) + except subprocess.CalledProcessError as e: + raise RuntimeError(fmt('@{rf}Failed to commit package.xml files: %s' % str(e))) + return cmd + + +def tag_repository(base_path, vcs_type, tag_name, has_tag_prefix, dry_run=False): + if vcs_type in ['bzr', 'git', 'hg']: + cmd = [_find_executable(vcs_type), 'tag', tag_name] + elif vcs_type == 'svn': + svn_url = vcs_remotes(base_path, 'svn')[5:] + if os.path.basename(svn_url) == 'trunk': + # tag "trunk" + base_url = os.path.dirname(svn_url) + elif os.path.basename(os.path.dirname(svn_url)) == 'branches': + # tag a direct subfolder of "branches" + base_url = os.path.dirname(os.path.dirname(svn_url)) + elif svn_url.rfind('/trunk/') != -1: + # tag any subfolder of trunk but require a tag prefix + if not has_tag_prefix: + raise RuntimeError(fmt('@{rf}When tagging a subfolder you must use --tag-prefix to make your tag name unique')) + base_url = svn_url[:svn_url.rfind('/trunk/')] + elif svn_url.rfind('/branches/') != -1: + # tag any subfolder of trunk but require a tag prefix + if not has_tag_prefix: + raise RuntimeError(fmt('@{rf}When tagging a subfolder you must use --tag-prefix to make your tag name unique')) + base_url = svn_url[:svn_url.rfind('/branches/')] + else: + raise RuntimeError(fmt("@{rf}Could not determine base URL of SVN repository '%s'" % svn_url)) + tag_url = '%s/tags/%s' % (base_url, tag_name) + cmd = ['svn', 'cp', '-m', '"tagging %s"' % tag_name, svn_url, tag_url] + else: + assert False, 'Unknown vcs type: %s' % vcs_type + if not dry_run: + try: + subprocess.check_call(cmd, cwd=base_path) + except subprocess.CalledProcessError as e: + raise RuntimeError(fmt('@{rf}Failed to tag repository: %s' % str(e))) + return cmd + + +def push_changes(base_path, vcs_type, tag_name, dry_run=False): + commands = [] + + # push changes to the repository + cmd = [_find_executable(vcs_type), 'push'] + if vcs_type == 'git': + cmd.extend([get_git_remote(base_path), get_git_branch(base_path)]) + commands.append(cmd) + if not dry_run: + try: + subprocess.check_call(cmd, cwd=base_path) + except subprocess.CalledProcessError as e: + raise RuntimeError(fmt('@{rf}Failed to push changes to the repository: %s\n\nYou need to manually push the changes/tag to the repository.' % str(e))) + + # push tags to the repository + if vcs_type in ['git']: + cmd = [_find_executable(vcs_type), 'push', get_git_remote(base_path), tag_name] + commands.append(cmd) + if not dry_run: + try: + subprocess.check_call(cmd, cwd=base_path) + except subprocess.CalledProcessError as e: + raise RuntimeError(fmt('@{rf}Failed to push tag to the repository: %s\n\nYou need to manually push the new tag to the repository.' % str(e))) + + return commands + + +def _find_executable(vcs_type): + file_path = which(vcs_type) + if file_path is None: + raise RuntimeError(fmt('@{rf}Could not find vcs binary: %s' % vcs_type)) + return file_path + + +def main(): + try: + _main() + except RuntimeError as e: + print(e, file=sys.stderr) + sys.exit(1) + + +def _main(): + parser = argparse.ArgumentParser( + description='Runs the commands to bump the version number, commit the modified %s files and create a tag in the repository.' % PACKAGE_MANIFEST_FILENAME) + parser.add_argument('--bump', choices=('major', 'minor', 'patch'), default='patch', help='Which part of the version number to bump? (default: %(default)s)') + parser.add_argument('--version', help='Specify a specific version to use') + parser.add_argument('--no-color', action='store_true', default=False, help='Disables colored output') + parser.add_argument('--no-push', action='store_true', default=False, help='Disables pushing to remote repository') + parser.add_argument('-t', '--tag-prefix', default='', help='Add this prefix to the created release tag') + parser.add_argument('-y', '--non-interactive', action='store_true', default=False, help="Run without user interaction, confirming all questions with 'yes'") + args = parser.parse_args() + + if args.version and not re.match(r'^(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)$', args.version): + parser.error('The passed version must follow the conventions (positive integers x.y.z with no leading zeros)') + + if args.tag_prefix and ' ' in args.tag_prefix: + parser.error('The tag prefix must not contain spaces') + + # force --no-color if stdout is non-interactive + if not sys.stdout.isatty(): + args.no_color = True + # disable colors if asked + if args.no_color: + disable_ANSI_colors() + + base_path = '.' + + print(fmt('@{gf}Prepare the source repository for a release.')) + + # determine repository type + vcs_type = get_repository_type(base_path) + if vcs_type is None: + raise RuntimeError(fmt("@{rf}Could not determine repository type of @{boldon}'%s'@{boldoff}" % base_path)) + print(fmt('Repository type: @{boldon}%s@{boldoff}' % vcs_type)) + + # find packages + try: + packages = find_packages(base_path) + except InvalidPackage as e: + raise RuntimeError(fmt("@{rf}Invalid package at path @{boldon}'%s'@{boldoff}:\n %s" % (os.path.abspath(base_path), str(e)))) + if not packages: + raise RuntimeError(fmt('@{rf}No packages found')) + print('Found packages: %s' % ', '.join([fmt('@{bf}@{boldon}%s@{boldoff}@{reset}' % p.name) for p in packages.values()])) + + # complain about packages with unsupported build_type as they might require additional steps before being released + # complain about packages with upper case character since they won't be releasable with bloom + unsupported_pkg_names = [] + invalid_pkg_names = [] + valid_build_types = ['catkin', 'ament_cmake', 'ament_python'] + for package in packages.values(): + build_types = package.get_unconditional_build_types() + if any(build_type not in valid_build_types for build_type in build_types): + unsupported_pkg_names.append(package.name) + if package.name != package.name.lower(): + invalid_pkg_names.append(package.name) + if unsupported_pkg_names: + print( + fmt( + "@{yf}Warning: the following package are not of build_type %s and may require manual steps to release': %s" % + (str(valid_build_types), ', '.join([('@{boldon}%s@{boldoff}' % p) for p in sorted(unsupported_pkg_names)])) + ), file=sys.stderr) + if not args.non_interactive and not prompt_continue('Continue anyway', default=False): + raise RuntimeError(fmt('@{rf}Aborted release, verify that unsupported packages are ready to be released or release manually.')) + if invalid_pkg_names: + print( + fmt( + "@{yf}Warning: the following package names contain upper case characters which violate both ROS and Debian naming conventions': %s" % + ', '.join([('@{boldon}%s@{boldoff}' % p) for p in sorted(invalid_pkg_names)]) + ), file=sys.stderr) + if not args.non_interactive and not prompt_continue('Continue anyway', default=False): + raise RuntimeError(fmt('@{rf}Aborted release, fix the names of the packages.')) + + local_modifications = [] + for pkg_path, package in packages.items(): + # verify that the package.xml files don't have modifications pending + package_xml_path = os.path.join(pkg_path, PACKAGE_MANIFEST_FILENAME) + if has_changes(base_path, package_xml_path, vcs_type): + local_modifications.append(package_xml_path) + # verify that metapackages are valid + if package.is_metapackage(): + try: + metapackage.validate_metapackage(pkg_path, package) + except metapackage.InvalidMetapackage as e: + raise RuntimeError(fmt( + "@{rf}Invalid metapackage at path '@{boldon}%s@{boldoff}':\n %s\n\nSee requirements for metapackages: %s" % + (os.path.abspath(pkg_path), str(e), metapackage.DEFINITION_URL))) + # verify that the setup.py files don't have modifications pending + setup_py_path = os.path.join(pkg_path, 'setup.py') + if os.path.exists(setup_py_path) and has_changes(base_path, setup_py_path, vcs_type): + local_modifications.append(setup_py_path) + + # fetch current version and verify that all packages have same version number + old_version = verify_equal_package_versions(packages.values()) + if args.version: + new_version = args.version + else: + new_version = bump_version(old_version, args.bump) + tag_name = args.tag_prefix + new_version + + if ( + not args.non_interactive and + not prompt_continue( + fmt( + "Prepare release of version '@{bf}@{boldon}%s@{boldoff}@{reset}'%s" % + (new_version, " (tagged as '@{bf}@{boldon}%s@{boldoff}@{reset}')" % tag_name if args.tag_prefix else '') + ), default=True) + ): + raise RuntimeError(fmt("@{rf}Aborted release, use option '--bump' to release a different version and/or '--tag-prefix' to add a prefix to the tag name.")) + + # check for changelog entries + missing_changelogs = [] + missing_changelogs_but_forthcoming = {} + for pkg_path, package in packages.items(): + changelog_path = os.path.join(pkg_path, CHANGELOG_FILENAME) + if not os.path.exists(changelog_path): + missing_changelogs.append(package.name) + continue + # verify that the changelog files don't have modifications pending + if has_changes(base_path, changelog_path, vcs_type): + local_modifications.append(changelog_path) + changelog = get_changelog_from_path(changelog_path, package.name) + try: + changelog.get_content_of_version(new_version) + except KeyError: + # check that forthcoming section exists + forthcoming_label = get_forthcoming_label(changelog.rst) + if forthcoming_label: + missing_changelogs_but_forthcoming[package.name] = (changelog_path, changelog, forthcoming_label) + else: + missing_changelogs.append(package.name) + + if local_modifications: + raise RuntimeError(fmt('@{rf}The following files have modifications, please commit/revert them before:' + ''.join([('\n- @{boldon}%s@{boldoff}' % path) for path in local_modifications]))) + + if missing_changelogs: + print( + fmt( + "@{yf}Warning: the following packages do not have a changelog file or entry for version '@{boldon}%s@{boldoff}': %s" % + (new_version, ', '.join([('@{boldon}%s@{boldoff}' % p) for p in sorted(missing_changelogs)])) + ), file=sys.stderr) + if not args.non_interactive and not prompt_continue('Continue without changelogs', default=False): + raise RuntimeError(fmt("@{rf}Aborted release, populate the changelog with '@{boldon}catkin_generate_changelog@{boldoff}' and review / clean up the content.")) + + # verify that repository is pushable (if the vcs supports dry run of push) + if not args.no_push: + try_repo_push(base_path, vcs_type) + + # check for staged changes and modified and untracked files + print(fmt('@{gf}Checking if working copy is clean (no staged changes, no modified files, no untracked files)...')) + is_clean = check_clean_working_copy(base_path, vcs_type) + if not is_clean: + print(fmt('@{yf}Warning: the working copy contains other changes. Consider reverting/committing/stashing them before preparing a release.'), file=sys.stderr) + if not args.non_interactive and not prompt_continue('Continue anyway', default=False): + raise RuntimeError(fmt('@{rf}Aborted release, clean the working copy before trying again.')) + + # for svn verify that we know how to tag that repository + if vcs_type in ['svn']: + tag_svn_cmd = tag_repository(base_path, vcs_type, tag_name, args.tag_prefix != '', dry_run=True) + + # tag forthcoming changelog sections + update_changelog_sections(missing_changelogs_but_forthcoming, new_version) + print(fmt( + "@{gf}Rename the forthcoming section@{reset} of the following packages to version '@{bf}@{boldon}%s@{boldoff}@{reset}': %s" % + (new_version, ', '.join([('@{boldon}%s@{boldoff}' % p) for p in sorted(missing_changelogs_but_forthcoming.keys())])))) + + # bump version number + update_versions(packages, new_version) + print(fmt("@{gf}Bump version@{reset} of all packages from '@{bf}%s@{reset}' to '@{bf}@{boldon}%s@{boldoff}@{reset}'" % (old_version, new_version))) + + pushed = None + if vcs_type in ['svn']: + # for svn everything affects the remote repository immediately + commands = [] + commands.append(commit_files(base_path, vcs_type, packages, missing_changelogs_but_forthcoming, tag_name, dry_run=True)) + commands.append(tag_svn_cmd) + if not args.no_push: + print(fmt('@{gf}The following commands will be executed to commit the changes and tag the new version:')) + else: + print(fmt('@{gf}You can use the following commands to manually commit the changes and tag the new version:')) + for cmd in commands: + print(fmt(' @{bf}@{boldon}%s@{boldoff}' % ' '.join(cmd))) + + if not args.no_push: + if not args.non_interactive: + # confirm before modifying repository + if not prompt_continue('Execute commands which will modify the repository', default=True): + pushed = False + if pushed is None: + commit_files(base_path, vcs_type, packages, missing_changelogs_but_forthcoming, tag_name) + tag_repository(base_path, vcs_type, tag_name, args.tag_prefix != '') + pushed = True + + else: + # for other vcs types the changes are first done locally + print(fmt('@{gf}Committing the package.xml files...')) + commit_files(base_path, vcs_type, packages, missing_changelogs_but_forthcoming, tag_name) + + print(fmt("@{gf}Creating tag '@{boldon}%s@{boldoff}'..." % (tag_name))) + tag_repository(base_path, vcs_type, tag_name, args.tag_prefix != '') + + try: + commands = push_changes(base_path, vcs_type, tag_name, dry_run=True) + except RuntimeError: + print(fmt('@{yf}Warning: could not determine commands to push the changes and tag to the remote repository. Do you have a remote configured for the current branch?')) + else: + if not args.no_push: + print(fmt('@{gf}The following commands will be executed to push the changes and tag to the remote repository:')) + else: + print(fmt('@{gf}You can use the following commands to manually push the changes to the remote repository:')) + for cmd in commands: + print(fmt(' @{bf}@{boldon}%s@{boldoff}' % ' '.join(cmd))) + + if not args.no_push: + if not args.non_interactive: + # confirm commands to push to remote repository + if not prompt_continue('Execute commands to push the local commits and tags to the remote repository', default=True): + pushed = False + if pushed is None: + push_changes(base_path, vcs_type, tag_name) + pushed = True + + if pushed: + print(fmt("@{gf}The source repository has been released successfully. The next step will be '@{boldon}bloom-release@{boldoff}'.")) + else: + msg = 'The release of the source repository has been prepared successfully but the changes have not been pushed yet. ' \ + "After pushing the changes manually the next step will be '@{boldon}bloom-release@{boldoff}'." + if args.no_push or pushed is False: + print(fmt('@{yf}%s' % msg)) + else: + raise RuntimeError(fmt('@{rf}%s' % msg)) diff --git a/.ci/catkin_pkg/cli/tag_changelog.py b/.ci/catkin_pkg/cli/tag_changelog.py new file mode 100644 index 0000000000..311c7dba2e --- /dev/null +++ b/.ci/catkin_pkg/cli/tag_changelog.py @@ -0,0 +1,114 @@ +"""This script renames the forthcoming section in changelog files with the upcoming version and the current date.""" + +from __future__ import print_function + +import argparse +import datetime +import os +import re +import sys + +from catkin_pkg.changelog import CHANGELOG_FILENAME, get_changelog_from_path +from catkin_pkg.changelog_generator import FORTHCOMING_LABEL +from catkin_pkg.package_version import bump_version +from catkin_pkg.packages import find_packages, verify_equal_package_versions + +import docutils.core + + +def get_forthcoming_label(rst): + document = docutils.core.publish_doctree(rst) + forthcoming_label = None + for child in document.children: + title = None + if isinstance(child, docutils.nodes.subtitle): + title = child + elif isinstance(child, docutils.nodes.section): + section = child + if len(section.children) > 0 and isinstance(section.children[0], docutils.nodes.title): + title = section.children[0] + if title and len(title.children) > 0 and isinstance(title.children[0], docutils.nodes.Text): + title_text = title.children[0].rawsource + if FORTHCOMING_LABEL.lower() in title_text.lower(): + if forthcoming_label: + raise RuntimeError('Found multiple forthcoming sections') + forthcoming_label = title_text + return forthcoming_label + + +def rename_section(data, old_label, new_label): + valid_section_characters = '!"#$%&\'()*+,-./:;<=>?@[\\]^_`{|}~' + + def replace_section(match): + section_char = match.group(2)[0] + return new_label + '\n' + section_char * len(new_label) + pattern = '^(' + re.escape(old_label) + ')\n([' + re.escape(valid_section_characters) + ']+)$' + data, count = re.subn(pattern, replace_section, data, flags=re.MULTILINE) + if count == 0: + raise RuntimeError('Could not find section') + if count > 1: + raise RuntimeError('Found multiple matching sections') + return data + + +def main(sysargs=None): + parser = argparse.ArgumentParser(description='Tag the forthcoming section in the changelog files with an upcoming version number') + parser.add_argument('--bump', choices=('major', 'minor', 'patch'), default='patch', help='Which part of the version number to bump? (default: %(default)s)') + args = parser.parse_args(sysargs) + + base_path = '.' + + # find packages + packages = find_packages(base_path) + if not packages: + raise RuntimeError('No packages found') + print('Found packages: %s' % ', '.join([p.name for p in packages.values()])) + + # fetch current version and verify that all packages have same version number + old_version = verify_equal_package_versions(packages.values()) + new_version = bump_version(old_version, args.bump) + print('Tag version %s' % new_version) + + # check for changelog entries + changelogs = [] + missing_forthcoming = [] + already_tagged = [] + for pkg_path, package in packages.items(): + changelog_path = os.path.join(base_path, pkg_path, CHANGELOG_FILENAME) + if not os.path.exists(changelog_path): + missing_forthcoming.append(package.name) + continue + changelog = get_changelog_from_path(changelog_path, package.name) + if not changelog: + missing_forthcoming.append(package.name) + continue + # check that forthcoming section exists + forthcoming_label = get_forthcoming_label(changelog.rst) + if not forthcoming_label: + missing_forthcoming.append(package.name) + continue + # check that new_version section does not exist yet + try: + changelog.get_content_of_version(new_version) + already_tagged.append(package.name) + continue + except KeyError: + pass + changelogs.append((package.name, changelog_path, changelog, forthcoming_label)) + if missing_forthcoming: + print('The following packages do not have a forthcoming section in their changelog file: %s' % ', '.join(sorted(missing_forthcoming)), file=sys.stderr) + if already_tagged: + print("The following packages do already have a section '%s' in their changelog file: %s" % (new_version, ', '.join(sorted(already_tagged))), file=sys.stderr) + + # rename forthcoming sections to new_version including current date + new_changelog_data = [] + new_label = '%s (%s)' % (new_version, datetime.date.today().isoformat()) + for (pkg_name, changelog_path, changelog, forthcoming_label) in changelogs: + print("Renaming section '%s' to '%s' in package '%s'..." % (forthcoming_label, new_label, pkg_name)) + data = rename_section(changelog.rst, forthcoming_label, new_label) + new_changelog_data.append((changelog_path, data)) + + print('Writing updated changelog files...') + for (changelog_path, data) in new_changelog_data: + with open(changelog_path, 'wb') as f: + f.write(data.encode('utf-8')) diff --git a/.ci/catkin_pkg/cli/test_changelog.py b/.ci/catkin_pkg/cli/test_changelog.py new file mode 100644 index 0000000000..0c2aa8b07f --- /dev/null +++ b/.ci/catkin_pkg/cli/test_changelog.py @@ -0,0 +1,46 @@ +"""This script tests REP-0132 changelog files.""" + +from __future__ import print_function + +import argparse +import logging +import os +import sys + +import catkin_pkg.changelog +from catkin_pkg.changelog import Changelog, CHANGELOG_FILENAME +from catkin_pkg.changelog import populate_changelog_from_rst + + +def main(sysargs=None): + parser = argparse.ArgumentParser( + description='Tests a REP-0132 %s' % CHANGELOG_FILENAME) + parser.add_argument( + 'changelog_file', + help='%s file to parse' % CHANGELOG_FILENAME, + default='.', + nargs='?') + + args = parser.parse_args(sysargs) + + if os.path.isdir(args.changelog_file): + changelog_file = os.path.join(args.changelog_file, CHANGELOG_FILENAME) + if not os.path.exists(changelog_file): + print("No {0} file in given directory: '{1}'" + .format(CHANGELOG_FILENAME, args.changelog_file), file=sys.stderr) + return 1 + else: + changelog_file = args.changelog_file + if not os.path.exists(changelog_file): + print("{0} file given does not exist: '{1}'" + .format(CHANGELOG_FILENAME, args.changelog_file), file=sys.stderr) + return 1 + + if os.path.basename(changelog_file) != CHANGELOG_FILENAME: + print('WARNING: changelog file name should be %s' % CHANGELOG_FILENAME) + + logging.basicConfig() + catkin_pkg.changelog.log.setLevel(logging.DEBUG) + changelog = Changelog() + with open(changelog_file, 'r') as f: + print(populate_changelog_from_rst(changelog, f.read())) diff --git a/.ci/catkin_pkg/cmake.py b/.ci/catkin_pkg/cmake.py new file mode 100644 index 0000000000..11aaf2815c --- /dev/null +++ b/.ci/catkin_pkg/cmake.py @@ -0,0 +1,80 @@ +# Software License Agreement (BSD License) +# +# Copyright (c) 2013, Open Source Robotics Foundation, Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# * Neither the name of Open Source Robotics Foundation, Inc. nor +# the names of its contributors may be used to endorse or promote +# products derived from this software without specific prior +# written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +from __future__ import print_function + +import os +import re + + +def get_metapackage_cmake_template_path(): + """ + Return the location of the metapackage CMakeLists.txt CMake template. + + :returns: ``str`` location of the metapackage CMakeLists.txt CMake template + """ + rel_path = os.path.join('templates', 'metapackage.cmake.in') + return os.path.join(os.path.dirname(__file__), rel_path) + + +def configure_file(template_file, environment): + """ + Evaluate a .in template file used in CMake with configure_file(). + + :param template_file: path to the template, ``str`` + :param environment: dictionary of placeholders to substitute, + ``dict`` + :returns: string with evaluates template + :raises: KeyError for placeholders in the template which are not + in the environment + """ # noqa: D402 + with open(template_file, 'r') as f: + template = f.read() + return configure_string(template, environment) + + +def configure_string(template, environment): + """ + Substitute variables enclosed by @ characters. + + :param template: the template, ``str`` + :param environment: dictionary of placeholders to substitute, + ``dict`` + :returns: string with evaluates template + :raises: KeyError for placeholders in the template which are not + in the environment + """ + def substitute(match): + var = match.group(0)[1:-1] + return environment[var] + return re.sub('@[a-zA-Z0-9_]+@', substitute, template) diff --git a/.ci/catkin_pkg/condition.py b/.ci/catkin_pkg/condition.py new file mode 100644 index 0000000000..22f0b099fc --- /dev/null +++ b/.ci/catkin_pkg/condition.py @@ -0,0 +1,161 @@ +# Copyright 2017 Open Source Robotics Foundation, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import operator + +import pyparsing as pp + +# operatorPrecedence renamed to infixNotation in 1.5.7 +try: + from pyparsing import infixNotation +except ImportError: + from pyparsing import operatorPrecedence as infixNotation + + +def evaluate_condition(condition, context): + if condition is None: + return True + expr = _get_condition_expression() + try: + parse_results = expr.parseString(condition, parseAll=True) + except pp.ParseException as e: + raise ValueError( + "condition '%s' failed to parse: %s" % (condition, e)) + return parse_results[0](context) + + +_condition_expression = None + + +def _get_condition_expression(): + global _condition_expression + if not _condition_expression: + operator = pp.Regex('==|!=|>=|>|<=|<').setName('operator') + operator.setParseAction(_Operator) + + identifier = pp.Word('$', pp.alphanums + '_', min=2).setName('identifier') + identifier.setParseAction(_Identifier) + + value = pp.Word(pp.alphanums + '_-').setName('value') + value.setParseAction(_Value) + + double_quoted_value = pp.QuotedString('"').setName( + 'double_quoted_value') + double_quoted_value.setParseAction(_Value) + single_quoted_value = pp.QuotedString("'").setName( + 'single_quoted_value') + single_quoted_value.setParseAction(_Value) + + comparison_term = identifier | value | double_quoted_value | \ + single_quoted_value + + condition = pp.Group(comparison_term + operator + comparison_term).setName('condition') + condition.setParseAction(_Condition) + + _condition_expression = infixNotation( + condition, [ + ('and', 2, pp.opAssoc.LEFT, _And), + ('or', 2, pp.opAssoc.LEFT, _Or), + ]) + return _condition_expression + + +class _Operator: + operators = { + '==': operator.eq, + '!=': operator.ne, + '<=': operator.le, + '<': operator.lt, + '>=': operator.ge, + '>': operator.gt, + } + + def __init__(self, t): + self.value = t[0] + + def __call__(self, arg1, arg2, context): + assert self.value in self.operators + return self.operators[self.value](arg1(context), arg2(context)) + + def __str__(self): + return self.value + + __repr__ = __str__ + + +class _Identifier: + + def __init__(self, t): + self.value = t[0] + + def __call__(self, context): + return str(context.get(self.value[1:], '')) + + def __str__(self): + return self.value + + __repr__ = __str__ + + +class _Value: + + def __init__(self, t): + self.value = t[0] + + def __call__(self, context): + return self.value + + def __str__(self): + return self.value + + __repr__ = __str__ + + +class _Condition: + + def __init__(self, t): + self.value = t[0] + + def __call__(self, context): + return self.value[1](self.value[0], self.value[2], context) + + def __str__(self): + return ' '.join(map(str, self.value)) + + __repr__ = __str__ + + +class _BinOp: + + def __init__(self, t): + self.args = t[0][0::2] + + def __call__(self, context): + return self.evalop(a(context) for a in self.args) + + def __str__(self): + sep = ' %s ' % self.reprsymbol + return '(' + sep.join(map(str, self.args)) + ')' + + __repr__ = __str__ + + +class _And(_BinOp): + reprsymbol = 'and' + evalop = all + + +class _Or(_BinOp): + reprsymbol = 'or' + evalop = any diff --git a/.ci/catkin_pkg/group_dependency.py b/.ci/catkin_pkg/group_dependency.py new file mode 100644 index 0000000000..f9946e07ba --- /dev/null +++ b/.ci/catkin_pkg/group_dependency.py @@ -0,0 +1,62 @@ +# Copyright 2017 Open Source Robotics Foundation, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from catkin_pkg.condition import evaluate_condition + + +class GroupDependency: + __slots__ = [ + 'name', + 'condition', + 'evaluated_condition', + 'members', + ] + + def __init__(self, name, condition=None, members=None): + self.name = name + self.condition = condition + self.members = members + self.evaluated_condition = None + + def __eq__(self, other): + if not isinstance(other, GroupDependency): + return False + return all(getattr(self, attr) == getattr(other, attr) + for attr in self.__slots__) + + def __str__(self): + return self.name + + def evaluate_condition(self, context): + """ + Evaluate the condition. + + The result is also stored in the member variable `evaluated_condition`. + + :param context: A dictionary with key value pairs to replace variables + starting with $ in the condition. + + :returns: True if the condition evaluates to True, else False + :raises: :exc:`ValueError` if the condition fails to parse + """ + self.evaluated_condition = evaluate_condition(self.condition, context) + return self.evaluated_condition + + def extract_group_members(self, packages): + self.members = set() + for pkg in packages: + for g in pkg.member_of_groups: + assert g.evaluated_condition is not None + if self.name in (g.name for g in pkg.member_of_groups if g.evaluated_condition): + self.members.add(pkg.name) diff --git a/.ci/catkin_pkg/group_membership.py b/.ci/catkin_pkg/group_membership.py new file mode 100644 index 0000000000..8c87c0dd38 --- /dev/null +++ b/.ci/catkin_pkg/group_membership.py @@ -0,0 +1,52 @@ +# Copyright 2017 Open Source Robotics Foundation, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from catkin_pkg.condition import evaluate_condition + + +class GroupMembership: + __slots__ = [ + 'name', + 'condition', + 'evaluated_condition', + ] + + def __init__(self, name, condition=None): + self.name = name + self.condition = condition + self.evaluated_condition = None + + def __eq__(self, other): + if not isinstance(other, GroupMembership): + return False + return all(getattr(self, attr) == getattr(other, attr) + for attr in self.__slots__) + + def __str__(self): + return self.name + + def evaluate_condition(self, context): + """ + Evaluate the condition. + + The result is also stored in the member variable `evaluated_condition`. + + :param context: A dictionary with key value pairs to replace variables + starting with $ in the condition. + + :returns: True if the condition evaluates to True, else False + :raises: :exc:`ValueError` if the condition fails to parse + """ + self.evaluated_condition = evaluate_condition(self.condition, context) + return self.evaluated_condition diff --git a/.ci/catkin_pkg/metapackage.py b/.ci/catkin_pkg/metapackage.py new file mode 100644 index 0000000000..377bbcb0f0 --- /dev/null +++ b/.ci/catkin_pkg/metapackage.py @@ -0,0 +1,173 @@ +# Software License Agreement (BSD License) +# +# Copyright (c) 2013, Open Source Robotics Foundation, Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# * Neither the name of Open Source Robotics Foundation, Inc. nor +# the names of its contributors may be used to endorse or promote +# products derived from this software without specific prior +# written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Checks metapackages for compliance with REP-0127. + +Reference: http://ros.org/reps/rep-0127.html#metapackage +""" + +from __future__ import print_function + +import os +import re + +from catkin_pkg.cmake import configure_file +from catkin_pkg.cmake import get_metapackage_cmake_template_path + +__author__ = 'William Woodall' +__email__ = 'william@osrfoundation.org' +__maintainer__ = 'William Woodall' + +DEFINITION_URL = 'http://ros.org/reps/rep-0127.html#metapackage' + + +class InvalidMetapackage(Exception): + + def __init__(self, msg, path, package): + self.path = path + self.package = package + Exception.__init__(self, "Metapackage '%s': %s" % (package.name, msg)) + + +def get_expected_cmakelists_txt(metapackage_name): + """ + Return the expected boilerplate CMakeLists.txt file for a metapackage. + + :param metapackage_name: name of the metapackage + :type metapackage_name: str + :returns: expected CMakeLists.txt file + :rtype: str + """ + env = { + 'name': metapackage_name, + 'metapackage_arguments': '' + } + return configure_file(get_metapackage_cmake_template_path(), env) + + +def has_cmakelists_txt(path): + """ + Return True if the given path contains a CMakeLists.txt, otherwise False. + + :param path: path to folder potentially containing CMakeLists.txt + :type path: str + :returns: True if path contains CMakeLists.txt, else False + :rtype: bool + """ + cmakelists_txt_path = os.path.join(path, 'CMakeLists.txt') + return os.path.isfile(cmakelists_txt_path) + + +def get_cmakelists_txt(path): + """ + Fetch the CMakeLists.txt from a given path. + + :param path: path to the folder containing the CMakeLists.txt + :type path: str + :returns: contents of CMakeLists.txt file in given path + :rtype: str + :raises OSError: if there is no CMakeLists.txt in given path + """ + cmakelists_txt_path = os.path.join(path, 'CMakeLists.txt') + with open(cmakelists_txt_path, 'r') as f: + return f.read() + + +def has_valid_cmakelists_txt(path, metapackage_name): + """ + Return True if the given path contains a valid CMakeLists.txt, otherwise False. + + A valid CMakeLists.txt for a metapackage is defined by REP-0127 + + :param path: path to folder containing CMakeLists.txt + :type path: str + :param metapackage_name: name of the metapackage being tested + :type metapackage_name: str + :returns: True if the path contains a valid CMakeLists.txt, else False + :rtype: bool + :raises OSError: if there is no CMakeLists.txt in given path + """ + cmakelists_txt = get_cmakelists_txt(path) + expected = get_expected_cmakelists_txt(metapackage_name) + prefix, suffix = expected.split('2.8.3', 1) + if not cmakelists_txt.startswith(prefix): + return False + if not cmakelists_txt.endswith(suffix): + return False + version = cmakelists_txt[len(prefix):-len(suffix)] + return re.match(r'^\d+\.\d+\.\d+$', version) + + +def validate_metapackage(path, package): + """ + Validate the given package (catkin_pkg.package.Package) as a metapackage. + + This validates the metapackage against the definition from REP-0127 + + :param path: directory of the package being checked + :type path: str + :param package: package to be validated + :type package: :py:class:`catkin_pkg.package.Package` + :raises InvalidMetapackage: if package is not a valid metapackage + :raises OSError: if there is not package.xml at the given path + """ + # Is there actually a package at the given path, else raise + # Cannot do package_exists_at from catkin_pkg.packages because of circular dep + if not os.path.isdir(path) or not os.path.isfile(os.path.join(path, 'package.xml')): + raise OSError("No package.xml found at path: '%s'" % path) + # Is it a metapackage, else raise + if not package.is_metapackage(): + raise InvalidMetapackage('No tag in section of package.xml', path, package) + # Is there a CMakeLists.txt, else raise + if not has_cmakelists_txt(path): + raise InvalidMetapackage('No CMakeLists.txt', path, package) + # Is the CMakeLists.txt correct, else raise + if not has_valid_cmakelists_txt(path, package.name): + expected = get_expected_cmakelists_txt(package.name) + expected = expected.replace('2.8.3', '') + raise InvalidMetapackage("""\ +Invalid CMakeLists.txt +Expected: +<<<%s>>> +Got: +<<<%s>>>""" % (expected, get_cmakelists_txt(path)), path, package + ) + # Does it buildtool depend on catkin, else raise + if not package.has_buildtool_depend_on_catkin(): + raise InvalidMetapackage('No buildtool dependency on catkin', path, package) + # Does it have only run depends, else raise + if package.has_invalid_metapackage_dependencies(): + raise InvalidMetapackage( + 'Has build, buildtool, and/or test depends, but only run depends are allowed (except buildtool catkin)', + path, package) diff --git a/.ci/catkin_pkg/package.py b/.ci/catkin_pkg/package.py new file mode 100644 index 0000000000..2843c697cb --- /dev/null +++ b/.ci/catkin_pkg/package.py @@ -0,0 +1,876 @@ +# Software License Agreement (BSD License) +# +# Copyright (c) 2012, Willow Garage, Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# * Neither the name of Willow Garage, Inc. nor the names of its +# contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +"""Library for parsing package.xml and providing an object representation.""" + +from __future__ import print_function + +from copy import deepcopy +import os +import re +import sys +import xml.dom.minidom as dom +from xml.parsers.expat import ExpatError + +from catkin_pkg.condition import evaluate_condition + +PACKAGE_MANIFEST_FILENAME = 'package.xml' +PACKAGE_MANIFEST_SCHEMA_URLS = [ + 'http://download.ros.org/schema/package_format1.xsd', + 'http://download.ros.org/schema/package_format2.xsd', + 'http://download.ros.org/schema/package_format3.xsd', +] + + +class Package(object): + """Object representation of a package manifest file.""" + + __slots__ = [ + 'package_format', + 'name', + 'version', + 'version_compatibility', + 'description', + 'plaintext_description', + 'maintainers', + 'licenses', + 'urls', + 'authors', + 'build_depends', + 'buildtool_depends', + 'build_export_depends', + 'buildtool_export_depends', + 'exec_depends', + 'test_depends', + 'doc_depends', + 'conflicts', + 'replaces', + 'group_depends', + 'member_of_groups', + 'exports', + 'filename' + ] + + def __init__(self, filename=None, **kwargs): + """ + Initialize Package. + + :param filename: location of package.xml. Necessary if + converting ``${prefix}`` in ```` values, ``str``. + """ + # initialize all slots ending with "s" with lists, all other with plain values + for attr in self.__slots__: + if attr.endswith('s'): + value = list(kwargs[attr]) if attr in kwargs else [] + setattr(self, attr, value) + else: + value = kwargs[attr] if attr in kwargs else None + setattr(self, attr, value) + if 'depends' in kwargs: + for d in kwargs['depends']: + for slot in [self.build_depends, self.build_export_depends, self.exec_depends]: + if d not in slot: + slot.append(deepcopy(d)) + del kwargs['depends'] + if 'run_depends' in kwargs: + for d in kwargs['run_depends']: + for slot in [self.build_export_depends, self.exec_depends]: + if d not in slot: + slot.append(deepcopy(d)) + del kwargs['run_depends'] + self.filename = filename + self.licenses = [license_ if isinstance(license_, License) else License(license_) for license_ in self.licenses] + # verify that no unknown keywords are passed + unknown = set(kwargs.keys()).difference(self.__slots__) + if unknown: + raise TypeError('Unknown properties: %s' % ', '.join(unknown)) + + def __getattr__(self, name): + if name == 'run_depends': + # merge different dependencies if they are not exactly equal + # potentially having the same dependency name multiple times with different attributes + run_depends = [] + [run_depends.append(deepcopy(d)) for d in self.exec_depends + self.build_export_depends if d not in run_depends] + return run_depends + raise AttributeError(name) + + def __getitem__(self, key): + if key in self.__slots__ + ['run_depends']: + return getattr(self, key) + raise KeyError('Unknown key "%s"' % key) + + def __iter__(self): + for slot in self.__slots__: + yield slot + + def __str__(self): + data = {} + for attr in self.__slots__: + data[attr] = getattr(self, attr) + return str(data) + + def has_buildtool_depend_on_catkin(self): + """ + Return True if this Package buildtool depends on catkin, otherwise False. + + :returns: True if the given package buildtool depends on catkin + :rtype: bool + """ + return 'catkin' in (d.name for d in self.buildtool_depends) + + def get_build_type(self): + """ + Return value of export/build_type element, or 'catkin' if unspecified. + + :returns: package build type + :rtype: str + :raises: :exc:`InvalidPackage` + """ + # for backward compatibility a build type without an evaluated + # condition is still being considered (i.e. evaluated_condition is None) + build_type_exports = [ + e.content for e in self.exports + if e.tagname == 'build_type' and e.evaluated_condition is not False] + if not build_type_exports: + return 'catkin' + if len(build_type_exports) == 1: + return build_type_exports[0] + raise InvalidPackage('Only one element is permitted.', self.filename) + + def get_unconditional_build_types(self): + """ + Return values of export/build_type elements without conditional filtering, or ['catkin'] if unspecified. + + :returns: package build types + :rtype: List[str] + """ + build_type_exports = [e.content for e in self.exports if e.tagname == 'build_type'] + if not build_type_exports: + return ['catkin'] + return build_type_exports + + def has_invalid_metapackage_dependencies(self): + """ + Return True if this package has invalid dependencies for a metapackage. + + This is defined by REP-0127 as any non-run_depends dependencies other then a buildtool_depend on catkin. + + :returns: True if the given package has any invalid dependencies, otherwise False + :rtype: bool + """ + buildtool_depends = [d.name for d in self.buildtool_depends if d.name != 'catkin'] + return len(self.build_depends + buildtool_depends + self.test_depends) > 0 + + def is_metapackage(self): + """ + Return True if this pacakge is a metapackage, otherwise False. + + :returns: True if metapackage, else False + :rtype: bool + """ + return 'metapackage' in (e.tagname for e in self.exports) + + def evaluate_conditions(self, context): + """ + Evaluate the conditions of all dependencies and memberships. + + :param context: A dictionary with key value pairs to replace variables + starting with $ in the condition. + :raises: :exc:`ValueError` if any condition fails to parse + """ + for attr in ( + 'build_depends', + 'buildtool_depends', + 'build_export_depends', + 'buildtool_export_depends', + 'exec_depends', + 'test_depends', + 'doc_depends', + 'conflicts', + 'replaces', + 'group_depends', + 'member_of_groups', + 'exports', + ): + conditionals = getattr(self, attr) + for conditional in conditionals: + conditional.evaluate_condition(context) + + def validate(self, warnings=None): + """ + Make sure all standards for packages are met. + + :param package: Package to check + :param warnings: Print warnings if None or return them in the given list + :raises InvalidPackage: in case validation fails + """ + errors = [] + new_warnings = [] + + if self.package_format: + if not re.match('^[1-9][0-9]*$', str(self.package_format)): + errors.append('The "format" attribute of the package must contain a positive integer if present') + + if not self.name: + errors.append('Package name must not be empty') + # accepting upper case letters and hyphens only for backward compatibility + if not re.match('^[a-zA-Z0-9][a-zA-Z0-9_-]*$', self.name): + errors.append('Package name "%s" does not follow naming conventions' % self.name) + else: + if not re.match('^[a-z][a-z0-9_-]*$', self.name): + new_warnings.append( + 'Package name "%s" does not follow the naming conventions. It should start with ' + 'a lower case letter and only contain lower case letters, digits, underscores, and dashes.' % self.name) + + version_regexp = r'^[0-9]+\.[0-9]+\.[0-9]+$' + if not self.version: + errors.append('Package version must not be empty') + elif not re.match(version_regexp, self.version): + errors.append('Package version "%s" does not follow version conventions' % self.version) + elif not re.match(r'^(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)$', self.version): + new_warnings.append('Package "%s" does not follow the version conventions. It should not contain leading zeros (unless the number is 0).' % self.name) + if self.version_compatibility: + if not re.match(version_regexp, self.version_compatibility): + errors.append( + "Package compatibility version '%s' does not follow " + 'version conventions' % self.version_compatibility) + + if not self.description: + errors.append('Package description must not be empty') + + if not self.maintainers: + errors.append("Package '{0}' must declare at least one maintainer".format(self.name)) + for maintainer in self.maintainers: + try: + maintainer.validate() + except InvalidPackage as e: + errors.append(e.msg) + if not maintainer.email: + errors.append('Maintainers must have an email address') + + if not self.licenses: + errors.append('The package node must contain at least one "license" tag') + if [license_ for license_ in self.licenses if not license_.strip()]: + errors.append('The license tag must neither be empty nor only contain whitespaces') + + if self.authors is not None: + for author in self.authors: + try: + author.validate() + except InvalidPackage as e: + errors.append(e.msg) + + dep_types = { + 'build': self.build_depends, + 'buildtool': self.buildtool_depends, + 'build_export': self.build_export_depends, + 'buildtool_export': self.buildtool_export_depends, + 'exec': self.exec_depends, + 'test': self.test_depends, + 'doc': self.doc_depends + } + for dep_type, depends in dep_types.items(): + for depend in depends: + if depend.name == self.name: + errors.append('The package "%s" must not "%s_depend" on a package with the same name as this package' % (self.name, dep_type)) + + if ( + set([d.name for d in self.group_depends]) & + set([g.name for g in self.member_of_groups]) + ): + errors.append( + "The package must not 'group_depend' on a package which it " + 'also declares to be a member of') + + if self.is_metapackage(): + if not self.has_buildtool_depend_on_catkin(): + # TODO escalate to error in the future, or use metapackage.validate_metapackage + new_warnings.append('Metapackage "%s" must buildtool_depend on catkin.' % self.name) + if self.has_invalid_metapackage_dependencies(): + new_warnings.append('Metapackage "%s" should not have other dependencies besides a ' + 'buildtool_depend on catkin and %s.' % + (self.name, 'run_depends' if self.package_format == 1 else 'exec_depends')) + + for warning in new_warnings: + if warnings is None: + print('WARNING: ' + warning, file=sys.stderr) + elif warning not in warnings: + warnings.append(warning) + + if errors: + raise InvalidPackage('\n'.join(errors), self.filename) + + +class Dependency(object): + __slots__ = [ + 'name', + 'version_lt', 'version_lte', 'version_eq', 'version_gte', 'version_gt', + 'condition', + 'evaluated_condition', + ] + + def __init__(self, name, **kwargs): + self.evaluated_condition = None + for attr in self.__slots__: + value = kwargs[attr] if attr in kwargs else None + setattr(self, attr, value) + self.name = name + # verify that no unknown keywords are passed + unknown = set(kwargs.keys()).difference(self.__slots__) + if unknown: + raise TypeError('Unknown properties: %s' % ', '.join(unknown)) + + def __eq__(self, other): + if not isinstance(other, Dependency): + return False + return all(getattr(self, attr) == getattr(other, attr) for attr in self.__slots__ if attr != 'evaluated_condition') + + def __hash__(self): + return hash(tuple(getattr(self, slot) for slot in self.__slots__)) + + def __str__(self): + return self.name + + def __repr__(self): + kv = [] + for slot in self.__slots__: + attr = getattr(self, slot, None) + if attr is not None: + kv.append('{}={!r}'.format(slot, attr)) + return '{}({})'.format(self.__class__.__name__, ', '.join(kv)) + + def evaluate_condition(self, context): + """ + Evaluate the condition. + + The result is also stored in the member variable `evaluated_condition`. + + :param context: A dictionary with key value pairs to replace variables + starting with $ in the condition. + + :returns: True if the condition evaluates to True, else False + :raises: :exc:`ValueError` if the condition fails to parse + """ + self.evaluated_condition = evaluate_condition(self.condition, context) + return self.evaluated_condition + + +class Export(object): + __slots__ = ['tagname', 'attributes', 'content', 'evaluated_condition'] + + def __init__(self, tagname, content=None): + self.tagname = tagname + self.attributes = {} + self.content = content + self.evaluated_condition = None + + def __str__(self): + txt = '<%s' % self.tagname + for key in sorted(self.attributes.keys()): + txt += ' %s="%s"' % (key, self.attributes[key]) + if self.content: + txt += '>%s' % (self.content, self.tagname) + else: + txt += '/>' + return txt + + def evaluate_condition(self, context): + """ + Evaluate the condition. + + The result is also stored in the member variable `evaluated_condition`. + + :param context: A dictionary with key value pairs to replace variables + starting with $ in the condition. + + :returns: True if the condition evaluates to True, else False + :raises: :exc:`ValueError` if the condition fails to parse + """ + self.evaluated_condition = evaluate_condition(self.attributes.get('condition'), context) + return self.evaluated_condition + + +# Subclassing ``str`` to keep backward compatibility. +class License(str): + + def __new__(cls, value, file_=None): + obj = str.__new__(cls, str(value)) + obj.file = file_ + return obj + + +class Person(object): + __slots__ = ['name', 'email'] + + def __init__(self, name, email=None): + self.name = name + self.email = email + + def __str__(self): + name = self.name + if not isinstance(name, str): + name = name.encode('utf-8') + if self.email is not None: + return '%s <%s>' % (name, self.email) + else: + return '%s' % name + + def validate(self): + if self.email is None: + return + if not re.match(r'^[-a-zA-Z0-9_%+]+(\.[-a-zA-Z0-9_%+]+)*@[-a-zA-Z0-9%]+(\.[-a-zA-Z0-9%]+)*\.[a-zA-Z]{2,}$', self.email): + raise InvalidPackage('Invalid email "%s" for person "%s"' % (self.email, self.name)) + + +class Url(object): + __slots__ = ['url', 'type'] + + def __init__(self, url, type_=None): + self.url = url + self.type = type_ + + def __str__(self): + return self.url + + +def parse_package_for_distutils(path=None): + print('WARNING: %s/setup.py: catkin_pkg.package.parse_package_for_distutils() is deprecated. Please use catkin_pkg.python_setup.generate_distutils_setup(**kwargs) instead.' % + os.path.basename(os.path.abspath('.'))) + from .python_setup import generate_distutils_setup + data = {} + if path is not None: + data['package_xml_path'] = path + return generate_distutils_setup(**data) + + +class InvalidPackage(Exception): + + def __init__(self, msg, package_path=None): + self.msg = msg + self.package_path = package_path + Exception.__init__(self, self.msg) + + def __str__(self): + result = '' if not self.package_path else "Error(s) in package '%s':\n" % self.package_path + return result + Exception.__str__(self) + + +def package_exists_at(path): + """ + Check that a package exists at the given path. + + :param path: path to a package + :type path: str + :returns: True if package exists in given path, else False + :rtype: bool + """ + return os.path.isdir(path) and os.path.isfile(os.path.join(path, PACKAGE_MANIFEST_FILENAME)) + + +def _get_package_xml(path): + """ + Get xml of package manifest. + + :param path: The path of the package.xml file, it may or may not + include the filename + + :returns: a tuple with the xml as well as the path of the read file + :raises: :exc:`IOError` + """ + if os.path.isfile(path): + filename = path + elif package_exists_at(path): + filename = os.path.join(path, PACKAGE_MANIFEST_FILENAME) + if not os.path.isfile(filename): + raise IOError('Directory "%s" does not contain a "%s"' % (path, PACKAGE_MANIFEST_FILENAME)) + else: + raise IOError('Path "%s" is neither a directory containing a "%s" file nor a file' % (path, PACKAGE_MANIFEST_FILENAME)) + + # Force utf8 encoding for python3. + # This way unicode files can still be processed on non-unicode locales. + kwargs = {} + if sys.version_info[0] >= 3: + kwargs['encoding'] = 'utf8' + + with open(filename, 'r', **kwargs) as f: + return f.read(), filename + + +def has_ros_schema_reference(path): + """ + Check if the XML file contains a processing instruction referencing a ROS package manifest schema. + + :param path: The path of the package.xml file, it may or may not + include the filename + :type path: str + :returns: True if it contains the known reference, else False + :rtype: bool + :raises: :exc:`IOError` + """ + xml, _ = _get_package_xml(path) + return has_ros_schema_reference_string(xml) + + +def has_ros_schema_reference_string(data): + """ + Check if the XML data contains a processing instruction referencing a ROS package manifest schema. + + :param data: package.xml contents + :type data: str + :returns: True if it contains the known reference, else False + :rtype: bool + """ + if sys.version_info[0] == 2 and not isinstance(data, str): + data = data.encode('utf-8') + try: + root = dom.parseString(data) + except ExpatError: + # invalid XML + return False + + for child in root.childNodes: + if child.nodeType == child.PROCESSING_INSTRUCTION_NODE: + if child.target == 'xml-model': + # extract schema url from "xml-model" processing instruction + schema_url = re.search(r'href="([A-Za-z0-9\._/:]*)"', child.data).group(1) + if schema_url in PACKAGE_MANIFEST_SCHEMA_URLS: + return True + + return False + + +def parse_package(path, warnings=None): + """ + Parse package manifest. + + :param path: The path of the package.xml file, it may or may not + include the filename + :param warnings: Print warnings if None or return them in the given list + + :returns: return :class:`Package` instance, populated with parsed fields + :raises: :exc:`InvalidPackage` + :raises: :exc:`IOError` + """ + xml, filename = _get_package_xml(path) + return parse_package_string(xml, filename, warnings=warnings) + + +def _check_known_attributes(node, known): + if node.hasAttributes(): + attrs = map(str, node.attributes.keys()) + # colon is the namespace separator in attributes, xmlns can be added to any tag + unknown_attrs = [attr for attr in attrs if not (attr in known or attr == 'xmlns' or ':' in attr)] + if unknown_attrs: + return ['The "%s" tag must not have the following attributes: %s' % (node.tagName, ', '.join(unknown_attrs))] + return [] + + +def parse_package_string(data, filename=None, warnings=None): + """ + Parse package.xml string contents. + + :param data: package.xml contents, ``str`` + :param filename: full file path for debugging, ``str`` + :param warnings: Print warnings if None or return them in the given list + :returns: return parsed :class:`Package` + :raises: :exc:`InvalidPackage` + """ + if sys.version_info[0] == 2 and not isinstance(data, str): + data = data.encode('utf-8') + try: + root = dom.parseString(data) + except ExpatError as ex: + raise InvalidPackage('The manifest contains invalid XML:\n%s' % ex, filename) + + pkg = Package(filename) + + # verify unique root node + nodes = _get_nodes(root, 'package') + if len(nodes) != 1: + raise InvalidPackage('The manifest must contain a single "package" root tag', filename) + root = nodes[0] + + # format attribute + value = _get_node_attr(root, 'format', default=1) + pkg.package_format = int(value) + assert pkg.package_format in (1, 2, 3), \ + "Unable to handle package.xml format version '%d', please update catkin_pkg " \ + '(e.g. on Ubuntu/Debian use: sudo apt-get update && sudo apt-get install --only-upgrade python-catkin-pkg)' % pkg.package_format + + # name + pkg.name = _get_node_value(_get_node(root, 'name', filename)) + + # version and optional compatibility + version_node = _get_node(root, 'version', filename) + pkg.version = _get_node_value(version_node) + pkg.version_compatibility = _get_node_attr( + version_node, 'compatibility', default=None) + + # description + pkg.description = _get_node_value(_get_node(root, 'description', filename), allow_xml=True, apply_str=False) + pkg.plaintext_description = re.sub(' +(\n+) +', r'\1', _get_node_text(_get_node(root, 'description', filename)), flags=re.MULTILINE) + + # at least one maintainer, all must have email + maintainers = _get_nodes(root, 'maintainer') + for node in maintainers: + pkg.maintainers.append(Person( + _get_node_value(node, apply_str=False), + _get_node_attr(node, 'email') + )) + + # urls with optional type + urls = _get_nodes(root, 'url') + for node in urls: + pkg.urls.append(Url( + _get_node_value(node), + _get_node_attr(node, 'type', default='website') + )) + + # authors with optional email + authors = _get_nodes(root, 'author') + for node in authors: + pkg.authors.append(Person( + _get_node_value(node, apply_str=False), + _get_node_attr(node, 'email', default=None) + )) + + # at least one license + licenses = _get_nodes(root, 'license') + for node in licenses: + pkg.licenses.append(License( + _get_node_value(node), + _get_node_attr(node, 'file', default=None) + )) + + errors = [] + # dependencies and relationships + pkg.build_depends = _get_dependencies(root, 'build_depend') + pkg.buildtool_depends = _get_dependencies(root, 'buildtool_depend') + if pkg.package_format == 1: + run_depends = _get_dependencies(root, 'run_depend') + for d in run_depends: + pkg.build_export_depends.append(deepcopy(d)) + pkg.exec_depends.append(deepcopy(d)) + if pkg.package_format != 1: + pkg.build_export_depends = _get_dependencies(root, 'build_export_depend') + pkg.buildtool_export_depends = _get_dependencies(root, 'buildtool_export_depend') + pkg.exec_depends = _get_dependencies(root, 'exec_depend') + depends = _get_dependencies(root, 'depend') + for dep in depends: + # check for collisions with specific dependencies + same_build_depends = ['build_depend' for d in pkg.build_depends if d == dep] + same_build_export_depends = ['build_export_depend' for d in pkg.build_export_depends if d == dep] + same_exec_depends = ['exec_depend' for d in pkg.exec_depends if d == dep] + if same_build_depends or same_build_export_depends or same_exec_depends: + errors.append("The generic dependency on '%s' is redundant with: %s" % (dep.name, ', '.join(same_build_depends + same_build_export_depends + same_exec_depends))) + # only append non-duplicates + if not same_build_depends: + pkg.build_depends.append(deepcopy(dep)) + if not same_build_export_depends: + pkg.build_export_depends.append(deepcopy(dep)) + if not same_exec_depends: + pkg.exec_depends.append(deepcopy(dep)) + pkg.doc_depends = _get_dependencies(root, 'doc_depend') + pkg.test_depends = _get_dependencies(root, 'test_depend') + pkg.conflicts = _get_dependencies(root, 'conflict') + pkg.replaces = _get_dependencies(root, 'replace') + + # group dependencies and memberships + pkg.group_depends = _get_group_dependencies(root, 'group_depend') + pkg.member_of_groups = _get_group_memberships(root, 'member_of_group') + + if pkg.package_format == 1: + for test_depend in pkg.test_depends: + same_build_depends = ['build_depend' for d in pkg.build_depends if d == test_depend] + same_run_depends = ['run_depend' for d in pkg.run_depends if d == test_depend] + if same_build_depends or same_run_depends: + errors.append('The test dependency on "%s" is redundant with: %s' % (test_depend.name, ', '.join(same_build_depends + same_run_depends))) + + # exports + export_node = _get_optional_node(root, 'export', filename) + if export_node is not None: + exports = [] + for node in [n for n in export_node.childNodes if n.nodeType == n.ELEMENT_NODE]: + export = Export(str(node.tagName), _get_node_value(node, allow_xml=True)) + for key, value in node.attributes.items(): + export.attributes[str(key)] = str(value) + exports.append(export) + pkg.exports = exports + + # verify that no unsupported tags and attributes are present + errors += _check_known_attributes(root, ['format']) + depend_attributes = ['version_lt', 'version_lte', 'version_eq', 'version_gte', 'version_gt'] + if pkg.package_format > 2: + depend_attributes.append('condition') + known = { + 'name': [], + 'version': ['compatibility'], + 'description': [], + 'maintainer': ['email'], + 'license': [], + 'url': ['type'], + 'author': ['email'], + 'build_depend': depend_attributes, + 'buildtool_depend': depend_attributes, + 'test_depend': depend_attributes, + 'conflict': depend_attributes, + 'replace': depend_attributes, + 'export': [], + } + if pkg.package_format == 1: + known.update({ + 'run_depend': depend_attributes, + }) + if pkg.package_format != 1: + known.update({ + 'build_export_depend': depend_attributes, + 'buildtool_export_depend': depend_attributes, + 'depend': depend_attributes, + 'exec_depend': depend_attributes, + 'doc_depend': depend_attributes, + }) + if pkg.package_format > 2: + known.update({ + 'group_depend': ['condition'], + 'member_of_group': ['condition'] + }) + if pkg.package_format > 2: + known.update({ + 'license': ['file'], + }) + nodes = [n for n in root.childNodes if n.nodeType == n.ELEMENT_NODE] + unknown_tags = set([n.tagName for n in nodes if n.tagName not in known.keys()]) + if unknown_tags: + errors.append('The manifest of package "%s" (with format version %d) must not contain the following tags: %s' % (pkg.name, pkg.package_format, ', '.join(unknown_tags))) + if 'run_depend' in unknown_tags and pkg.package_format >= 2: + errors.append('Please replace tags with tags.') + elif 'exec_depend' in unknown_tags and pkg.package_format < 2: + errors.append('Either update to a newer format or replace tags with tags.') + for node in [n for n in nodes if n.tagName in known.keys()]: + errors += _check_known_attributes(node, known[node.tagName]) + if node.tagName not in ['description', 'export']: + subnodes = [n for n in node.childNodes if n.nodeType == n.ELEMENT_NODE] + if subnodes: + errors.append('The "%s" tag must not contain the following children: %s' % (node.tagName, ', '.join([n.tagName for n in subnodes]))) + + if errors: + raise InvalidPackage('Error(s):%s' % (''.join(['\n- %s' % e for e in errors])), filename) + + pkg.validate(warnings=warnings) + + return pkg + + +def _get_nodes(parent, tagname): + return [n for n in parent.childNodes if n.nodeType == n.ELEMENT_NODE and n.tagName == tagname] + + +def _get_node(parent, tagname, filename): + nodes = _get_nodes(parent, tagname) + if len(nodes) != 1: + raise InvalidPackage('The manifest must contain exactly one "%s" tag' % tagname, filename) + return nodes[0] + + +def _get_optional_node(parent, tagname, filename): + nodes = _get_nodes(parent, tagname) + if len(nodes) > 1: + raise InvalidPackage('The manifest must not contain more than one "%s" tags' % tagname, filename) + return nodes[0] if nodes else None + + +def _get_node_value(node, allow_xml=False, apply_str=True): + if allow_xml: + value = (''.join([n.toxml() for n in node.childNodes])).strip(' \n\r\t') + else: + value = (''.join([n.data for n in node.childNodes if n.nodeType == n.TEXT_NODE])).strip(' \n\r\t') + if apply_str: + value = str(value) + return value + + +def _get_node_text(node, strip=True): + value = '' + for child in node.childNodes: + if child.nodeType == child.TEXT_NODE: + value += re.sub(r'\s+', ' ', child.data) + elif child.nodeType == child.ELEMENT_NODE: + if child.tagName == 'br': + value += '\n' + else: + value += _get_node_text(child, strip=False) + else: + assert 'unreachable' + if strip: + value = value.strip() + return value + + +def _get_node_attr(node, attr, default=False): + """:param default: False means value is required.""" + if node.hasAttribute(attr): + return str(node.getAttribute(attr)) + if default is False: + raise InvalidPackage('The "%s" tag must have the attribute "%s"' % (node.tagName, attr)) + return default + + +def _get_dependencies(parent, tagname): + depends = [] + for node in _get_nodes(parent, tagname): + depend = Dependency(_get_node_value(node)) + for attr in ('version_lt', 'version_lte', 'version_eq', 'version_gte', 'version_gt', 'condition'): + setattr(depend, attr, _get_node_attr(node, attr, None)) + depends.append(depend) + return depends + + +def _get_group_dependencies(parent, tagname): + from .group_dependency import GroupDependency + depends = [] + for node in _get_nodes(parent, tagname): + depends.append( + GroupDependency( + _get_node_value(node), + condition=_get_node_attr(node, 'condition', default=None))) + return depends + + +def _get_group_memberships(parent, tagname): + from .group_membership import GroupMembership + memberships = [] + for node in _get_nodes(parent, tagname): + memberships.append( + GroupMembership( + _get_node_value(node), + condition=_get_node_attr(node, 'condition', default=None))) + return memberships diff --git a/.ci/catkin_pkg/package_templates.py b/.ci/catkin_pkg/package_templates.py new file mode 100644 index 0000000000..d539d2f83a --- /dev/null +++ b/.ci/catkin_pkg/package_templates.py @@ -0,0 +1,445 @@ +# Software License Agreement (BSD License) +# +# Copyright (c) 2012, Willow Garage, Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# * Neither the name of Willow Garage, Inc. nor the names of its +# contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +from __future__ import print_function + +import getpass +import os +import string +import sys + +from catkin_pkg.cmake import configure_file +from catkin_pkg.cmake import get_metapackage_cmake_template_path +from catkin_pkg.package import Dependency +from catkin_pkg.package import Package +from catkin_pkg.package import PACKAGE_MANIFEST_FILENAME +from catkin_pkg.package import Person + + +class PackageTemplate(Package): + + def __init__(self, catkin_deps=None, system_deps=None, boost_comps=None, **kwargs): + super(PackageTemplate, self).__init__(**kwargs) + self.catkin_deps = catkin_deps or [] + self.system_deps = system_deps or [] + self.boost_comps = boost_comps or [] + self.validate() + + @staticmethod + def _create_package_template(package_name, description=None, licenses=None, + maintainer_names=None, author_names=None, + version=None, catkin_deps=None, system_deps=None, + boost_comps=None): + """ + Alternative factory method mapping CLI args to argument for Package class. + + :param package_name: + :param description: + :param licenses: + :param maintainer_names: + :param authors: + :param version: + :param catkin_deps: + """ + # Sort so they are alphebetical + licenses = list(licenses or ['TODO']) + licenses.sort() + if not maintainer_names: + maintainer_names = [getpass.getuser()] + maintainer_names = list(maintainer_names or []) + maintainer_names.sort() + maintainers = [] + for maintainer_name in maintainer_names: + maintainers.append( + Person(maintainer_name, + '%s@todo.todo' % maintainer_name.split()[-1]) + ) + author_names = list(author_names or []) + author_names.sort() + authors = [] + for author_name in author_names: + authors.append(Person(author_name)) + catkin_deps = list(catkin_deps or []) + catkin_deps.sort() + pkg_catkin_deps = [] + depends = [] + build_depends = [] + exec_depends = [] + buildtool_depends = [Dependency('catkin')] + for dep in catkin_deps: + if dep.lower() == 'catkin': + catkin_deps.remove(dep) + continue + if dep.lower() == 'genmsg': + sys.stderr.write('WARNING: Packages with messages or services should not depend on genmsg, but on message_generation and message_runtime\n') + buildtool_depends.append(Dependency('genmsg')) + continue + if dep.lower() == 'message_generation': + if 'message_runtime' not in catkin_deps: + sys.stderr.write('WARNING: Packages with messages or services should depend on both message_generation and message_runtime\n') + build_depends.append(Dependency('message_generation')) + continue + if dep.lower() == 'message_runtime': + if 'message_generation' not in catkin_deps: + sys.stderr.write('WARNING: Packages with messages or services should depend on both message_generation and message_runtime\n') + exec_depends.append(Dependency('message_runtime')) + continue + pkg_catkin_deps.append(Dependency(dep)) + for dep in pkg_catkin_deps: + depends.append(dep) + if boost_comps: + if not system_deps: + system_deps = ['boost'] + elif 'boost' not in system_deps: + system_deps.append('boost') + for dep in system_deps or []: + if not dep.lower().startswith('python-'): + depends.append(Dependency(dep)) + else: + exec_depends.append(Dependency(dep)) + package_temp = PackageTemplate( + name=package_name, + version=version or '0.0.0', + description=description or 'The %s package' % package_name, + buildtool_depends=buildtool_depends, + build_depends=build_depends, + depends=depends, + exec_depends=exec_depends, + catkin_deps=catkin_deps, + system_deps=system_deps, + boost_comps=boost_comps, + licenses=licenses, + authors=authors, + maintainers=maintainers, + urls=[]) + return package_temp + + +def read_template_file(filename, rosdistro): + template_dir = os.path.join(os.path.dirname(__file__), 'templates') + templates = [] + templates.append(os.path.join(template_dir, rosdistro, '%s.in' % filename)) + templates.append(os.path.join(template_dir, '%s.in' % filename)) + for template in templates: + if os.path.isfile(template): + with open(template, 'r') as fhand: + template_contents = fhand.read() + return template_contents + raise IOError( + 'Could not read template for ROS distro ' + "'{}' at '{}': ".format(rosdistro, ', '.join(templates)) + + 'no such file or directory' + ) + + +def _safe_write_files(newfiles, target_dir): + """ + Write file contents to target_dir/filepath for all entries of newfiles. + + Aborts early if files exist in places for new files or directories + + :param newfiles: a dict {filepath: contents} + :param target_dir: a string + """ + # first check no filename conflict exists + for filename in newfiles: + target_file = os.path.join(target_dir, filename) + if os.path.exists(target_file): + raise ValueError('File exists: %s' % target_file) + dirname = os.path.dirname(target_file) + while dirname != target_dir: + if os.path.isfile(dirname): + raise ValueError('Cannot create directory, file exists: %s' % + dirname) + dirname = os.path.dirname(dirname) + + for filename, content in newfiles.items(): + target_file = os.path.join(target_dir, filename) + dirname = os.path.dirname(target_file) + if not os.path.exists(dirname): + os.makedirs(dirname) + # print(target_file, content) + with open(target_file, 'ab') as fhand: + fhand.write(content.encode()) + print('Created file %s' % os.path.relpath(target_file, os.path.dirname(target_dir))) + + +def create_package_files(target_path, package_template, rosdistro, + newfiles=None, meta=False): + """ + Create several files from templates to start a new package. + + :param target_path: parent folder where to create the package + :param package_template: contains the required information + :param rosdistro: name of the distro to look up respective template + :param newfiles: dict {filepath: contents} for additional files to write + """ + if newfiles is None: + newfiles = {} + # allow to replace default templates when path string is equal + manifest_path = os.path.join(target_path, PACKAGE_MANIFEST_FILENAME) + if manifest_path not in newfiles: + newfiles[manifest_path] = \ + create_package_xml(package_template, rosdistro, meta=meta) + cmake_path = os.path.join(target_path, 'CMakeLists.txt') + if cmake_path not in newfiles: + newfiles[cmake_path] = create_cmakelists(package_template, rosdistro, meta=meta) + _safe_write_files(newfiles, target_path) + if 'roscpp' in package_template.catkin_deps: + fname = os.path.join(target_path, 'include', package_template.name) + os.makedirs(fname) + print('Created folder %s' % os.path.relpath(fname, os.path.dirname(target_path))) + if 'roscpp' in package_template.catkin_deps or \ + 'rospy' in package_template.catkin_deps: + fname = os.path.join(target_path, 'src') + os.makedirs(fname) + print('Created folder %s' % os.path.relpath(fname, os.path.dirname(target_path))) + + +class CatkinTemplate(string.Template): + """subclass to use @ instead of $ as markers.""" + + delimiter = '@' + escape = '@' + + +def create_cmakelists(package_template, rosdistro, meta=False): + """Create CMake file contents from the template. + + :param package_template: contains the required information + :returns: file contents as string + """ + if meta: + template_path = get_metapackage_cmake_template_path() + temp_dict = { + 'name': package_template.name, + 'metapackage_arguments': '', + } + return configure_file(template_path, temp_dict) + else: + cmakelists_txt_template = read_template_file('CMakeLists.txt', rosdistro) + ctemp = CatkinTemplate(cmakelists_txt_template) + if package_template.catkin_deps == []: + components = '' + else: + components = ' COMPONENTS\n %s\n' % '\n '.join(package_template.catkin_deps) + boost_find_package = \ + ('' if not package_template.boost_comps + else ('find_package(Boost REQUIRED COMPONENTS %s)\n' % + ' '.join(package_template.boost_comps))) + system_find_package = '' + for sysdep in package_template.system_deps: + if sysdep == 'boost': + continue + if sysdep.startswith('python-'): + system_find_package += '# ' + system_find_package += 'find_package(%s REQUIRED)\n' % sysdep + # provide dummy values + catkin_depends = (' '.join(package_template.catkin_deps) + if package_template.catkin_deps + else 'other_catkin_pkg') + system_depends = (' '.join(package_template.system_deps) + if package_template.system_deps + else 'system_lib') + message_pkgs = [pkg for pkg in package_template.catkin_deps if pkg.endswith('_msgs')] + if message_pkgs: + message_depends = '# %s' % '# '.join(message_pkgs) + else: + message_depends = '# std_msgs # Or other packages containing msgs' + temp_dict = {'name': package_template.name, + 'components': components, + 'include_directories': _create_include_macro(package_template), + 'boost_find': boost_find_package, + 'systems_find': system_find_package, + 'catkin_depends': catkin_depends, + 'system_depends': system_depends, + 'target_libraries': _create_targetlib_args(package_template), + 'message_dependencies': message_depends + } + return ctemp.substitute(temp_dict) + + +def _create_targetlib_args(package_template): + result = '# ${catkin_LIBRARIES}\n' + if package_template.boost_comps: + result += '# ${Boost_LIBRARIES}\n' + if package_template.system_deps: + result += (''.join(['# ${%s_LIBRARIES}\n' % + sdep for sdep in package_template.system_deps])) + return result + + +def _create_include_macro(package_template): + includes = ['# include'] + includes.append((' ' if package_template.catkin_deps else '# ') + '${catkin_INCLUDE_DIRS}') + if package_template.boost_comps: + includes.append(' ${Boost_INCLUDE_DIRS}') + if package_template.system_deps: + deplist = [] + for sysdep in package_template.system_deps: + if not sysdep.startswith('python-'): + deplist.append(sysdep) + if deplist: + todo_incl = '# TODO: Check names of system library include directories' + includes.append(todo_incl + (' (%s)' % ', '.join(deplist))) + includes.extend([' ${%s_INCLUDE_DIRS}' % sysdep for sysdep in deplist]) + result = '' + if includes: + result += '\n'.join(includes) + return result + + +def _create_depend_tag(dep_type, + name, + version_eq=None, + version_lt=None, + version_lte=None, + version_gt=None, + version_gte=None): + """Create xml snippet for package.xml.""" + version_string = [] + for key, var in {'version_eq': version_eq, + 'version_lt': version_lt, + 'version_lte': version_lte, + 'version_gt': version_gt, + 'version_gte': version_gte}.items(): + if var is not None: + version_string.append(' %s="%s"' % (key, var)) + result = ' <%s%s>%s\n' % (dep_type, + ''.join(version_string), + name, + dep_type) + return result + + +def create_package_xml(package_template, rosdistro, meta=False): + """ + Create package xml file content. + + :param package_template: contains the required information + :returns: file contents as string + """ + package_xml_template = \ + read_template_file(PACKAGE_MANIFEST_FILENAME, rosdistro) + ctemp = CatkinTemplate(package_xml_template) + temp_dict = {} + for key in package_template.__slots__: + temp_dict[key] = getattr(package_template, key) + + if package_template.version_compatibility: + temp_dict['version_compatibility'] = \ + ' compatibility="%s"' % package_template.version_compatibility + else: + temp_dict['version_compatibility'] = '' + + if not package_template.description: + temp_dict['description'] = 'The %s package ...' % package_template.name + + licenses = [] + for plicense in package_template.licenses: + licenses.append(' %s\n' % plicense) + temp_dict['licenses'] = ''.join(licenses) + + def get_person_tag(tagname, person): + email_string = ( + '' if person.email is None else 'email="%s"' % person.email + ) + return ' <%s %s>%s\n' % (tagname, email_string, + person.name, tagname) + + maintainers = [] + for maintainer in package_template.maintainers: + maintainers.append(get_person_tag('maintainer', maintainer)) + temp_dict['maintainers'] = ''.join(maintainers) + + urls = [] + for url in package_template.urls: + type_string = ('' if url.type is None + else 'type="%s"' % url.type) + urls.append(' %s\n' % (type_string, url.url)) + temp_dict['urls'] = ''.join(urls) + + authors = [] + for author in package_template.authors: + authors.append(get_person_tag('author', author)) + temp_dict['authors'] = ''.join(authors) + + dependencies = [] + dep_map = { + 'build_depend': package_template.build_depends, + 'build_export_depend': package_template.build_export_depends, + 'buildtool_depend': package_template.buildtool_depends, + 'exec_depend': package_template.exec_depends, + 'test_depend': package_template.test_depends, + 'conflict': package_template.conflicts, + 'replace': package_template.replaces + } + for dep_type in ['buildtool_depend', 'build_depend', 'build_export_depend', + 'exec_depend', 'test_depend', 'conflict', 'replace']: + for dep in sorted(dep_map[dep_type], key=lambda x: x.name): + if 'depend' in dep_type: + dep_tag = _create_depend_tag( + dep_type, + dep.name, + dep.version_eq, + dep.version_lt, + dep.version_lte, + dep.version_gt, + dep.version_gte + ) + dependencies.append(dep_tag) + else: + dependencies.append(_create_depend_tag(dep_type, + dep.name)) + temp_dict['dependencies'] = ''.join(dependencies) + + exports = [] + if package_template.exports is not None: + for export in package_template.exports: + if export.content is not None: + print('WARNING: Create package does not know how to ' + 'serialize exports with content: ' + '%s, %s, ' % (export.tagname, export.attributes) + + '%s' % (export.content), + file=sys.stderr) + else: + attribs = [' %s="%s"' % (k, v) for (k, v) in export.attributes.items()] + line = ' <%s%s/>\n' % (export.tagname, ''.join(attribs)) + exports.append(line) + + if meta: + exports.append(' ') + temp_dict['exports'] = ''.join(exports) + + temp_dict['components'] = package_template.catkin_deps + + return ctemp.substitute(temp_dict) diff --git a/.ci/catkin_pkg/package_version.py b/.ci/catkin_pkg/package_version.py new file mode 100644 index 0000000000..f7d204e3a5 --- /dev/null +++ b/.ci/catkin_pkg/package_version.py @@ -0,0 +1,213 @@ +# Software License Agreement (BSD License) +# +# Copyright (c) 2012, Willow Garage, Inc. +# Copyright (c) 2013, Open Source Robotics Foundation, Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# * Neither the name of Open Source Robotics Foundation, Inc. nor +# the names of its contributors may be used to endorse or promote +# products derived from this software without specific prior +# written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +from __future__ import print_function + +import datetime +import os +import re + +from catkin_pkg.changelog_generator import FORTHCOMING_LABEL +import docutils.core + + +def bump_version(version, bump='patch'): + """ + Increases version number. + + :param str version: must be in version format "int.int.int" + :param str bump: one of 'patch, minor, major' + :returns: version with the given part increased, and all inferior parts reset to 0 + :rtype: str + :raises ValueError: if the version string is not in the format x.y.z + """ + # split the version number + match = re.match(r'^(\d+)\.(\d+)\.(\d+)$', version) + if match is None: + raise ValueError('Invalid version string, must be int.int.int: "%s"' % version) + new_version = match.groups() + new_version = [int(x) for x in new_version] + # find the desired index + idx = {'major': 0, 'minor': 1, 'patch': 2}[bump] + # increment the desired part + new_version[idx] += 1 + # reset all parts behind the bumped part + new_version = new_version[:idx + 1] + [0 for x in new_version[idx + 1:]] + return '%d.%d.%d' % tuple(new_version) + + +def _replace_version(package_str, new_version): + """ + Replace the version tag in contents if there is only one instance. + + :param str package_str: contents of package.xml + :param str new_version: version number + :returns: new package.xml + :rtype: str + :raises RuntimeError: + """ + # try to replace contens + new_package_str, number_of_subs = re.subn(']*)>[^<>]*', r'>%s' % new_version, package_str) + if number_of_subs != 1: + raise RuntimeError('Illegal number of version tags: %s' % (number_of_subs)) + return new_package_str + + +def _replace_setup_py_version(setup_py_str, new_version): + """ + Replace the version tag in contents if there is only one instance and it is using a literal as the version. + + :param str package_str: contents of setup.py + :param str new_version: new version number + :returns: new setup.py string + :rtype: str + :raises RuntimeError: + """ + # try to replace contents + new_setup_py_str, number_of_subs = re.subn( + r'version=([\'"])\d+\.\d+\.\d+([\'"]),', + r'version=\g<1>%s\g<2>,' % new_version, + setup_py_str) + if number_of_subs == 0: + raise RuntimeError("Failed to find a normal version statement, e.g.: version='1.2.3',") + if number_of_subs != 1: + raise RuntimeError('Illegal number of version statements: %s' % (number_of_subs)) + return new_setup_py_str + + +def _check_for_version_comment(package_str, new_version): + """ + Check if a comment is present behind the version tag and return it. + + :param str package_str: contents of package.xml + :param str new_version: version number + :returns: comment if available, else None + :rtype: str + """ + version_tag = '>%s' % new_version + pattern = '%s[ \t]*%s *(.+) *%s' % (re.escape(version_tag), re.escape('')) + comment = re.search(pattern, package_str) + if comment: + comment = comment.group(1) + return comment + + +def update_versions(packages, new_version): + """ + Bulk replace of version: searches for package.xml and setup.py files directly in given folders and replaces version tag within. + + :param dict packages: dict from folder names to package xml objects in those folders + :param str new_version: version string "int.int.int" + :raises RuntimeError: if any one package.xml cannot be updated + """ + files = {} + for path, package_obj in packages.items(): + # Update any package.xml files. + package_path = os.path.join(path, 'package.xml') + with open(package_path, 'r') as f: + package_str = f.read() + try: + new_package_str = _replace_version(package_str, new_version) + comment = _check_for_version_comment(new_package_str, new_version) + if comment: + print('NOTE: The package manifest "%s" contains a comment besides the version tag:\n %s' % (path, comment)) + except RuntimeError as rue: + raise RuntimeError('Could not bump version number in file %s: %s' % (package_path, str(rue))) + files[package_path] = new_package_str + # Update any setup.py files. + setup_py_path = os.path.join(path, 'setup.py') + if os.path.exists(setup_py_path): + # Only update setup.py for ament_python packages. + build_types = package_obj.get_unconditional_build_types() + if 'ament_python' in build_types: + with open(setup_py_path, 'r') as f: + setup_py_str = f.read() + try: + new_setup_py_str = _replace_setup_py_version(setup_py_str, new_version) + except RuntimeError as exc: + raise RuntimeError('Could not bump version number in file %s: %s' % (setup_py_path, str(exc))) + files[setup_py_path] = new_setup_py_str + + # if all replacements successful, write back modified package.xml + for package_path, new_package_str in files.items(): + with open(package_path, 'w') as f: + f.write(new_package_str) + + +def get_forthcoming_label(rst): + document = docutils.core.publish_doctree(rst) + forthcoming_label = None + for child in document.children: + title = None + if isinstance(child, docutils.nodes.subtitle): + title = child + elif isinstance(child, docutils.nodes.section): + section = child + if len(section.children) > 0 and isinstance(section.children[0], docutils.nodes.title): + title = section.children[0] + if title and len(title.children) > 0 and isinstance(title.children[0], docutils.nodes.Text): + title_text = title.children[0].astext() + if FORTHCOMING_LABEL.lower() in title_text.lower(): + if forthcoming_label: + raise RuntimeError('Found multiple forthcoming sections') + forthcoming_label = title_text + return forthcoming_label + + +def update_changelog_sections(changelogs, new_version): + # rename forthcoming sections to new_version including current date + new_changelog_data = {} + new_label = '%s (%s)' % (new_version, datetime.date.today().isoformat()) + for pkg_name, (changelog_path, changelog, forthcoming_label) in changelogs.items(): + data = rename_section(changelog.rst, forthcoming_label, new_label) + new_changelog_data[changelog_path] = data + + for changelog_path, data in new_changelog_data.items(): + with open(changelog_path, 'wb') as f: + f.write(data.encode('utf-8')) + + +def rename_section(data, old_label, new_label): + valid_section_characters = '!"#$%&\'()*+,-./:;<=>?@[\\]^_`{|}~' + + def replace_section(match): + section_char = match.group(2)[0] + return new_label + '\n' + section_char * len(new_label) + pattern = '^(' + re.escape(old_label) + ')\r?\n([' + re.escape(valid_section_characters) + ']+)\r?$' + data, count = re.subn(pattern, replace_section, data, flags=re.MULTILINE) + if count == 0: + raise RuntimeError('Could not find section') + if count > 1: + raise RuntimeError('Found multiple matching sections') + return data diff --git a/.ci/catkin_pkg/packages.py b/.ci/catkin_pkg/packages.py new file mode 100644 index 0000000000..36b514494c --- /dev/null +++ b/.ci/catkin_pkg/packages.py @@ -0,0 +1,186 @@ +# Software License Agreement (BSD License) +# +# Copyright (c) 2012, Willow Garage, Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# * Neither the name of Willow Garage, Inc. nor the names of its +# contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +"""Library to find packages in the filesystem.""" + +import multiprocessing +import os + +from .package import _get_package_xml +from .package import PACKAGE_MANIFEST_FILENAME +from .package import parse_package_string + + +DEFAULT_IGNORE_MARKERS = {'AMENT_IGNORE', 'CATKIN_IGNORE', 'COLCON_IGNORE'} + + +def find_package_paths(basepath, exclude_paths=None, exclude_subspaces=False, ignore_markers=DEFAULT_IGNORE_MARKERS): + """ + Crawls the filesystem to find package manifest files. + + When a subfolder contains either of the files mentioned in ``ignore_markers`` it is ignored. By default, these are: + - ``AMENT_IGNORE`` + - ``CATKIN_IGNORE`` + - ``COLCON_IGNORE`` + + :param basepath: The path to search in, ``str`` + :param exclude_paths: A list of paths which should not be searched, ``list`` + :param exclude_subspaces: The flag is subfolders containing a .catkin file should not be + searched, ``bool`` + :param ignore_markers: Names of files that indicate that a folder should be ignored, ``set`` + :returns: A list of relative paths containing package manifest files ``list`` + """ + paths = [] + real_exclude_paths = [os.path.realpath(p) for p in exclude_paths] if exclude_paths is not None else [] + for dirpath, dirnames, filenames in os.walk(basepath, followlinks=True): + if set(dirnames + filenames) & ignore_markers or \ + os.path.realpath(dirpath) in real_exclude_paths or \ + (exclude_subspaces and '.catkin' in filenames): + del dirnames[:] + continue + elif PACKAGE_MANIFEST_FILENAME in filenames: + paths.append(os.path.relpath(dirpath, basepath)) + del dirnames[:] + continue + # filter out hidden directories in-place + dirnames[:] = [d for d in dirnames if not d.startswith('.')] + return paths + + +def find_packages(basepath, exclude_paths=None, exclude_subspaces=False, warnings=None, ignore_markers=DEFAULT_IGNORE_MARKERS): + """ + Crawls the filesystem to find package manifest files and parses them. + + :param basepath: The path to search in, ``str`` + :param exclude_paths: A list of paths which should not be searched, ``list`` + :param exclude_subspaces: The flag is subfolders containing a .catkin file should not be + searched, ``bool`` + :param warnings: Print warnings if None or return them in the given list, ``bool`` + :param ignore_markers: Names of files that indicate that a folder should be ignored, ``set`` + :returns: A dict mapping relative paths to ``Package`` objects ``dict`` + :raises: :exc:RuntimeError` If multiple packages have the same name + """ + packages = find_packages_allowing_duplicates(basepath, exclude_paths=exclude_paths, + exclude_subspaces=exclude_subspaces, warnings=warnings, + ignore_markers=ignore_markers) + package_paths_by_name = {} + for path, package in packages.items(): + if package.name not in package_paths_by_name: + package_paths_by_name[package.name] = set() + package_paths_by_name[package.name].add(path) + duplicates = dict([(name, paths) for name, paths in package_paths_by_name.items() if len(paths) > 1]) + if duplicates: + duplicates = ['Multiple packages found with the same name "%s":%s' % (name, ''.join(['\n- %s' % path_ for path_ in sorted(duplicates[name])])) for name in sorted(duplicates.keys())] + raise RuntimeError('\n'.join(duplicates)) + return packages + + +class _PackageParser(object): + + def __init__(self, capture_warnings): + self.capture_warnings = capture_warnings + + def __call__(self, xml_and_path_and_filename): + xml, path, filename = xml_and_path_and_filename + warnings = [] if self.capture_warnings else None + parsed_package = parse_package_string(xml, filename=filename, warnings=warnings) + return (path, parsed_package), warnings + + +def find_packages_allowing_duplicates(basepath, exclude_paths=None, exclude_subspaces=False, warnings=None, ignore_markers=DEFAULT_IGNORE_MARKERS): + """ + Crawls the filesystem to find package manifest files and parses them. + + :param basepath: The path to search in, ``str`` + :param exclude_paths: A list of paths which should not be searched, ``list`` + :param exclude_subspaces: The flag is subfolders containing a .catkin file should not be + searched, ``bool`` + :param warnings: Print warnings if None or return them in the given list + :param ignore_markers: Names of files that indicate that a folder should be ignored, ``set`` + :returns: A dict mapping relative paths to ``Package`` objects ``dict`` + """ + package_paths = find_package_paths(basepath, exclude_paths=exclude_paths, exclude_subspaces=exclude_subspaces, ignore_markers=ignore_markers) + + xmls = {} + for path in package_paths: + xmls[path] = _get_package_xml(os.path.join(basepath, path)) + + data = [(v[0], k, v[1]) for k, v in xmls.items()] + + if not data: + return {} + + parallel = len(data) > 100 + if parallel: + try: + pool = multiprocessing.Pool() + except OSError: + # On chroot environment, multiprocessing is not available + # https://stackoverflow.com/questions/6033599/oserror-38-errno-38-with-multiprocessing + parallel = False + + if not parallel: + # use sequential loop + parsed_packages = {} + for xml, path, filename in data: + parsed_package = parse_package_string( + xml, filename=filename, warnings=warnings) + parsed_packages[path] = parsed_package + return parsed_packages + + # use multiprocessing pool + parser = _PackageParser(warnings is not None) + try: + path_parsed_packages, warnings_lists = zip(*pool.map(parser, data)) + finally: + pool.close() + pool.join() + if parser.capture_warnings: + map(warnings.extend, warnings_lists) + return dict(path_parsed_packages) + + +def verify_equal_package_versions(packages): + """ + Verify that all packages have the same version number. + + :param packages: The list of ``Package`` objects, ``list`` + :returns: The version number + :raises: :exc:RuntimeError` If the version is not equal in all packages + """ + version = None + for package in packages: + if version is None: + version = package.version + elif package.version != version: + raise RuntimeError('Two packages have different version numbers (%s != %s):\n- %s\n- %s' % (package.version, version, package.filename, list(packages)[0].filename)) + return version diff --git a/.ci/catkin_pkg/python_setup.py b/.ci/catkin_pkg/python_setup.py new file mode 100644 index 0000000000..84bd64d42b --- /dev/null +++ b/.ci/catkin_pkg/python_setup.py @@ -0,0 +1,177 @@ +# Software License Agreement (BSD License) +# +# Copyright (c) 2012, Willow Garage, Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# * Neither the name of Willow Garage, Inc. nor the names of its +# contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +"""Library for providing the relevant information from the package manifest for the Python setup.py file.""" + +from __future__ import print_function + +import os +import sys + +from .package import InvalidPackage, parse_package + + +def generate_distutils_setup(package_xml_path=os.path.curdir, **kwargs): + """ + Extract the information relevant for distutils from the package manifest. + + The following keys will be set: + + The "name" and "version" are taken from the eponymous tags. + + A single maintainer will set the keys "maintainer" and + "maintainer_email" while multiple maintainers are merged into the + "maintainer" fields (including their emails). Authors are handled + likewise. + + The first URL of type "website" (or without a type) is used for + the "url" field. + + The "description" is taken from the eponymous tag if it does not + exceed 200 characters and has no newlines. If it does "description" + contains the truncated text while "long_description" contains the + complete. + + All licenses are merged into the "license" field. + + :param kwargs: All keyword arguments are passed through. The above + mentioned keys are verified to be identical if passed as a + keyword argument + + :returns: return dict populated with parsed fields and passed + keyword arguments + :raises: :exc:`InvalidPackage` + :raises: :exc:`IOError` + """ + package = parse_package(package_xml_path) + + data = {} + data['name'] = package.name + data['version'] = package.version + + # either set one author with one email or join all in a single field + if len(package.authors) == 1 and package.authors[0].email is not None: + data['author'] = package.authors[0].name + data['author_email'] = package.authors[0].email + else: + data['author'] = ', '.join([('%s <%s>' % (a.name, a.email) if a.email is not None else a.name) for a in package.authors]) + + # either set one maintainer with one email or join all in a single field + if len(package.maintainers) == 1: + data['maintainer'] = package.maintainers[0].name + data['maintainer_email'] = package.maintainers[0].email + else: + data['maintainer'] = ', '.join(['%s <%s>' % (m.name, m.email) for m in package.maintainers]) + + # either set the first URL with the type 'website' or the first URL of any type + websites = [url.url for url in package.urls if url.type == 'website'] + if websites: + data['url'] = websites[0] + elif package.urls: + data['url'] = package.urls[0].url + + description = package.plaintext_description.splitlines()[0] + if len(description) > 200: + description = description[:197] + '...' + + data['description'] = description + if description != package.plaintext_description: + data['long_description'] = package.plaintext_description + + data['license'] = ', '.join(package.licenses) + + # pass keyword arguments and verify equality if generated and passed in + for k, v in kwargs.items(): + if k in data: + if v != data[k]: + raise InvalidPackage('The keyword argument "%s" does not match the information from package.xml: "%s" != "%s"' % (k, v, data[k]), package_xml_path) + else: + data[k] = v + + return data + + +def get_global_bin_destination(): + return 'bin' + + +def get_global_etc_destination(): + return 'etc' + + +def get_global_include_destination(): + return 'include' + + +def get_global_lib_destination(): + return 'lib' + + +def get_global_libexec_destination(): + return 'lib' + + +def get_global_python_destination(): + dest = 'lib/python%u.%u/' % (sys.version_info[0], sys.version_info[1]) + if '--install-layout=deb' not in sys.argv[1:]: + dest += 'site-packages' + else: + dest += 'dist-packages' + return dest + + +def get_global_share_destination(): + return 'share' + + +def get_package_bin_destination(pkgname): + return os.path.join(get_global_libexec_destination(), pkgname) + + +def get_package_etc_destination(pkgname): + return os.path.join(get_global_etc_destination(), pkgname) + + +def get_package_include_destination(pkgname): + return os.path.join(get_global_include_destination(), pkgname) + + +def get_package_lib_destination(_pkgname): + return get_global_lib_destination() + + +def get_package_python_destination(pkgname): + return os.path.join(get_global_python_destination(), pkgname) + + +def get_package_share_destination(pkgname): + return os.path.join(get_global_share_destination(), pkgname) diff --git a/.ci/catkin_pkg/rospack.py b/.ci/catkin_pkg/rospack.py new file mode 100644 index 0000000000..f4db133e64 --- /dev/null +++ b/.ci/catkin_pkg/rospack.py @@ -0,0 +1,43 @@ +# Software License Agreement (BSD License) +# +# Copyright (c) 2012, Willow Garage, Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# * Neither the name of Willow Garage, Inc. nor the names of its +# contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +"""API provided for rospack to reorder include/library paths according to the chained workspaces.""" + +from __future__ import print_function + +from .workspaces import get_spaces, order_paths + + +def reorder_paths(paths): + paths_to_order = paths.split(' ') if paths else [] + ordered_paths = order_paths(paths_to_order, get_spaces()) + return ' '.join(ordered_paths) diff --git a/.ci/catkin_pkg/terminal_color.py b/.ci/catkin_pkg/terminal_color.py new file mode 100644 index 0000000000..27fa1905a3 --- /dev/null +++ b/.ci/catkin_pkg/terminal_color.py @@ -0,0 +1,126 @@ +# Software License Agreement (BSD License) +# +# Copyright (c) 2012, Willow Garage, Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# * Neither the name of Willow Garage, Inc. nor the names of its +# contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +"""Module to enable color terminal output.""" + +from __future__ import print_function + +import os +import string + +_ansi = {} + + +def ansi(key): + """Return the escape sequence for a given ansi color key.""" + global _ansi + return _ansi[key] + + +def enable_ANSI_colors(): + """Populate the global module dictionary `ansi` with ANSI escape sequences.""" + global _ansi + color_order = [ + 'black', 'red', 'green', 'yellow', 'blue', 'purple', 'cyan', 'white' + ] + short_colors = { + 'black': 'k', 'red': 'r', 'green': 'g', 'yellow': 'y', 'blue': 'b', + 'purple': 'p', 'cyan': 'c', 'white': 'w' + } + _ansi = { + 'escape': '\033', 'reset': 0, '|': 0, + 'boldon': 1, '!': 1, 'italicson': 3, '/': 3, 'ulon': 4, '_': 4, + 'invon': 7, 'boldoff': 22, 'italicsoff': 23, + 'uloff': 24, 'invoff': 27 + } + + # Convert plain numbers to escapes + for key in _ansi: + if key != 'escape': + _ansi[key] = '{0}[{1}m'.format(_ansi['escape'], _ansi[key]) + + # Foreground + for index, color in enumerate(color_order): + _ansi[color] = '{0}[{1}m'.format(_ansi['escape'], 30 + index) + _ansi[color + 'f'] = _ansi[color] + _ansi[short_colors[color] + 'f'] = _ansi[color + 'f'] + + # Background + for index, color in enumerate(color_order): + _ansi[color + 'b'] = '{0}[{1}m'.format(_ansi['escape'], 40 + index) + _ansi[short_colors[color] + 'b'] = _ansi[color + 'b'] + + # Fmt sanitizers + _ansi['atexclimation'] = '@!' + _ansi['atfwdslash'] = '@/' + _ansi['atunderscore'] = '@_' + _ansi['atbar'] = '@|' + + +def disable_ANSI_colors(): + """Set all the ANSI escape sequences to empty strings, effectively disabling console colors.""" + global _ansi + for key in _ansi: + _ansi[key] = '' + + +# Default to ansi colors on +enable_ANSI_colors() +if os.name in ['nt']: + disable_ANSI_colors() + + +class ColorTemplate(string.Template): + delimiter = '@' + + +def sanitize(msg): + """Sanitize the existing msg, use before adding color annotations.""" + msg = msg.replace('@', '@@') + msg = msg.replace('{', '{{') + msg = msg.replace('}', '}}') + msg = msg.replace('@@!', '@{atexclimation}') + msg = msg.replace('@@/', '@{atfwdslash}') + msg = msg.replace('@@_', '@{atunderscore}') + msg = msg.replace('@@|', '@{atbar}') + return msg + + +def fmt(msg): + """Replace color annotations with ansi escape sequences.""" + global _ansi + msg = msg.replace('@!', '@{boldon}') + msg = msg.replace('@/', '@{italicson}') + msg = msg.replace('@_', '@{ulon}') + msg = msg.replace('@|', '@{reset}') + t = ColorTemplate(msg) + return t.substitute(_ansi) + ansi('reset') diff --git a/.ci/catkin_pkg/tool_detection.py b/.ci/catkin_pkg/tool_detection.py new file mode 100644 index 0000000000..1f6e2c8c94 --- /dev/null +++ b/.ci/catkin_pkg/tool_detection.py @@ -0,0 +1,79 @@ +# Software License Agreement (BSD License) +# +# Copyright (c) 2015, Open Source Robotics Foundation, Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# * Neither the name of Willow Garage, Inc. nor the names of its +# contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Common functions that can be used to mark spaces, e.g. build and devel, to indicate which tools previously built the space. + +This allows the tools to detect cross tool talk and avoid it where appropriate +""" + + +from __future__ import print_function + +import os + +SPACE_BUILT_BY_MARKER_FILENAME = '.built_by' + + +def get_previous_tool_used_on_the_space(space_path): + """ + Return the tool used to build the space at the given path, or None. + + Returns None if the path does not exist or if there is no built by file. + + :param str space_path: path to the space in question. + :returns: str identifying the tool used to build the space or None. + """ + if os.path.isdir(space_path): + marker_path = os.path.join(space_path, SPACE_BUILT_BY_MARKER_FILENAME) + if os.path.isfile(marker_path): + with open(marker_path, 'r') as f: + return f.read().strip() + return None + + +def mark_space_as_built_by(space_path, tool_name): + """ + Place a marker file in the space at the given path, telling who built it. + + The path to the marker is created if necessary. + + :param str space_path: path to the space which should be marked. + :param str tool_name: name of the tool doing the marking. + :raises: OSError, others, when trying to create the folder. + """ + if not os.path.isdir(space_path): + # Might fail if it's a file already or for permissions. + os.makedirs(space_path) + marker_path = os.path.join(space_path, SPACE_BUILT_BY_MARKER_FILENAME) + with open(marker_path, 'w') as f: + f.write(tool_name) diff --git a/.ci/catkin_pkg/topological_order.py b/.ci/catkin_pkg/topological_order.py new file mode 100644 index 0000000000..34fe044c8a --- /dev/null +++ b/.ci/catkin_pkg/topological_order.py @@ -0,0 +1,310 @@ +# Software License Agreement (BSD License) +# +# Copyright (c) 2012, Willow Garage, Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# * Neither the name of Willow Garage, Inc. nor the names of its +# contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +from __future__ import print_function + +import copy +import os +import sys + +from .packages import find_packages +from .workspaces import get_spaces + +class _PackageDecorator(object): + + def __init__(self, package, path): + self.package = package + self.path = path + self.is_metapackage = 'metapackage' in (e.tagname for e in self.package.exports) + message_generators = [e.content for e in self.package.exports if e.tagname == 'message_generator'] + self.message_generator = message_generators[0] if message_generators else None + # a set containing this package name, direct build depends + # and recursive run_depends of these build_depends + self.depends_for_topological_order = None + # a set containing this package name and recursive run_depends + self._recursive_run_depends_for_topological_order = None + + def __getattr__(self, name): + if name.startswith('__'): + raise AttributeError(name) + return getattr(self.package, name) + + def calculate_depends_for_topological_order(self, packages): + """ + Set self.depends_for_topological_order to the recursive dependencies required for topological order. + + It contains this package name, all direct build- and buildtool + dependencies and their recursive runtime dependencies. + The set only contains packages which are in the passed packages + dictionary. + + :param packages: dict of name to ``_PackageDecorator`` + """ + self.depends_for_topological_order = set() + all_depends = self.package.build_depends + self.package.buildtool_depends + self.package.test_depends + names = [d.name for d in all_depends if d.evaluated_condition] + + # collect all group dependencies + for group_depend in self.package.group_depends: + if group_depend.evaluated_condition: + assert group_depend.members is not None, \ + 'Group members need to be determined before' + names += group_depend.members + + # skip external dependencies, meaning names that are not known packages + for name in [n for n in names if n in packages.keys()]: + if not self.is_metapackage and packages[name].is_metapackage: + print('WARNING: package "%s" should not depend on metapackage "%s" but on its packages instead' % (self.name, name), file=sys.stderr) + if name in self.depends_for_topological_order: + # avoid function call to improve performance + # check within the loop since the set changes every cycle + continue + packages[name]._add_recursive_run_depends(packages, self.depends_for_topological_order) + + def _add_recursive_run_depends(self, packages, depends_for_topological_order): + """ + Modify depends_for_topological_order argument by adding run_depends of self recursively. + + Only packages which are in the passed packages are added and recursed into. + + :param packages: dict of name to ``_PackageDecorator`` + :param depends_for_topological_order: set to be extended + """ + if self._recursive_run_depends_for_topological_order is None: + self._recursive_run_depends_for_topological_order = set() + self._recursive_run_depends_for_topological_order.add(self.package.name) + package_names = packages.keys() + names = [d.name for d in self.package.run_depends if d.evaluated_condition] + + for group_depend in self.package.group_depends: + if group_depend.evaluated_condition: + assert group_depend.members is not None, \ + 'Group members need to be determined before' + names += group_depend.members + + for name in [n for n in names + if (n in package_names and + n not in self._recursive_run_depends_for_topological_order)]: + packages[name]._add_recursive_run_depends(packages, + self._recursive_run_depends_for_topological_order) + + depends_for_topological_order.update(self._recursive_run_depends_for_topological_order) + + +def topological_order(root_dir, whitelisted=None, blacklisted=None, underlay_workspaces=None): + """ + Crawls the filesystem to find packages and uses their dependencies to return a topologically order list. + + When a circular dependency is detected, the last item in the returned list + is a tuple with None and a string giving a superset of the guilty packages. + + :param root_dir: The path to search in, ``str`` + :param whitelisted: A list of whitelisted package names, ``list`` + :param blacklisted: A list of blacklisted package names, ``list`` + :param underlay_workspaces: A list of underlay workspaces of packages which might provide dependencies in case of partial workspaces, ``list`` + :returns: A list of tuples containing the relative path and a ``Package`` object, ``list`` + """ + packages = find_packages(root_dir) + + # find packages in underlayed workspaces + underlay_packages = {} + if underlay_workspaces: + for workspace in reversed(underlay_workspaces): + # since underlay workspace might be a devel space + # consider spaces stored in the .catkin file + spaces = get_spaces([workspace]) + for space in spaces: + for path, package in find_packages(space).items(): + underlay_packages[package.name] = (path, package) + + return topological_order_packages(packages, whitelisted=whitelisted, blacklisted=blacklisted, underlay_packages=dict(underlay_packages.values())) + + +def topological_order_packages(packages, whitelisted=None, blacklisted=None, underlay_packages=None): + """ + Topologically orders packages. + + evaluate_conditions() will be called for each package. + + If group dependencies haven't determined their members yet + extract_group_members() will be called for each group dependency to do so. + + First returning packages which have message generators and then + the rest based on direct build-/buildtool_depends and indirect + recursive run_depends. + + When a circular dependency is detected, the last item in the returned list + is a tuple with None and a string giving a superset of the guilty packages. + + :param packages: A dict mapping relative paths to ``Package`` objects ``dict`` + :param whitelisted: A list of whitelisted package names, ``list`` + :param blacklisted: A list of blacklisted package names, ``list`` + :param underlay_packages: A dict mapping relative paths to ``Package`` objects ``dict`` + :returns: A list of tuples containing the relative path and a ``Package`` object, ``list`` + """ + decorators_by_name = {} + for path, package in packages.items(): + # skip non-whitelisted packages + if whitelisted and package.name not in whitelisted: + continue + # skip blacklisted packages + if blacklisted and package.name in blacklisted: + continue + if package.name in decorators_by_name: + path_with_same_name = decorators_by_name[package.name].path + raise RuntimeError('Two packages with the same name "%s" in the workspace:\n- %s\n- %s' % (package.name, path_with_same_name, path)) + decorators_by_name[package.name] = _PackageDecorator(package, path) + + underlay_decorators_by_name = {} + if underlay_packages: + for path, package in underlay_packages.items(): + # skip overlayed packages + if package.name in decorators_by_name: + continue + underlay_decorators_by_name[package.name] = _PackageDecorator(package, path) + decorators_by_name.update(underlay_decorators_by_name) + + # evaluate conditions and determine group membership + pkgs = [d.package for d in decorators_by_name.values()] + for pkg in pkgs: + pkg.evaluate_conditions(os.environ) + for pkg in pkgs: + for group_depend in pkg.group_depends: + if group_depend.evaluated_condition: + group_depend.extract_group_members(pkgs) + + # calculate transitive dependencies + for decorator in decorators_by_name.values(): + decorator.calculate_depends_for_topological_order(decorators_by_name) + + tuples = _sort_decorated_packages(decorators_by_name) + # remove underlay packages from result + return [(path, package) for path, package in tuples if path is None or package.name not in underlay_decorators_by_name] + + +def _reduce_cycle_set(packages_orig): + """ + Remove iteratively some packages from a set that are definitely not part of any cycle. + + When there is a cycle in the package dependencies, + _sort_decorated_packages only knows the set of packages containing + the cycle. + :param packages: A dict mapping package name to ``_PackageDecorator`` objects ``dict`` + :returns: A list of package names from the input which could not easily be detected as not being part of a cycle. + """ + assert packages_orig + packages = copy.copy(packages_orig) + last_depended = None + while len(packages) > 0: + depended = set() + for name, decorator in packages.items(): + if decorator.depends_for_topological_order: + depended = depended.union(decorator.depends_for_topological_order) + for name in list(packages.keys()): + if name not in depended: + del packages[name] + if last_depended: + if last_depended == depended: + return packages.keys() + last_depended = depended + + +def _sort_decorated_packages(packages_orig): + """ + Sorts packages according to dependency ordering. + + First considering the message generators and their recursive dependencies + and then the rest of the packages. + When a circle is detected, a tuple with None and a string giving a + superset of the guilty packages. + + :param packages: A dict mapping package name to ``_PackageDecorator`` objects ``dict`` + :returns: A List of tuples containing the relative path and a ``Package`` object ``list`` + """ + packages = copy.deepcopy(packages_orig) + + # mark all packages which are (recursively) dependent on by message generators + dependency_names_to_follow = set([name for name, decorator in packages.items() if decorator.message_generator]) + + not_marked_package_names = set(packages.keys()) - dependency_names_to_follow + + while dependency_names_to_follow: + pkg_name = dependency_names_to_follow.pop() + for name in packages[pkg_name].depends_for_topological_order: + if name in not_marked_package_names: + # mark package + packages[name].message_generator = True + not_marked_package_names.remove(name) + # queue for recursion + dependency_names_to_follow.add(name) + + ordered_packages = [] + + while len(packages) > 0: + + # find all packages without build dependencies + message_generators = [] + non_message_generators = [] + + for name, decorator in packages.items(): + if not decorator.depends_for_topological_order: + if decorator.message_generator: + message_generators.append(name) + else: + non_message_generators.append(name) + + # first choose message generators + if message_generators: + names = message_generators + elif non_message_generators: + names = non_message_generators + else: + # in case of a circular dependency pass a string with + # the names list of remaining package names, with path + # None to indicate cycle + ordered_packages.append([None, ', '.join(sorted(_reduce_cycle_set(packages)))]) + break + + # alphabetic order only for convenience + # names.sort() + + # add first candidates to ordered list + # do not add all candidates since removing the depends from the first might affect the next candidates + name = names[0] + ordered_packages.append([packages[name].path, packages[name].package]) + # remove package from further processing + del packages[name] + for package in packages.values(): + if name in package.depends_for_topological_order: + package.depends_for_topological_order.remove(name) + + return ordered_packages diff --git a/.ci/catkin_pkg/workspace_vcs.py b/.ci/catkin_pkg/workspace_vcs.py new file mode 100644 index 0000000000..8fa012f77f --- /dev/null +++ b/.ci/catkin_pkg/workspace_vcs.py @@ -0,0 +1,63 @@ +# Software License Agreement (BSD License) +# +# Copyright (c) 2012, Willow Garage, Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# * Neither the name of Willow Garage, Inc. nor the names of its +# contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +from __future__ import print_function + +import os +import subprocess + + +def get_repository_type(path): + for vcs_type in ['bzr', 'git', 'hg', 'svn']: + if os.path.isdir(os.path.join(path, '.%s' % vcs_type)): + return vcs_type + return None + + +def vcs_remotes(path, vcs_type=None): + if vcs_type is None: + vcs_type = get_repository_type(path) + if vcs_type == 'git': + output = subprocess.check_output(['git', 'remote', '-v'], cwd=path) + return output.decode('utf-8').rstrip() + elif vcs_type == 'hg': + output = subprocess.check_output(['hg', 'paths'], cwd=path) + return output.decode('utf-8').rstrip() + elif vcs_type == 'svn': + output = subprocess.check_output(['svn', 'info'], cwd=path) + output = output.decode('utf-8').rstrip() + for line in output.split(os.linesep): + if line.startswith('URL: '): + return line + raise RuntimeError('Could not determine URL of svn working copy') + else: + raise RuntimeError('"remotes" command not supported for vcs type "%s"' % vcs_type) diff --git a/.ci/catkin_pkg/workspaces.py b/.ci/catkin_pkg/workspaces.py new file mode 100644 index 0000000000..d6f42730ac --- /dev/null +++ b/.ci/catkin_pkg/workspaces.py @@ -0,0 +1,108 @@ +# Software License Agreement (BSD License) +# +# Copyright (c) 2012, Willow Garage, Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# * Neither the name of Willow Garage, Inc. nor the names of its +# contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +"""Library to provided logic for chained workspaces.""" + +from __future__ import print_function + +import os + +CATKIN_WORKSPACE_MARKER_FILE = '.catkin_workspace' + + +def get_spaces(paths=None): + """ + Return a list of spaces based on the CMAKE_PREFIX_PATH or passed in list of workspaces. + + It resolves the source space for each devel space and ignores non-catkin paths. + :param paths_to_order: list of paths + :param prefix_paths: list of prefixes, must not end with '/' + """ + if paths is None: + if 'CMAKE_PREFIX_PATH' not in os.environ: + raise RuntimeError('Neither the environment variable CMAKE_PREFIX_PATH is set nor was a list of paths passed.') + paths = os.environ['CMAKE_PREFIX_PATH'].split(os.pathsep) if os.environ['CMAKE_PREFIX_PATH'] else [] + + spaces = [] + for path in paths: + marker = os.path.join(path, '.catkin') + # ignore non catkin paths + if not os.path.exists(marker): + continue + spaces.append(path) + + # append source spaces + with open(marker, 'r') as f: + data = f.read() + if data: + spaces += data.split(';') + return spaces + + +def order_paths(paths_to_order, prefix_paths): + """ + Return a list containing all items of paths_to_order ordered by list of prefix_paths, compared as strings. + + :param paths_to_order: list of paths + :param prefix_paths: list of prefixes, must not end with '/' + """ + # the ordered paths contains a list for each prefix plus one more which contains paths which do not match one of the prefix_paths + ordered_paths = [[] for _ in range(len(prefix_paths) + 1)] + + for path in paths_to_order: + # put each directory into the slot where it matches the prefix, or last otherwise + index = 0 + for prefix in prefix_paths: + if _is_equal_or_in_parents(prefix, path): + break + index += 1 + ordered_paths[index].append(path) + + # flatten list of lists + return [j for i in ordered_paths for j in i] + + +def _is_equal_or_in_parents(dir_, path): + dir_ = os.path.normcase(os.path.realpath(dir_)) + path = os.path.normcase(os.path.realpath(path)) + return path == dir_ or path.startswith(dir_ + os.sep) + + +def ensure_workspace_marker(base_path): + """ + Create workspace marker file at path if not existing. + + :param path: target folder + """ + if not os.path.exists(os.path.join(base_path, CATKIN_WORKSPACE_MARKER_FILE)): + with open(os.path.join(base_path, CATKIN_WORKSPACE_MARKER_FILE), 'a') as fhand: + fhand.write('# This file currently only serves to mark the location of a catkin workspace for tool integration\n') diff --git a/.ci/get_build_matrix.sh b/.ci/get_build_matrix.sh index 2e599ef4e1..e9ce16fcf5 100755 --- a/.ci/get_build_matrix.sh +++ b/.ci/get_build_matrix.sh @@ -1,5 +1,9 @@ #!/bin/bash +# get the path to this script +MY_PATH=`dirname "$0"` +MY_PATH=`( cd "$MY_PATH" && pwd )` + set -e trap 'last_command=$current_command; current_command=$BASH_COMMAND' DEBUG @@ -11,26 +15,18 @@ LIST=$1 VARIANT=$2 ARCH=$3 WORKSPACE=/tmp/workspace -ARTIFACTS_FOLDER=/tmp/artifacts YAML_FILE=$LIST.yaml -./.ci_scripts/package_build/add_ros_ppa.sh >> /tmp/log.txt 2>&1 - -# dependencies need for build the deb package -sudo apt-get -y install ros-noetic-catkin python3-catkin-tools >> /tmp/log.txt 2>&1 -sudo apt-get -y install fakeroot dpkg-dev debhelper >> /tmp/log.txt 2>&1 -sudo pip3 install -U bloom >> /tmp/log.txt 2>&1 +REPOS=$($MY_PATH/parse_yaml.py $YAML_FILE $ARCH) -REPOS=$(./.ci/parse_yaml.py $YAML_FILE $ARCH) +if [ -e $WORKSPACE ]; then + rm -rf $WORKSPACE +fi -mkdir -p $WORKSPACE >> /tmp/log.txt 2>&1 -mkdir -p $ARTIFACTS_FOLDER >> /tmp/log.txt 2>&1 +mkdir -p $WORKSPACE/src >> /tmp/log.txt 2>&1 cd $WORKSPACE >> /tmp/log.txt 2>&1 -mkdir src >> /tmp/log.txt 2>&1 -source /opt/ros/noetic/setup.bash >> /tmp/log.txt 2>&1 -catkin init >> /tmp/log.txt 2>&1 cd $WORKSPACE/src >> /tmp/log.txt 2>&1 @@ -63,9 +59,11 @@ echo "$REPOS" | while IFS= read -r REPO; do done -$DEBUG && echo "Done cloning" +$DEBUG && echo "$0: Done cloning" + +BUILD_ORDER=$($MY_PATH/get_build_order.py $WORKSPACE/src) -BUILD_ORDER=$(catkin list -u) +$DEBUG && echo "$0: Build oreder: $BUILD_ORDER" FIRST=true @@ -73,11 +71,9 @@ RESULT='[' $DEBUG && echo "Sorting packages" -for ROS_PACKAGE in $BUILD_ORDER; do +for PKG_PATH in $BUILD_ORDER; do - cd $WORKSPACE - PKG_PATH=$(catkin locate "$ROS_PACKAGE") - cd $PKG_PATH + cd $WORKSPACE/src/$PKG_PATH $DEBUG && echo "Gonna look for package location for '$ROS_PACKAGE'" diff --git a/.ci/get_build_order.py b/.ci/get_build_order.py new file mode 100755 index 0000000000..34d5f7a181 --- /dev/null +++ b/.ci/get_build_order.py @@ -0,0 +1,19 @@ +#!/usr/bin/python3 + +import sys +from catkin_pkg.topological_order import topological_order + +def main(): + + if len(sys.argv) == 2: + ws_path = sys.argv[1] + else: + return "" + + order = topological_order(ws_path) + + for touple in order: + print("{}".format(touple[0])) + +if __name__ == '__main__': + main() diff --git a/.gitignore b/.gitignore index b6dae4d0db..93fc6b6d16 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,9 @@ *.swo *.swp +__pycache__ +__pycache__* + act.sh .ci_scripts