diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index a609af1d3..13e3bc782 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -12,7 +12,23 @@ on:
jobs:
- build:
+ Format:
+ name: 🐍 Format
+ runs-on: ubuntu-latest
+ steps:
+
+ - name: 🧰 Checkout
+ uses: actions/checkout@v2
+
+ - name: 🐍 Setup Python ${{ matrix.pyver }}
+ uses: actions/setup-python@v2
+ with:
+ python-version: '3.10'
+
+ - name: 🚦 Run pre-commit Action
+ uses: pre-commit/action@v2.0.0
+
+ Build:
strategy:
fail-fast: false
matrix:
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 000000000..68ff1e471
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,7 @@
+# Configuration for pre-commit (https://pre-commit.com/), a tool to run
+# formatters, linters, and other productivity tools before a commit.
+repos:
+ - repo: https://github.com/psf/black
+ rev: 21.7b0
+ hooks:
+ - id: black
diff --git a/dev-requirements.txt b/dev-requirements.txt
new file mode 100644
index 000000000..2734d7ee8
--- /dev/null
+++ b/dev-requirements.txt
@@ -0,0 +1,2 @@
+pre-commit>=2.9.0
+tox
diff --git a/doc/conf.py b/doc/conf.py
index 41057b59c..249f91491 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -15,51 +15,52 @@
from datetime import datetime
import os
import sys
-sys.path.insert(0, os.path.abspath('..'))
-sys.path.insert(0, os.path.abspath('../tests/'))
-sys.path.insert(0, os.path.abspath('../tests/test_vunit/vunit_mock/'))
+
+sys.path.insert(0, os.path.abspath(".."))
+sys.path.insert(0, os.path.abspath("../tests/"))
+sys.path.insert(0, os.path.abspath("../tests/test_vunit/vunit_mock/"))
# -- Project information -----------------------------------------------------
-project = 'Edalize'
-copyright = '2019-{}, Olof Kindgren'.format(datetime.now().year)
-author = 'Olof Kindgren'
+project = "Edalize"
+copyright = "2019-{}, Olof Kindgren".format(datetime.now().year)
+author = "Olof Kindgren"
# The short X.Y version
-version = ''
+version = ""
# The full version, including alpha/beta/rc tags
-release = '0.1.3'
+release = "0.1.3"
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
-needs_sphinx = '3.0'
+needs_sphinx = "3.0"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
- 'sphinx.ext.autodoc',
- 'sphinx.ext.viewcode',
- 'sphinx.ext.napoleon',
- 'sphinx_autodoc_typehints',
- 'sphinx.ext.intersphinx',
+ "sphinx.ext.autodoc",
+ "sphinx.ext.viewcode",
+ "sphinx.ext.napoleon",
+ "sphinx_autodoc_typehints",
+ "sphinx.ext.intersphinx",
]
# Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
+templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
-source_suffix = '.rst'
+source_suffix = ".rst"
# The master toctree document.
-master_doc = 'index'
+master_doc = "index"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
@@ -71,13 +72,15 @@
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
-exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
+exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = None
-intersphinx_mapping = {'python': ('https://docs.python.org/3', None),
- 'vunit': ('https://vunit.github.io/', None)}
+intersphinx_mapping = {
+ "python": ("https://docs.python.org/3", None),
+ "vunit": ("https://vunit.github.io/", None),
+}
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
@@ -93,13 +96,16 @@
try:
import sphinx_rtd_theme
- html_theme = 'sphinx_rtd_theme'
+
+ html_theme = "sphinx_rtd_theme"
except ImportError:
- sys.stderr.write('Warning: The Sphinx \'sphinx_rtd_theme\' HTML theme was '+
- 'not found. Make sure you have the theme installed to produce pretty '+
- 'HTML output. Falling back to the default theme.\n')
+ sys.stderr.write(
+ "Warning: The Sphinx 'sphinx_rtd_theme' HTML theme was "
+ + "not found. Make sure you have the theme installed to produce pretty "
+ + "HTML output. Falling back to the default theme.\n"
+ )
- html_theme = 'alabaster'
+ html_theme = "alabaster"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
@@ -126,7 +132,7 @@
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
-htmlhelp_basename = 'Edalizedoc'
+htmlhelp_basename = "Edalizedoc"
# -- Options for LaTeX output ------------------------------------------------
@@ -135,15 +141,12 @@
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
-
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
-
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
-
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
@@ -153,8 +156,7 @@
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
- (master_doc, 'Edalize.tex', 'Edalize Documentation',
- 'Olof Kindgren', 'manual'),
+ (master_doc, "Edalize.tex", "Edalize Documentation", "Olof Kindgren", "manual"),
]
@@ -162,10 +164,7 @@
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
-man_pages = [
- (master_doc, 'edalize', 'Edalize Documentation',
- [author], 1)
-]
+man_pages = [(master_doc, "edalize", "Edalize Documentation", [author], 1)]
# -- Options for Texinfo output ----------------------------------------------
@@ -174,9 +173,15 @@
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
- (master_doc, 'Edalize', 'Edalize Documentation',
- author, 'Edalize', 'Edalize is a Python Library for interacting with EDA tools.',
- 'Miscellaneous'),
+ (
+ master_doc,
+ "Edalize",
+ "Edalize Documentation",
+ author,
+ "Edalize",
+ "Edalize is a Python Library for interacting with EDA tools.",
+ "Miscellaneous",
+ ),
]
@@ -195,7 +200,7 @@
# epub_uid = ''
# A list of files that should not be packed into the epub file.
-epub_exclude_files = ['search.html']
+epub_exclude_files = ["search.html"]
# -- Extension configuration -------------------------------------------------
diff --git a/doc/dev/setup.rst b/doc/dev/setup.rst
new file mode 100644
index 000000000..4083a3da8
--- /dev/null
+++ b/doc/dev/setup.rst
@@ -0,0 +1,50 @@
+Development Setup
+=================
+
+Setup development environment
+-----------------------------
+
+.. note::
+
+ If you have already installed Edalize, remove it first using ``pip3 uninstall edalize``.
+
+To develop Edalize and test the changes, the edalize package needs to be installed in editable or development mode.
+In this mode, the ``edalize`` command is linked to the source directory, and changes made to the source code are
+immediately visible when calling ``edalize``.
+
+.. code-block:: bash
+
+ # Install all Python packages required to develop edalize
+ pip3 install --user -r dev-requirements.txt
+
+ # Install Git pre-commit hooks, e.g. for the code formatter and lint tools
+ pre-commit install
+
+ # Install the edalize package in editable mode
+ pip3 install --user -e .
+
+.. note::
+
+ All commands above use Python 3 and install software only for the current user.
+ If, after this installation, the ``edalize`` command cannot be found adjust your ``PATH`` environment variable to
+ include ``~/.local/bin``.
+
+After this installation is completed, you can
+
+* edit files in the source directory and re-run ``edalize`` to immediately see the changes,
+* run the unit tests as outlined in the section below, and
+* use linter and automated code formatters.
+
+Formatting and linting code
+---------------------------
+
+The Edalize code comes with tooling to automatically format code to conform to our expectations.
+These tools are installed and called through a tool called `pre-commit `_.
+No setup is required: whenever you do a ``git commit``, the necessary tools are called and your code is automatically formatted and checked for common mistakes.
+
+To check the whole source code ``pre-commit`` can be run directly:
+
+.. code-block:: bash
+
+ # check and fix all files
+ pre-commit run -a
diff --git a/doc/source/tests.rst b/doc/dev/tests.rst
similarity index 100%
rename from doc/source/tests.rst
rename to doc/dev/tests.rst
diff --git a/doc/source/edalize.rst b/doc/edalize.rst
similarity index 100%
rename from doc/source/edalize.rst
rename to doc/edalize.rst
diff --git a/doc/genindex.rst b/doc/genindex.rst
new file mode 100644
index 000000000..d4fc2f48f
--- /dev/null
+++ b/doc/genindex.rst
@@ -0,0 +1,4 @@
+.. # This file is a placeholder and will be replaced
+
+Index
+#####
diff --git a/doc/index.rst b/doc/index.rst
index fa501d522..4ca892032 100644
--- a/doc/index.rst
+++ b/doc/index.rst
@@ -18,19 +18,26 @@
Welcome to Edalize's documentation!
===================================
+Edalize is a Python Library for interacting with EDA tools.
+It can create project files for supported tools and run them in batch or GUI mode (where supported).
+
.. toctree::
- :maxdepth: 2
- :caption: Contents:
+ :caption: Reference
+ :hidden:
edam/api
- source/modules
- source/tests
+ Modules
+.. toctree::
+ :caption: Developer's Guide
+ :hidden:
-Indices and tables
-==================
+ dev/setup
+ dev/tests
+.. toctree::
+ :caption: Indices and tables
+ :hidden:
-* :ref:`genindex`
-* :ref:`modindex`
-* :ref:`search`
+ genindex
+ Module Index
diff --git a/doc/py-modindex.rst b/doc/py-modindex.rst
new file mode 100644
index 000000000..5f07d3e25
--- /dev/null
+++ b/doc/py-modindex.rst
@@ -0,0 +1,4 @@
+.. # This file is a placeholder and will be replaced
+
+Search Page
+###########
diff --git a/doc/source/modules.rst b/doc/source/modules.rst
deleted file mode 100644
index 9908a3228..000000000
--- a/doc/source/modules.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-Modules
-=======
-
-.. toctree::
- :maxdepth: 4
-
- edalize
diff --git a/edalize/__init__.py b/edalize/__init__.py
index 4f02eea6b..7346fa4db 100644
--- a/edalize/__init__.py
+++ b/edalize/__init__.py
@@ -7,21 +7,23 @@
from pkgutil import walk_packages
NON_TOOL_PACKAGES = [
- 'vunit_hooks',
- 'reporting',
- 'ise_reporting',
- 'vivado_reporting',
- 'quartus_reporting',
+ "vunit_hooks",
+ "reporting",
+ "ise_reporting",
+ "vivado_reporting",
+ "quartus_reporting",
]
+
def get_edatool(name):
- return getattr(import_module('{}.{}'.format(__name__, name)),
- name.capitalize())
+ return getattr(import_module("{}.{}".format(__name__, name)), name.capitalize())
+
def walk_tool_packages():
for _, pkg_name, _ in walk_packages([dirname(__file__)]):
if not pkg_name in NON_TOOL_PACKAGES:
yield pkg_name
+
def get_edatools():
return [get_edatool(pkg) for pkg in walk_tool_packages()]
diff --git a/edalize/apicula.py b/edalize/apicula.py
index b7fe54e7b..f53305999 100644
--- a/edalize/apicula.py
+++ b/edalize/apicula.py
@@ -8,48 +8,55 @@
from edalize.nextpnr import Nextpnr
from edalize.yosys import Yosys
+
class Apicula(Edatool):
- argtypes = ['vlogdefine', 'vlogparam']
+ argtypes = ["vlogdefine", "vlogparam"]
@classmethod
def get_doc(cls, api_ver):
if api_ver == 0:
options = {
- 'lists' : [],
- 'members' : [
- {'name' : 'device',
- 'type' : 'String',
- 'desc' : 'Required device option for nextpnr-gowin and gowin_pack command (e.g. GW1N-LV1QN48C6/I5)'},
- ]}
+ "lists": [],
+ "members": [
+ {
+ "name": "device",
+ "type": "String",
+ "desc": "Required device option for nextpnr-gowin and gowin_pack command (e.g. GW1N-LV1QN48C6/I5)",
+ },
+ ],
+ }
Edatool._extend_options(options, Yosys)
Edatool._extend_options(options, Nextpnr)
- return {'description' : "Open source toolchain for Gowin FPGAs. Uses yosys for synthesis and nextpnr for Place & Route",
- 'members' : options['members'],
- 'lists' : options['lists']}
+ return {
+ "description": "Open source toolchain for Gowin FPGAs. Uses yosys for synthesis and nextpnr for Place & Route",
+ "members": options["members"],
+ "lists": options["lists"],
+ }
def configure_main(self):
- #Pass apicula tool options to yosys and nextpnr
- self.edam['tool_options'] = \
- {'yosys' : {
- 'arch' : 'gowin',
- 'yosys_synth_options' : [f"-json {self.name}.json"] + self.tool_options.get('yosys_synth_options',[]),
- 'yosys_as_subtool' : True,
- 'yosys_template' : self.tool_options.get('yosys_template'),
+ # Pass apicula tool options to yosys and nextpnr
+ self.edam["tool_options"] = {
+ "yosys": {
+ "arch": "gowin",
+ "yosys_synth_options": [f"-json {self.name}.json"]
+ + self.tool_options.get("yosys_synth_options", []),
+ "yosys_as_subtool": True,
+ "yosys_template": self.tool_options.get("yosys_template"),
+ },
+ "nextpnr": {
+ "device": self.tool_options.get("device"),
+ "nextpnr_options": self.tool_options.get("nextpnr_options", []),
},
- 'nextpnr' : {
- 'device' : self.tool_options.get('device'),
- 'nextpnr_options' : self.tool_options.get('nextpnr_options', [])
- },
- }
+ }
yosys = Yosys(self.edam, self.work_root)
yosys.configure()
nextpnr = Nextpnr(yosys.edam, self.work_root)
- nextpnr.flow_config = {'arch' : 'gowin'}
+ nextpnr.flow_config = {"arch": "gowin"}
nextpnr.configure()
# Write Makefile
@@ -58,11 +65,18 @@ def configure_main(self):
commands.commands += nextpnr.commands
- #Image generation
- depends = self.name+'.pack'
- targets = self.name+'.fs'
- command = ['gowin_pack', '-d', self.tool_options.get('device'), '-o', targets, depends]
+ # Image generation
+ depends = self.name + ".pack"
+ targets = self.name + ".fs"
+ command = [
+ "gowin_pack",
+ "-d",
+ self.tool_options.get("device"),
+ "-o",
+ targets,
+ depends,
+ ]
commands.add(command, [targets], [depends])
commands.set_default_target(targets)
- commands.write(os.path.join(self.work_root, 'Makefile'))
+ commands.write(os.path.join(self.work_root, "Makefile"))
diff --git a/edalize/ascentlint.py b/edalize/ascentlint.py
index 5b7335264..70c0d0cbb 100644
--- a/edalize/ascentlint.py
+++ b/edalize/ascentlint.py
@@ -11,46 +11,52 @@
logger = logging.getLogger(__name__)
+
class Ascentlint(Edatool):
- argtypes = ['vlogdefine', 'vlogparam']
+ argtypes = ["vlogdefine", "vlogparam"]
@classmethod
def get_doc(cls, api_ver):
if api_ver == 0:
- return {'description' : """ Real Intent Ascent Lint backend
+ return {
+ "description": """ Real Intent Ascent Lint backend
Ascent Lint performs static source code analysis on HDL code and checks for
common coding errors or coding style violations.
""",
- 'lists' : [
- {'name' : 'ascentlint_options',
- 'type' : 'String',
- 'desc' : 'Additional run options for ascentlint'}
- ]}
+ "lists": [
+ {
+ "name": "ascentlint_options",
+ "type": "String",
+ "desc": "Additional run options for ascentlint",
+ }
+ ],
+ }
def configure_main(self):
(src_files, incdirs) = self._get_fileset_files(force_slash=True)
- self._write_fileset_to_f_file(os.path.join(self.work_root, 'sources.f'),
- include_vlogparams = False)
+ self._write_fileset_to_f_file(
+ os.path.join(self.work_root, "sources.f"), include_vlogparams=False
+ )
- tcl_source_files = [f for f in src_files if f.file_type == 'tclSource']
- waiver_files = [f for f in src_files if f.file_type == 'waiver']
+ tcl_source_files = [f for f in src_files if f.file_type == "tclSource"]
+ waiver_files = [f for f in src_files if f.file_type == "waiver"]
template_vars = {
- 'name' : self.name,
- 'ascentlint_options' : ' '.join(self.tool_options.get('ascentlint_options', [])),
- 'tcl_source_files' : tcl_source_files,
- 'waiver_files' : waiver_files,
- 'toplevel' : self.toplevel,
- 'vlogparam' : self.vlogparam,
+ "name": self.name,
+ "ascentlint_options": " ".join(
+ self.tool_options.get("ascentlint_options", [])
+ ),
+ "tcl_source_files": tcl_source_files,
+ "waiver_files": waiver_files,
+ "toplevel": self.toplevel,
+ "vlogparam": self.vlogparam,
}
- self.render_template('run-ascentlint.tcl.j2',
- 'run-ascentlint.tcl',
- template_vars)
+ self.render_template(
+ "run-ascentlint.tcl.j2", "run-ascentlint.tcl", template_vars
+ )
- self.render_template('Makefile.j2',
- 'Makefile',
- template_vars)
+ self.render_template("Makefile.j2", "Makefile", template_vars)
diff --git a/edalize/diamond.py b/edalize/diamond.py
index 2ec7f4187..82e6cc5dd 100644
--- a/edalize/diamond.py
+++ b/edalize/diamond.py
@@ -10,115 +10,142 @@
logger = logging.getLogger(__name__)
+
class Diamond(Edatool):
- argtypes = ['generic', 'vlogdefine', 'vlogparam']
+ argtypes = ["generic", "vlogdefine", "vlogparam"]
@classmethod
def get_doc(cls, api_ver):
if api_ver == 0:
- return {'description' : "Backend for Lattice Diamond",
- 'members' : [
- {'name' : 'part',
- 'type' : 'String',
- 'desc' : 'FPGA part number (e.g. LFE5U-45F-6BG381C)'},
- ]}
+ return {
+ "description": "Backend for Lattice Diamond",
+ "members": [
+ {
+ "name": "part",
+ "type": "String",
+ "desc": "FPGA part number (e.g. LFE5U-45F-6BG381C)",
+ },
+ ],
+ }
def configure_main(self):
- part = self.tool_options.get('part')
+ part = self.tool_options.get("part")
if not part:
raise RuntimeError("Missing required option 'part' for diamond backend")
(src_files, incdirs) = self._get_fileset_files()
has_vhdl2008 = "vhdlSource-2008" in [x.file_type for x in src_files]
-
+
lpf_file = None
- prj_name = self.name.replace('.','_')
+ prj_name = self.name.replace(".", "_")
for f in src_files:
- if f.file_type == 'LPF':
+ if f.file_type == "LPF":
if lpf_file:
- logger.warning("Multiple LPF files detected. Only the first one will be used")
+ logger.warning(
+ "Multiple LPF files detected. Only the first one will be used"
+ )
else:
lpf_file = f.name
- #FIXME: Warn about pnr without lpf
- with open(os.path.join(self.work_root, self.name+'.tcl'), 'w') as f:
+ # FIXME: Warn about pnr without lpf
+ with open(os.path.join(self.work_root, self.name + ".tcl"), "w") as f:
TCL_TEMPLATE = """#Generated by Edalize
prj_project new -name {} -dev {}{} -synthesis synplify
prj_impl option top {}
{}
"""
- f.write(TCL_TEMPLATE.format(prj_name,
- part,
- " -lpf "+lpf_file if lpf_file else "",
- self.toplevel,
- "prj_strgy set_value -strategy Strategy1 syn_vhdl2008=True" if has_vhdl2008 else ""
- ))
+ f.write(
+ TCL_TEMPLATE.format(
+ prj_name,
+ part,
+ " -lpf " + lpf_file if lpf_file else "",
+ self.toplevel,
+ "prj_strgy set_value -strategy Strategy1 syn_vhdl2008=True"
+ if has_vhdl2008
+ else "",
+ )
+ )
if incdirs:
- _s = 'prj_impl option {include path} {'
- _s += ' '.join(incdirs)
- f.write(_s + '}\n')
+ _s = "prj_impl option {include path} {"
+ _s += " ".join(incdirs)
+ f.write(_s + "}\n")
if self.generic:
- _s = ';'.join(['{}={}'.format(k, self._param_value_str(v, '"')) for k,v in self.generic.items()])
- f.write('prj_impl option HDL_PARAM {')
+ _s = ";".join(
+ [
+ "{}={}".format(k, self._param_value_str(v, '"'))
+ for k, v in self.generic.items()
+ ]
+ )
+ f.write("prj_impl option HDL_PARAM {")
f.write(_s)
- f.write('}\n')
+ f.write("}\n")
if self.vlogparam:
- _s = ';'.join(['{}={}'.format(k, self._param_value_str(v, '"')) for k,v in self.vlogparam.items()])
- f.write('prj_impl option HDL_PARAM {')
+ _s = ";".join(
+ [
+ "{}={}".format(k, self._param_value_str(v, '"'))
+ for k, v in self.vlogparam.items()
+ ]
+ )
+ f.write("prj_impl option HDL_PARAM {")
f.write(_s)
- f.write('}\n')
+ f.write("}\n")
if self.vlogdefine:
- _s = ";".join(['{}={}'.format(k,v) for k,v in self.vlogdefine.items()])
- f.write('prj_impl option VERILOG_DIRECTIVES {')
+ _s = ";".join(
+ ["{}={}".format(k, v) for k, v in self.vlogdefine.items()]
+ )
+ f.write("prj_impl option VERILOG_DIRECTIVES {")
f.write(_s)
- f.write('}\n')
+ f.write("}\n")
for src_file in src_files:
_s = self.src_file_filter(src_file)
if _s:
- f.write(_s+'\n')
- f.write('prj_project save\nexit\n')
+ f.write(_s + "\n")
+ f.write("prj_project save\nexit\n")
- with open(os.path.join(self.work_root, self.name+'_run.tcl'), 'w') as f:
- f.write("""#Generated by Edalize
+ with open(os.path.join(self.work_root, self.name + "_run.tcl"), "w") as f:
+ f.write(
+ """#Generated by Edalize
prj_project open {}.ldf
prj_run Synthesis
prj_run Export -task Bitgen
prj_project save
prj_project close
-""".format(prj_name))
- def src_file_filter(self, f):
+""".format(
+ prj_name
+ )
+ )
+ def src_file_filter(self, f):
def _vhdl_source(f):
- s = 'VHDL'
+ s = "VHDL"
if f.logical_name:
- s += ' -work '+f.logical_name
+ s += " -work " + f.logical_name
return s
file_types = {
- 'verilogSource' : 'prj_src add -format Verilog',
- 'systemVerilogSource' : 'prj_src add -format Verilog',
- 'vhdlSource' : 'prj_src add -format '+ _vhdl_source(f),
- 'tclSource' : 'source',
- 'SDC' : 'prj_src add -format SDC',
+ "verilogSource": "prj_src add -format Verilog",
+ "systemVerilogSource": "prj_src add -format Verilog",
+ "vhdlSource": "prj_src add -format " + _vhdl_source(f),
+ "tclSource": "source",
+ "SDC": "prj_src add -format SDC",
}
- _file_type = f.file_type.split('-')[0]
+ _file_type = f.file_type.split("-")[0]
if _file_type in file_types:
- return file_types[_file_type] + ' ' + f.name
- elif _file_type in ['user', 'LPF']:
- return ''
+ return file_types[_file_type] + " " + f.name
+ elif _file_type in ["user", "LPF"]:
+ return ""
else:
_s = "{} has unknown file type '{}'"
- logger.warning(_s.format(f.name,
- f.file_type))
- return ''
+ logger.warning(_s.format(f.name, f.file_type))
+ return ""
def build_main(self):
- if sys.platform == 'win32':
- tcl = 'pnmainc'
+ if sys.platform == "win32":
+ tcl = "pnmainc"
else:
- tcl = 'diamondc'
+ tcl = "diamondc"
- self._run_tool(tcl, [self.name+'.tcl'], quiet=True)
- self._run_tool(tcl, [self.name+'_run.tcl'], quiet=True)
+ self._run_tool(tcl, [self.name + ".tcl"], quiet=True)
+ self._run_tool(tcl, [self.name + "_run.tcl"], quiet=True)
def run_main(self):
pass
diff --git a/edalize/edatool.py b/edalize/edatool.py
index 6ac54ea1c..edf565a0b 100644
--- a/edalize/edatool.py
+++ b/edalize/edatool.py
@@ -12,28 +12,31 @@
logger = logging.getLogger(__name__)
-if sys.version[0] == '2':
+if sys.version[0] == "2":
FileNotFoundError = OSError
try:
import msvcrt
+
_mswindows = True
except ImportError:
_mswindows = False
-def subprocess_run_3_9(*popenargs,
- input=None, capture_output=False, timeout=None,
- check=False, **kwargs):
+
+def subprocess_run_3_9(
+ *popenargs, input=None, capture_output=False, timeout=None, check=False, **kwargs
+):
if input is not None:
- if kwargs.get('stdin') is not None:
- raise ValueError('stdin and input arguments may not both be used.')
- kwargs['stdin'] = subprocess.PIPE
+ if kwargs.get("stdin") is not None:
+ raise ValueError("stdin and input arguments may not both be used.")
+ kwargs["stdin"] = subprocess.PIPE
if capture_output:
- if kwargs.get('stdout') is not None or kwargs.get('stderr') is not None:
- raise ValueError('stdout and stderr arguments may not be used '
- 'with capture_output.')
- kwargs['stdout'] = subprocess.PIPE
- kwargs['stderr'] = subprocess.PIPE
+ if kwargs.get("stdout") is not None or kwargs.get("stderr") is not None:
+ raise ValueError(
+ "stdout and stderr arguments may not be used " "with capture_output."
+ )
+ kwargs["stdout"] = subprocess.PIPE
+ kwargs["stderr"] = subprocess.PIPE
with subprocess.Popen(*popenargs, **kwargs) as process:
try:
@@ -58,8 +61,9 @@ def subprocess_run_3_9(*popenargs,
raise
retcode = process.poll()
if check and retcode:
- raise subprocess.CalledProcessError(retcode, process.args,
- output=stdout, stderr=stderr)
+ raise subprocess.CalledProcessError(
+ retcode, process.args, output=stdout, stderr=stderr
+ )
return subprocess.CompletedProcess(process.args, retcode, stdout, stderr)
@@ -84,9 +88,9 @@ def jinja_filter_param_value_str(value, str_quote_style="", bool_is_str=False):
"""
if type(value) == bool:
if bool_is_str:
- return 'true' if value else 'false'
+ return "true" if value else "false"
else:
- return '1' if value else '0'
+ return "1" if value else "0"
elif type(value) == str:
return str_quote_style + str(value) + str_quote_style
else:
@@ -100,83 +104,95 @@ def __call__(self, parser, namespace, values, option_string=None):
path = os.path.abspath(path)
setattr(namespace, self.dest, [path])
-class Edatool(object):
+class Edatool(object):
def __init__(self, edam=None, work_root=None, eda_api=None, verbose=True):
_tool_name = self.__class__.__name__.lower()
self.verbose = verbose
- self.stdout=None
- self.stderr=None
+ self.stdout = None
+ self.stderr = None
if not edam:
edam = eda_api
self.edam = edam
try:
- self.name = edam['name']
+ self.name = edam["name"]
except KeyError:
raise RuntimeError("Missing required parameter 'name'")
- self.tool_options = edam.get('tool_options', {}).get(_tool_name, {})
+ self.tool_options = edam.get("tool_options", {}).get(_tool_name, {})
- self.files = edam.get('files', [])
- self.toplevel = edam.get('toplevel', [])
- self.vpi_modules = edam.get('vpi', [])
+ self.files = edam.get("files", [])
+ self.toplevel = edam.get("toplevel", [])
+ self.vpi_modules = edam.get("vpi", [])
- self.hooks = edam.get('hooks', {})
- self.parameters = edam.get('parameters', {})
+ self.hooks = edam.get("hooks", {})
+ self.parameters = edam.get("parameters", {})
self.work_root = work_root
self.env = os.environ.copy()
- self.env['WORK_ROOT'] = self.work_root
+ self.env["WORK_ROOT"] = self.work_root
- self.plusarg = OrderedDict()
- self.vlogparam = OrderedDict()
- self.vlogdefine = OrderedDict()
- self.generic = OrderedDict()
- self.cmdlinearg = OrderedDict()
+ self.plusarg = OrderedDict()
+ self.vlogparam = OrderedDict()
+ self.vlogdefine = OrderedDict()
+ self.generic = OrderedDict()
+ self.cmdlinearg = OrderedDict()
args = OrderedDict()
for k, v in self.parameters.items():
- args[k] = v.get('default')
+ args[k] = v.get("default")
self._apply_parameters(args)
self.jinja_env = Environment(
- loader = PackageLoader(__package__, 'templates'),
- trim_blocks = True,
- lstrip_blocks = True,
- keep_trailing_newline = True,
+ loader=PackageLoader(__package__, "templates"),
+ trim_blocks=True,
+ lstrip_blocks=True,
+ keep_trailing_newline=True,
)
- self.jinja_env.filters['param_value_str'] = jinja_filter_param_value_str
- self.jinja_env.filters['generic_value_str'] = jinja_filter_param_value_str
+ self.jinja_env.filters["param_value_str"] = jinja_filter_param_value_str
+ self.jinja_env.filters["generic_value_str"] = jinja_filter_param_value_str
@classmethod
def get_doc(cls, api_ver):
if api_ver == 0:
- desc = getattr(cls, '_description', 'Options for {} backend'.format(cls.__name__))
- opts = {'description' : desc}
- for group in ['members', 'lists', 'dicts']:
+ desc = getattr(
+ cls, "_description", "Options for {} backend".format(cls.__name__)
+ )
+ opts = {"description": desc}
+ for group in ["members", "lists", "dicts"]:
if group in cls.tool_options:
opts[group] = []
for _name, _type in cls.tool_options[group].items():
- opts[group].append({'name' : _name,
- 'type' : _type,
- 'desc' : ''})
+ opts[group].append({"name": _name, "type": _type, "desc": ""})
return opts
else:
- logger.warning("Invalid API version '{}' for get_tool_options".format(api_ver))
+ logger.warning(
+ "Invalid API version '{}' for get_tool_options".format(api_ver)
+ )
@classmethod
def _extend_options(cls, options, other_class):
help = other_class.get_doc(0)
- options['members'].extend(m for m in help['members'] if m['name'] not in [i['name'] for i in options['members']])
- options['lists'].extend(m for m in help['lists'] if m['name'] not in [i['name'] for i in options['lists']])
+ options["members"].extend(
+ m
+ for m in help["members"]
+ if m["name"] not in [i["name"] for i in options["members"]]
+ )
+ options["lists"].extend(
+ m
+ for m in help["lists"]
+ if m["name"] not in [i["name"] for i in options["lists"]]
+ )
def configure(self, args=[]):
if args:
- logger.error("Edalize has stopped supporting passing arguments as a function argument. Set these values as default values in the EDAM object instead")
+ logger.error(
+ "Edalize has stopped supporting passing arguments as a function argument. Set these values as default values in the EDAM object instead"
+ )
logger.info("Setting up project")
self.configure_pre()
self.configure_main()
@@ -197,16 +213,18 @@ def build(self):
self.build_post()
def build_pre(self):
- if 'pre_build' in self.hooks:
- self._run_scripts(self.hooks['pre_build'], 'pre_build')
+ if "pre_build" in self.hooks:
+ self._run_scripts(self.hooks["pre_build"], "pre_build")
def build_main(self, target=None):
- logger.info("Building{}".format("" if target is None else "target " + " ".join(target)))
- self._run_tool('make', [] if target is None else [target], quiet=True)
+ logger.info(
+ "Building{}".format("" if target is None else "target " + " ".join(target))
+ )
+ self._run_tool("make", [] if target is None else [target], quiet=True)
def build_post(self):
- if 'post_build' in self.hooks:
- self._run_scripts(self.hooks['post_build'], 'post_build')
+ if "post_build" in self.hooks:
+ self._run_scripts(self.hooks["post_build"], "post_build")
def run(self, args={}):
logger.info("Running")
@@ -220,15 +238,15 @@ def run_pre(self, args=None):
else:
parsed_args = args
self._apply_parameters(parsed_args)
- if 'pre_run' in self.hooks:
- self._run_scripts(self.hooks['pre_run'], 'pre_run')
+ if "pre_run" in self.hooks:
+ self._run_scripts(self.hooks["pre_run"], "pre_run")
def run_main(self):
pass
def run_post(self):
- if 'post_run' in self.hooks:
- self._run_scripts(self.hooks['post_run'], 'post_run')
+ if "post_run" in self.hooks:
+ self._run_scripts(self.hooks["post_run"], "post_run")
class EdaCommands(object):
class Command(object):
@@ -248,7 +266,7 @@ def set_default_target(self, target):
self.default_target = target
def write(self, outfile):
- with open(outfile, 'w') as f:
+ with open(outfile, "w") as f:
f.write(self.header)
if not self.default_target:
raise RuntimeError("Internal Edalize error. Missing default target")
@@ -258,7 +276,7 @@ def write(self, outfile):
for c in self.commands:
f.write(f"\n{' '.join(c.targets)}:")
for d in c.depends:
- f.write(" "+d)
+ f.write(" " + d)
f.write("\n")
if c.command:
@@ -268,58 +286,69 @@ def set_default_target(self, target):
self.default_target = target
def parse_args(self, args, paramtypes):
- typedict = {'bool' : {'action' : 'store_true'},
- 'file' : {'type' : str , 'nargs' : 1, 'action' : FileAction},
- 'int' : {'type' : int , 'nargs' : 1},
- 'str' : {'type' : str , 'nargs' : 1},
- }
- progname = os.path.basename(sys.argv[0]) + ' run {}'.format(self.name)
-
- parser = argparse.ArgumentParser(prog = progname,
- conflict_handler='resolve')
+ typedict = {
+ "bool": {"action": "store_true"},
+ "file": {"type": str, "nargs": 1, "action": FileAction},
+ "int": {"type": int, "nargs": 1},
+ "str": {"type": str, "nargs": 1},
+ }
+ progname = os.path.basename(sys.argv[0]) + " run {}".format(self.name)
+
+ parser = argparse.ArgumentParser(prog=progname, conflict_handler="resolve")
param_groups = {}
- _descr = {'plusarg' : 'Verilog plusargs (Run-time option)',
- 'vlogparam' : 'Verilog parameters (Compile-time option)',
- 'vlogdefine' : 'Verilog defines (Compile-time global symbol)',
- 'generic' : 'VHDL generic (Run-time option)',
- 'cmdlinearg' : 'Command-line arguments (Run-time option)'}
+ _descr = {
+ "plusarg": "Verilog plusargs (Run-time option)",
+ "vlogparam": "Verilog parameters (Compile-time option)",
+ "vlogdefine": "Verilog defines (Compile-time global symbol)",
+ "generic": "VHDL generic (Run-time option)",
+ "cmdlinearg": "Command-line arguments (Run-time option)",
+ }
param_type_map = {}
for name, param in self.parameters.items():
- _description = param.get('description', "No description")
- _paramtype = param['paramtype']
+ _description = param.get("description", "No description")
+ _paramtype = param["paramtype"]
if _paramtype in paramtypes:
if not _paramtype in param_groups:
- param_groups[_paramtype] = \
- parser.add_argument_group(_descr[_paramtype])
+ param_groups[_paramtype] = parser.add_argument_group(
+ _descr[_paramtype]
+ )
default = None
- if not param.get('default') is None:
+ if not param.get("default") is None:
try:
- if param['datatype'] == 'bool':
- default = param['default']
+ if param["datatype"] == "bool":
+ default = param["default"]
else:
- default = [typedict[param['datatype']]['type'](param['default'])]
+ default = [
+ typedict[param["datatype"]]["type"](param["default"])
+ ]
except KeyError as e:
pass
try:
- param_groups[_paramtype].add_argument('--'+name,
- help=_description,
- default=default,
- **typedict[param['datatype']])
+ param_groups[_paramtype].add_argument(
+ "--" + name,
+ help=_description,
+ default=default,
+ **typedict[param["datatype"]],
+ )
except KeyError as e:
- raise RuntimeError("Invalid data type {} for parameter '{}'".format(str(e),
- name))
- param_type_map[name.replace('-','_')] = _paramtype
+ raise RuntimeError(
+ "Invalid data type {} for parameter '{}'".format(str(e), name)
+ )
+ param_type_map[name.replace("-", "_")] = _paramtype
else:
- logging.warn("Parameter '{}' has unsupported type '{}' for requested backend".format(name, _paramtype))
+ logging.warn(
+ "Parameter '{}' has unsupported type '{}' for requested backend".format(
+ name, _paramtype
+ )
+ )
- #backend_args.
+ # backend_args.
backend_args = parser.add_argument_group("Backend arguments")
_opts = self.__class__.get_doc(0)
- for _opt in _opts.get('members', []) + _opts.get('lists', []):
- backend_args.add_argument('--'+_opt['name'],
- help=_opt['desc'])
+ for _opt in _opts.get("members", []) + _opts.get("lists", []):
+ backend_args.add_argument("--" + _opt["name"], help=_opt["desc"])
args_dict = {}
for key, value in vars(parser.parse_args(args)).items():
@@ -334,10 +363,10 @@ def parse_args(self, args, paramtypes):
def _apply_parameters(self, args):
_opts = self.__class__.get_doc(0)
- #Parse arguments
- backend_members = [x['name'] for x in _opts.get('members', [])]
- backend_lists = [x['name'] for x in _opts.get('lists', [])]
- for key,value in args.items():
+ # Parse arguments
+ backend_members = [x["name"] for x in _opts.get("members", [])]
+ backend_lists = [x["name"] for x in _opts.get("lists", [])]
+ for key, value in args.items():
if value is None:
continue
if key in backend_members:
@@ -346,29 +375,29 @@ def _apply_parameters(self, args):
if key in backend_lists:
if not key in self.tool_options:
self.tool_options[key] = []
- self.tool_options[key] += value.split(' ')
+ self.tool_options[key] += value.split(" ")
continue
- paramtype = self.parameters[key]['paramtype']
+ paramtype = self.parameters[key]["paramtype"]
getattr(self, paramtype)[key] = value
- def render_template(self, template_file, target_file, template_vars = {}):
+ def render_template(self, template_file, target_file, template_vars={}):
"""
Render a Jinja2 template for the backend.
The template file is expected in the directory templates/BACKEND_NAME.
"""
template_dir = str(self.__class__.__name__).lower()
- template = self.jinja_env.get_template('/'.join([template_dir, template_file]))
+ template = self.jinja_env.get_template("/".join([template_dir, template_file]))
file_path = os.path.join(self.work_root, target_file)
- with open(file_path, 'w') as f:
+ with open(file_path, "w") as f:
f.write(template.render(template_vars))
def _add_include_dir(self, f, incdirs, force_slash=False):
- if f.get('is_include_file'):
- _incdir = f.get('include_path') or os.path.dirname(f['name']) or '.'
+ if f.get("is_include_file"):
+ _incdir = f.get("include_path") or os.path.dirname(f["name"]) or "."
if force_slash:
- _incdir = _incdir.replace('\\', '/')
+ _incdir = _incdir.replace("\\", "/")
if not _incdir in incdirs:
incdirs.append(_incdir)
return True
@@ -377,21 +406,20 @@ def _add_include_dir(self, f, incdirs, force_slash=False):
def _get_fileset_files(self, force_slash=False):
class File:
def __init__(self, name, file_type, logical_name):
- self.name = name
- self.file_type = file_type
+ self.name = name
+ self.file_type = file_type
self.logical_name = logical_name
+
incdirs = []
src_files = []
for f in self.files:
if not self._add_include_dir(f, incdirs, force_slash):
- _name = f['name']
+ _name = f["name"]
if force_slash:
- _name = _name.replace('\\', '/')
- file_type = f.get('file_type', '')
- logical_name = f.get('logical_name', '')
- src_files.append(File(_name,
- file_type,
- logical_name))
+ _name = _name.replace("\\", "/")
+ file_type = f.get("file_type", "")
+ logical_name = f.get("logical_name", "")
+ src_files.append(File(_name, file_type, logical_name))
return (src_files, incdirs)
def _param_value_str(self, param_value, str_quote_style="", bool_is_str=False):
@@ -400,22 +428,26 @@ def _param_value_str(self, param_value, str_quote_style="", bool_is_str=False):
def _run_scripts(self, scripts, hook_name):
for script in scripts:
_env = self.env.copy()
- if 'env' in script:
- _env.update(script['env'])
- logger.info("Running {} script {}".format(hook_name, script['name']))
+ if "env" in script:
+ _env.update(script["env"])
+ logger.info("Running {} script {}".format(hook_name, script["name"]))
logger.debug("Environment: " + str(_env))
logger.debug("Working directory: " + self.work_root)
try:
- run(script['cmd'],
- cwd = self.work_root,
- env = _env,
+ run(
+ script["cmd"],
+ cwd=self.work_root,
+ env=_env,
capture_output=not self.verbose,
- check = True)
+ check=True,
+ )
except FileNotFoundError as e:
msg = "Unable to run {} script '{}': {}"
- raise RuntimeError(msg.format(hook_name, script['name'], str(e)))
+ raise RuntimeError(msg.format(hook_name, script["name"], str(e)))
except subprocess.CalledProcessError as e:
- msg = "{} script '{}': {} exited with error code {}".format(hook_name, script['name'], e.cmd, e.returncode)
+ msg = "{} script '{}': {} exited with error code {}".format(
+ hook_name, script["name"], e.cmd, e.returncode
+ )
logger.debug(msg)
if e.stdout:
logger.info(e.stdout.decode())
@@ -427,17 +459,19 @@ def _run_scripts(self, scripts, hook_name):
def _run_tool(self, cmd, args=[], quiet=False):
logger.debug("Running " + cmd)
- logger.debug("args : " + ' '.join(args))
+ logger.debug("args : " + " ".join(args))
capture_output = quiet and not (self.verbose or self.stdout or self.stderr)
try:
- cp = run([cmd] + args,
- cwd = self.work_root,
- stdin = subprocess.PIPE,
- stdout=self.stdout,
- stderr=self.stderr,
- capture_output=capture_output,
- check=True)
+ cp = run(
+ [cmd] + args,
+ cwd=self.work_root,
+ stdin=subprocess.PIPE,
+ stdout=self.stdout,
+ stderr=self.stderr,
+ capture_output=capture_output,
+ check=True,
+ )
except FileNotFoundError:
_s = "Command '{}' not found. Make sure it is in $PATH".format(cmd)
raise RuntimeError(_s)
@@ -459,32 +493,36 @@ def _filter_verilog_files(src_file):
ft = src_file.file_type
return ft.startswith("verilogSource") or ft.startswith("systemVerilogSource")
- def _write_fileset_to_f_file(self, output_file, include_vlogparams = True, filter_func = _filter_verilog_files):
+ def _write_fileset_to_f_file(
+ self, output_file, include_vlogparams=True, filter_func=_filter_verilog_files
+ ):
"""
Write a file list (*.f) file.
Returns a list of all files which were not added to the *.f file.
"""
- with open(output_file, 'w') as f:
+ with open(output_file, "w") as f:
unused_files = []
(src_files, incdirs) = self._get_fileset_files()
for key, value in self.vlogdefine.items():
- define_str = self._param_value_str(param_value = value)
- f.write('+define+{}={}\n'.format(key, define_str))
+ define_str = self._param_value_str(param_value=value)
+ f.write("+define+{}={}\n".format(key, define_str))
if include_vlogparams:
for key, value in self.vlogparam.items():
- param_str = self._param_value_str(param_value = value, str_quote_style = '"')
- f.write('-pvalue+{}.{}={}\n'.format(self.toplevel, key, param_str))
+ param_str = self._param_value_str(
+ param_value=value, str_quote_style='"'
+ )
+ f.write("-pvalue+{}.{}={}\n".format(self.toplevel, key, param_str))
for id in incdirs:
- f.write("+incdir+" + id + '\n')
+ f.write("+incdir+" + id + "\n")
for src_file in src_files:
- if (filter_func is None or filter_func(src_file)):
- f.write(src_file.name + '\n')
+ if filter_func is None or filter_func(src_file):
+ f.write(src_file.name + "\n")
else:
unused_files.append(src_file)
diff --git a/edalize/ghdl.py b/edalize/ghdl.py
index 6ab4c0cb7..5a6a2c722 100644
--- a/edalize/ghdl.py
+++ b/edalize/ghdl.py
@@ -12,45 +12,55 @@
class Ghdl(Edatool):
- argtypes = ['vlogparam', 'generic']
+ argtypes = ["vlogparam", "generic"]
@classmethod
def get_doc(cls, api_ver):
if api_ver == 0:
- return {'description' : "GHDL is an open source VHDL simulator, which fully supports IEEE 1076-1987, IEEE 1076-1993, IEE 1076-2002 and partially the 1076-2008 version of VHDL",
- 'lists' : [
- {'name' : 'analyze_options',
- 'type' : 'String',
- 'desc' : 'Options to use for the import (ghdl -i) and make (ghdl -m) phases'},
- {'name' : 'run_options',
- 'type' : 'String',
- 'desc' : 'Options to use for the run (ghdl -r) phase'},
- ]}
+ return {
+ "description": "GHDL is an open source VHDL simulator, which fully supports IEEE 1076-1987, IEEE 1076-1993, IEE 1076-2002 and partially the 1076-2008 version of VHDL",
+ "lists": [
+ {
+ "name": "analyze_options",
+ "type": "String",
+ "desc": "Options to use for the import (ghdl -i) and make (ghdl -m) phases",
+ },
+ {
+ "name": "run_options",
+ "type": "String",
+ "desc": "Options to use for the run (ghdl -r) phase",
+ },
+ ],
+ }
def configure_main(self):
(src_files, incdirs) = self._get_fileset_files()
- analyze_options = self.tool_options.get('analyze_options', '')
+ analyze_options = self.tool_options.get("analyze_options", "")
# Check of std=xx analyze option, this overyides the dynamic determination of vhdl standard
import re
+
rx = re.compile("^--std=([0-9]+)")
m = None
for o in analyze_options:
m = rx.match(o)
if m:
- stdarg = [ m.group() ]
+ stdarg = [m.group()]
analyze_options.remove(o)
break
if m:
- logger.warning("Analyze option "+ m.group() + " given, will override any vhdlSource-xxxx specification\n")
- standard = m.group(1)
+ logger.warning(
+ "Analyze option "
+ + m.group()
+ + " given, will override any vhdlSource-xxxx specification\n"
+ )
+ standard = m.group(1)
else:
# ghdl does not support mixing incompatible versions
# specifying 93c as std should allow 87 syntax
# 2008 can't be combined so try to parse everthing with 08 std
-
has87 = has93 = has08 = False
for f in src_files:
if f.file_type == "vhdlSource-87":
@@ -62,25 +72,25 @@ def configure_main(self):
stdarg = []
if has08:
if has87 or has93:
- logger.warning("ghdl can't mix vhdlSource-2008 with other standard version\n"+
- "Trying with treating all as vhdlSource-2008"
+ logger.warning(
+ "ghdl can't mix vhdlSource-2008 with other standard version\n"
+ + "Trying with treating all as vhdlSource-2008"
)
- stdarg = ['--std=08']
+ stdarg = ["--std=08"]
elif has87 and has93:
- stdarg = ['--std=93c']
+ stdarg = ["--std=93c"]
elif has87:
- stdarg = ['--std=87']
+ stdarg = ["--std=87"]
elif has93:
- stdarg = ['--std=93']
+ stdarg = ["--std=93"]
else:
- stdarg = ['--std=93c']
+ stdarg = ["--std=93c"]
- standard = rx.match(stdarg[0]).group(1)
+ standard = rx.match(stdarg[0]).group(1)
+ run_options = self.tool_options.get("run_options", [])
- run_options = self.tool_options.get('run_options', [])
-
- analyze_options=' '.join(analyze_options)
+ analyze_options = " ".join(analyze_options)
_vhdltypes = ("vhdlSource", "vhdlSource-87", "vhdlSource-93", "vhdlSource-2008")
@@ -112,7 +122,7 @@ def configure_main(self):
# libraries[None] which is perhaps poor form but avoids
# conflicts with user generated names
libraries[f.logical_name] = libraries.get(f.logical_name, []) + [f.name]
- vhdl_sources += (" {file}".format(file=f.name))
+ vhdl_sources += " {file}".format(file=f.name)
elif f.file_type in ["user"]:
pass
else:
@@ -128,27 +138,29 @@ def configure_main(self):
analyze_options += " -P./{}".format(lib)
make_libraries_directories += "\tmkdir -p {}\n".format(lib)
lib_opts = library_options.format(lib=lib)
- ghdlimport += "\tghdl -i $(STD) $(ANALYZE_OPTIONS) {} {}\n".format(lib_opts, " ".join(files))
+ ghdlimport += "\tghdl -i $(STD) $(ANALYZE_OPTIONS) {} {}\n".format(
+ lib_opts, " ".join(files)
+ )
self.render_template(
- 'Makefile.j2',
- 'Makefile',
+ "Makefile.j2",
+ "Makefile",
{
- 'std' : ' '.join(stdarg),
- 'toplevel' : top_unit,
- 'vhdl_sources' : vhdl_sources,
- 'standard' : standard,
- 'analyze_options' : analyze_options,
- 'run_options' : ' '.join(run_options),
- 'make_libraries_directories' : make_libraries_directories,
- 'ghdlimport' : ghdlimport,
- 'top_libraries': top_libraries
- }
+ "std": " ".join(stdarg),
+ "toplevel": top_unit,
+ "vhdl_sources": vhdl_sources,
+ "standard": standard,
+ "analyze_options": analyze_options,
+ "run_options": " ".join(run_options),
+ "make_libraries_directories": make_libraries_directories,
+ "ghdlimport": ghdlimport,
+ "top_libraries": top_libraries,
+ },
)
def run_main(self):
- cmd = 'make'
- args = ['run']
+ cmd = "make"
+ args = ["run"]
# GHDL doesn't support Verilog, but the backend used vlogparam since
# edalize didn't support generic at the time. Now that generic support
@@ -156,14 +168,17 @@ def run_main(self):
# removed in the future. For now support either option.
if self.vlogparam:
- logger.warning("GHDL backend support for vlogparam is deprecated and will be removed.\n"+
- "Use generic instead."
+ logger.warning(
+ "GHDL backend support for vlogparam is deprecated and will be removed.\n"
+ + "Use generic instead."
)
if self.vlogparam or self.generic:
- extra_options='EXTRA_OPTIONS='
+ extra_options = "EXTRA_OPTIONS="
for d in [self.vlogparam, self.generic]:
- for k,v in d.items():
- extra_options += ' -g{}={}'.format(k,self._param_value_str(v,'"',bool_is_str=True))
+ for k, v in d.items():
+ extra_options += " -g{}={}".format(
+ k, self._param_value_str(v, '"', bool_is_str=True)
+ )
args.append(extra_options)
self._run_tool(cmd, args)
diff --git a/edalize/icarus.py b/edalize/icarus.py
index 9347068cd..4e62e4536 100644
--- a/edalize/icarus.py
+++ b/edalize/icarus.py
@@ -34,40 +34,48 @@
$(RM) {name}.vpi
"""
+
class Icarus(Edatool):
- argtypes = ['plusarg', 'vlogdefine', 'vlogparam']
+ argtypes = ["plusarg", "vlogdefine", "vlogparam"]
@classmethod
def get_doc(cls, api_ver):
if api_ver == 0:
- return {'description' : "Icarus Verilog is a Verilog simulation and synthesis tool. It operates as a compiler, compiling source code written in Verilog (IEEE-1364) into some target format",
- 'members' : [
- {'name' : 'timescale',
- 'type' : 'String',
- 'desc' : 'Default timescale'}],
- 'lists' : [
- {'name' : 'iverilog_options',
- 'type' : 'String',
- 'desc' : 'Additional options for iverilog'},
- ]}
+ return {
+ "description": "Icarus Verilog is a Verilog simulation and synthesis tool. It operates as a compiler, compiling source code written in Verilog (IEEE-1364) into some target format",
+ "members": [
+ {"name": "timescale", "type": "String", "desc": "Default timescale"}
+ ],
+ "lists": [
+ {
+ "name": "iverilog_options",
+ "type": "String",
+ "desc": "Additional options for iverilog",
+ },
+ ],
+ }
def configure_main(self):
- f = open(os.path.join(self.work_root, self.name+'.scr'),'w')
+ f = open(os.path.join(self.work_root, self.name + ".scr"), "w")
(src_files, incdirs) = self._get_fileset_files()
for key, value in self.vlogdefine.items():
- f.write('+define+{}={}\n'.format(key, self._param_value_str(value, '')))
+ f.write("+define+{}={}\n".format(key, self._param_value_str(value, "")))
for key, value in self.vlogparam.items():
- f.write('+parameter+{}.{}={}\n'.format(self.toplevel, key, self._param_value_str(value, '"')))
+ f.write(
+ "+parameter+{}.{}={}\n".format(
+ self.toplevel, key, self._param_value_str(value, '"')
+ )
+ )
for id in incdirs:
- f.write("+incdir+" + id+'\n')
- timescale = self.tool_options.get('timescale')
+ f.write("+incdir+" + id + "\n")
+ timescale = self.tool_options.get("timescale")
if timescale:
- with open(os.path.join(self.work_root, 'timescale.v'), 'w') as tsfile:
+ with open(os.path.join(self.work_root, "timescale.v"), "w") as tsfile:
tsfile.write("`timescale {}\n".format(timescale))
- f.write('timescale.v\n')
+ f.write("timescale.v\n")
supported_file_types = [
"verilogSource",
@@ -81,8 +89,8 @@ def configure_main(self):
]
for src_file in src_files:
if src_file.file_type in supported_file_types:
- f.write(src_file.name+'\n')
- elif src_file.file_type == 'user':
+ f.write(src_file.name + "\n")
+ elif src_file.file_type == "user":
pass
else:
_s = "{} has unknown file type '{}'"
@@ -90,39 +98,47 @@ def configure_main(self):
f.close()
- with open(os.path.join(self.work_root, 'Makefile'), 'w') as f:
+ with open(os.path.join(self.work_root, "Makefile"), "w") as f:
f.write("TARGET := {}\n".format(self.name))
- _vpi_modules = ' '.join([m['name']+'.vpi' for m in self.vpi_modules])
+ _vpi_modules = " ".join([m["name"] + ".vpi" for m in self.vpi_modules])
if _vpi_modules:
f.write("VPI_MODULES := {}\n".format(_vpi_modules))
f.write("TOPLEVEL := {}\n".format(self.toplevel))
- f.write("IVERILOG_OPTIONS := {}\n".format(' '.join(self.tool_options.get('iverilog_options', []))))
+ f.write(
+ "IVERILOG_OPTIONS := {}\n".format(
+ " ".join(self.tool_options.get("iverilog_options", []))
+ )
+ )
if self.plusarg:
plusargs = []
for key, value in self.plusarg.items():
- plusargs += ['+{}={}'.format(key, self._param_value_str(value))]
- f.write("EXTRA_OPTIONS ?= {}\n".format(' '.join(plusargs)))
+ plusargs += ["+{}={}".format(key, self._param_value_str(value))]
+ f.write("EXTRA_OPTIONS ?= {}\n".format(" ".join(plusargs)))
f.write(MAKEFILE_TEMPLATE)
for vpi_module in self.vpi_modules:
- _incs = ['-I' + s for s in vpi_module['include_dirs']]
- _libs = ['-l'+l for l in vpi_module['libs']]
- _srcs = vpi_module['src_files']
- f.write(VPI_MAKE_SECTION.format(name = vpi_module['name'],
- libs = ' '.join(_libs),
- incs = ' '.join(_incs),
- srcs = ' '.join(_srcs)))
+ _incs = ["-I" + s for s in vpi_module["include_dirs"]]
+ _libs = ["-l" + l for l in vpi_module["libs"]]
+ _srcs = vpi_module["src_files"]
+ f.write(
+ VPI_MAKE_SECTION.format(
+ name=vpi_module["name"],
+ libs=" ".join(_libs),
+ incs=" ".join(_incs),
+ srcs=" ".join(_srcs),
+ )
+ )
def run_main(self):
- args = ['run']
+ args = ["run"]
# Set plusargs
if self.plusarg:
plusargs = []
for key, value in self.plusarg.items():
- plusargs += ['+{}={}'.format(key, self._param_value_str(value))]
- args.append('EXTRA_OPTIONS='+' '.join(plusargs))
+ plusargs += ["+{}={}".format(key, self._param_value_str(value))]
+ args.append("EXTRA_OPTIONS=" + " ".join(plusargs))
- self._run_tool('make', args)
+ self._run_tool("make", args)
diff --git a/edalize/icestorm.py b/edalize/icestorm.py
index ad6e9ce4e..a0e782f6f 100644
--- a/edalize/icestorm.py
+++ b/edalize/icestorm.py
@@ -8,94 +8,106 @@
from edalize.nextpnr import Nextpnr
from edalize.yosys import Yosys
+
class Icestorm(Edatool):
- argtypes = ['vlogdefine', 'vlogparam']
+ argtypes = ["vlogdefine", "vlogparam"]
@classmethod
def get_doc(cls, api_ver):
if api_ver == 0:
options = {
- 'members' : [
- {'name' : 'pnr',
- 'type' : 'String',
- 'desc' : 'Select Place & Route tool. Legal values are *arachne* for Arachne-PNR, *next* for nextpnr or *none* to only perform synthesis. Default is next'},
+ "members": [
+ {
+ "name": "pnr",
+ "type": "String",
+ "desc": "Select Place & Route tool. Legal values are *arachne* for Arachne-PNR, *next* for nextpnr or *none* to only perform synthesis. Default is next",
+ },
+ ],
+ "lists": [
+ {
+ "name": "arachne_pnr_options",
+ "type": "String",
+ "desc": "Additional options for Arachnhe PNR",
+ },
],
- 'lists' : [
- {'name' : 'arachne_pnr_options',
- 'type' : 'String',
- 'desc' : 'Additional options for Arachnhe PNR'},
- ]}
+ }
Edatool._extend_options(options, Yosys)
Edatool._extend_options(options, Nextpnr)
- return {'description' : "Open source toolchain for Lattice iCE40 FPGAs. Uses yosys for synthesis and arachne-pnr or nextpnr for Place & Route",
- 'members' : options['members'],
- 'lists' : options['lists']}
+ return {
+ "description": "Open source toolchain for Lattice iCE40 FPGAs. Uses yosys for synthesis and arachne-pnr or nextpnr for Place & Route",
+ "members": options["members"],
+ "lists": options["lists"],
+ }
def configure_main(self):
# Write yosys script file
- yosys_synth_options = self.tool_options.get('yosys_synth_options', '')
-
- #Pass icestorm tool options to yosys and nextpnr
- self.edam['tool_options'] = \
- {'yosys' : {
- 'arch' : 'ice40',
- 'yosys_synth_options' : yosys_synth_options,
- 'yosys_as_subtool' : True,
- 'yosys_template' : self.tool_options.get('yosys_template'),
+ yosys_synth_options = self.tool_options.get("yosys_synth_options", "")
+
+ # Pass icestorm tool options to yosys and nextpnr
+ self.edam["tool_options"] = {
+ "yosys": {
+ "arch": "ice40",
+ "yosys_synth_options": yosys_synth_options,
+ "yosys_as_subtool": True,
+ "yosys_template": self.tool_options.get("yosys_template"),
+ },
+ "nextpnr": {
+ "nextpnr_options": self.tool_options.get("nextpnr_options", [])
},
- 'nextpnr' : {
- 'nextpnr_options' : self.tool_options.get('nextpnr_options', [])
- },
- }
+ }
yosys = Yosys(self.edam, self.work_root)
yosys.configure()
- pnr = self.tool_options.get('pnr', 'next')
- part = self.tool_options.get('part', None)
- if not pnr in ['arachne', 'next', 'none']:
- raise RuntimeError("Invalid pnr option '{}'. Valid values are 'arachne' for Arachne-pnr, 'next' for nextpnr or 'none' to only perform synthesis".format(pnr))
+ pnr = self.tool_options.get("pnr", "next")
+ part = self.tool_options.get("part", None)
+ if not pnr in ["arachne", "next", "none"]:
+ raise RuntimeError(
+ "Invalid pnr option '{}'. Valid values are 'arachne' for Arachne-pnr, 'next' for nextpnr or 'none' to only perform synthesis".format(
+ pnr
+ )
+ )
# Write Makefile
commands = self.EdaCommands()
commands.commands = yosys.commands
- if pnr == 'arachne':
- depends = self.name+'.blif'
- targets = self.name+'.asc'
- command = ['arachne-pnr']
- command += self.tool_options.get('arachne_pnr_options', [])
- command += ['-p', depends, '-o', targets]
+ if pnr == "arachne":
+ depends = self.name + ".blif"
+ targets = self.name + ".asc"
+ command = ["arachne-pnr"]
+ command += self.tool_options.get("arachne_pnr_options", [])
+ command += ["-p", depends, "-o", targets]
commands.add(command, [depends], [targets])
- set_default_target(self.name+'.bin')
- elif pnr == 'next':
+ set_default_target(self.name + ".bin")
+ elif pnr == "next":
nextpnr = Nextpnr(yosys.edam, self.work_root)
- nextpnr.flow_config = {'arch' : 'ice40'}
+ nextpnr.flow_config = {"arch": "ice40"}
nextpnr.configure()
commands.commands += nextpnr.commands
- commands.set_default_target(self.name+'.bin')
+ commands.set_default_target(self.name + ".bin")
else:
- commands.set_default_target(self.name+'.json')
+ commands.set_default_target(self.name + ".json")
- #Image generation
- depends = self.name+'.asc'
- targets = self.name+'.bin'
- command = ['icepack', depends, targets]
+ # Image generation
+ depends = self.name + ".asc"
+ targets = self.name + ".bin"
+ command = ["icepack", depends, targets]
commands.add(command, [targets], [depends])
- #Timing analysis
- depends = self.name+'.asc'
- targets = self.name+'.tim'
- command = ['icetime', '-tmd', part or '', depends, targets]
+ # Timing analysis
+ depends = self.name + ".asc"
+ targets = self.name + ".tim"
+ command = ["icetime", "-tmd", part or "", depends, targets]
commands.add(command, [targets], [depends])
commands.add([], ["timing"], [targets])
- #Statistics
- depends = self.name+'.asc'
- targets = self.name+'.stat'
- command = ['icebox_stat', depends, targets]
+ # Statistics
+ depends = self.name + ".asc"
+ targets = self.name + ".stat"
+ command = ["icebox_stat", depends, targets]
commands.add(command, [targets], [depends])
commands.add([], ["stats"], [targets])
- commands.write(os.path.join(self.work_root, 'Makefile'))
+ commands.write(os.path.join(self.work_root, "Makefile"))
diff --git a/edalize/ise.py b/edalize/ise.py
index f94aa16ce..e0495e59f 100644
--- a/edalize/ise.py
+++ b/edalize/ise.py
@@ -6,9 +6,10 @@
from edalize.edatool import Edatool
+
class Ise(Edatool):
- argtypes = ['vlogdefine', 'vlogparam', 'generic']
+ argtypes = ["vlogdefine", "vlogparam", "generic"]
MAKEFILE_TEMPLATE = """#Auto generated by Edalize
include config.mk
@@ -64,95 +65,132 @@ class Ise(Edatool):
@classmethod
def get_doc(cls, api_ver):
if api_ver == 0:
- return {'description' : "Xilinx ISE Design Suite",
- 'members' : [
- {'name' : 'family',
- 'type' : 'String',
- 'desc' : 'FPGA family (e.g. spartan6)'},
- {'name' : 'device',
- 'type' : 'String',
- 'desc' : 'FPGA device (e.g. xc6slx45)'},
- {'name' : 'package',
- 'type' : 'String',
- 'desc' : 'FPGA package (e.g. csg324)'},
- {'name' : 'speed',
- 'type' : 'String',
- 'desc' : 'FPGA speed grade (e.g. -2)'},
- ]}
+ return {
+ "description": "Xilinx ISE Design Suite",
+ "members": [
+ {
+ "name": "family",
+ "type": "String",
+ "desc": "FPGA family (e.g. spartan6)",
+ },
+ {
+ "name": "device",
+ "type": "String",
+ "desc": "FPGA device (e.g. xc6slx45)",
+ },
+ {
+ "name": "package",
+ "type": "String",
+ "desc": "FPGA package (e.g. csg324)",
+ },
+ {
+ "name": "speed",
+ "type": "String",
+ "desc": "FPGA speed grade (e.g. -2)",
+ },
+ ],
+ }
def configure_main(self):
- for i in ['family', 'device', 'package', 'speed']:
+ for i in ["family", "device", "package", "speed"]:
if not i in self.tool_options:
raise RuntimeError("Missing required option '{}'".format(i))
self._write_tcl_file()
- with open(os.path.join(self.work_root, 'Makefile'),'w') as f:
+ with open(os.path.join(self.work_root, "Makefile"), "w") as f:
f.write(self.MAKEFILE_TEMPLATE)
- with open(os.path.join(self.work_root, 'config.mk'),'w') as f:
+ with open(os.path.join(self.work_root, "config.mk"), "w") as f:
f.write("NAME := {}\n".format(self.name))
f.write("TOPLEVEL := {}\n".format(self.toplevel))
- with open(os.path.join(self.work_root, self.name+'_run.tcl'),'w') as f:
+ with open(os.path.join(self.work_root, self.name + "_run.tcl"), "w") as f:
f.write(self.TCL_RUN_FILE_TEMPLATE)
def _write_tcl_file(self):
- tcl_file = open(os.path.join(self.work_root, self.name+'.tcl'),'w')
-
- tcl_file.write(self.TCL_FILE_TEMPLATE.format(
- design = self.name,
- family = self.tool_options['family'],
- device = self.tool_options['device'],
- package = self.tool_options['package'],
- speed = self.tool_options['speed']))
+ tcl_file = open(os.path.join(self.work_root, self.name + ".tcl"), "w")
+
+ tcl_file.write(
+ self.TCL_FILE_TEMPLATE.format(
+ design=self.name,
+ family=self.tool_options["family"],
+ device=self.tool_options["device"],
+ package=self.tool_options["package"],
+ speed=self.tool_options["speed"],
+ )
+ )
if self.vlogdefine:
s = 'project set "Verilog Macros" "{}" -process "Synthesize - XST"\n'
- tcl_file.write(s.format('|'.join([k+'='+self._param_value_str(v) for k,v in self.vlogdefine.items()])))
+ tcl_file.write(
+ s.format(
+ "|".join(
+ [
+ k + "=" + self._param_value_str(v)
+ for k, v in self.vlogdefine.items()
+ ]
+ )
+ )
+ )
if self.vlogparam or self.generic:
genparam = self.vlogparam.copy()
genparam.update(self.generic)
s = 'project set "Generics, Parameters" "{}" -process "Synthesize - XST"\n'
- tcl_file.write(s.format('|'.join([k+'='+self._param_value_str(v, '\\"') for k,v in genparam.items()])))
+ tcl_file.write(
+ s.format(
+ "|".join(
+ [
+ k + "=" + self._param_value_str(v, '\\"')
+ for k, v in genparam.items()
+ ]
+ )
+ )
+ )
(src_files, incdirs) = self._get_fileset_files()
if incdirs:
- tcl_file.write('project set "Verilog Include Directories" "{}" -process "Synthesize - XST"\n'.format('|'.join(incdirs)))
+ tcl_file.write(
+ 'project set "Verilog Include Directories" "{}" -process "Synthesize - XST"\n'.format(
+ "|".join(incdirs)
+ )
+ )
_libraries = []
for f in src_files:
- if f.file_type == 'tclSource':
- tcl_file.write('source {}\n'.format(f.name))
- elif f.file_type.startswith('verilogSource'):
- tcl_file.write('xfile add {}\n'.format(f.name))
- elif f.file_type == 'UCF':
- tcl_file.write('xfile_add_exist_ok {}\n'.format(f.name))
- elif f.file_type == 'BMM':
- tcl_file.write('xfile add {}\n'.format(f.name))
- elif f.file_type.startswith('vhdlSource'):
+ if f.file_type == "tclSource":
+ tcl_file.write("source {}\n".format(f.name))
+ elif f.file_type.startswith("verilogSource"):
+ tcl_file.write("xfile add {}\n".format(f.name))
+ elif f.file_type == "UCF":
+ tcl_file.write("xfile_add_exist_ok {}\n".format(f.name))
+ elif f.file_type == "BMM":
+ tcl_file.write("xfile add {}\n".format(f.name))
+ elif f.file_type.startswith("vhdlSource"):
if f.logical_name:
if not f.logical_name in _libraries:
- tcl_file.write('lib_vhdl new {}\n'.format(f.logical_name))
+ tcl_file.write("lib_vhdl new {}\n".format(f.logical_name))
_libraries.append(f.logical_name)
- _s = 'xfile add {} -lib_vhdl {}\n'
- tcl_file.write(_s.format(f.name,
- f.logical_name))
+ _s = "xfile add {} -lib_vhdl {}\n"
+ tcl_file.write(_s.format(f.name, f.logical_name))
else:
- tcl_file.write('xfile add {}\n'.format(f.name))
- elif f.file_type == 'user':
+ tcl_file.write("xfile add {}\n".format(f.name))
+ elif f.file_type == "user":
pass
tcl_file.write('project set top "{}"\n'.format(self.toplevel))
tcl_file.close()
def run_main(self):
- pgm_file_name = os.path.join(self.work_root, self.name+'.pgm')
+ pgm_file_name = os.path.join(self.work_root, self.name + ".pgm")
self._write_pgm_file(pgm_file_name)
- self._run_tool('impact', ['-batch', pgm_file_name])
+ self._run_tool("impact", ["-batch", pgm_file_name])
def _write_pgm_file(self, pgm_file_name):
- pgm_file = open(pgm_file_name,'w')
- pgm_file.write(self.PGM_FILE_TEMPLATE.format(
- pgm_file = pgm_file_name,
- bit_file = os.path.join(self.work_root, self.toplevel+'.bit'),
- cdf_file = os.path.join(self.work_root, self.toplevel+'.cdf')))
+ pgm_file = open(pgm_file_name, "w")
+ pgm_file.write(
+ self.PGM_FILE_TEMPLATE.format(
+ pgm_file=pgm_file_name,
+ bit_file=os.path.join(self.work_root, self.toplevel + ".bit"),
+ cdf_file=os.path.join(self.work_root, self.toplevel + ".cdf"),
+ )
+ )
pgm_file.close()
diff --git a/edalize/isim.py b/edalize/isim.py
index d20377503..ed7b1726c 100644
--- a/edalize/isim.py
+++ b/edalize/isim.py
@@ -9,11 +9,12 @@
logger = logging.getLogger(__name__)
+
class Isim(Edatool):
- argtypes = ['plusarg', 'vlogdefine', 'vlogparam']
+ argtypes = ["plusarg", "vlogdefine", "vlogparam"]
- MAKEFILE_TEMPLATE="""#Auto generated by Edalize
+ MAKEFILE_TEMPLATE = """#Auto generated by Edalize
include config.mk
all: $(TARGET)
@@ -51,79 +52,117 @@ class Isim(Edatool):
@classmethod
def get_doc(cls, api_ver):
if api_ver == 0:
- return {'description' : "Xilinx ISim simulator from ISE design suite",
- 'lists' : [
- {'name' : 'fuse_options',
- 'type' : 'String',
- 'desc' : 'Additional options for compilation with FUSE'},
- {'name' : 'isim_options',
- 'type' : 'String',
- 'desc' : 'Additional run options for ISim'},
- ]}
+ return {
+ "description": "Xilinx ISim simulator from ISE design suite",
+ "lists": [
+ {
+ "name": "fuse_options",
+ "type": "String",
+ "desc": "Additional options for compilation with FUSE",
+ },
+ {
+ "name": "isim_options",
+ "type": "String",
+ "desc": "Additional run options for ISim",
+ },
+ ],
+ }
+
def configure_main(self):
- #Check if any VPI modules are present and display warning
+ # Check if any VPI modules are present and display warning
if len(self.vpi_modules) > 0:
- modules = [m['name'] for m in self.vpi_modules]
- logger.error('VPI modules not supported by Isim: %s' % ', '.join(modules))
+ modules = [m["name"] for m in self.vpi_modules]
+ logger.error("VPI modules not supported by Isim: %s" % ", ".join(modules))
- with open(os.path.join(self.work_root, self.name+'.prj'),'w') as f:
+ with open(os.path.join(self.work_root, self.name + ".prj"), "w") as f:
(src_files, self.incdirs) = self._get_fileset_files()
for src_file in src_files:
prefix = ""
logical_name = ""
if src_file.file_type in [
- "verilogSource",
- "verilogSource-95",
- "verilogSource-2001"]:
- prefix = 'verilog'
+ "verilogSource",
+ "verilogSource-95",
+ "verilogSource-2001",
+ ]:
+ prefix = "verilog"
elif src_file.file_type.startswith("vhdlSource"):
- prefix = 'vhdl'
+ prefix = "vhdl"
if src_file.logical_name:
- logical_name = src_file.logical_name + ' '
- elif src_file.file_type in ["systemVerilogSource",
- "systemVerilogSource-3.0",
- "systemVerilogSource-3.1",
- "systemVerilogSource-3.1a",
- "verilogSource-2005"]:
- prefix = 'sv'
+ logical_name = src_file.logical_name + " "
+ elif src_file.file_type in [
+ "systemVerilogSource",
+ "systemVerilogSource-3.0",
+ "systemVerilogSource-3.1",
+ "systemVerilogSource-3.1a",
+ "verilogSource-2005",
+ ]:
+ prefix = "sv"
elif src_file.file_type in ["user"]:
pass
else:
_s = "{} has unknown file type '{}'"
- logger.warning(_s.format(src_file.name,
- src_file.file_type))
+ logger.warning(_s.format(src_file.name, src_file.file_type))
if prefix:
- f.write('{} work {}{}\n'.format(prefix, logical_name, src_file.name))
+ f.write(
+ "{} work {}{}\n".format(prefix, logical_name, src_file.name)
+ )
- with open(os.path.join(self.work_root, 'run_'+self.name+'.tcl'),'w') as f:
+ with open(os.path.join(self.work_root, "run_" + self.name + ".tcl"), "w") as f:
f.write(self.RUN_TCL_TEMPLATE)
- with open(os.path.join(self.work_root, 'Makefile'),'w') as f:
+ with open(os.path.join(self.work_root, "Makefile"), "w") as f:
f.write(self.MAKEFILE_TEMPLATE)
- with open(os.path.join(self.work_root, 'config.mk'),'w') as f:
- vlog_defines = ' '.join(['--define {}={}'.format(k, self._param_value_str(v)) for k,v, in self.vlogdefine.items()])
- vlog_includes = ' '.join(['-i '+k for k in self.incdirs])
- vlog_params = ' '.join(['--generic_top {}={}'.format(k, self._param_value_str(v)) for k,v, in self.vlogparam.items()])
- fuse_options = ' '.join(self.tool_options.get('fuse_options', []))
- isim_options = ' '.join(self.tool_options.get('isim_options', []))
-
- _s = '-testplusarg {}={}'
- extra_options = ' '.join([_s.format(k, self._param_value_str(v)) for k,v in self.plusarg.items()])
- f.write(self.CONFIG_MK_TEMPLATE.format(target = self.name,
- toplevel = self.toplevel,
- vlog_defines = vlog_defines,
- vlog_includes = vlog_includes,
- vlog_params = vlog_params,
- fuse_options = fuse_options,
- isim_options = isim_options,
- extra_options = extra_options))
+ with open(os.path.join(self.work_root, "config.mk"), "w") as f:
+ vlog_defines = " ".join(
+ [
+ "--define {}={}".format(k, self._param_value_str(v))
+ for k, v, in self.vlogdefine.items()
+ ]
+ )
+ vlog_includes = " ".join(["-i " + k for k in self.incdirs])
+ vlog_params = " ".join(
+ [
+ "--generic_top {}={}".format(k, self._param_value_str(v))
+ for k, v, in self.vlogparam.items()
+ ]
+ )
+ fuse_options = " ".join(self.tool_options.get("fuse_options", []))
+ isim_options = " ".join(self.tool_options.get("isim_options", []))
+
+ _s = "-testplusarg {}={}"
+ extra_options = " ".join(
+ [
+ _s.format(k, self._param_value_str(v))
+ for k, v in self.plusarg.items()
+ ]
+ )
+ f.write(
+ self.CONFIG_MK_TEMPLATE.format(
+ target=self.name,
+ toplevel=self.toplevel,
+ vlog_defines=vlog_defines,
+ vlog_includes=vlog_includes,
+ vlog_params=vlog_params,
+ fuse_options=fuse_options,
+ isim_options=isim_options,
+ extra_options=extra_options,
+ )
+ )
def run_main(self):
- args = ['run']
+ args = ["run"]
# Plusargs
if self.plusarg:
- _s = '-testplusarg {}={}'
- args.append('EXTRA_OPTIONS='+' '.join([_s.format(k, self._param_value_str(v)) for k,v in self.plusarg.items()]))
- self._run_tool('make', args)
+ _s = "-testplusarg {}={}"
+ args.append(
+ "EXTRA_OPTIONS="
+ + " ".join(
+ [
+ _s.format(k, self._param_value_str(v))
+ for k, v in self.plusarg.items()
+ ]
+ )
+ )
+ self._run_tool("make", args)
diff --git a/edalize/libero.py b/edalize/libero.py
index 255e9e1c6..0fe0bba80 100644
--- a/edalize/libero.py
+++ b/edalize/libero.py
@@ -12,54 +12,73 @@ class Libero(Edatool):
@classmethod
def get_doc(cls, api_ver):
if api_ver == 0:
- return {'description': "The Libero backend supports Microsemi Libero to build systems and program the FPGA",
- 'members': [
- {'name': 'family',
- 'type': 'String',
- 'desc': 'FPGA family (e.g. PolarFire)'},
- {'name': 'die',
- 'type': 'String',
- 'desc': 'FPGA device (e.g. MPF300TS)'},
- {'name': 'package',
- 'type': 'String',
- 'desc': 'FPGA package type (e.g. FCG1152)'},
- {'name': 'speed',
- 'type': 'String',
- 'desc': 'FPGA speed rating (e.g. -1)'},
- {'name': 'dievoltage',
- 'type': 'String',
- 'desc': 'FPGA die voltage (e.g. 1.0)'},
- {'name': 'range',
- 'type': 'String',
- 'desc': 'FPGA temperature range (e.g. IND)'},
- {'name': 'defiostd',
- 'type': 'String',
- 'desc': 'FPGA default IO std (e.g. "LVCMOS 1.8V")'},
- {'name': 'hdl',
- 'type': 'String',
- 'desc': 'Default HDL (e.g. "VERILOG")'},
- ]
- }
-
- argtypes = ['vlogdefine', 'vlogparam', 'generic']
- mandatory_options = ['family', 'die', 'package', 'range']
+ return {
+ "description": "The Libero backend supports Microsemi Libero to build systems and program the FPGA",
+ "members": [
+ {
+ "name": "family",
+ "type": "String",
+ "desc": "FPGA family (e.g. PolarFire)",
+ },
+ {
+ "name": "die",
+ "type": "String",
+ "desc": "FPGA device (e.g. MPF300TS)",
+ },
+ {
+ "name": "package",
+ "type": "String",
+ "desc": "FPGA package type (e.g. FCG1152)",
+ },
+ {
+ "name": "speed",
+ "type": "String",
+ "desc": "FPGA speed rating (e.g. -1)",
+ },
+ {
+ "name": "dievoltage",
+ "type": "String",
+ "desc": "FPGA die voltage (e.g. 1.0)",
+ },
+ {
+ "name": "range",
+ "type": "String",
+ "desc": "FPGA temperature range (e.g. IND)",
+ },
+ {
+ "name": "defiostd",
+ "type": "String",
+ "desc": 'FPGA default IO std (e.g. "LVCMOS 1.8V")',
+ },
+ {
+ "name": "hdl",
+ "type": "String",
+ "desc": 'Default HDL (e.g. "VERILOG")',
+ },
+ ],
+ }
+
+ argtypes = ["vlogdefine", "vlogparam", "generic"]
+ mandatory_options = ["family", "die", "package", "range"]
tool_options_defaults = {
- 'range': 'IND',
+ "range": "IND",
}
def _set_tool_options_defaults(self):
for key, default_value in self.tool_options_defaults.items():
if not key in self.tool_options:
- logger.info("Set Libero tool option %s to default value %s"
- % (key, str(default_value)))
+ logger.info(
+ "Set Libero tool option %s to default value %s"
+ % (key, str(default_value))
+ )
self.tool_options[key] = default_value
def _check_mandatory_options(self):
shouldExit = 0
for key in self.mandatory_options:
if not key in self.tool_options:
- logger.error("Libero option \"%s\" must be defined", key)
+ logger.error('Libero option "%s" must be defined', key)
shouldExit = 1
if shouldExit:
raise RuntimeError("Missing required tool options")
@@ -75,9 +94,9 @@ def configure_main(self):
self._set_tool_options_defaults()
self._check_mandatory_options()
(src_files, incdirs) = self._get_fileset_files(force_slash=True)
- self.jinja_env.filters['src_file_filter'] = self.src_file_filter
- self.jinja_env.filters['constraint_file_filter'] = self.constraint_file_filter
- self.jinja_env.filters['tcl_file_filter'] = self.tcl_file_filter
+ self.jinja_env.filters["src_file_filter"] = self.src_file_filter
+ self.jinja_env.filters["constraint_file_filter"] = self.constraint_file_filter
+ self.jinja_env.filters["tcl_file_filter"] = self.tcl_file_filter
escaped_name = self.name.replace(".", "_")
@@ -91,20 +110,20 @@ def configure_main(self):
library_files[f.logical_name].append(f.name)
template_vars = {
- 'name': escaped_name,
- 'src_files': src_files,
- 'library_files': library_files,
- 'incdirs': incdirs,
- 'vlogparam': self.vlogparam,
- 'vlogdefine': self.vlogdefine,
- 'generic': self.generic,
- 'tool_options': self.tool_options,
- 'toplevel': self.toplevel,
- 'generic': self.generic,
- 'prj_root': "./prj",
- 'op': "{",
- 'cl': "}",
- 'sp': " "
+ "name": escaped_name,
+ "src_files": src_files,
+ "library_files": library_files,
+ "incdirs": incdirs,
+ "vlogparam": self.vlogparam,
+ "vlogdefine": self.vlogdefine,
+ "generic": self.generic,
+ "tool_options": self.tool_options,
+ "toplevel": self.toplevel,
+ "generic": self.generic,
+ "prj_root": "./prj",
+ "op": "{",
+ "cl": "}",
+ "sp": " ",
}
# Set preferred HDL language based on file type amount if not user defined.
@@ -114,7 +133,7 @@ def configure_main(self):
verilogFiles = 0
VHDLFiles = 0
for f in src_files:
- t = f.file_type.split('-')[0]
+ t = f.file_type.split("-")[0]
if t == "verilogSource" or t == "systemVerilogSource":
verilogFiles += 1
elif t == "vhdlSource":
@@ -125,73 +144,77 @@ def configure_main(self):
self.tool_options["hdl"] = "VHDL"
# Render the TCL project file
- self.render_template('libero-project.tcl.j2',
- escaped_name + '-project.tcl',
- template_vars)
+ self.render_template(
+ "libero-project.tcl.j2", escaped_name + "-project.tcl", template_vars
+ )
# Render the TCL run file
- self.render_template('libero-run.tcl.j2',
- escaped_name + '-run.tcl',
- template_vars)
+ self.render_template(
+ "libero-run.tcl.j2", escaped_name + "-run.tcl", template_vars
+ )
# Render the Synthesize TCL file
- self.render_template('libero-syn-user.tcl.j2',
- escaped_name + '-syn-user.tcl',
- template_vars)
+ self.render_template(
+ "libero-syn-user.tcl.j2", escaped_name + "-syn-user.tcl", template_vars
+ )
logger.info("Cores and Libero TCL Scripts generated.")
def src_file_filter(self, f):
file_types = {
- 'verilogSource': '-hdl_source {',
- 'systemVerilogSource': '-hdl_source {',
- 'vhdlSource': "-hdl_source {",
- 'PDC': '-io_pdc {',
- 'SDC': '-sdc {',
- 'FPPDC': '-fp_pdc {',
+ "verilogSource": "-hdl_source {",
+ "systemVerilogSource": "-hdl_source {",
+ "vhdlSource": "-hdl_source {",
+ "PDC": "-io_pdc {",
+ "SDC": "-sdc {",
+ "FPPDC": "-fp_pdc {",
}
- _file_type = f.file_type.split('-')[0]
+ _file_type = f.file_type.split("-")[0]
if _file_type in file_types:
# Do not return library files here
if f.logical_name:
- return ''
+ return ""
return file_types[_file_type] + f.name
- return ''
+ return ""
def tcl_file_filter(self, f):
file_types = {
- 'tclSource': 'source ',
+ "tclSource": "source ",
}
- _file_type = f.file_type.split('-')[0]
+ _file_type = f.file_type.split("-")[0]
if _file_type in file_types:
return file_types[_file_type] + f.name
- return ''
+ return ""
def constraint_file_filter(self, f, type="ALL"):
file_types = {
- 'PDC': 'constraint/io/',
- 'SDC': 'constraint/',
- 'FPPDC': 'constraint/fp/',
+ "PDC": "constraint/io/",
+ "SDC": "constraint/",
+ "FPPDC": "constraint/fp/",
}
- _file_type = f.file_type.split('-')[0]
+ _file_type = f.file_type.split("-")[0]
if _file_type in file_types:
filename = f.name.split("/")[-1]
if type == "ALL":
return file_types[_file_type] + filename
elif _file_type == type:
return file_types[_file_type] + filename
- return ''
+ return ""
def build_main(self):
logger.info("Executing Libero TCL Scripts.")
escaped_name = self.name.replace(".", "_")
if shutil.which("libero"):
- self._run_tool('libero', ['SCRIPT:' + escaped_name + '-run.tcl'])
+ self._run_tool("libero", ["SCRIPT:" + escaped_name + "-run.tcl"])
else:
- filePath = os.path.join(Path(self.work_root).relative_to(
- os.getcwd()), escaped_name + '-run.tcl')
+ filePath = os.path.join(
+ Path(self.work_root).relative_to(os.getcwd()), escaped_name + "-run.tcl"
+ )
logger.warn(
- "Libero not found on path, execute manually the script \"" + filePath + "\"")
+ 'Libero not found on path, execute manually the script "'
+ + filePath
+ + '"'
+ )
def run_main(self):
pass
diff --git a/edalize/modelsim.py b/edalize/modelsim.py
index a6d82690a..551228dd7 100644
--- a/edalize/modelsim.py
+++ b/edalize/modelsim.py
@@ -9,7 +9,7 @@
logger = logging.getLogger(__name__)
-MAKE_HEADER ="""#Generated by Edalize
+MAKE_HEADER = """#Generated by Edalize
ifndef MODEL_TECH
$(error Environment variable MODEL_TECH was not found. It should be set to /bin)
endif
@@ -69,118 +69,134 @@
$(RM) $({name}_OBJS) {name}
"""
+
class Modelsim(Edatool):
- argtypes = ['plusarg', 'vlogdefine', 'vlogparam', 'generic']
+ argtypes = ["plusarg", "vlogdefine", "vlogparam", "generic"]
@classmethod
def get_doc(cls, api_ver):
if api_ver == 0:
- return {'description' : "ModelSim simulator from Mentor Graphics",
- 'lists' : [
- {'name' : 'vcom_options',
- 'type' : 'String',
- 'desc' : 'Additional options for compilation with vcom'},
- {'name' : 'vlog_options',
- 'type' : 'String',
- 'desc' : 'Additional options for compilation with vlog'},
- {'name' : 'vsim_options',
- 'type' : 'String',
- 'desc' : 'Additional run options for vsim'},
- ]}
+ return {
+ "description": "ModelSim simulator from Mentor Graphics",
+ "lists": [
+ {
+ "name": "vcom_options",
+ "type": "String",
+ "desc": "Additional options for compilation with vcom",
+ },
+ {
+ "name": "vlog_options",
+ "type": "String",
+ "desc": "Additional options for compilation with vlog",
+ },
+ {
+ "name": "vsim_options",
+ "type": "String",
+ "desc": "Additional run options for vsim",
+ },
+ ],
+ }
def _write_build_rtl_tcl_file(self, tcl_main):
- tcl_build_rtl = open(os.path.join(self.work_root, "edalize_build_rtl.tcl"), 'w')
+ tcl_build_rtl = open(os.path.join(self.work_root, "edalize_build_rtl.tcl"), "w")
(src_files, incdirs) = self._get_fileset_files()
- vlog_include_dirs = ['+incdir+'+d.replace('\\','/') for d in incdirs]
+ vlog_include_dirs = ["+incdir+" + d.replace("\\", "/") for d in incdirs]
libs = []
for f in src_files:
if not f.logical_name:
- f.logical_name = 'work'
+ f.logical_name = "work"
if not f.logical_name in libs:
tcl_build_rtl.write("vlib {}\n".format(f.logical_name))
libs.append(f.logical_name)
- if f.file_type.startswith("verilogSource") or \
- f.file_type.startswith("systemVerilogSource"):
- cmd = 'vlog'
+ if f.file_type.startswith("verilogSource") or f.file_type.startswith(
+ "systemVerilogSource"
+ ):
+ cmd = "vlog"
args = []
- args += self.tool_options.get('vlog_options', [])
+ args += self.tool_options.get("vlog_options", [])
for k, v in self.vlogdefine.items():
- args += ['+define+{}={}'.format(k,self._param_value_str(v))]
+ args += ["+define+{}={}".format(k, self._param_value_str(v))]
if f.file_type.startswith("systemVerilogSource"):
- args += ['-sv']
+ args += ["-sv"]
args += vlog_include_dirs
elif f.file_type.startswith("vhdlSource"):
- cmd = 'vcom'
+ cmd = "vcom"
if f.file_type.endswith("-87"):
- args = ['-87']
+ args = ["-87"]
if f.file_type.endswith("-93"):
- args = ['-93']
+ args = ["-93"]
if f.file_type.endswith("-2008"):
- args = ['-2008']
+ args = ["-2008"]
else:
args = []
- args += self.tool_options.get('vcom_options', [])
+ args += self.tool_options.get("vcom_options", [])
- elif f.file_type == 'tclSource':
+ elif f.file_type == "tclSource":
cmd = None
tcl_main.write("do {}\n".format(f.name))
- elif f.file_type == 'user':
+ elif f.file_type == "user":
cmd = None
else:
_s = "{} has unknown file type '{}'"
logger.warning(_s.format(f.name, f.file_type))
cmd = None
if cmd:
- args += ['-quiet']
- args += ['-work', f.logical_name]
- args += [f.name.replace('\\','/')]
- tcl_build_rtl.write("{} {}\n".format(cmd, ' '.join(args)))
+ args += ["-quiet"]
+ args += ["-work", f.logical_name]
+ args += [f.name.replace("\\", "/")]
+ tcl_build_rtl.write("{} {}\n".format(cmd, " ".join(args)))
def _write_makefile(self):
- vpi_make = open(os.path.join(self.work_root, "Makefile"), 'w')
+ vpi_make = open(os.path.join(self.work_root, "Makefile"), "w")
_parameters = []
for key, value in self.vlogparam.items():
- _parameters += ['{}={}'.format(key, self._param_value_str(value))]
+ _parameters += ["{}={}".format(key, self._param_value_str(value))]
for key, value in self.generic.items():
- _parameters += ['{}={}'.format(key, self._param_value_str(value, bool_is_str=True))]
+ _parameters += [
+ "{}={}".format(key, self._param_value_str(value, bool_is_str=True))
+ ]
_plusargs = []
for key, value in self.plusarg.items():
- _plusargs += ['{}={}'.format(key, self._param_value_str(value))]
-
- _vsim_options = self.tool_options.get('vsim_options', [])
-
- _modules = [m['name'] for m in self.vpi_modules]
- _clean_targets = ' '.join(["clean_"+m for m in _modules])
- _s = MAKE_HEADER.format(toplevel = self.toplevel,
- parameters = ' '.join(_parameters),
- plusargs = ' '.join(_plusargs),
- vsim_options = ' '.join(_vsim_options),
- modules = ' '.join(_modules),
- clean_targets = _clean_targets)
+ _plusargs += ["{}={}".format(key, self._param_value_str(value))]
+
+ _vsim_options = self.tool_options.get("vsim_options", [])
+
+ _modules = [m["name"] for m in self.vpi_modules]
+ _clean_targets = " ".join(["clean_" + m for m in _modules])
+ _s = MAKE_HEADER.format(
+ toplevel=self.toplevel,
+ parameters=" ".join(_parameters),
+ plusargs=" ".join(_plusargs),
+ vsim_options=" ".join(_vsim_options),
+ modules=" ".join(_modules),
+ clean_targets=_clean_targets,
+ )
vpi_make.write(_s)
for vpi_module in self.vpi_modules:
- _name = vpi_module['name']
- _objs = [os.path.splitext(s)[0]+'.o' for s in vpi_module['src_files']]
- _libs = ['-l'+l for l in vpi_module['libs']]
- _incs = ['-I'+d for d in vpi_module['include_dirs']]
- _s = VPI_MAKE_SECTION.format(name=_name,
- objs=' '.join(_objs),
- libs=' '.join(_libs),
- incs=' '.join(_incs))
+ _name = vpi_module["name"]
+ _objs = [os.path.splitext(s)[0] + ".o" for s in vpi_module["src_files"]]
+ _libs = ["-l" + l for l in vpi_module["libs"]]
+ _incs = ["-I" + d for d in vpi_module["include_dirs"]]
+ _s = VPI_MAKE_SECTION.format(
+ name=_name,
+ objs=" ".join(_objs),
+ libs=" ".join(_libs),
+ incs=" ".join(_incs),
+ )
vpi_make.write(_s)
vpi_make.close()
def configure_main(self):
- tcl_main = open(os.path.join(self.work_root, "edalize_main.tcl"), 'w')
+ tcl_main = open(os.path.join(self.work_root, "edalize_main.tcl"), "w")
tcl_main.write("onerror { quit -code 1; }\n")
tcl_main.write("do edalize_build_rtl.tcl\n")
@@ -189,13 +205,13 @@ def configure_main(self):
tcl_main.close()
def run_main(self):
- args = ['run']
+ args = ["run"]
# Set plusargs
if self.plusarg:
plusargs = []
for key, value in self.plusarg.items():
- plusargs += ['{}={}'.format(key, self._param_value_str(value))]
- args.append('PLUSARGS='+' '.join(plusargs))
+ plusargs += ["{}={}".format(key, self._param_value_str(value))]
+ args.append("PLUSARGS=" + " ".join(plusargs))
- self._run_tool('make', args)
+ self._run_tool("make", args)
diff --git a/edalize/morty.py b/edalize/morty.py
index 3a5561c66..f9a373bf3 100644
--- a/edalize/morty.py
+++ b/edalize/morty.py
@@ -9,8 +9,9 @@
logger = logging.getLogger(__name__)
+
class Morty(Edatool):
- argtypes = ['cmdlinearg', 'vlogdefine']
+ argtypes = ["cmdlinearg", "vlogdefine"]
_description = """ Morty Systemverilog pickle
@@ -33,28 +34,35 @@ class Morty(Edatool):
"""
tool_options = {
- 'lists' : {
- 'morty_options' : 'String', # runtime options (passed to morty)
+ "lists": {
+ "morty_options": "String", # runtime options (passed to morty)
}
}
@classmethod
def get_doc(cls, api_ver):
if api_ver == 0:
- return {'description' : "Run the (System-) Verilog pickle tool called `morty`.",
- 'lists' : [
- {'name' : 'morty_options',
- 'type' : 'String',
- 'desc' : 'Run-time options passed to morty.'},
- ]}
+ return {
+ "description": "Run the (System-) Verilog pickle tool called `morty`.",
+ "lists": [
+ {
+ "name": "morty_options",
+ "type": "String",
+ "desc": "Run-time options passed to morty.",
+ },
+ ],
+ }
def build_main(self, target=None):
args = list()
src_files_filtered = list()
(src_files, incdirs) = self._get_fileset_files()
- args += ['-I {}'.format(incdir) for incdir in incdirs]
- args += ['-D {}={}'.format(key, self._param_value_str(value)) for key, value in self.vlogdefine.items()]
+ args += ["-I {}".format(incdir) for incdir in incdirs]
+ args += [
+ "-D {}={}".format(key, self._param_value_str(value))
+ for key, value in self.vlogdefine.items()
+ ]
# Filter for Verilog source files.
for src_file in src_files:
@@ -64,9 +72,9 @@ def build_main(self, target=None):
# Append filtered file names.
args += [f.name for f in src_files_filtered]
# Append any options passed through `morty_options`.
- args += self.tool_options.get('morty_options', [])
+ args += self.tool_options.get("morty_options", [])
# Go and do your thing!
- self._run_tool('morty', args, quiet=True)
+ self._run_tool("morty", args, quiet=True)
def run_main(self):
logger.warn("Morty does not support running. Use build instead.")
diff --git a/edalize/nextpnr.py b/edalize/nextpnr.py
index cdfa35366..3efd1112b 100644
--- a/edalize/nextpnr.py
+++ b/edalize/nextpnr.py
@@ -6,18 +6,22 @@
from edalize.edatool import Edatool
-class Nextpnr(Edatool):
+class Nextpnr(Edatool):
@classmethod
def get_doc(cls, api_ver):
if api_ver == 0:
- return {'description' : "a portable FPGA place and route tool",
- 'members' : [],
- 'lists' : [
- {'name' : 'nextpnr_options',
- 'type' : 'String',
- 'desc' : 'Additional options for nextpnr'},
- ]}
+ return {
+ "description": "a portable FPGA place and route tool",
+ "members": [],
+ "lists": [
+ {
+ "name": "nextpnr_options",
+ "type": "String",
+ "desc": "Additional options for nextpnr",
+ },
+ ],
+ }
def configure_main(self):
cst_file = ""
@@ -26,61 +30,77 @@ def configure_main(self):
netlist = ""
unused_files = []
for f in self.files:
- if f['file_type'] == 'CST':
+ if f["file_type"] == "CST":
if cst_file:
- raise RuntimeError("Nextpnr only supports one CST file. Found {} and {}".format(cst_file, f['name']))
- cst_file = f['name']
- if f['file_type'] == 'LPF':
+ raise RuntimeError(
+ "Nextpnr only supports one CST file. Found {} and {}".format(
+ cst_file, f["name"]
+ )
+ )
+ cst_file = f["name"]
+ if f["file_type"] == "LPF":
if lpf_file:
- raise RuntimeError("Nextpnr only supports one LPF file. Found {} and {}".format(pcf_file, f['name']))
- lpf_file = f['name']
- if f['file_type'] == 'PCF':
+ raise RuntimeError(
+ "Nextpnr only supports one LPF file. Found {} and {}".format(
+ pcf_file, f["name"]
+ )
+ )
+ lpf_file = f["name"]
+ if f["file_type"] == "PCF":
if pcf_file:
- raise RuntimeError("Nextpnr only supports one PCF file. Found {} and {}".format(pcf_file, f['name']))
- pcf_file = f['name']
- elif f['file_type'] == 'jsonNetlist':
+ raise RuntimeError(
+ "Nextpnr only supports one PCF file. Found {} and {}".format(
+ pcf_file, f["name"]
+ )
+ )
+ pcf_file = f["name"]
+ elif f["file_type"] == "jsonNetlist":
if netlist:
- raise RuntimeError("Nextpnr only supports one netlist. Found {} and {}".format(netlist, f['name']))
- netlist = f['name']
+ raise RuntimeError(
+ "Nextpnr only supports one netlist. Found {} and {}".format(
+ netlist, f["name"]
+ )
+ )
+ netlist = f["name"]
else:
unused_files.append(f)
- self.edam['files'] = unused_files
+ self.edam["files"] = unused_files
of = [
- {'name' : self.name+'.asc', 'file_type' : 'iceboxAscii'},
+ {"name": self.name + ".asc", "file_type": "iceboxAscii"},
]
- self.edam['files'] += of
+ self.edam["files"] += of
# Write Makefile
commands = self.EdaCommands()
- arch = self.flow_config['arch']
+ arch = self.flow_config["arch"]
arch_options = []
- if arch == 'ecp5':
- targets = self.name+'.config'
- constraints = ['--lpf' , lpf_file] if lpf_file else []
- output = ['--textcfg' , targets]
- elif arch == 'gowin':
- device = self.tool_options.get('device')
+ if arch == "ecp5":
+ targets = self.name + ".config"
+ constraints = ["--lpf", lpf_file] if lpf_file else []
+ output = ["--textcfg", targets]
+ elif arch == "gowin":
+ device = self.tool_options.get("device")
if not device:
raise RuntimeError("Missing required option 'device' for nextpnr-gowin")
- arch_options += ['--device', device]
- targets = self.name+'.pack'
- constraints = ['--cst' , cst_file] if cst_file else []
- output = ['--write' , targets]
+ arch_options += ["--device", device]
+ targets = self.name + ".pack"
+ constraints = ["--cst", cst_file] if cst_file else []
+ output = ["--write", targets]
else:
- targets = self.name+'.asc'
- constraints = ['--pcf' , pcf_file] if pcf_file else []
- output = ['--asc' , targets]
+ targets = self.name + ".asc"
+ constraints = ["--pcf", pcf_file] if pcf_file else []
+ output = ["--asc", targets]
depends = netlist
- command = ['nextpnr-'+ arch, '-l', 'next.log']
- command += arch_options + self.tool_options.get('nextpnr_options', [])
- command += constraints + ['--json', depends] + output
+ command = ["nextpnr-" + arch, "-l", "next.log"]
+ command += arch_options + self.tool_options.get("nextpnr_options", [])
+ command += constraints + ["--json", depends] + output
- #CLI target
+ # CLI target
commands.add(command, [targets], [depends])
- #GUI target
- commands.add(command+['--gui'], ["build-gui"], [depends])
+ # GUI target
+ commands.add(command + ["--gui"], ["build-gui"], [depends])
self.commands = commands.commands
diff --git a/edalize/openlane.py b/edalize/openlane.py
index 81f05fbf1..d107338ad 100644
--- a/edalize/openlane.py
+++ b/edalize/openlane.py
@@ -8,17 +8,19 @@
logger = logging.getLogger(__name__)
+
class Openlane(Edatool):
- argtypes = ['vlogdefine']
+ argtypes = ["vlogdefine"]
@classmethod
def get_doc(cls, api_ver):
if api_ver == 0:
- return {'description' : "Open source flow for ASIC synthesis, placement and routing",
- 'members': [
- ],
- 'lists' : []}
+ return {
+ "description": "Open source flow for ASIC synthesis, placement and routing",
+ "members": [],
+ "lists": [],
+ }
def configure_main(self):
files = []
@@ -26,24 +28,24 @@ def configure_main(self):
(src_files, incdirs) = self._get_fileset_files()
for f in src_files:
- if f.file_type.startswith('verilogSource'):
+ if f.file_type.startswith("verilogSource"):
files.append(f.name)
elif f.file_type == "tclSource":
tcl.append(f.name)
defines = ""
for k, v in self.vlogdefine.items():
- defines += ' {}={}'.format(k,self._param_value_str(v))
+ defines += " {}={}".format(k, self._param_value_str(v))
template_vars = {
- 'top' : self.toplevel,
- 'file_table' : ' '.join(files),
- 'tcl' : '\n'.join(['source ' + f for f in tcl]),
- 'defines' : defines,
+ "top": self.toplevel,
+ "file_table": " ".join(files),
+ "tcl": "\n".join(["source " + f for f in tcl]),
+ "defines": defines,
}
- script_name = 'config.tcl'
- self.render_template('openlane-script-tcl.j2', script_name, template_vars)
+ script_name = "config.tcl"
+ self.render_template("openlane-script-tcl.j2", script_name, template_vars)
- makefile_name = 'Makefile'
- self.render_template('openlane-makefile.j2', makefile_name, template_vars)
+ makefile_name = "Makefile"
+ self.render_template("openlane-makefile.j2", makefile_name, template_vars)
diff --git a/edalize/quartus.py b/edalize/quartus.py
index e743c9cef..a5954efa3 100644
--- a/edalize/quartus.py
+++ b/edalize/quartus.py
@@ -14,43 +14,63 @@
logger = logging.getLogger(__name__)
+
class Quartus(Edatool):
- argtypes = ['vlogdefine', 'vlogparam', 'generic']
+ argtypes = ["vlogdefine", "vlogparam", "generic"]
# Define Standard edition to be our default version
isPro = False
- makefile_template = {False : "quartus-std-makefile.j2",
- True : "quartus-pro-makefile.j2"}
+ makefile_template = {
+ False: "quartus-std-makefile.j2",
+ True: "quartus-pro-makefile.j2",
+ }
@classmethod
def get_doc(cls, api_ver):
if api_ver == 0:
- return {'description' : "The Quartus backend supports Intel Quartus Std and Pro editions to build systems and program the FPGA",
- 'members' : [
- {'name' : 'family',
- 'type' : 'String',
- 'desc' : 'FPGA family (e.g. Cyclone V)'},
- {'name' : 'device',
- 'type' : 'String',
- 'desc' : 'FPGA device (e.g. 5CSXFC6D6F31C8ES)'},
- {'name' : 'cable',
- 'type' : 'String',
- 'desc' : "Specifies the FPGA's JTAG programming cable. Use the tool `jtagconfig` to determine the available cables."},
- {'name' : 'board_device_index',
- 'type' : 'String',
- 'desc' : "Specifies the FPGA's device number in the JTAG chain. The device index specifies the device where the flash programmer looks for the Nios® II JTAG debug module. JTAG devices are numbered relative to the JTAG chain, starting at 1. Use the tool `jtagconfig` to determine the index."},
- {'name' : 'pnr',
- 'type' : 'String',
- 'desc' : 'P&R tool. Allowed values are quartus (default), dse (to run Design Space Explorer) and none (to just run synthesis)'}],
- 'lists' : [
- {'name' : 'dse_options',
- 'type' : 'String',
- 'desc' : 'Options for DSE (Design Space Explorer)'},
- {'name' : 'quartus_options',
- 'type' : 'String',
- 'desc' : 'Additional options for Quartus'},
- ]}
+ return {
+ "description": "The Quartus backend supports Intel Quartus Std and Pro editions to build systems and program the FPGA",
+ "members": [
+ {
+ "name": "family",
+ "type": "String",
+ "desc": "FPGA family (e.g. Cyclone V)",
+ },
+ {
+ "name": "device",
+ "type": "String",
+ "desc": "FPGA device (e.g. 5CSXFC6D6F31C8ES)",
+ },
+ {
+ "name": "cable",
+ "type": "String",
+ "desc": "Specifies the FPGA's JTAG programming cable. Use the tool `jtagconfig` to determine the available cables.",
+ },
+ {
+ "name": "board_device_index",
+ "type": "String",
+ "desc": "Specifies the FPGA's device number in the JTAG chain. The device index specifies the device where the flash programmer looks for the Nios® II JTAG debug module. JTAG devices are numbered relative to the JTAG chain, starting at 1. Use the tool `jtagconfig` to determine the index.",
+ },
+ {
+ "name": "pnr",
+ "type": "String",
+ "desc": "P&R tool. Allowed values are quartus (default), dse (to run Design Space Explorer) and none (to just run synthesis)",
+ },
+ ],
+ "lists": [
+ {
+ "name": "dse_options",
+ "type": "String",
+ "desc": "Options for DSE (Design Space Explorer)",
+ },
+ {
+ "name": "quartus_options",
+ "type": "String",
+ "desc": "Additional options for Quartus",
+ },
+ ],
+ }
def __init__(self, edam=None, work_root=None, eda_api=None, verbose=False):
"""
@@ -67,21 +87,25 @@ def __init__(self, edam=None, work_root=None, eda_api=None, verbose=False):
# Acquire quartus_sh identification information from available tool if
# possible. We always default to version 18.1 Standard if a problem is encountered
version = {
- 'major': '18',
- 'minor': '1',
- 'patch': '0',
- 'date': '01/01/2019',
- 'edition': 'Standard'
+ "major": "18",
+ "minor": "1",
+ "patch": "0",
+ "date": "01/01/2019",
+ "edition": "Standard",
}
try:
- qsh_text = subprocess.Popen(["quartus_sh", "--version"], stdout=subprocess.PIPE, env=os.environ).communicate()[0]
+ qsh_text = subprocess.Popen(
+ ["quartus_sh", "--version"], stdout=subprocess.PIPE, env=os.environ
+ ).communicate()[0]
# Attempt to pattern match the output. Examples include
# Version 16.1.2 Build 203 01/18/2017 SJ Standard Edition
# Version 17.1.2 Build 304 01/31/2018 SJ Pro Edition
- version_exp = r'Version (?P\d+)\.(?P\d+)\.(?P\d+) ' + \
- r'Build (?P\d+) (?P\d{2}/\d{2}/\d{4}) (?:\w+) ' + \
- r'(?P(Lite|Standard|Pro)) Edition'
+ version_exp = (
+ r"Version (?P\d+)\.(?P\d+)\.(?P\d+) "
+ + r"Build (?P\d+) (?P\d{2}/\d{2}/\d{4}) (?:\w+) "
+ + r"(?P(Lite|Standard|Pro)) Edition"
+ )
match = re.search(version_exp, str(qsh_text))
if match != None:
@@ -91,13 +115,14 @@ def __init__(self, edam=None, work_root=None, eda_api=None, verbose=False):
# Quartus being installed. Allow these errors to be ignored
logger.warning("Unable to recognise Quartus version via quartus_sh")
- self.isPro = (version['edition'] == "Pro")
+ self.isPro = version["edition"] == "Pro"
# Quartus Pro 17 and later use 1/0 for boolean generics. Other editions
# and versions use "true"/"false" strings
- if (version['edition'] != "Pro") or (int(version['major']) < 17):
- self.jinja_env.filters['generic_value_str'] = \
- partial(self.jinja_env.filters['generic_value_str'], bool_is_str=True)
+ if (version["edition"] != "Pro") or (int(version["major"]) < 17):
+ self.jinja_env.filters["generic_value_str"] = partial(
+ self.jinja_env.filters["generic_value_str"], bool_is_str=True
+ )
def configure_main(self):
"""
@@ -108,54 +133,57 @@ def configure_main(self):
with the build steps.
"""
(src_files, incdirs) = self._get_fileset_files(force_slash=True)
- self.jinja_env.filters['src_file_filter'] = self.src_file_filter
- self.jinja_env.filters['qsys_file_filter'] = self.qsys_file_filter
+ self.jinja_env.filters["src_file_filter"] = self.src_file_filter
+ self.jinja_env.filters["qsys_file_filter"] = self.qsys_file_filter
- has_vhdl2008 = 'vhdlSource-2008' in [x.file_type for x in src_files]
- has_qsys = 'QSYS' in [x.file_type for x in src_files]
+ has_vhdl2008 = "vhdlSource-2008" in [x.file_type for x in src_files]
+ has_qsys = "QSYS" in [x.file_type for x in src_files]
escaped_name = self.name.replace(".", "_")
template_vars = {
- 'name' : escaped_name,
- 'src_files' : src_files,
- 'incdirs' : incdirs,
- 'tool_options' : self.tool_options,
- 'toplevel' : self.toplevel,
- 'vlogparam' : self.vlogparam,
- 'vlogdefine' : self.vlogdefine,
- 'generic' : self.generic,
- 'has_vhdl2008' : has_vhdl2008
+ "name": escaped_name,
+ "src_files": src_files,
+ "incdirs": incdirs,
+ "tool_options": self.tool_options,
+ "toplevel": self.toplevel,
+ "vlogparam": self.vlogparam,
+ "vlogdefine": self.vlogdefine,
+ "generic": self.generic,
+ "has_vhdl2008": has_vhdl2008,
}
# Render Makefile based on detected version
- self.render_template(self.makefile_template[self.isPro],
- 'Makefile',
- { 'name' : escaped_name,
- 'src_files' : src_files,
- 'tool_options' : self.tool_options})
+ self.render_template(
+ self.makefile_template[self.isPro],
+ "Makefile",
+ {
+ "name": escaped_name,
+ "src_files": src_files,
+ "tool_options": self.tool_options,
+ },
+ )
# Render the TCL project file
- self.render_template('quartus-project.tcl.j2',
- escaped_name + '.tcl',
- template_vars)
-
+ self.render_template(
+ "quartus-project.tcl.j2", escaped_name + ".tcl", template_vars
+ )
# Helper to extract file type
def file_type(self, f):
- return f.file_type.split('-')[0]
+ return f.file_type.split("-")[0]
# Filter for just QSYS files. This verifies that they are compatible
# with the identified Quartus version
def qsys_file_filter(self, f):
- name = ''
- if self.file_type(f) == 'QSYS':
+ name = ""
+ if self.file_type(f) == "QSYS":
# Compatibility checks
try:
qsysTree = ET.parse(os.path.join(self.work_root, f.name))
try:
- tool = qsysTree.find('component').attrib['tool']
- if tool == 'QsysPro' and self.isPro:
+ tool = qsysTree.find("component").attrib["tool"]
+ if tool == "QsysPro" and self.isPro:
name = f.name
except (AttributeError, KeyError):
# Either a component wasn't found in the QSYS file, or it
@@ -168,9 +196,9 @@ def qsys_file_filter(self, f):
# Give QSYS files special attributes to make the logic in
# the Jinja2 templates much simplier
- setattr(f, "simplename", os.path.basename(f.name).split('.qsys')[0])
- setattr(f, "srcdir", os.path.dirname(f.name) or '.')
- setattr(f, "dstdir", os.path.join('qsys', f.simplename))
+ setattr(f, "simplename", os.path.basename(f.name).split(".qsys")[0])
+ setattr(f, "srcdir", os.path.dirname(f.name) or ".")
+ setattr(f, "dstdir", os.path.join("qsys", f.simplename))
return name
@@ -179,7 +207,7 @@ def src_file_filter(self, f):
def _append_library(f):
s = ""
if f.logical_name:
- s += ' -library ' + f.logical_name
+ s += " -library " + f.logical_name
return s
def _handle_qsys(t, f):
@@ -190,74 +218,72 @@ def _handle_qsys(t, f):
if self.isPro:
return _handle_src(t, f)
else:
- f.name = os.path.join(f.dstdir, f.simplename + '.qip')
- f.file_type = 'QIP'
- return _handle_src('QIP_FILE', f)
+ f.name = os.path.join(f.dstdir, f.simplename + ".qip")
+ f.file_type = "QIP"
+ return _handle_src("QIP_FILE", f)
def _handle_src(t, f):
- s = 'set_global_assignment -name ' + t
+ s = "set_global_assignment -name " + t
s += _append_library(f)
- s += ' ' + f.name
+ s += " " + f.name
return s
def _handle_tcl(f):
return "source " + f.name
file_mapping = {
- 'verilogSource' : partial(_handle_src, 'VERILOG_FILE'),
- 'systemVerilogSource' : partial(_handle_src, 'SYSTEMVERILOG_FILE'),
- 'vhdlSource' : partial(_handle_src, 'VHDL_FILE'),
- 'SDC' : partial(_handle_src, 'SDC_FILE'),
- 'QSYS' : partial(_handle_qsys, 'QSYS_FILE'),
- 'QIP' : partial(_handle_src, 'QIP_FILE'),
- 'IP' : partial(_handle_src, 'IP_FILE'),
- 'tclSource' : partial(_handle_tcl),
+ "verilogSource": partial(_handle_src, "VERILOG_FILE"),
+ "systemVerilogSource": partial(_handle_src, "SYSTEMVERILOG_FILE"),
+ "vhdlSource": partial(_handle_src, "VHDL_FILE"),
+ "SDC": partial(_handle_src, "SDC_FILE"),
+ "QSYS": partial(_handle_qsys, "QSYS_FILE"),
+ "QIP": partial(_handle_src, "QIP_FILE"),
+ "IP": partial(_handle_src, "IP_FILE"),
+ "tclSource": partial(_handle_tcl),
}
_file_type = self.file_type(f)
if _file_type in file_mapping:
return file_mapping[_file_type](f)
- elif _file_type == 'user':
- return ''
+ elif _file_type == "user":
+ return ""
else:
_s = "{} has unknown file type '{}'"
- logger.warning(_s.format(f.name,
- f.file_type))
-
- return ''
+ logger.warning(_s.format(f.name, f.file_type))
+ return ""
def build_main(self):
logger.info("Building")
args = []
- if 'pnr' in self.tool_options:
- if self.tool_options['pnr'] == 'quartus':
+ if "pnr" in self.tool_options:
+ if self.tool_options["pnr"] == "quartus":
pass
- elif self.tool_options['pnr'] == 'dse':
- args.append('dse')
- elif self.tool_options['pnr'] == 'none':
- args.append('syn')
- self._run_tool('make', args, quiet=True)
+ elif self.tool_options["pnr"] == "dse":
+ args.append("dse")
+ elif self.tool_options["pnr"] == "none":
+ args.append("syn")
+ self._run_tool("make", args, quiet=True)
def run_main(self):
"""
Program the FPGA.
"""
- args = ['--mode=jtag']
- if 'cable' in self.tool_options:
- args += ['-c', self.tool_options['cable']]
- args += ['-o']
- args += ['p;' + self.name.replace('.', '_') + '.sof']
-
- if 'pnr' in self.tool_options:
- if self.tool_options['pnr'] == 'quartus':
+ args = ["--mode=jtag"]
+ if "cable" in self.tool_options:
+ args += ["-c", self.tool_options["cable"]]
+ args += ["-o"]
+ args += ["p;" + self.name.replace(".", "_") + ".sof"]
+
+ if "pnr" in self.tool_options:
+ if self.tool_options["pnr"] == "quartus":
pass
- elif self.tool_options['pnr'] == 'dse':
+ elif self.tool_options["pnr"] == "dse":
return
- elif self.tool_options['pnr'] == 'none':
+ elif self.tool_options["pnr"] == "none":
return
- if 'board_device_index' in self.tool_options:
- args[-1] += "@" + self.tool_options['board_device_index']
+ if "board_device_index" in self.tool_options:
+ args[-1] += "@" + self.tool_options["board_device_index"]
- self._run_tool('quartus_pgm', args)
+ self._run_tool("quartus_pgm", args)
diff --git a/edalize/quartus_reporting.py b/edalize/quartus_reporting.py
index edebaffce..b131c18a6 100644
--- a/edalize/quartus_reporting.py
+++ b/edalize/quartus_reporting.py
@@ -103,9 +103,7 @@ def report_summary(
# Get a frequency like 175.0 MHz and just return the numeric part
freq = timing["Clocks"].set_index("Clock Name")["Frequency"]
- summary["constraint"] = (
- freq.str.split(expand=True)[0].astype(float).to_dict()
- )
+ summary["constraint"] = freq.str.split(expand=True)[0].astype(float).to_dict()
# Find the Fmax summary table for the slowest corner, such as "Slow
# 1200mV 85C Model Fmax Summary". The voltage and temperature will
diff --git a/edalize/radiant.py b/edalize/radiant.py
index b8bf1582b..a7bdb5808 100644
--- a/edalize/radiant.py
+++ b/edalize/radiant.py
@@ -9,66 +9,84 @@
logger = logging.getLogger(__name__)
+
class Radiant(Edatool):
- argtypes = ['generic', 'vlogdefine', 'vlogparam']
+ argtypes = ["generic", "vlogdefine", "vlogparam"]
@classmethod
def get_doc(cls, api_ver):
if api_ver == 0:
- return {'description' : "Backend for Lattice Radiant",
- 'members' : [
- {'name' : 'part',
- 'type' : 'String',
- 'desc' : 'FPGA part number (e.g. LIFCL-40-9BG400C)'},
- ]}
+ return {
+ "description": "Backend for Lattice Radiant",
+ "members": [
+ {
+ "name": "part",
+ "type": "String",
+ "desc": "FPGA part number (e.g. LIFCL-40-9BG400C)",
+ },
+ ],
+ }
def configure_main(self):
(src_files, incdirs) = self._get_fileset_files()
pdc_file = None
- prj_name = self.name.replace('.','_')
+ prj_name = self.name.replace(".", "_")
for f in src_files:
- if f.file_type == 'PDC':
+ if f.file_type == "PDC":
if pdc_file:
- logger.warning("Multiple PDC files detected. Only the first one will be used")
+ logger.warning(
+ "Multiple PDC files detected. Only the first one will be used"
+ )
else:
pdc_file = f.name
- with open(os.path.join(self.work_root, self.name+'.tcl'), 'w') as f:
+ with open(os.path.join(self.work_root, self.name + ".tcl"), "w") as f:
TCL_TEMPLATE = """#Generated by Edalize
prj_create -name {} -impl "impl" -dev {}
prj_set_impl_opt top {}
"""
- f.write(TCL_TEMPLATE.format(prj_name,
- self.tool_options['part'],
- self.toplevel,
- ))
+ f.write(
+ TCL_TEMPLATE.format(
+ prj_name,
+ self.tool_options["part"],
+ self.toplevel,
+ )
+ )
if incdirs:
- _s = 'prj_set_impl_opt {include path} {'
- _s += ' '.join(incdirs)
- f.write(_s + '}\n')
- if self.generic: # ?
- _s = ';'.join(['{}={}'.format(k, v) for k,v in self.generic.items()])
- f.write('prj_set_impl_opt HDL_PARAM {')
+ _s = "prj_set_impl_opt {include path} {"
+ _s += " ".join(incdirs)
+ f.write(_s + "}\n")
+ if self.generic: # ?
+ _s = ";".join(["{}={}".format(k, v) for k, v in self.generic.items()])
+ f.write("prj_set_impl_opt HDL_PARAM {")
f.write(_s)
- f.write('}\n')
+ f.write("}\n")
if self.vlogparam:
- _s = ';'.join(['{}={}'.format(k, self._param_value_str(v, '"')) for k,v in self.vlogparam.items()])
- f.write('prj_set_impl_opt HDL_PARAM {')
+ _s = ";".join(
+ [
+ "{}={}".format(k, self._param_value_str(v, '"'))
+ for k, v in self.vlogparam.items()
+ ]
+ )
+ f.write("prj_set_impl_opt HDL_PARAM {")
f.write(_s)
- f.write('}\n')
+ f.write("}\n")
if self.vlogdefine:
- _s = ";".join(['{}={}'.format(k,v) for k,v in self.vlogdefine.items()])
- f.write('prj_set_impl_opt VERILOG_DIRECTIVES {')
+ _s = ";".join(
+ ["{}={}".format(k, v) for k, v in self.vlogdefine.items()]
+ )
+ f.write("prj_set_impl_opt VERILOG_DIRECTIVES {")
f.write(_s)
- f.write('}\n')
+ f.write("}\n")
for src_file in src_files:
_s = self.src_file_filter(src_file)
if _s:
- f.write(_s+'\n')
- f.write('prj_save\nprj_close\n')
+ f.write(_s + "\n")
+ f.write("prj_save\nprj_close\n")
- with open(os.path.join(self.work_root, self.name+'_run.tcl'), 'w') as f:
- f.write("""#Generated by Edalize
+ with open(os.path.join(self.work_root, self.name + "_run.tcl"), "w") as f:
+ f.write(
+ """#Generated by Edalize
prj_open {}.rdf
prj_run Synthesis -impl impl -forceOne
prj_run Map -impl impl
@@ -76,11 +94,14 @@ def configure_main(self):
prj_run Export -impl impl -task Bitgen
prj_save
prj_close
-""".format(prj_name))
- def src_file_filter(self, f):
+""".format(
+ prj_name
+ )
+ )
+ def src_file_filter(self, f):
def _work_source(f):
- s = ' -work '
+ s = " -work "
if f.logical_name:
s += f.logical_name
else:
@@ -88,26 +109,25 @@ def _work_source(f):
return s
file_types = {
- 'verilogSource' : 'prj_add_source ',
- 'vhdlSource' : 'prj_add_source ',
- 'PDC' : 'prj_add_source ',
+ "verilogSource": "prj_add_source ",
+ "vhdlSource": "prj_add_source ",
+ "PDC": "prj_add_source ",
}
- _file_type = f.file_type.split('-')[0]
+ _file_type = f.file_type.split("-")[0]
if _file_type in file_types:
return file_types[_file_type] + f.name + _work_source(f)
- elif _file_type == 'tclSource':
+ elif _file_type == "tclSource":
return "source " + f.name
- elif _file_type in ['user', 'LPF']:
- return ''
+ elif _file_type in ["user", "LPF"]:
+ return ""
else:
_s = "{} has unknown file type '{}'"
- logger.warning(_s.format(f.name,
- f.file_type))
- return ''
+ logger.warning(_s.format(f.name, f.file_type))
+ return ""
def build_main(self):
- self._run_tool('radiantc', [self.name+'.tcl'], quiet=True)
- self._run_tool('radiantc', [self.name+'_run.tcl'], quiet=True)
+ self._run_tool("radiantc", [self.name + ".tcl"], quiet=True)
+ self._run_tool("radiantc", [self.name + "_run.tcl"], quiet=True)
def run_main(self):
pass
diff --git a/edalize/rivierapro.py b/edalize/rivierapro.py
index dae2c0e50..8f01730e9 100644
--- a/edalize/rivierapro.py
+++ b/edalize/rivierapro.py
@@ -13,164 +13,183 @@
run -all
exit
"""
+
+
class Rivierapro(Edatool):
- argtypes = ['plusarg', 'vlogdefine', 'vlogparam']
+ argtypes = ["plusarg", "vlogdefine", "vlogparam"]
@classmethod
def get_doc(cls, api_ver):
if api_ver == 0:
- return {'description' : "Riviera Pro simulator from Aldec",
- 'members' : [
- {'name' : 'compilation_mode',
- 'type' : 'String',
- 'desc' : 'Common or separate compilation, sep - for separate compilation, common - for common compilation'}],
- 'lists' : [
- {'name' : 'vlog_options',
- 'type' : 'String',
- 'desc' : 'Additional options for compilation with vlog'},
- {'name' : 'vsim_options',
- 'type' : 'String',
- 'desc' : 'Additional run options for vsim'},
- ]}
+ return {
+ "description": "Riviera Pro simulator from Aldec",
+ "members": [
+ {
+ "name": "compilation_mode",
+ "type": "String",
+ "desc": "Common or separate compilation, sep - for separate compilation, common - for common compilation",
+ }
+ ],
+ "lists": [
+ {
+ "name": "vlog_options",
+ "type": "String",
+ "desc": "Additional options for compilation with vlog",
+ },
+ {
+ "name": "vsim_options",
+ "type": "String",
+ "desc": "Additional run options for vsim",
+ },
+ ],
+ }
def _write_build_rtl_tcl_file(self, tcl_main):
- tcl_build_rtl = open(os.path.join(self.work_root, "edalize_build_rtl.tcl"), 'w')
+ tcl_build_rtl = open(os.path.join(self.work_root, "edalize_build_rtl.tcl"), "w")
(src_files, incdirs) = self._get_fileset_files(force_slash=True)
- vlog_include_dirs = ['+incdir+'+d.replace('\\','/') for d in incdirs]
+ vlog_include_dirs = ["+incdir+" + d.replace("\\", "/") for d in incdirs]
libs = []
common_compilation_sv = []
common_compilation_vhdl = []
for f in src_files:
if not f.logical_name:
- f.logical_name = 'work'
+ f.logical_name = "work"
if not f.logical_name in libs:
tcl_build_rtl.write("vlib {}\n".format(f.logical_name))
libs.append(f.logical_name)
- if f.file_type.startswith("verilogSource") or \
- f.file_type.startswith("systemVerilogSource"):
- cmd = 'vlog'
+ if f.file_type.startswith("verilogSource") or f.file_type.startswith(
+ "systemVerilogSource"
+ ):
+ cmd = "vlog"
args = []
- args += self.tool_options.get('vlog_options', [])
+ args += self.tool_options.get("vlog_options", [])
if f.file_type.startswith("verilogSource"):
if f.file_type.endswith("95"):
- args.append('-v95')
+ args.append("-v95")
elif f.file_type.endswith("2001"):
- args.append('-v2k')
+ args.append("-v2k")
elif f.file_type.endswith("2005"):
- args.append('-v2k5')
+ args.append("-v2k5")
else:
- args += ['-sv']
+ args += ["-sv"]
for k, v in self.vlogdefine.items():
- args += ['+define+{}={}'.format(k,self._param_value_str(v))]
+ args += ["+define+{}={}".format(k, self._param_value_str(v))]
args += vlog_include_dirs
elif f.file_type.startswith("vhdlSource"):
- cmd = 'vcom'
+ cmd = "vcom"
if f.file_type.endswith("-87"):
- args = ['-87']
+ args = ["-87"]
if f.file_type.endswith("-93"):
- args = ['-93']
+ args = ["-93"]
if f.file_type.endswith("-2008"):
- args = ['-2008']
+ args = ["-2008"]
else:
args = []
- elif f.file_type == 'tclSource':
+ elif f.file_type == "tclSource":
cmd = None
tcl_main.write("do {}\n".format(f.name))
- elif f.file_type == 'user':
+ elif f.file_type == "user":
cmd = None
else:
_s = "{} has unknown file type '{}'"
logger.warning(_s.format(f.name, f.file_type))
cmd = None
if cmd:
- args += ['-quiet']
- args += ['-work', f.logical_name]
+ args += ["-quiet"]
+ args += ["-work", f.logical_name]
args += [f.name]
- if cmd == 'vlog':
+ if cmd == "vlog":
if not common_compilation_sv:
- common_compilation_sv += ['vlog']
+ common_compilation_sv += ["vlog"]
for k, v in self.vlogdefine.items():
- common_compilation_sv += ['+define+{}={}'.format(k,self._param_value_str(v))]
- common_compilation_sv += self.tool_options.get('vlog_options', [])
+ common_compilation_sv += [
+ "+define+{}={}".format(k, self._param_value_str(v))
+ ]
+ common_compilation_sv += self.tool_options.get(
+ "vlog_options", []
+ )
common_compilation_sv += vlog_include_dirs
- common_compilation_sv += ['-quiet']
- common_compilation_sv += ['-work', f.logical_name]
- common_compilation_sv += [f.name,'\\\n']
+ common_compilation_sv += ["-quiet"]
+ common_compilation_sv += ["-work", f.logical_name]
+ common_compilation_sv += [f.name, "\\\n"]
else:
- common_compilation_sv += [f.name,'\\\n']
- elif cmd == 'vcom':
+ common_compilation_sv += [f.name, "\\\n"]
+ elif cmd == "vcom":
if not common_compilation_vhdl:
- common_compilation_vhdl += ['vcom']
- common_compilation_vhdl += [f.name,'\\\n']
+ common_compilation_vhdl += ["vcom"]
+ common_compilation_vhdl += [f.name, "\\\n"]
else:
- common_compilation_vhdl += [f.name,'\\\n']
- if (self.tool_options.get('compilation_mode'))=='sep' or (self.tool_options.get('compilation_mode')==None):
- tcl_build_rtl.write("{} {}\n".format(cmd, ' '.join(args)))
+ common_compilation_vhdl += [f.name, "\\\n"]
+ if (self.tool_options.get("compilation_mode")) == "sep" or (
+ self.tool_options.get("compilation_mode") == None
+ ):
+ tcl_build_rtl.write("{} {}\n".format(cmd, " ".join(args)))
- if (self.tool_options.get('compilation_mode')=='common'):
+ if self.tool_options.get("compilation_mode") == "common":
if common_compilation_sv:
- tcl_build_rtl.write("{} \n".format(' '.join(common_compilation_sv)))
+ tcl_build_rtl.write("{} \n".format(" ".join(common_compilation_sv)))
if common_compilation_vhdl:
- tcl_build_rtl.write("{} \n".format(' '.join(common_compilation_vhdl)))
-
- if not (self.tool_options.get('compilation_mode')=='common' or self.tool_options.get('compilation_mode')==None or self.tool_options.get('compilation_mode')=='sep'):
- raise RuntimeError('wrong compilation mode, use --compilation_mode=common for common compilation or --compilation_mode=sep for separate compilation')
-
-
+ tcl_build_rtl.write("{} \n".format(" ".join(common_compilation_vhdl)))
+ if not (
+ self.tool_options.get("compilation_mode") == "common"
+ or self.tool_options.get("compilation_mode") == None
+ or self.tool_options.get("compilation_mode") == "sep"
+ ):
+ raise RuntimeError(
+ "wrong compilation mode, use --compilation_mode=common for common compilation or --compilation_mode=sep for separate compilation"
+ )
def _write_run_tcl_file(self):
- tcl_launch = open(os.path.join(self.work_root, "edalize_launch.tcl"), 'w')
+ tcl_launch = open(os.path.join(self.work_root, "edalize_launch.tcl"), "w")
- #FIXME: Handle failures. Save stdout/stderr
+ # FIXME: Handle failures. Save stdout/stderr
vpi_options = []
for vpi_module in self.vpi_modules:
- vpi_options += ['-pli', vpi_module['name']]
+ vpi_options += ["-pli", vpi_module["name"]]
- args = ['vsim']
- args += self.tool_options.get('vsim_options', [])
+ args = ["vsim"]
+ args += self.tool_options.get("vsim_options", [])
args += vpi_options
args += self.toplevel.split()
# Plusargs
for key, value in self.plusarg.items():
- args += ['+{}={}'.format(key, self._param_value_str(value))]
- #Top-level parameters
+ args += ["+{}={}".format(key, self._param_value_str(value))]
+ # Top-level parameters
for key, value in self.vlogparam.items():
- args += ['-g{}={}'.format(key, self._param_value_str(value))]
- tcl_launch.write(' '.join(args)+'\n')
+ args += ["-g{}={}".format(key, self._param_value_str(value))]
+ tcl_launch.write(" ".join(args) + "\n")
tcl_launch.close()
- tcl_run = open(os.path.join(self.work_root, "edalize_run.tcl"), 'w')
+ tcl_run = open(os.path.join(self.work_root, "edalize_run.tcl"), "w")
tcl_run.write("do edalize_launch.tcl\n")
tcl_run.write("run -all\n")
tcl_run.write("exit\n")
tcl_run.close()
def _write_build_vpi_tcl_file(self):
- tcl_build_vpi = open(os.path.join(self.work_root, "edalize_build_vpi.tcl"), 'w')
+ tcl_build_vpi = open(os.path.join(self.work_root, "edalize_build_vpi.tcl"), "w")
for vpi_module in self.vpi_modules:
- _name = vpi_module['name']
- _incs = ' '.join(['-I'+d for d in vpi_module['include_dirs']])
- _libs = ' '.join(['-l'+l for l in vpi_module['libs']])
+ _name = vpi_module["name"]
+ _incs = " ".join(["-I" + d for d in vpi_module["include_dirs"]])
+ _libs = " ".join(["-l" + l for l in vpi_module["libs"]])
_options = "-std=c99"
- _srcs = ' '.join(vpi_module['src_files'])
- _s = 'ccomp -pli -o {}.so {} {} {} {}\n'.format(vpi_module['name'],
- _incs,
- _libs,
- _options,
- _srcs)
+ _srcs = " ".join(vpi_module["src_files"])
+ _s = "ccomp -pli -o {}.so {} {} {} {}\n".format(
+ vpi_module["name"], _incs, _libs, _options, _srcs
+ )
tcl_build_vpi.write(_s)
tcl_build_vpi.close()
def configure_main(self):
- tcl_main = open(os.path.join(self.work_root, "edalize_main.tcl"), 'w')
+ tcl_main = open(os.path.join(self.work_root, "edalize_main.tcl"), "w")
tcl_main.write("do edalize_build_rtl.tcl\n")
self._write_build_rtl_tcl_file(tcl_main)
@@ -181,17 +200,21 @@ def configure_main(self):
self._write_run_tcl_file()
def build_pre(self):
- if not os.getenv('ALDEC_PATH'):
- raise RuntimeError("Environment variable ALDEC_PATH was not found. It should be set to Riviera Pro install path. Please source /etc/setenv to set it")
+ if not os.getenv("ALDEC_PATH"):
+ raise RuntimeError(
+ "Environment variable ALDEC_PATH was not found. It should be set to Riviera Pro install path. Please source /etc/setenv to set it"
+ )
super(Rivierapro, self).build_pre()
def build_main(self):
- args = ['-c', '-do', 'do edalize_main.tcl; exit']
- self._run_tool('vsim', args, quiet=True)
+ args = ["-c", "-do", "do edalize_main.tcl; exit"]
+ self._run_tool("vsim", args, quiet=True)
def run_main(self):
- if not os.getenv('ALDEC_PATH'):
- raise RuntimeError("Environment variable ALDEC_PATH was not found. It should be set to Riviera Pro install path. Please source /etc/setenv to set it")
+ if not os.getenv("ALDEC_PATH"):
+ raise RuntimeError(
+ "Environment variable ALDEC_PATH was not found. It should be set to Riviera Pro install path. Please source /etc/setenv to set it"
+ )
- args = ['-c', '-quiet', '-do', 'edalize_run.tcl']
- self._run_tool('vsim', args)
+ args = ["-c", "-quiet", "-do", "edalize_run.tcl"]
+ self._run_tool("vsim", args)
diff --git a/edalize/spyglass.py b/edalize/spyglass.py
index 14b2dba11..2166ca7f2 100644
--- a/edalize/spyglass.py
+++ b/edalize/spyglass.py
@@ -10,6 +10,7 @@
logger = logging.getLogger(__name__)
+
class Spyglass(Edatool):
_description = """ Synopsys (formerly Atrenta) Spyglass Backend
@@ -35,33 +36,32 @@ class Spyglass(Edatool):
"""
tool_options = {
- 'members' : {
- 'methodology' : 'String'
- },
- 'lists': {
- 'goals': 'String',
- 'spyglass_options': 'String',
- 'rule_parameters': 'String',
+ "members": {"methodology": "String"},
+ "lists": {
+ "goals": "String",
+ "spyglass_options": "String",
+ "rule_parameters": "String",
},
}
- argtypes = ['vlogdefine', 'vlogparam']
+ argtypes = ["vlogdefine", "vlogparam"]
tool_options_defaults = {
- 'methodology': 'GuideWare/latest/block/rtl_handoff',
- 'goals': [ 'lint/lint_rtl' ],
- 'spyglass_options': [],
- 'rule_parameters': [],
+ "methodology": "GuideWare/latest/block/rtl_handoff",
+ "goals": ["lint/lint_rtl"],
+ "spyglass_options": [],
+ "rule_parameters": [],
}
def _set_tool_options_defaults(self):
for key, default_value in self.tool_options_defaults.items():
if not key in self.tool_options:
- logger.info("Set Spyglass tool option %s to default value %s"
- % (key, str(default_value)))
+ logger.info(
+ "Set Spyglass tool option %s to default value %s"
+ % (key, str(default_value))
+ )
self.tool_options[key] = default_value
-
def configure_main(self):
"""
Configuration is the first phase of the build.
@@ -74,74 +74,74 @@ def configure_main(self):
(src_files, incdirs) = self._get_fileset_files(force_slash=True)
- self.jinja_env.filters['src_file_filter'] = self.src_file_filter
+ self.jinja_env.filters["src_file_filter"] = self.src_file_filter
has_systemVerilog = False
for src_file in src_files:
- if src_file.file_type.startswith('systemVerilogSource'):
+ if src_file.file_type.startswith("systemVerilogSource"):
has_systemVerilog = True
break
# Spyglass expects all parameters in the form module.parameter
# Always prepend the toplevel module name to be consistent with all other
# backends, which do not require this syntax.
- vlogparam_spyglass = OrderedDict((self.toplevel + "." + p, v) for (p, v) in self.vlogparam.items())
+ vlogparam_spyglass = OrderedDict(
+ (self.toplevel + "." + p, v) for (p, v) in self.vlogparam.items()
+ )
template_vars = {
- 'name' : self.name,
- 'src_files' : src_files,
- 'incdirs' : incdirs,
- 'tool_options' : self.tool_options,
- 'toplevel' : self.toplevel,
- 'vlogparam' : vlogparam_spyglass,
- 'vlogdefine' : self.vlogdefine,
- 'has_systemVerilog' : has_systemVerilog,
- 'sanitized_goals' : [],
+ "name": self.name,
+ "src_files": src_files,
+ "incdirs": incdirs,
+ "tool_options": self.tool_options,
+ "toplevel": self.toplevel,
+ "vlogparam": vlogparam_spyglass,
+ "vlogdefine": self.vlogdefine,
+ "has_systemVerilog": has_systemVerilog,
+ "sanitized_goals": [],
}
- self.render_template('spyglass-project.prj.j2',
- self.name + '.prj',
- template_vars)
+ self.render_template(
+ "spyglass-project.prj.j2", self.name + ".prj", template_vars
+ )
# Create a single TCL file for each goal
- goals = ['Design_Read'] + self.tool_options['goals']
+ goals = ["Design_Read"] + self.tool_options["goals"]
for goal in goals:
- template_vars['goal'] = goal
- sanitized_goal = re.sub(r"[^a-zA-Z0-9]", '_', goal).lower()
- template_vars['sanitized_goals'].append(sanitized_goal)
-
- self.render_template('spyglass-run-goal.tcl.j2',
- 'spyglass-run-%s.tcl' % sanitized_goal,
- template_vars)
+ template_vars["goal"] = goal
+ sanitized_goal = re.sub(r"[^a-zA-Z0-9]", "_", goal).lower()
+ template_vars["sanitized_goals"].append(sanitized_goal)
+ self.render_template(
+ "spyglass-run-goal.tcl.j2",
+ "spyglass-run-%s.tcl" % sanitized_goal,
+ template_vars,
+ )
- self.render_template('Makefile.j2',
- 'Makefile',
- template_vars)
+ self.render_template("Makefile.j2", "Makefile", template_vars)
def src_file_filter(self, f):
def _vhdl_source(f):
- s = 'read_file -type vhdl'
+ s = "read_file -type vhdl"
if f.logical_name:
- s += ' -library '+f.logical_name
+ s += " -library " + f.logical_name
return s
file_types = {
- 'verilogSource' : 'read_file -type verilog',
- 'systemVerilogSource' : 'read_file -type verilog',
- 'vhdlSource' : _vhdl_source(f),
- 'tclSource' : 'source',
- 'waiver' : 'read_file -type waiver',
- 'awl' : 'read_file -type awl',
+ "verilogSource": "read_file -type verilog",
+ "systemVerilogSource": "read_file -type verilog",
+ "vhdlSource": _vhdl_source(f),
+ "tclSource": "source",
+ "waiver": "read_file -type waiver",
+ "awl": "read_file -type awl",
}
- _file_type = f.file_type.split('-')[0]
+ _file_type = f.file_type.split("-")[0]
if _file_type in file_types:
- return file_types[_file_type] + ' ' + f.name
- elif _file_type == 'user':
- return ''
+ return file_types[_file_type] + " " + f.name
+ elif _file_type == "user":
+ return ""
else:
_s = "{} has unknown file type '{}'"
- logger.warning(_s.format(f.name,
- f.file_type))
- return ''
+ logger.warning(_s.format(f.name, f.file_type))
+ return ""
diff --git a/edalize/symbiflow.py b/edalize/symbiflow.py
index d046fc21d..35a423519 100644
--- a/edalize/symbiflow.py
+++ b/edalize/symbiflow.py
@@ -24,6 +24,7 @@ class Symbiflow(Edatool):
* Standard design sources (Verilog only)
* Constraints: unmanaged constraints with file_type SDC, pin_constraints with file_type PCF and placement constraints with file_type xdc
"""
+
argtypes = ["vlogdefine", "vlogparam", "generic"]
archs = ["xilinx", "fpga_interchange"]
fpga_interchange_families = ["xc7"]
@@ -34,9 +35,9 @@ def get_doc(cls, api_ver):
symbiflow_help = {
"members": [
{
- "name" : "arch",
- "type" : "String",
- "desc" : "Target architecture. Legal values are *xilinx* and *fpga_interchange* (this is relevant only for Nextpnr variant)."
+ "name": "arch",
+ "type": "String",
+ "desc": "Target architecture. Legal values are *xilinx* and *fpga_interchange* (this is relevant only for Nextpnr variant).",
},
{
"name": "package",
@@ -89,27 +90,31 @@ def configure_nextpnr(self):
yosys_synth_options = self.tool_options.get("yosys_synth_options", "")
yosys_template = self.tool_options.get("yosys_template")
yosys_edam = {
- "files" : self.files,
- "name" : self.name,
- "toplevel" : self.toplevel,
- "parameters" : self.parameters,
- "tool_options" : {
- "yosys" : {
- "arch" : vendor,
- "yosys_synth_options" : yosys_synth_options,
- "yosys_template" : yosys_template,
- "yosys_as_subtool" : True,
- }
- }
+ "files": self.files,
+ "name": self.name,
+ "toplevel": self.toplevel,
+ "parameters": self.parameters,
+ "tool_options": {
+ "yosys": {
+ "arch": vendor,
+ "yosys_synth_options": yosys_synth_options,
+ "yosys_template": yosys_template,
+ "yosys_as_subtool": True,
}
+ },
+ }
- yosys = getattr(import_module("edalize.yosys"), "Yosys")(yosys_edam, self.work_root)
+ yosys = getattr(import_module("edalize.yosys"), "Yosys")(
+ yosys_edam, self.work_root
+ )
yosys.configure()
# Nextpnr configuration
arch = self.tool_options.get("arch")
if arch not in self.archs:
- logger.error('Missing or invalid "arch" parameter: {} in "tool_options"'.format(arch))
+ logger.error(
+ 'Missing or invalid "arch" parameter: {} in "tool_options"'.format(arch)
+ )
package = self.tool_options.get("package")
if not package:
@@ -126,7 +131,11 @@ def configure_nextpnr(self):
break
if target_family is None and arch == "fpga_interchange":
- logger.error("Couldn't find family for part: {}. Available families: {}".format(part, ", ".join(getattr(self, "fpga_interchange_families"))))
+ logger.error(
+ "Couldn't find family for part: {}. Available families: {}".format(
+ part, ", ".join(getattr(self, "fpga_interchange_families"))
+ )
+ )
chipdb = None
device = None
@@ -163,10 +172,10 @@ def configure_nextpnr(self):
if "xc7k" in part:
bitstream_device = "kintex7"
- depends = self.name+'.json'
+ depends = self.name + ".json"
xdcs = []
for x in placement_constraints:
- xdcs += ['--xdc', x]
+ xdcs += ["--xdc", x]
commands = self.EdaCommands()
commands.commands = yosys.commands
@@ -176,50 +185,57 @@ def configure_nextpnr(self):
endif
"""
- targets = self.name+'.netlist'
- command = ['python', '-m', 'fpga_interchange.yosys_json']
- command += ['--schema_dir', '$(INTERCHANGE_SCHEMA_PATH)']
- command += ['--device', device]
- command += ['--top', self.toplevel]
+ targets = self.name + ".netlist"
+ command = ["python", "-m", "fpga_interchange.yosys_json"]
+ command += ["--schema_dir", "$(INTERCHANGE_SCHEMA_PATH)"]
+ command += ["--device", device]
+ command += ["--top", self.toplevel]
command += [depends, targets]
commands.add(command, [targets], [depends])
- depends = self.name+'.netlist'
- targets = self.name+'.phys'
- command = ['nextpnr-'+arch, '--chipdb', chipdb]
- command += ['--package', package]
+ depends = self.name + ".netlist"
+ targets = self.name + ".phys"
+ command = ["nextpnr-" + arch, "--chipdb", chipdb]
+ command += ["--package", package]
command += xdcs
- command += ['--netlist', depends]
- command += ['--write', self.name+'.routed.json']
- command += ['--phys', targets]
+ command += ["--netlist", depends]
+ command += ["--write", self.name + ".routed.json"]
+ command += ["--phys", targets]
command += [nextpnr_options]
commands.add(command, [targets], [depends])
- depends = self.name+'.phys'
- targets = self.name+'.fasm'
- command = ['python', '-m', 'fpga_interchange.fasm_generator']
- command += ['--schema_dir', '$(INTERCHANGE_SCHEMA_PATH)']
- command += ['--family', family, device, self.name+'.netlist', depends, targets]
+ depends = self.name + ".phys"
+ targets = self.name + ".fasm"
+ command = ["python", "-m", "fpga_interchange.fasm_generator"]
+ command += ["--schema_dir", "$(INTERCHANGE_SCHEMA_PATH)"]
+ command += [
+ "--family",
+ family,
+ device,
+ self.name + ".netlist",
+ depends,
+ targets,
+ ]
commands.add(command, [targets], [depends])
else:
- targets = self.name+'.fasm'
- command = ['nextpnr-'+arch, '--chipdb', chipdb]
+ targets = self.name + ".fasm"
+ command = ["nextpnr-" + arch, "--chipdb", chipdb]
command += xdcs
- command += ['--json', depends]
- command += ['--write', self.name+'.routed.json']
- command += ['--fasm', targets]
- command += ['--log', 'nextpnr.log']
+ command += ["--json", depends]
+ command += ["--write", self.name + ".routed.json"]
+ command += ["--fasm", targets]
+ command += ["--log", "nextpnr.log"]
command += [nextpnr_options]
commands.add(command, [targets], [depends])
- depends = self.name+'.fasm'
- targets = self.name+'.bit'
- command = ['symbiflow_write_bitstream', '-d', bitstream_device]
- command += ['-f', depends, '-p', partname, '-b', targets]
+ depends = self.name + ".fasm"
+ targets = self.name + ".bit"
+ command = ["symbiflow_write_bitstream", "-d", bitstream_device]
+ command += ["-f", depends, "-p", partname, "-b", targets]
commands.add(command, [targets], [depends])
commands.set_default_target(targets)
- commands.write(os.path.join(self.work_root, 'Makefile'))
+ commands.write(os.path.join(self.work_root, "Makefile"))
def configure_vpr(self):
(src_files, incdirs) = self._get_fileset_files(force_slash=True)
@@ -274,59 +290,59 @@ def configure_vpr(self):
bitstream_device = part + "_" + device_suffix
_vo = self.tool_options.get("vpr_options")
- vpr_options = ['--additional_vpr_options', f'"{_vo}"'] if _vo else []
- pcf_opts = ['-p']+pins_constraints if pins_constraints else []
- sdc_opts = ['-s']+timing_constraints if timing_constraints else []
- xdc_opts = ['-x']+placement_constraints if placement_constraints else []
+ vpr_options = ["--additional_vpr_options", f'"{_vo}"'] if _vo else []
+ pcf_opts = ["-p"] + pins_constraints if pins_constraints else []
+ sdc_opts = ["-s"] + timing_constraints if timing_constraints else []
+ xdc_opts = ["-x"] + placement_constraints if placement_constraints else []
commands = self.EdaCommands()
- #Synthesis
- targets = self.toplevel+'.eblif'
- command = ['symbiflow_synth', '-t', self.toplevel]
- command += ['-v'] + file_list
- command += ['-d', bitstream_device]
- command += ['-p' if vendor == 'xilinx' else '-P', partname]
+ # Synthesis
+ targets = self.toplevel + ".eblif"
+ command = ["symbiflow_synth", "-t", self.toplevel]
+ command += ["-v"] + file_list
+ command += ["-d", bitstream_device]
+ command += ["-p" if vendor == "xilinx" else "-P", partname]
command += xdc_opts
commands.add(command, [targets], [])
- #P&R
- eblif_opt = ['-e', self.toplevel+'.eblif']
- device_opt = ['-d', part+'_'+device_suffix]
+ # P&R
+ eblif_opt = ["-e", self.toplevel + ".eblif"]
+ device_opt = ["-d", part + "_" + device_suffix]
- depends = self.toplevel+'.eblif'
- targets = self.toplevel+'.net'
- command = ['symbiflow_pack'] + eblif_opt + device_opt + sdc_opts + vpr_options
+ depends = self.toplevel + ".eblif"
+ targets = self.toplevel + ".net"
+ command = ["symbiflow_pack"] + eblif_opt + device_opt + sdc_opts + vpr_options
commands.add(command, [targets], [depends])
- depends = self.toplevel+'.net'
- targets = self.toplevel+'.place'
- command = ['symbiflow_place'] + eblif_opt + device_opt
- command += ['-n', depends, '-P', partname]
+ depends = self.toplevel + ".net"
+ targets = self.toplevel + ".place"
+ command = ["symbiflow_place"] + eblif_opt + device_opt
+ command += ["-n", depends, "-P", partname]
command += sdc_opts + pcf_opts + vpr_options
commands.add(command, [targets], [depends])
- depends = self.toplevel+'.place'
- targets = self.toplevel+'.route'
- command = ['symbiflow_route'] + eblif_opt + device_opt
+ depends = self.toplevel + ".place"
+ targets = self.toplevel + ".route"
+ command = ["symbiflow_route"] + eblif_opt + device_opt
command += sdc_opts + vpr_options
commands.add(command, [targets], [depends])
- depends = self.toplevel+'.route'
- targets = self.toplevel+'.fasm'
- command = ['symbiflow_write_fasm'] + eblif_opt + device_opt
+ depends = self.toplevel + ".route"
+ targets = self.toplevel + ".fasm"
+ command = ["symbiflow_write_fasm"] + eblif_opt + device_opt
command += sdc_opts + vpr_options
commands.add(command, [targets], [depends])
- depends = self.toplevel+'.fasm'
- targets = self.toplevel+'.bit'
- command = ['symbiflow_write_bitstream'] + ['-d', bitstream_device]
- command += ['-f', depends]
- command += ['-p' if vendor == 'xilinx' else '-P', partname]
- command += ['-b', targets]
+ depends = self.toplevel + ".fasm"
+ targets = self.toplevel + ".bit"
+ command = ["symbiflow_write_bitstream"] + ["-d", bitstream_device]
+ command += ["-f", depends]
+ command += ["-p" if vendor == "xilinx" else "-P", partname]
+ command += ["-b", targets]
commands.add(command, [targets], [depends])
commands.set_default_target(targets)
- commands.write(os.path.join(self.work_root, 'Makefile'))
+ commands.write(os.path.join(self.work_root, "Makefile"))
def configure_main(self):
if self.tool_options.get("pnr") == "nextpnr":
@@ -334,7 +350,9 @@ def configure_main(self):
elif self.tool_options.get("pnr") in ["vtr", "vpr"]:
self.configure_vpr()
else:
- logger.error("Unsupported PnR tool: {}".format(self.tool_options.get("pnr")))
+ logger.error(
+ "Unsupported PnR tool: {}".format(self.tool_options.get("pnr"))
+ )
def run_main(self):
logger.info("Programming")
diff --git a/edalize/symbiyosys.py b/edalize/symbiyosys.py
index 0b1d3de9c..05259119f 100644
--- a/edalize/symbiyosys.py
+++ b/edalize/symbiyosys.py
@@ -102,13 +102,13 @@ class Symbiyosys(Edatool):
"""
- argtypes = ['vlogdefine', 'vlogparam']
+ argtypes = ["vlogdefine", "vlogparam"]
tool_options = {
- 'lists': {
+ "lists": {
# A list of tasks to run from the .sby file. Passed on the sby
# command line.
- 'tasknames': 'String'
+ "tasknames": "String"
}
}
@@ -128,21 +128,24 @@ def __init__(self, edam=None, work_root=None, eda_api=None, verbose=True):
# The name of the interpolated .sby file that we create in the work
# root
- self.sby_name = 'test.sby'
+ self.sby_name = "test.sby"
@staticmethod
def get_doc(api_ver):
if api_ver == 0:
- return {'description':
- 'SymbiYosys formal verification wrapper for Yosys',
- 'lists': [
- {
- 'name': 'tasknames',
- 'type': 'String',
- 'desc': ("A list of the .sby file's tasks to run. "
- "Passed on the sby command line.")
- }
- ]}
+ return {
+ "description": "SymbiYosys formal verification wrapper for Yosys",
+ "lists": [
+ {
+ "name": "tasknames",
+ "type": "String",
+ "desc": (
+ "A list of the .sby file's tasks to run. "
+ "Passed on the sby command line."
+ ),
+ }
+ ],
+ }
def _get_file_names(self):
"""Read the fileset to get our file names"""
@@ -156,7 +159,7 @@ def _get_file_names(self):
# RTL files have types verilogSource or systemVerilogSource*. We
# presumably want some of them. The .sby file has type sbyConfig: we
# want exactly one of them.
- ft_re = re.compile(r'(:?systemV|v)erilogSource')
+ ft_re = re.compile(r"(:?systemV|v)erilogSource")
for file_obj in src_files:
if ft_re.match(file_obj.file_type):
self.rtl_paths.append(file_obj.name)
@@ -166,25 +169,28 @@ def _get_file_names(self):
# work).
basename = os.path.basename(file_obj.name)
if basename in bn_to_path:
- raise RuntimeError("More than one RTL file with the same"
- "basename: {!r} and {!r}."
- .format(bn_to_path[basename],
- file_obj.name))
+ raise RuntimeError(
+ "More than one RTL file with the same"
+ "basename: {!r} and {!r}.".format(
+ bn_to_path[basename], file_obj.name
+ )
+ )
bn_to_path[basename] = file_obj.name
continue
- if file_obj.file_type == 'sbyConfigTemplate':
+ if file_obj.file_type == "sbyConfigTemplate":
sby_names.append(file_obj.name)
continue
# Ignore anything else
if len(sby_names) != 1:
- raise RuntimeError("SymbiYosys expects exactly one file with type "
- "sbyConfigTemplate (the one called "
- "something.sby.j2). We have {}."
- .format(sby_names or "none"))
+ raise RuntimeError(
+ "SymbiYosys expects exactly one file with type "
+ "sbyConfigTemplate (the one called "
+ "something.sby.j2). We have {}.".format(sby_names or "none")
+ )
return sby_names[0]
@@ -194,24 +200,30 @@ def _get_read_flags(self):
These are exposed as the {{flags}} variable in Jinja templates.
"""
- return ' '.join(['-D{}={}'.format(key, self._param_value_str(value))
- for key, value in self.vlogdefine.items()] +
- ['-I{}'.format(inc) for inc in self.incdirs])
+ return " ".join(
+ [
+ "-D{}={}".format(key, self._param_value_str(value))
+ for key, value in self.vlogdefine.items()
+ ]
+ + ["-I{}".format(inc) for inc in self.incdirs]
+ )
def _get_chparam(self):
"""
Return a string for the {{chparam}} variable.
"""
if not self.vlogparam:
- return ''
+ return ""
- chparam_lst = ['chparam']
+ chparam_lst = ["chparam"]
for key, value in self.vlogparam.items():
- chparam_lst += ['-set', key,
- self._param_value_str(param_value=value,
- str_quote_style='"')]
+ chparam_lst += [
+ "-set",
+ key,
+ self._param_value_str(param_value=value, str_quote_style='"'),
+ ]
chparam_lst.append(self.toplevel)
- return ' '.join(chparam_lst)
+ return " ".join(chparam_lst)
def _gen_reads(self, value):
"""
@@ -223,7 +235,7 @@ def _gen_reads(self, value):
See the class documentation for more details.
"""
- base_cmd = 'read {} {} '.format(value, self._get_read_flags())
+ base_cmd = "read {} {} ".format(value, self._get_read_flags())
lines = []
for path in self.rtl_paths:
@@ -233,7 +245,7 @@ def _gen_reads(self, value):
if chparam:
lines.append(chparam)
- return '\n'.join(lines)
+ return "\n".join(lines)
def _interpolate_sby(self, src):
"""
@@ -258,21 +270,22 @@ def _interpolate_sby(self, src):
try:
template = self.jinja_env.from_string(sf.read())
except jinja2.TemplateError as err:
- raise RuntimeError('Failed to load {!r} '
- 'as a Jinja2 template: {}.'
- .format(src_path, err))
+ raise RuntimeError(
+ "Failed to load {!r} "
+ "as a Jinja2 template: {}.".format(src_path, err)
+ )
- files = '\n'.join(self.rtl_paths)
+ files = "\n".join(self.rtl_paths)
template_ctxt = {
- 'chparam': self._get_chparam(),
- 'files': files,
- 'flags': self._get_read_flags(),
- 'src_files': [os.path.basename(p) for p in self.rtl_paths],
- 'top_level': self.toplevel
+ "chparam": self._get_chparam(),
+ "files": files,
+ "flags": self._get_read_flags(),
+ "src_files": [os.path.basename(p) for p in self.rtl_paths],
+ "top_level": self.toplevel,
}
- with open(dst_path, 'w') as df:
+ with open(dst_path, "w") as df:
df.write(template.render(template_ctxt))
def _dump_file_lists(self):
@@ -284,10 +297,10 @@ def _dump_file_lists(self):
RTL files goes to files.txt and the list of include directories goes to
incdirs.txt.
"""
- with open(os.path.join(self.work_root, 'files.txt'), 'w') as handle:
- handle.write('\n'.join(self.rtl_paths) + '\n')
- with open(os.path.join(self.work_root, 'incdirs.txt'), 'w') as handle:
- handle.write('\n'.join(self.incdirs) + '\n')
+ with open(os.path.join(self.work_root, "files.txt"), "w") as handle:
+ handle.write("\n".join(self.rtl_paths) + "\n")
+ with open(os.path.join(self.work_root, "incdirs.txt"), "w") as handle:
+ handle.write("\n".join(self.incdirs) + "\n")
def configure_main(self):
clean_sby_name = self._get_file_names()
@@ -298,10 +311,11 @@ def build_main(self):
pass
def run_main(self):
- tasknames = self.tool_options.get('tasknames', [])
+ tasknames = self.tool_options.get("tasknames", [])
if not isinstance(tasknames, list):
- raise RuntimeError('"tasknames" tool option should be '
- 'a list of strings. Got {!r}.'
- .format(tasknames))
+ raise RuntimeError(
+ '"tasknames" tool option should be '
+ "a list of strings. Got {!r}.".format(tasknames)
+ )
- self._run_tool('sby', ['-d', 'build', self.sby_name] + tasknames)
+ self._run_tool("sby", ["-d", "build", self.sby_name] + tasknames)
diff --git a/edalize/templates/vunit/run.py.j2 b/edalize/templates/vunit/run.py.j2
index bd43e4f7b..2ccaf4bfd 100644
--- a/edalize/templates/vunit/run.py.j2
+++ b/edalize/templates/vunit/run.py.j2
@@ -1,17 +1,21 @@
# Auto generated by Edalize
+
def load_module_from_file(name, python_file):
import importlib.util
+
spec = importlib.util.spec_from_file_location(name, python_file)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
-def load_runner_hooks(python_file = r'{{ vunit_runner_path }}'):
+
+def load_runner_hooks(python_file=r"{{ vunit_runner_path }}"):
if len(python_file) > 0:
- return load_module_from_file('vunit_runner_hooks', python_file)
+ return load_module_from_file("vunit_runner_hooks", python_file)
else:
- return __import__('edalize.vunit_hooks', fromlist=['vunit_hooks'])
+ return __import__("edalize.vunit_hooks", fromlist=["vunit_hooks"])
+
runner = load_runner_hooks().VUnitRunner()
diff --git a/edalize/trellis.py b/edalize/trellis.py
index 1102f693b..541134379 100644
--- a/edalize/trellis.py
+++ b/edalize/trellis.py
@@ -8,43 +8,44 @@
from edalize.nextpnr import Nextpnr
from edalize.yosys import Yosys
+
class Trellis(Edatool):
- argtypes = ['vlogdefine', 'vlogparam']
+ argtypes = ["vlogdefine", "vlogparam"]
@classmethod
def get_doc(cls, api_ver):
if api_ver == 0:
- options = {
- 'lists' : [],
- 'members' : []}
+ options = {"lists": [], "members": []}
Edatool._extend_options(options, Yosys)
Edatool._extend_options(options, Nextpnr)
- return {'description' : "Project Trellis enables a fully open-source flow for ECP5 FPGAs using Yosys for Verilog synthesis and nextpnr for place and route",
- 'members' : options['members'],
- 'lists' : options['lists']}
+ return {
+ "description": "Project Trellis enables a fully open-source flow for ECP5 FPGAs using Yosys for Verilog synthesis and nextpnr for place and route",
+ "members": options["members"],
+ "lists": options["lists"],
+ }
def configure_main(self):
- #Pass trellis tool options to yosys and nextpnr
- self.edam['tool_options'] = \
- {'yosys' : {
- 'arch' : 'ecp5',
- 'yosys_synth_options' : self.tool_options.get('yosys_synth_options', []),
- 'yosys_as_subtool' : True,
- 'yosys_template' : self.tool_options.get('yosys_template'),
+ # Pass trellis tool options to yosys and nextpnr
+ self.edam["tool_options"] = {
+ "yosys": {
+ "arch": "ecp5",
+ "yosys_synth_options": self.tool_options.get("yosys_synth_options", []),
+ "yosys_as_subtool": True,
+ "yosys_template": self.tool_options.get("yosys_template"),
+ },
+ "nextpnr": {
+ "nextpnr_options": self.tool_options.get("nextpnr_options", [])
},
- 'nextpnr' : {
- 'nextpnr_options' : self.tool_options.get('nextpnr_options', [])
- },
- }
+ }
yosys = Yosys(self.edam, self.work_root)
yosys.configure()
nextpnr = Nextpnr(yosys.edam, self.work_root)
- nextpnr.flow_config = {'arch' : 'ecp5'}
+ nextpnr.flow_config = {"arch": "ecp5"}
nextpnr.configure()
# Write Makefile
@@ -53,11 +54,11 @@ def configure_main(self):
commands.commands += nextpnr.commands
- #Image generation
- depends = self.name+'.config'
- targets = self.name+'.bit'
- command = ['ecppack', '--svf', self.name+'.svf', depends, targets]
+ # Image generation
+ depends = self.name + ".config"
+ targets = self.name + ".bit"
+ command = ["ecppack", "--svf", self.name + ".svf", depends, targets]
commands.add(command, [targets], [depends])
- commands.set_default_target(self.name+'.bit')
- commands.write(os.path.join(self.work_root, 'Makefile'))
+ commands.set_default_target(self.name + ".bit")
+ commands.write(os.path.join(self.work_root, "Makefile"))
diff --git a/edalize/vcs.py b/edalize/vcs.py
index bd357f0ef..e4506a462 100644
--- a/edalize/vcs.py
+++ b/edalize/vcs.py
@@ -31,69 +31,74 @@ class Vcs(Edatool):
"""
tool_options = {
- 'lists' : {
- 'vcs_options' : 'String', # compile-time options (passed to VCS)
- 'run_options' : 'String', # runtime options (passed to simulation)
+ "lists": {
+ "vcs_options": "String", # compile-time options (passed to VCS)
+ "run_options": "String", # runtime options (passed to simulation)
}
}
- argtypes = ['plusarg', 'vlogdefine', 'vlogparam']
+ argtypes = ["plusarg", "vlogdefine", "vlogparam"]
-
- def _filelist_has_filetype(self, file_list, string, match_type='prefix'):
+ def _filelist_has_filetype(self, file_list, string, match_type="prefix"):
for f in file_list:
- if match_type == 'prefix' and f.file_type.startswith(string):
+ if match_type == "prefix" and f.file_type.startswith(string):
return True
- elif match_type == 'exact' and f.file_type == string:
+ elif match_type == "exact" and f.file_type == string:
return True
return False
def configure_main(self):
-
def _vcs_filelist_filter(src_file):
ft = src_file.file_type
# XXX: C source files can be passed to VCS to be compiled into DPI
# libraries; passing C sources together with RTL sources is a
# workaround until we have proper DPI support
# (https://github.com/olofk/fusesoc/issues/311).
- return ft.startswith("verilogSource") or ft.startswith("systemVerilogSource") or ft == 'cSource' or ft == 'cppSource'
-
- self._write_fileset_to_f_file(os.path.join(self.work_root, self.name + '.scr'),
- include_vlogparams=True,
- filter_func=_vcs_filelist_filter)
+ return (
+ ft.startswith("verilogSource")
+ or ft.startswith("systemVerilogSource")
+ or ft == "cSource"
+ or ft == "cppSource"
+ )
+
+ self._write_fileset_to_f_file(
+ os.path.join(self.work_root, self.name + ".scr"),
+ include_vlogparams=True,
+ filter_func=_vcs_filelist_filter,
+ )
plusargs = []
if self.plusarg:
for key, value in self.plusarg.items():
- plusargs += ['+{}={}'.format(key, self._param_value_str(value))]
+ plusargs += ["+{}={}".format(key, self._param_value_str(value))]
- vcs_options = self.tool_options.get('vcs_options', [])
+ vcs_options = self.tool_options.get("vcs_options", [])
(src_files, incdirs) = self._get_fileset_files(force_slash=True)
- if self._filelist_has_filetype(src_files, 'systemVerilog', match_type = 'prefix'):
- vcs_options.append('-sverilog')
+ if self._filelist_has_filetype(src_files, "systemVerilog", match_type="prefix"):
+ vcs_options.append("-sverilog")
- if self._filelist_has_filetype(src_files, 'verilog2001', match_type = 'exact'):
- vcs_options.append('+v2k')
+ if self._filelist_has_filetype(src_files, "verilog2001", match_type="exact"):
+ vcs_options.append("+v2k")
template_vars = {
- 'name' : self.name,
- 'vcs_options' : vcs_options,
- 'run_options' : self.tool_options.get('run_options', []),
- 'toplevel' : self.toplevel,
- 'plusargs' : plusargs
+ "name": self.name,
+ "vcs_options": vcs_options,
+ "run_options": self.tool_options.get("run_options", []),
+ "toplevel": self.toplevel,
+ "plusargs": plusargs,
}
- self.render_template('Makefile.j2', 'Makefile', template_vars)
+ self.render_template("Makefile.j2", "Makefile", template_vars)
def run_main(self):
- args = ['run']
+ args = ["run"]
# Set plusargs
if self.plusarg:
plusargs = []
for key, value in self.plusarg.items():
- plusargs += ['+{}={}'.format(key, self._param_value_str(value))]
- args.append('EXTRA_OPTIONS='+' '.join(plusargs))
+ plusargs += ["+{}={}".format(key, self._param_value_str(value))]
+ args.append("EXTRA_OPTIONS=" + " ".join(plusargs))
- self._run_tool('make', args)
+ self._run_tool("make", args)
diff --git a/edalize/veribleformat.py b/edalize/veribleformat.py
index dd17e86fe..0157db921 100644
--- a/edalize/veribleformat.py
+++ b/edalize/veribleformat.py
@@ -11,28 +11,32 @@
logger = logging.getLogger(__name__)
+
class Veribleformat(Edatool):
- argtypes = ['vlogdefine', 'vlogparam']
+ argtypes = ["vlogdefine", "vlogparam"]
@classmethod
def get_doc(cls, api_ver):
if api_ver == 0:
- return {'description' : "Verible format backend (verible-verilog-format)",
- 'lists': [
- {'name' : 'verible_format_args',
- 'type' : 'String',
- 'desc' : 'Extra command line arguments passed to the Verible tool'},
- ]}
-
+ return {
+ "description": "Verible format backend (verible-verilog-format)",
+ "lists": [
+ {
+ "name": "verible_format_args",
+ "type": "String",
+ "desc": "Extra command line arguments passed to the Verible tool",
+ },
+ ],
+ }
def build_main(self):
pass
def _get_tool_args(self):
args = []
- if 'verible_format_args' in self.tool_options:
- args += self.tool_options['verible_format_args']
+ if "verible_format_args" in self.tool_options:
+ args += self.tool_options["verible_format_args"]
return args
@@ -42,7 +46,9 @@ def run_main(self):
src_files_filtered = []
for src_file in src_files:
ft = src_file.file_type
- if not ft.startswith("verilogSource") and not ft.startswith("systemVerilogSource"):
+ if not ft.startswith("verilogSource") and not ft.startswith(
+ "systemVerilogSource"
+ ):
continue
src_files_filtered.append(src_file.name)
@@ -52,11 +58,11 @@ def run_main(self):
fail = False
for src_file in src_files_filtered:
- cmd = ['verible-verilog-format'] + self._get_tool_args() + [src_file]
- logger.debug("Running " + ' '.join(cmd))
+ cmd = ["verible-verilog-format"] + self._get_tool_args() + [src_file]
+ logger.debug("Running " + " ".join(cmd))
try:
- res = subprocess.run(cmd, cwd = self.work_root, check=False)
+ res = subprocess.run(cmd, cwd=self.work_root, check=False)
except FileNotFoundError:
_s = "Command '{}' not found. Make sure it is in $PATH"
raise RuntimeError(_s.format(cmd[0]))
diff --git a/edalize/veriblelint.py b/edalize/veriblelint.py
index 8f1ba28ea..58c02e509 100644
--- a/edalize/veriblelint.py
+++ b/edalize/veriblelint.py
@@ -11,41 +11,49 @@
logger = logging.getLogger(__name__)
+
class Veriblelint(Edatool):
- argtypes = ['vlogdefine', 'vlogparam']
+ argtypes = ["vlogdefine", "vlogparam"]
@classmethod
def get_doc(cls, api_ver):
if api_ver == 0:
- return {'description' : "Verible lint backend (verible-verilog-lint)",
- 'members': [
- {'name': 'ruleset',
- 'type': 'String',
- 'desc': 'Ruleset: [default|all|none]'},
- ],
- 'lists': [
- {'name' : 'verible_lint_args',
- 'type' : 'String',
- 'desc' : 'Extra command line arguments passed to the Verible tool'},
- {'name': 'rules',
- 'type': 'String',
- 'desc': 'What rules to use. Prefix a rule name with "-" to disable it.'},
- ]}
-
+ return {
+ "description": "Verible lint backend (verible-verilog-lint)",
+ "members": [
+ {
+ "name": "ruleset",
+ "type": "String",
+ "desc": "Ruleset: [default|all|none]",
+ },
+ ],
+ "lists": [
+ {
+ "name": "verible_lint_args",
+ "type": "String",
+ "desc": "Extra command line arguments passed to the Verible tool",
+ },
+ {
+ "name": "rules",
+ "type": "String",
+ "desc": 'What rules to use. Prefix a rule name with "-" to disable it.',
+ },
+ ],
+ }
def build_main(self):
pass
def _get_tool_args(self):
- args = [ '--lint_fatal', '--parse_fatal' ]
+ args = ["--lint_fatal", "--parse_fatal"]
- if 'rules' in self.tool_options:
- args.append('--rules=' + ','.join(self.tool_options['rules']))
- if 'ruleset' in self.tool_options:
- args.append('--ruleset=' + self.tool_options['ruleset'])
- if 'verible_lint_args' in self.tool_options:
- args += self.tool_options['verible_lint_args']
+ if "rules" in self.tool_options:
+ args.append("--rules=" + ",".join(self.tool_options["rules"]))
+ if "ruleset" in self.tool_options:
+ args.append("--ruleset=" + self.tool_options["ruleset"])
+ if "verible_lint_args" in self.tool_options:
+ args += self.tool_options["verible_lint_args"]
return args
@@ -59,11 +67,11 @@ def run_main(self):
ft = src_file.file_type
if ft.startswith("verilogSource") or ft.startswith("systemVerilogSource"):
- src_files_filtered.append(src_file.name)
+ src_files_filtered.append(src_file.name)
elif ft == "veribleLintRules":
- config_files_filtered.append(src_file.name)
+ config_files_filtered.append(src_file.name)
elif ft == "veribleLintWaiver":
- waiver_files_filtered.append(src_file.name)
+ waiver_files_filtered.append(src_file.name)
if len(src_files_filtered) == 0:
logger.warning("No SystemVerilog source files to be processed.")
@@ -72,18 +80,20 @@ def run_main(self):
lint_fail = False
args = self._get_tool_args()
if len(config_files_filtered) > 1:
- raise RuntimeError("Verible lint only supports a single rules file (type veribleLintRules)")
+ raise RuntimeError(
+ "Verible lint only supports a single rules file (type veribleLintRules)"
+ )
elif len(config_files_filtered) == 1:
- args.append('--rules_config=' + config_files_filtered[0])
+ args.append("--rules_config=" + config_files_filtered[0])
if waiver_files_filtered:
- args.append('--waiver_files=' + ','.join(waiver_files_filtered))
+ args.append("--waiver_files=" + ",".join(waiver_files_filtered))
for src_file in src_files_filtered:
- cmd = ['verible-verilog-lint'] + args + [src_file]
- logger.debug("Running " + ' '.join(cmd))
+ cmd = ["verible-verilog-lint"] + args + [src_file]
+ logger.debug("Running " + " ".join(cmd))
try:
- res = subprocess.run(cmd, cwd = self.work_root, check=False)
+ res = subprocess.run(cmd, cwd=self.work_root, check=False)
except FileNotFoundError:
_s = "Command '{}' not found. Make sure it is in $PATH"
raise RuntimeError(_s.format(cmd[0]))
diff --git a/edalize/verilator.py b/edalize/verilator.py
index f813e8598..a46238f3d 100644
--- a/edalize/verilator.py
+++ b/edalize/verilator.py
@@ -37,153 +37,198 @@
$(VERILATOR) -f $(VC_FILE) $(VERILATOR_OPTIONS)
"""
+
class Verilator(Edatool):
- argtypes = ['cmdlinearg', 'plusarg', 'vlogdefine', 'vlogparam']
+ argtypes = ["cmdlinearg", "plusarg", "vlogdefine", "vlogparam"]
@classmethod
def get_doc(cls, api_ver):
if api_ver == 0:
- return {'description' : "Verilator is the fastest free Verilog HDL simulator, and outperforms most commercial simulators",
- 'members' : [
- {'name' : 'mode',
- 'type' : 'String',
- 'desc' : 'Select compilation mode. Legal values are *cc* for C++ testbenches, *sc* for SystemC testbenches or *lint-only* to only perform linting on the Verilog code'},
- {'name' : 'cli_parser',
- 'type' : 'String',
- 'desc' : '**Deprecated: Use run_options instead** : Select whether FuseSoC should handle command-line arguments (*managed*) or if they should be passed directly to the verilated model (*raw*). Default is *managed*'},
- {'name' : 'exe',
- 'type' : 'String',
- 'desc' : "Controls whether to create an executable. Set to 'false' when something else will do the final linking"},
- ],
-
- 'lists' : [
- {'name' : 'libs',
- 'type' : 'String',
- 'desc' : 'Extra libraries for the verilated model to link against'},
- {'name' : 'verilator_options',
- 'type' : 'String',
- 'desc' : 'Additional options for verilator'},
- {'name' : 'make_options',
- 'type' : 'String',
- 'desc' : 'Additional arguments passed to make when compiling the simulation. This is commonly used to set OPT/OPT_FAST/OPT_SLOW.'},
- {'name' : 'run_options',
- 'type' : 'String',
- 'desc' : 'Additional arguments directly passed to the verilated model'},
- ]}
+ return {
+ "description": "Verilator is the fastest free Verilog HDL simulator, and outperforms most commercial simulators",
+ "members": [
+ {
+ "name": "mode",
+ "type": "String",
+ "desc": "Select compilation mode. Legal values are *cc* for C++ testbenches, *sc* for SystemC testbenches or *lint-only* to only perform linting on the Verilog code",
+ },
+ {
+ "name": "cli_parser",
+ "type": "String",
+ "desc": "**Deprecated: Use run_options instead** : Select whether FuseSoC should handle command-line arguments (*managed*) or if they should be passed directly to the verilated model (*raw*). Default is *managed*",
+ },
+ {
+ "name": "exe",
+ "type": "String",
+ "desc": "Controls whether to create an executable. Set to 'false' when something else will do the final linking",
+ },
+ ],
+ "lists": [
+ {
+ "name": "libs",
+ "type": "String",
+ "desc": "Extra libraries for the verilated model to link against",
+ },
+ {
+ "name": "verilator_options",
+ "type": "String",
+ "desc": "Additional options for verilator",
+ },
+ {
+ "name": "make_options",
+ "type": "String",
+ "desc": "Additional arguments passed to make when compiling the simulation. This is commonly used to set OPT/OPT_FAST/OPT_SLOW.",
+ },
+ {
+ "name": "run_options",
+ "type": "String",
+ "desc": "Additional arguments directly passed to the verilated model",
+ },
+ ],
+ }
def check_managed_parser(self):
- managed = 'cli_parser' not in self.tool_options or self.tool_options['cli_parser'] == 'managed'
+ managed = (
+ "cli_parser" not in self.tool_options
+ or self.tool_options["cli_parser"] == "managed"
+ )
if not managed:
- logger.warning("The cli_parser argument is deprecated. Use run_options to pass raw arguments to verilated models")
+ logger.warning(
+ "The cli_parser argument is deprecated. Use run_options to pass raw arguments to verilated models"
+ )
def configure_main(self):
self.check_managed_parser()
if not self.toplevel:
- raise RuntimeError("'" + self.name + "' miss a mandatory parameter 'top_module'")
+ raise RuntimeError(
+ "'" + self.name + "' miss a mandatory parameter 'top_module'"
+ )
self._write_config_files()
def _write_config_files(self):
- #Future improvement: Separate include directories of c and verilog files
+ # Future improvement: Separate include directories of c and verilog files
incdirs = set()
src_files = []
(src_files, incdirs) = self._get_fileset_files(force_slash=True)
- self.verilator_file = self.name + '.vc'
+ self.verilator_file = self.name + ".vc"
- with open(os.path.join(self.work_root,self.verilator_file),'w') as f:
- f.write('--Mdir .\n')
- modes = ['sc', 'cc', 'lint-only']
+ with open(os.path.join(self.work_root, self.verilator_file), "w") as f:
+ f.write("--Mdir .\n")
+ modes = ["sc", "cc", "lint-only"]
- #Default to cc mode if not specified
- if not 'mode' in self.tool_options:
- self.tool_options['mode'] = 'cc'
+ # Default to cc mode if not specified
+ if not "mode" in self.tool_options:
+ self.tool_options["mode"] = "cc"
- if self.tool_options['mode'] in modes:
- f.write('--'+self.tool_options['mode']+'\n')
+ if self.tool_options["mode"] in modes:
+ f.write("--" + self.tool_options["mode"] + "\n")
else:
_s = "Illegal verilator mode {}. Allowed values are {}"
- raise RuntimeError(_s.format(self.tool_options['mode'],
- ', '.join(modes)))
- if 'libs' in self.tool_options:
- for lib in self.tool_options['libs']:
- f.write('-LDFLAGS {}\n'.format(lib))
+ raise RuntimeError(
+ _s.format(self.tool_options["mode"], ", ".join(modes))
+ )
+ if "libs" in self.tool_options:
+ for lib in self.tool_options["libs"]:
+ f.write("-LDFLAGS {}\n".format(lib))
for include_dir in incdirs:
- f.write("+incdir+" + include_dir + '\n')
+ f.write("+incdir+" + include_dir + "\n")
f.write("-CFLAGS -I{}\n".format(include_dir))
vlt_files = []
vlog_files = []
opt_c_files = []
for src_file in src_files:
- if src_file.file_type.startswith("systemVerilogSource") or src_file.file_type.startswith("verilogSource"):
+ if src_file.file_type.startswith(
+ "systemVerilogSource"
+ ) or src_file.file_type.startswith("verilogSource"):
vlog_files.append(src_file.name)
- elif src_file.file_type in ['cppSource', 'systemCSource', 'cSource']:
+ elif src_file.file_type in ["cppSource", "systemCSource", "cSource"]:
opt_c_files.append(src_file.name)
- elif src_file.file_type == 'vlt':
+ elif src_file.file_type == "vlt":
vlt_files.append(src_file.name)
- elif src_file.file_type == 'user':
+ elif src_file.file_type == "user":
pass
if vlt_files:
- f.write('\n'.join(vlt_files) + '\n')
- f.write('\n'.join(vlog_files) + '\n')
- f.write('--top-module {}\n'.format(self.toplevel))
- if str(self.tool_options.get('exe')).lower() != 'false':
- f.write('--exe\n')
- f.write('\n'.join(opt_c_files))
- f.write('\n')
- f.write(''.join(['-G{}={}\n'.format(key, self._param_value_str(value, str_quote_style='\\"')) for key, value in self.vlogparam.items()]))
- f.write(''.join(['-D{}={}\n'.format(key, self._param_value_str(value)) for key, value in self.vlogdefine.items()]))
-
- with open(os.path.join(self.work_root, 'Makefile'), 'w') as makefile:
+ f.write("\n".join(vlt_files) + "\n")
+ f.write("\n".join(vlog_files) + "\n")
+ f.write("--top-module {}\n".format(self.toplevel))
+ if str(self.tool_options.get("exe")).lower() != "false":
+ f.write("--exe\n")
+ f.write("\n".join(opt_c_files))
+ f.write("\n")
+ f.write(
+ "".join(
+ [
+ "-G{}={}\n".format(
+ key, self._param_value_str(value, str_quote_style='\\"')
+ )
+ for key, value in self.vlogparam.items()
+ ]
+ )
+ )
+ f.write(
+ "".join(
+ [
+ "-D{}={}\n".format(key, self._param_value_str(value))
+ for key, value in self.vlogdefine.items()
+ ]
+ )
+ )
+
+ with open(os.path.join(self.work_root, "Makefile"), "w") as makefile:
makefile.write(MAKEFILE_TEMPLATE)
- if 'verilator_options' in self.tool_options:
- verilator_options = ' '.join(self.tool_options['verilator_options'])
+ if "verilator_options" in self.tool_options:
+ verilator_options = " ".join(self.tool_options["verilator_options"])
else:
- verilator_options = ''
+ verilator_options = ""
- if 'make_options' in self.tool_options:
- make_options = ' '.join(self.tool_options['make_options'])
+ if "make_options" in self.tool_options:
+ make_options = " ".join(self.tool_options["make_options"])
else:
- make_options = ''
-
- with open(os.path.join(self.work_root, 'config.mk'), 'w') as config_mk:
- config_mk.write(CONFIG_MK_TEMPLATE.format(
- top_module = self.toplevel,
- vc_file = self.verilator_file,
- verilator_options = verilator_options,
- make_options = make_options))
+ make_options = ""
+
+ with open(os.path.join(self.work_root, "config.mk"), "w") as config_mk:
+ config_mk.write(
+ CONFIG_MK_TEMPLATE.format(
+ top_module=self.toplevel,
+ vc_file=self.verilator_file,
+ verilator_options=verilator_options,
+ make_options=make_options,
+ )
+ )
def build_main(self):
logger.info("Building simulation model")
- if not 'mode' in self.tool_options:
- self.tool_options['mode'] = 'cc'
+ if not "mode" in self.tool_options:
+ self.tool_options["mode"] = "cc"
# Do parallel builds with
make_job_count = multiprocessing.cpu_count()
- args = ['-j', str(make_job_count)]
+ args = ["-j", str(make_job_count)]
- if self.tool_options['mode'] == 'lint-only':
- args.append('V'+self.toplevel+'.mk')
- self._run_tool('make', args, quiet=True)
+ if self.tool_options["mode"] == "lint-only":
+ args.append("V" + self.toplevel + ".mk")
+ self._run_tool("make", args, quiet=True)
def run_main(self):
self.check_managed_parser()
self.args = []
for key, value in self.plusarg.items():
- self.args += ['+{}={}'.format(key, self._param_value_str(value))]
+ self.args += ["+{}={}".format(key, self._param_value_str(value))]
for key, value in self.cmdlinearg.items():
- self.args += ['--{}={}'.format(key, self._param_value_str(value))]
+ self.args += ["--{}={}".format(key, self._param_value_str(value))]
- self.args += self.tool_options.get('run_options', [])
+ self.args += self.tool_options.get("run_options", [])
- #Default to cc mode if not specified
- if not 'mode' in self.tool_options:
- self.tool_options['mode'] = 'cc'
- if self.tool_options['mode'] == 'lint-only':
+ # Default to cc mode if not specified
+ if not "mode" in self.tool_options:
+ self.tool_options["mode"] = "cc"
+ if self.tool_options["mode"] == "lint-only":
return
logger.info("Running simulation")
- self._run_tool('./V' + self.toplevel, self.args)
+ self._run_tool("./V" + self.toplevel, self.args)
diff --git a/edalize/vivado.py b/edalize/vivado.py
index 8f42474d1..28582698d 100644
--- a/edalize/vivado.py
+++ b/edalize/vivado.py
@@ -13,6 +13,7 @@
logger = logging.getLogger(__name__)
+
class Vivado(Edatool):
"""
Vivado Backend.
@@ -23,35 +24,52 @@ class Vivado(Edatool):
* Constraints: Supply xdc files with file_type=xdc or unmanaged constraints with file_type SDC
* IP: Supply the IP core xci file with file_type=xci and other files (like .prj) as file_type=user
"""
- argtypes = ['vlogdefine', 'vlogparam', 'generic']
+
+ argtypes = ["vlogdefine", "vlogparam", "generic"]
@classmethod
def get_doc(cls, api_ver):
if api_ver == 0:
- return {'description' : "The Vivado backend executes Xilinx Vivado to build systems and program the FPGA",
- 'members' : [
- {'name' : 'part',
- 'type' : 'String',
- 'desc' : 'FPGA part number (e.g. xc7a35tcsg324-1)'},
- {'name' : 'synth',
- 'type' : 'String',
- 'desc' : 'Synthesis tool. Allowed values are vivado (default) and yosys.'},
- {'name' : 'pnr',
- 'type' : 'String',
- 'desc' : 'P&R tool. Allowed values are vivado (default) and none (to just run synthesis)'},
- {'name' : 'jobs',
- 'type' : 'Integer',
- 'desc' : 'Number of jobs. Useful for parallelizing OOC (Out Of Context) syntheses.'},
- {'name' : 'jtag_freq',
- 'type' : 'Integer',
- 'desc' : 'The frequency for jtag communication'},
- {'name' : 'source_mgmt_mode',
- 'type' : 'String',
- 'desc' : 'Source managment mode. Allowed values are None (unmanaged, default), DisplayOnly (automatically update sources) and All (automatically update sources and compile order)'},
- {'name' : 'hw_target',
- 'type' : 'Description',
- 'desc' : 'A pattern matching a board identifier. Refer to the Vivado documentation for ``get_hw_targets`` for details. Example: ``*/xilinx_tcf/Digilent/123456789123A``'},
- ]}
+ return {
+ "description": "The Vivado backend executes Xilinx Vivado to build systems and program the FPGA",
+ "members": [
+ {
+ "name": "part",
+ "type": "String",
+ "desc": "FPGA part number (e.g. xc7a35tcsg324-1)",
+ },
+ {
+ "name": "synth",
+ "type": "String",
+ "desc": "Synthesis tool. Allowed values are vivado (default) and yosys.",
+ },
+ {
+ "name": "pnr",
+ "type": "String",
+ "desc": "P&R tool. Allowed values are vivado (default) and none (to just run synthesis)",
+ },
+ {
+ "name": "jobs",
+ "type": "Integer",
+ "desc": "Number of jobs. Useful for parallelizing OOC (Out Of Context) syntheses.",
+ },
+ {
+ "name": "jtag_freq",
+ "type": "Integer",
+ "desc": "The frequency for jtag communication",
+ },
+ {
+ "name": "source_mgmt_mode",
+ "type": "String",
+ "desc": "Source managment mode. Allowed values are None (unmanaged, default), DisplayOnly (automatically update sources) and All (automatically update sources and compile order)",
+ },
+ {
+ "name": "hw_target",
+ "type": "Description",
+ "desc": "A pattern matching a board identifier. Refer to the Vivado documentation for ``get_hw_targets`` for details. Example: ``*/xilinx_tcf/Digilent/123456789123A``",
+ },
+ ],
+ }
def get_version(self):
"""
@@ -62,12 +80,14 @@ def get_version(self):
"""
version = "unknown"
try:
- vivado_text = subprocess.Popen(["vivado", "-version"], stdout=subprocess.PIPE, env=os.environ).communicate()[0]
- version_exp = r'Vivado.*(?Pv.*) \(.*'
+ vivado_text = subprocess.Popen(
+ ["vivado", "-version"], stdout=subprocess.PIPE, env=os.environ
+ ).communicate()[0]
+ version_exp = r"Vivado.*(?Pv.*) \(.*"
match = re.search(version_exp, str(vivado_text))
if match is not None:
- version = match.group('version')
+ version = match.group("version")
except Exception:
logger.warning("Unable to recognize Vivado version")
@@ -85,16 +105,16 @@ def configure_main(self):
if synth_tool == "yosys":
- self.edam['tool_options']['yosys'] = {
- 'arch' : 'xilinx',
- 'output_format' : 'edif',
- 'yosys_synth_options' : self.tool_options.get('yosys_synth_options', []),
- 'yosys_as_subtool' : True,
+ self.edam["tool_options"]["yosys"] = {
+ "arch": "xilinx",
+ "output_format": "edif",
+ "yosys_synth_options": self.tool_options.get("yosys_synth_options", []),
+ "yosys_as_subtool": True,
}
yosys = Yosys(self.edam, self.work_root)
yosys.configure()
- self.files = yosys.edam['files']
+ self.files = yosys.edam["files"]
src_files = []
incdirs = []
@@ -105,126 +125,130 @@ def configure_main(self):
for f in self.files:
cmd = ""
- if f['file_type'].startswith('verilogSource'):
- cmd = 'read_verilog'
- elif f['file_type'].startswith('systemVerilogSource'):
- cmd = 'read_verilog -sv'
- elif f['file_type'] == 'tclSource':
- cmd = 'source'
- elif f['file_type'] == 'edif':
- cmd = 'read_edif'
- edif_files.append(f['name'])
- elif f['file_type'].startswith('vhdlSource'):
- cmd = 'read_vhdl'
- if f['file_type'] == 'vhdlSource-2008':
+ if f["file_type"].startswith("verilogSource"):
+ cmd = "read_verilog"
+ elif f["file_type"].startswith("systemVerilogSource"):
+ cmd = "read_verilog -sv"
+ elif f["file_type"] == "tclSource":
+ cmd = "source"
+ elif f["file_type"] == "edif":
+ cmd = "read_edif"
+ edif_files.append(f["name"])
+ elif f["file_type"].startswith("vhdlSource"):
+ cmd = "read_vhdl"
+ if f["file_type"] == "vhdlSource-2008":
has_vhdl2008 = True
- cmd += ' -vhdl2008'
- if f.get('logical_name'):
- cmd += ' -library '+f['logical_name']
- elif f['file_type'] == 'xci':
- cmd = 'read_ip'
+ cmd += " -vhdl2008"
+ if f.get("logical_name"):
+ cmd += " -library " + f["logical_name"]
+ elif f["file_type"] == "xci":
+ cmd = "read_ip"
has_xci = True
- elif f['file_type'] == 'xdc':
- cmd = 'read_xdc'
- elif f['file_type'] == 'SDC':
- cmd = 'read_xdc -unmanaged'
- elif f['file_type'] == 'mem':
- cmd = 'read_mem'
+ elif f["file_type"] == "xdc":
+ cmd = "read_xdc"
+ elif f["file_type"] == "SDC":
+ cmd = "read_xdc -unmanaged"
+ elif f["file_type"] == "mem":
+ cmd = "read_mem"
if cmd:
if not self._add_include_dir(f, incdirs):
- src_files.append(cmd + ' {' + f['name'] + '}')
+ src_files.append(cmd + " {" + f["name"] + "}")
else:
unused_files.append(f)
template_vars = {
- 'name' : self.name,
- 'src_files' : '\n'.join(src_files),
- 'incdirs' : incdirs+['.'],
- 'tool_options' : self.tool_options,
- 'toplevel' : self.toplevel,
- 'vlogparam' : self.vlogparam,
- 'vlogdefine' : self.vlogdefine,
- 'generic' : self.generic,
- 'netlist_flow' : bool(edif_files),
- 'has_vhdl2008' : has_vhdl2008,
- 'has_xci' : has_xci,
+ "name": self.name,
+ "src_files": "\n".join(src_files),
+ "incdirs": incdirs + ["."],
+ "tool_options": self.tool_options,
+ "toplevel": self.toplevel,
+ "vlogparam": self.vlogparam,
+ "vlogdefine": self.vlogdefine,
+ "generic": self.generic,
+ "netlist_flow": bool(edif_files),
+ "has_vhdl2008": has_vhdl2008,
+ "has_xci": has_xci,
}
- self.render_template('vivado-project.tcl.j2',
- self.name+'.tcl',
- template_vars)
+ self.render_template("vivado-project.tcl.j2", self.name + ".tcl", template_vars)
- jobs = self.tool_options.get('jobs', None)
+ jobs = self.tool_options.get("jobs", None)
- run_template_vars = {
- 'jobs' : ' -jobs ' + str(jobs) if jobs is not None else ''
- }
+ run_template_vars = {"jobs": " -jobs " + str(jobs) if jobs is not None else ""}
- self.render_template('vivado-run.tcl.j2',
- self.name+"_run.tcl",
- run_template_vars)
+ self.render_template(
+ "vivado-run.tcl.j2", self.name + "_run.tcl", run_template_vars
+ )
synth_template_vars = {
- 'jobs' : ' -jobs ' + str(jobs) if jobs is not None else ''
+ "jobs": " -jobs " + str(jobs) if jobs is not None else ""
}
- self.render_template('vivado-synth.tcl.j2',
- self.name+"_synth.tcl",
- synth_template_vars)
+ self.render_template(
+ "vivado-synth.tcl.j2", self.name + "_synth.tcl", synth_template_vars
+ )
# Write Makefile
commands = self.EdaCommands()
- vivado_command = ['vivado', '-notrace', '-mode', 'batch', '-source']
+ vivado_command = ["vivado", "-notrace", "-mode", "batch", "-source"]
- #Create project file
- project_file = self.name+'.xpr'
- tcl_file = [self.name+'.tcl']
- commands.add(vivado_command+tcl_file, [project_file], tcl_file + edif_files)
+ # Create project file
+ project_file = self.name + ".xpr"
+ tcl_file = [self.name + ".tcl"]
+ commands.add(vivado_command + tcl_file, [project_file], tcl_file + edif_files)
- #Synthesis target
- if synth_tool == 'yosys':
+ # Synthesis target
+ if synth_tool == "yosys":
commands.commands += yosys.commands
- commands.add([], ['synth'], edif_files)
+ commands.add([], ["synth"], edif_files)
else:
- depends = [f'{self.name}_synth.tcl', project_file]
- targets = [f'{self.name}.runs/synth_1/__synthesis_is_complete__']
- commands.add(vivado_command+depends, targets, depends)
- commands.add([], ['synth'], targets)
+ depends = [f"{self.name}_synth.tcl", project_file]
+ targets = [f"{self.name}.runs/synth_1/__synthesis_is_complete__"]
+ commands.add(vivado_command + depends, targets, depends)
+ commands.add([], ["synth"], targets)
- #Bitstream generation
- run_tcl = self.name+'_run.tcl'
+ # Bitstream generation
+ run_tcl = self.name + "_run.tcl"
depends = [run_tcl, project_file]
- bitstream = self.name+'.bit'
- commands.add(vivado_command+depends, [bitstream], depends)
-
- commands.add(['vivado', project_file], ['build-gui'], [project_file])
-
- depends = [self.name+'_pgm.tcl', bitstream]
- command = ['vivado', '-quiet', '-nolog', '-notrace', '-mode', 'batch',
- '-source', f'{self.name}_pgm.tcl', '-tclargs']
-
- part = self.tool_options.get('part', "")
+ bitstream = self.name + ".bit"
+ commands.add(vivado_command + depends, [bitstream], depends)
+
+ commands.add(["vivado", project_file], ["build-gui"], [project_file])
+
+ depends = [self.name + "_pgm.tcl", bitstream]
+ command = [
+ "vivado",
+ "-quiet",
+ "-nolog",
+ "-notrace",
+ "-mode",
+ "batch",
+ "-source",
+ f"{self.name}_pgm.tcl",
+ "-tclargs",
+ ]
+
+ part = self.tool_options.get("part", "")
command += [part] if part else []
command += [bitstream]
- commands.add(command, ['pgm'], depends)
+ commands.add(command, ["pgm"], depends)
commands.set_default_target(bitstream)
- commands.write(os.path.join(self.work_root, 'Makefile'))
+ commands.write(os.path.join(self.work_root, "Makefile"))
- self.render_template('vivado-program.tcl.j2',
- self.name+"_pgm.tcl")
+ self.render_template("vivado-program.tcl.j2", self.name + "_pgm.tcl")
def build_main(self):
logger.info("Building")
args = []
- if 'pnr' in self.tool_options:
- if self.tool_options['pnr'] == 'vivado':
+ if "pnr" in self.tool_options:
+ if self.tool_options["pnr"] == "vivado":
pass
- elif self.tool_options['pnr'] == 'none':
- args.append('synth')
- self._run_tool('make', args, quiet=True)
+ elif self.tool_options["pnr"] == "none":
+ args.append("synth")
+ self._run_tool("make", args, quiet=True)
def run_main(self):
"""
@@ -234,10 +258,10 @@ def run_main(self):
correct FPGA board and then downloads the bitstream. The tcl script is then
executed in Vivado's batch mode.
"""
- if 'pnr' in self.tool_options:
- if self.tool_options['pnr'] == 'vivado':
+ if "pnr" in self.tool_options:
+ if self.tool_options["pnr"] == "vivado":
pass
- elif self.tool_options['pnr'] == 'none':
+ elif self.tool_options["pnr"] == "none":
return
- self._run_tool('make', ['pgm'])
+ self._run_tool("make", ["pgm"])
diff --git a/edalize/vunit.py b/edalize/vunit.py
index e68b69eea..e57a2e645 100644
--- a/edalize/vunit.py
+++ b/edalize/vunit.py
@@ -87,7 +87,9 @@ def configure_main(self):
def build_main(self):
vunit_options = self.tool_options.get("vunit_options", [])
testrunner = os.path.join(self.work_root, self.testrunner)
- self._run_tool(sys.executable, [testrunner, "--compile", "-k"] + vunit_options, quiet=True)
+ self._run_tool(
+ sys.executable, [testrunner, "--compile", "-k"] + vunit_options, quiet=True
+ )
def run_main(self):
vunit_options = self.tool_options.get("vunit_options", [])
diff --git a/edalize/vunit_hooks.py b/edalize/vunit_hooks.py
index 4627383a1..608143b21 100644
--- a/edalize/vunit_hooks.py
+++ b/edalize/vunit_hooks.py
@@ -41,4 +41,5 @@ class VUnitRunner(VUnitHooks):
"""
The default runner which will be used if no :file:`vunit_runner.py` is specified.
"""
+
pass
diff --git a/edalize/xcelium.py b/edalize/xcelium.py
index 358a27b04..9f6c67fe0 100644
--- a/edalize/xcelium.py
+++ b/edalize/xcelium.py
@@ -9,7 +9,7 @@
logger = logging.getLogger(__name__)
-MAKE_HEADER ="""#Generated by Edalize
+MAKE_HEADER = """#Generated by Edalize
ifeq (, $(shell which xmroot))
$(error "No Xcelium installation in $(PATH)")
endif
@@ -67,118 +67,136 @@
$(RM) $({name}_OBJS) {name}
"""
+
class Xcelium(Edatool):
- argtypes = ['plusarg', 'vlogdefine', 'vlogparam', 'generic']
+ argtypes = ["plusarg", "vlogdefine", "vlogparam", "generic"]
@classmethod
def get_doc(cls, api_ver):
if api_ver == 0:
- return {'description' : "Xcelium simulator from Cadence Design Systems",
- 'lists' : [
- {'name' : 'xmvhdl_options',
- 'type' : 'String',
- 'desc' : 'Additional options for compilation with xmvhdl'},
- {'name' : 'xmvlog_options',
- 'type' : 'String',
- 'desc' : 'Additional options for compilation with xmvlog'},
- {'name' : 'xmsim_options',
- 'type' : 'String',
- 'desc' : 'Additional run options for xmsim'},
- {'name' : 'xrun_options',
- 'type' : 'String',
- 'desc' : 'Additional run options for xrun'},
- ]}
+ return {
+ "description": "Xcelium simulator from Cadence Design Systems",
+ "lists": [
+ {
+ "name": "xmvhdl_options",
+ "type": "String",
+ "desc": "Additional options for compilation with xmvhdl",
+ },
+ {
+ "name": "xmvlog_options",
+ "type": "String",
+ "desc": "Additional options for compilation with xmvlog",
+ },
+ {
+ "name": "xmsim_options",
+ "type": "String",
+ "desc": "Additional run options for xmsim",
+ },
+ {
+ "name": "xrun_options",
+ "type": "String",
+ "desc": "Additional run options for xrun",
+ },
+ ],
+ }
def _write_build_rtl_f_file(self, tcl_main):
- tcl_build_rtl = open(os.path.join(self.work_root, "edalize_build_rtl.f"), 'w')
+ tcl_build_rtl = open(os.path.join(self.work_root, "edalize_build_rtl.f"), "w")
(src_files, incdirs) = self._get_fileset_files()
- vlog_include_dirs = ['+incdir+'+d.replace('\\','/') for d in incdirs]
+ vlog_include_dirs = ["+incdir+" + d.replace("\\", "/") for d in incdirs]
libs = []
for f in src_files:
if not f.logical_name:
- f.logical_name = 'worklib'
- if f.file_type.startswith("verilogSource") or \
- f.file_type.startswith("systemVerilogSource"):
- cmd = 'xmvlog'
+ f.logical_name = "worklib"
+ if f.file_type.startswith("verilogSource") or f.file_type.startswith(
+ "systemVerilogSource"
+ ):
+ cmd = "xmvlog"
args = []
- args += self.tool_options.get('xmvlog_options', [])
+ args += self.tool_options.get("xmvlog_options", [])
# Sort dictionary items, to ensure stable output, which makes testing easier
for k, v in self.vlogdefine.items():
- args += ['+define+{}={}'.format(k,self._param_value_str(v))]
+ args += ["+define+{}={}".format(k, self._param_value_str(v))]
if f.file_type.startswith("systemVerilogSource"):
- args += ['-sv']
+ args += ["-sv"]
args += vlog_include_dirs
elif f.file_type.startswith("vhdlSource"):
- cmd = 'xmvhdl'
+ cmd = "xmvhdl"
if f.file_type.endswith("-93"):
- args = ['-v93']
+ args = ["-v93"]
if f.file_type.endswith("-2008"):
- args = ['-v200x']
+ args = ["-v200x"]
else:
args = []
- args += self.tool_options.get('xmvhdl_options', [])
+ args += self.tool_options.get("xmvhdl_options", [])
- elif f.file_type == 'tclSource':
+ elif f.file_type == "tclSource":
cmd = None
tcl_main.write("-input {}\n".format(f.name))
- elif f.file_type == 'user':
+ elif f.file_type == "user":
cmd = None
else:
_s = "{} has unknown file type '{}'"
logger.warning(_s.format(f.name, f.file_type))
cmd = None
if cmd:
- args += [f.name.replace('\\','/')]
- line = "-makelib {} {} -endlib".format(f.logical_name, ' '.join(args))
- tcl_build_rtl.write(line + '\n')
+ args += [f.name.replace("\\", "/")]
+ line = "-makelib {} {} -endlib".format(f.logical_name, " ".join(args))
+ tcl_build_rtl.write(line + "\n")
def _write_makefile(self):
- vpi_make = open(os.path.join(self.work_root, "Makefile"), 'w')
+ vpi_make = open(os.path.join(self.work_root, "Makefile"), "w")
_parameters = []
for key, value in self.vlogparam.items():
- _parameters += ['{}={}'.format(key, self._param_value_str(value))]
+ _parameters += ["{}={}".format(key, self._param_value_str(value))]
for key, value in self.generic.items():
- _parameters += ['{}={}'.format(key, self._param_value_str(value, bool_is_str=True))]
+ _parameters += [
+ "{}={}".format(key, self._param_value_str(value, bool_is_str=True))
+ ]
_plusargs = []
for key, value in self.plusarg.items():
- _plusargs += ['{}={}'.format(key, self._param_value_str(value))]
-
- _xmsim_options = self.tool_options.get('xmsim_options', [])
- _xrun_options = self.tool_options.get('xrun_options', [])
-
- _modules = [m['name'] for m in self.vpi_modules]
- _clean_targets = ' '.join(["clean_"+m for m in _modules])
- _s = MAKE_HEADER.format(toplevel = self.toplevel,
- parameters = ' '.join(_parameters),
- plusargs = ' '.join(_plusargs),
- xmsim_options = ' '.join(_xmsim_options),
- xrun_options = ' '.join(_xrun_options),
- modules = ' '.join(_modules),
- clean_targets = _clean_targets)
+ _plusargs += ["{}={}".format(key, self._param_value_str(value))]
+
+ _xmsim_options = self.tool_options.get("xmsim_options", [])
+ _xrun_options = self.tool_options.get("xrun_options", [])
+
+ _modules = [m["name"] for m in self.vpi_modules]
+ _clean_targets = " ".join(["clean_" + m for m in _modules])
+ _s = MAKE_HEADER.format(
+ toplevel=self.toplevel,
+ parameters=" ".join(_parameters),
+ plusargs=" ".join(_plusargs),
+ xmsim_options=" ".join(_xmsim_options),
+ xrun_options=" ".join(_xrun_options),
+ modules=" ".join(_modules),
+ clean_targets=_clean_targets,
+ )
vpi_make.write(_s)
for vpi_module in self.vpi_modules:
- _name = vpi_module['name']
- _objs = [os.path.splitext(s)[0]+'.o' for s in vpi_module['src_files']]
- _libs = ['-l'+l for l in vpi_module['libs']]
- _incs = ['-I'+d for d in vpi_module['include_dirs']]
- _s = VPI_MAKE_SECTION.format(name=_name,
- objs=' '.join(_objs),
- libs=' '.join(_libs),
- incs=' '.join(_incs))
+ _name = vpi_module["name"]
+ _objs = [os.path.splitext(s)[0] + ".o" for s in vpi_module["src_files"]]
+ _libs = ["-l" + l for l in vpi_module["libs"]]
+ _incs = ["-I" + d for d in vpi_module["include_dirs"]]
+ _s = VPI_MAKE_SECTION.format(
+ name=_name,
+ objs=" ".join(_objs),
+ libs=" ".join(_libs),
+ incs=" ".join(_incs),
+ )
vpi_make.write(_s)
vpi_make.close()
def configure_main(self):
- tcl_main = open(os.path.join(self.work_root, "edalize_main.f"), 'w')
+ tcl_main = open(os.path.join(self.work_root, "edalize_main.f"), "w")
tcl_main.write("-f edalize_build_rtl.f\n")
self._write_build_rtl_f_file(tcl_main)
@@ -186,13 +204,13 @@ def configure_main(self):
tcl_main.close()
def run_main(self):
- args = ['run']
+ args = ["run"]
# Set plusargs
if self.plusarg:
plusargs = []
for key, value in self.plusarg.items():
- plusargs += ['{}={}'.format(key, self._param_value_str(value))]
- args.append('PLUSARGS='+' '.join(plusargs))
+ plusargs += ["{}={}".format(key, self._param_value_str(value))]
+ args.append("PLUSARGS=" + " ".join(plusargs))
- self._run_tool('make', args)
+ self._run_tool("make", args)
diff --git a/edalize/xsim.py b/edalize/xsim.py
index 637f52d52..1e7081e69 100644
--- a/edalize/xsim.py
+++ b/edalize/xsim.py
@@ -10,11 +10,12 @@
logger = logging.getLogger(__name__)
+
class Xsim(Edatool):
- argtypes = ['plusarg', 'vlogdefine', 'vlogparam', 'generic']
+ argtypes = ["plusarg", "vlogdefine", "vlogparam", "generic"]
- MAKEFILE_TEMPLATE="""#Auto generated by Edalize
+ MAKEFILE_TEMPLATE = """#Auto generated by Edalize
include config.mk
all: xsim.dir/$(TARGET)/xsimk
@@ -44,46 +45,55 @@ class Xsim(Edatool):
@classmethod
def get_doc(cls, api_ver):
if api_ver == 0:
- return {'description' : "XSim simulator from the Xilinx Vivado suite",
- 'members' : [
- {'name' : 'compilation_mode',
- 'type' : 'String',
- 'desc' : 'Common or separate compilation, sep - for separate compilation, common - for common compilation'}],
- 'lists' : [
- {'name' : 'xelab_options',
- 'type' : 'String',
- 'desc' : 'Additional options for compilation with xelab'},
- {'name' : 'xsim_options',
- 'type' : 'String',
- 'desc' : 'Additional run options for XSim'},
- ]}
+ return {
+ "description": "XSim simulator from the Xilinx Vivado suite",
+ "members": [
+ {
+ "name": "compilation_mode",
+ "type": "String",
+ "desc": "Common or separate compilation, sep - for separate compilation, common - for common compilation",
+ }
+ ],
+ "lists": [
+ {
+ "name": "xelab_options",
+ "type": "String",
+ "desc": "Additional options for compilation with xelab",
+ },
+ {
+ "name": "xsim_options",
+ "type": "String",
+ "desc": "Additional run options for XSim",
+ },
+ ],
+ }
def configure_main(self):
self._write_config_files()
- #Check if any VPI modules are present and display warning
+ # Check if any VPI modules are present and display warning
if len(self.vpi_modules) > 0:
- modules = [m['name'] for m in self.vpi_modules]
- logger.error('VPI modules not supported by Xsim: %s' % ', '.join(modules))
+ modules = [m["name"] for m in self.vpi_modules]
+ logger.error("VPI modules not supported by Xsim: %s" % ", ".join(modules))
def _write_config_files(self):
- mfc = self.tool_options.get('compilation_mode') == 'common'
- with open(os.path.join(self.work_root, self.name+'.prj'),'w') as f:
+ mfc = self.tool_options.get("compilation_mode") == "common"
+ with open(os.path.join(self.work_root, self.name + ".prj"), "w") as f:
mfcu = []
(src_files, self.incdirs) = self._get_fileset_files()
for src_file in src_files:
cmd = ""
if src_file.file_type.startswith("verilogSource"):
- cmd = 'verilog'
- elif src_file.file_type == 'vhdlSource-2008':
- cmd = 'vhdl2008'
+ cmd = "verilog"
+ elif src_file.file_type == "vhdlSource-2008":
+ cmd = "vhdl2008"
elif src_file.file_type.startswith("vhdlSource"):
- cmd = 'vhdl'
+ cmd = "vhdl"
elif src_file.file_type.startswith("systemVerilogSource"):
if mfc:
mfcu.append(src_file.name)
else:
- cmd = 'sv'
+ cmd = "sv"
elif src_file.file_type in ["user"]:
pass
else:
@@ -93,14 +103,19 @@ def _write_config_files(self):
if src_file.logical_name:
lib = src_file.logical_name
else:
- lib = 'work'
- f.write('{} {} {}\n'.format(cmd, lib, src_file.name))
+ lib = "work"
+ f.write("{} {} {}\n".format(cmd, lib, src_file.name))
if mfc:
- f.write('sv work ' + ' '.join(mfcu))
+ f.write("sv work " + " ".join(mfcu))
- with open(os.path.join(self.work_root, 'config.mk'), 'w') as f:
- vlog_defines = ' '.join(['--define {}={}'.format(k,self._param_value_str(v)) for k,v, in self.vlogdefine.items()])
- vlog_includes = ' '.join(['-i '+k for k in self.incdirs])
+ with open(os.path.join(self.work_root, "config.mk"), "w") as f:
+ vlog_defines = " ".join(
+ [
+ "--define {}={}".format(k, self._param_value_str(v))
+ for k, v, in self.vlogdefine.items()
+ ]
+ )
+ vlog_includes = " ".join(["-i " + k for k in self.incdirs])
# Both parameters and generics use the same --generic_top argument
# so warn if there are overlapping values
@@ -117,25 +132,32 @@ def _write_config_files(self):
for k, v in gen_param.items()
]
)
- xelab_options = ' '.join(self.tool_options.get('xelab_options', []))
- xsim_options = ' '.join(self.tool_options.get('xsim_options' , []))
-
- f.write(self.CONFIG_MK_TEMPLATE.format(target=self.name,
- toplevel=self.toplevel,
- vlog_defines = vlog_defines,
- vlog_includes = vlog_includes,
- gen_params = gen_param_args,
- xelab_options = xelab_options,
- xsim_options = xsim_options))
-
- with open(os.path.join(self.work_root, 'Makefile'), 'w') as f:
+ xelab_options = " ".join(self.tool_options.get("xelab_options", []))
+ xsim_options = " ".join(self.tool_options.get("xsim_options", []))
+
+ f.write(
+ self.CONFIG_MK_TEMPLATE.format(
+ target=self.name,
+ toplevel=self.toplevel,
+ vlog_defines=vlog_defines,
+ vlog_includes=vlog_includes,
+ gen_params=gen_param_args,
+ xelab_options=xelab_options,
+ xsim_options=xsim_options,
+ )
+ )
+
+ with open(os.path.join(self.work_root, "Makefile"), "w") as f:
f.write(self.MAKEFILE_TEMPLATE)
def run_main(self):
- args = ['run']
+ args = ["run"]
# Plusargs
if self.plusarg:
- _s = '--testplusarg {}={}'
- args.append('EXTRA_OPTIONS='+' '.join([_s.format(k, v) for k,v in self.plusarg.items()]))
+ _s = "--testplusarg {}={}"
+ args.append(
+ "EXTRA_OPTIONS="
+ + " ".join([_s.format(k, v) for k, v in self.plusarg.items()])
+ )
- self._run_tool('make', args)
+ self._run_tool("make", args)
diff --git a/edalize/yosys.py b/edalize/yosys.py
index ec7ca7221..cce960dfb 100644
--- a/edalize/yosys.py
+++ b/edalize/yosys.py
@@ -9,41 +9,56 @@
logger = logging.getLogger(__name__)
+
class Yosys(Edatool):
- argtypes = ['vlogdefine', 'vlogparam']
+ argtypes = ["vlogdefine", "vlogparam"]
@classmethod
def get_doc(cls, api_ver):
if api_ver == 0:
- return {'description' : "Open source synthesis tool targeting many different FPGAs",
- 'members' : [
- {'name' : 'arch',
- 'type' : 'String',
- 'desc' : 'Target architecture. Legal values are *xilinx*, *ice40* and *ecp5*'},
- {'name' : 'output_format',
- 'type' : 'String',
- 'desc' : 'Output file format. Legal values are *json*, *edif*, *blif*'},
- {'name' : 'yosys_as_subtool',
- 'type' : 'bool',
- 'desc' : 'Determines if Yosys is run as a part of bigger toolchain, or as a standalone tool'},
- {'name' : 'makefile_name',
- 'type' : 'String',
- 'desc' : 'Generated makefile name, defaults to $name.mk'},
- {'name' : 'yosys_template',
- 'type' : 'String',
- 'desc' : 'TCL template file to use instead of default template'},
- ],
- 'lists' : [
- {'name' : 'yosys_synth_options',
- 'type' : 'String',
- 'desc' : 'Additional options for the synth command'},
- ]}
+ return {
+ "description": "Open source synthesis tool targeting many different FPGAs",
+ "members": [
+ {
+ "name": "arch",
+ "type": "String",
+ "desc": "Target architecture. Legal values are *xilinx*, *ice40* and *ecp5*",
+ },
+ {
+ "name": "output_format",
+ "type": "String",
+ "desc": "Output file format. Legal values are *json*, *edif*, *blif*",
+ },
+ {
+ "name": "yosys_as_subtool",
+ "type": "bool",
+ "desc": "Determines if Yosys is run as a part of bigger toolchain, or as a standalone tool",
+ },
+ {
+ "name": "makefile_name",
+ "type": "String",
+ "desc": "Generated makefile name, defaults to $name.mk",
+ },
+ {
+ "name": "yosys_template",
+ "type": "String",
+ "desc": "TCL template file to use instead of default template",
+ },
+ ],
+ "lists": [
+ {
+ "name": "yosys_synth_options",
+ "type": "String",
+ "desc": "Additional options for the synth command",
+ },
+ ],
+ }
def configure_main(self):
# write Yosys tcl script file
- yosys_template = self.tool_options.get('yosys_template')
+ yosys_template = self.tool_options.get("yosys_template")
incdirs = []
file_table = []
@@ -51,77 +66,79 @@ def configure_main(self):
for f in self.files:
cmd = ""
- if f['file_type'].startswith('verilogSource'):
- cmd = 'read_verilog'
- elif f['file_type'].startswith('systemVerilogSource'):
- cmd = 'read_verilog -sv'
- elif f['file_type'] == 'tclSource':
- cmd = 'source'
+ if f["file_type"].startswith("verilogSource"):
+ cmd = "read_verilog"
+ elif f["file_type"].startswith("systemVerilogSource"):
+ cmd = "read_verilog -sv"
+ elif f["file_type"] == "tclSource":
+ cmd = "source"
if cmd:
if not self._add_include_dir(f, incdirs):
- file_table.append(cmd + ' {' + f['name'] + '}')
+ file_table.append(cmd + " {" + f["name"] + "}")
else:
unused_files.append(f)
- self.edam['files'] = unused_files
+ self.edam["files"] = unused_files
of = [
- {'name' : self.name+'.blif', 'file_type' : 'blif'},
- {'name' : self.name+'.edif', 'file_type' : 'edif'},
- {'name' : self.name+'.json', 'file_type' : 'jsonNetlist'},
+ {"name": self.name + ".blif", "file_type": "blif"},
+ {"name": self.name + ".edif", "file_type": "edif"},
+ {"name": self.name + ".json", "file_type": "jsonNetlist"},
]
- self.edam['files'] += of
+ self.edam["files"] += of
verilog_defines = []
for key, value in self.vlogdefine.items():
- verilog_defines.append('{{{key} {value}}}'.format(key=key, value=value))
+ verilog_defines.append("{{{key} {value}}}".format(key=key, value=value))
verilog_params = []
for key, value in self.vlogparam.items():
if type(value) is str:
- value = "{\"" + value + "\"}"
+ value = '{"' + value + '"}'
_s = r"chparam -set {} {} {}"
- verilog_params.append(_s.format(key,
- self._param_value_str(value),
- self.toplevel))
+ verilog_params.append(
+ _s.format(key, self._param_value_str(value), self.toplevel)
+ )
- output_format = self.tool_options.get('output_format', 'blif')
- arch = self.tool_options.get('arch', None)
+ output_format = self.tool_options.get("output_format", "blif")
+ arch = self.tool_options.get("arch", None)
if not arch:
logger.error("ERROR: arch is not defined.")
- template = yosys_template or 'edalize_yosys_template.tcl'
+ template = yosys_template or "edalize_yosys_template.tcl"
template_vars = {
- 'verilog_defines' : "{" + " ".join(verilog_defines) + "}",
- 'verilog_params' : "\n".join(verilog_params),
- 'file_table' : "\n".join(file_table),
- 'incdirs' : ' '.join(['-I'+d for d in incdirs]),
- 'top' : self.toplevel,
- 'synth_command' : "synth_" + arch,
- 'synth_options' : " ".join(self.tool_options.get('yosys_synth_options', '')),
- 'write_command' : "write_" + output_format,
- 'default_target' : output_format,
- 'edif_opts' : '-pvector bra' if arch=='xilinx' else '',
- 'yosys_template' : template,
- 'name' : self.name
+ "verilog_defines": "{" + " ".join(verilog_defines) + "}",
+ "verilog_params": "\n".join(verilog_params),
+ "file_table": "\n".join(file_table),
+ "incdirs": " ".join(["-I" + d for d in incdirs]),
+ "top": self.toplevel,
+ "synth_command": "synth_" + arch,
+ "synth_options": " ".join(self.tool_options.get("yosys_synth_options", "")),
+ "write_command": "write_" + output_format,
+ "default_target": output_format,
+ "edif_opts": "-pvector bra" if arch == "xilinx" else "",
+ "yosys_template": template,
+ "name": self.name,
}
- self.render_template('edalize_yosys_procs.tcl.j2',
- 'edalize_yosys_procs.tcl',
- template_vars)
+ self.render_template(
+ "edalize_yosys_procs.tcl.j2", "edalize_yosys_procs.tcl", template_vars
+ )
if not yosys_template:
- self.render_template('yosys-script-tcl.j2',
- 'edalize_yosys_template.tcl',
- template_vars)
+ self.render_template(
+ "yosys-script-tcl.j2", "edalize_yosys_template.tcl", template_vars
+ )
commands = self.EdaCommands()
- commands.add(['yosys', '-l', 'yosys.log', '-p', f'"tcl {template}"'],
- [f'{self.name}.{output}' for output in ['blif', 'json','edif']],
- [template])
- if self.tool_options.get('yosys_as_subtool'):
+ commands.add(
+ ["yosys", "-l", "yosys.log", "-p", f'"tcl {template}"'],
+ [f"{self.name}.{output}" for output in ["blif", "json", "edif"]],
+ [template],
+ )
+ if self.tool_options.get("yosys_as_subtool"):
self.commands = commands.commands
else:
- commands.set_default_target(f'{self.name}.{output_format}')
- commands.write(os.path.join(self.work_root, 'Makefile'))
+ commands.set_default_target(f"{self.name}.{output_format}")
+ commands.write(os.path.join(self.work_root, "Makefile"))
diff --git a/setup.py b/setup.py
index 056af49c8..7e2b07a80 100644
--- a/setup.py
+++ b/setup.py
@@ -5,45 +5,61 @@
import os
from setuptools import setup
+
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
-setup(
- name = "edalize",
- version = "0.2.5",
- packages=['edalize'],
- package_data = {'edalize' : [
- 'templates/yosys/edalize_yosys_procs.tcl.j2',
- 'templates/yosys/yosys-script-tcl.j2',
- 'templates/spyglass/Makefile.j2',
- 'templates/spyglass/spyglass-project.prj.j2',
- 'templates/spyglass/spyglass-run-goal.tcl.j2',
- 'templates/vcs/Makefile.j2',
- 'templates/vivado/vivado-program.tcl.j2',
- 'templates/vivado/vivado-project.tcl.j2',
- 'templates/vivado/vivado-run.tcl.j2',
- 'templates/vivado/vivado-synth.tcl.j2',
- 'templates/vunit/run.py.j2',
- 'templates/quartus/quartus-project.tcl.j2',
- 'templates/quartus/quartus-std-makefile.j2',
- 'templates/quartus/quartus-pro-makefile.j2',
- 'templates/ascentlint/Makefile.j2',
- 'templates/ascentlint/run-ascentlint.tcl.j2',
- 'templates/libero/libero-project.tcl.j2',
- 'templates/libero/libero-run.tcl.j2',
- 'templates/libero/libero-syn-user.tcl.j2',
- 'templates/ghdl/Makefile.j2',
- 'templates/openlane/openlane-makefile.j2',
- 'templates/openlane/openlane-script-tcl.j2',
- ]},
- author = "Olof Kindgren",
- author_email = "olof.kindgren@gmail.com",
- description = ("Edalize is a library for interfacing EDA tools, primarily for FPGA development"),
- license = "BSD-2-Clause",
- keywords = ["VHDL", "verilog", "EDA", "hdl", "rtl", "synthesis", "FPGA", "simulation", "Xilinx", "Altera"],
- url = "https://github.com/olofk/edalize",
- long_description=read('README.rst'),
+setup(
+ name="edalize",
+ version="0.2.5",
+ packages=["edalize"],
+ package_data={
+ "edalize": [
+ "templates/yosys/edalize_yosys_procs.tcl.j2",
+ "templates/yosys/yosys-script-tcl.j2",
+ "templates/spyglass/Makefile.j2",
+ "templates/spyglass/spyglass-project.prj.j2",
+ "templates/spyglass/spyglass-run-goal.tcl.j2",
+ "templates/vcs/Makefile.j2",
+ "templates/vivado/vivado-program.tcl.j2",
+ "templates/vivado/vivado-project.tcl.j2",
+ "templates/vivado/vivado-run.tcl.j2",
+ "templates/vivado/vivado-synth.tcl.j2",
+ "templates/vunit/run.py.j2",
+ "templates/quartus/quartus-project.tcl.j2",
+ "templates/quartus/quartus-std-makefile.j2",
+ "templates/quartus/quartus-pro-makefile.j2",
+ "templates/ascentlint/Makefile.j2",
+ "templates/ascentlint/run-ascentlint.tcl.j2",
+ "templates/libero/libero-project.tcl.j2",
+ "templates/libero/libero-run.tcl.j2",
+ "templates/libero/libero-syn-user.tcl.j2",
+ "templates/ghdl/Makefile.j2",
+ "templates/openlane/openlane-makefile.j2",
+ "templates/openlane/openlane-script-tcl.j2",
+ ]
+ },
+ author="Olof Kindgren",
+ author_email="olof.kindgren@gmail.com",
+ description=(
+ "Edalize is a library for interfacing EDA tools, primarily for FPGA development"
+ ),
+ license="BSD-2-Clause",
+ keywords=[
+ "VHDL",
+ "verilog",
+ "EDA",
+ "hdl",
+ "rtl",
+ "synthesis",
+ "FPGA",
+ "simulation",
+ "Xilinx",
+ "Altera",
+ ],
+ url="https://github.com/olofk/edalize",
+ long_description=read("README.rst"),
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: BSD License",
@@ -54,12 +70,9 @@ def read(fname):
# 2.11.0 and .1 introduced an incompatible change in template output,
# which was fixed in 2.11.2 and later.
# https://github.com/pallets/jinja/issues/1138
- 'Jinja2>=2.11.3',
- ],
- tests_require=[
- 'pytest>=3.3.0',
- 'vunit_hdl>=4.0.8'
+ "Jinja2>=2.11.3",
],
+ tests_require=["pytest>=3.3.0", "vunit_hdl>=4.0.8"],
# The reporting modules have dependencies that shouldn't be required for
# all Edalize users.
extras_require={
diff --git a/tests/README.rst b/tests/README.rst
index d9e452949..c166e6391 100644
--- a/tests/README.rst
+++ b/tests/README.rst
@@ -1,17 +1,11 @@
-***************
Testing edalize
-***************
-
-Users
-=====
+###############
To run the tests, call :command:`pytest`.
-Developers
-==========
Mocks for commands
-------------------
+==================
We provide mocks (stand-ins) for all tools that we want to exercise in tests (located in :file:`tests/mock_commands/`).
These mocks are very simplified "models" of the actual tool, and are called instead of the actual tool.
@@ -25,8 +19,9 @@ In a more complex test setup (e.g. for ``vcs``),
* we make the file executable
* we set the access and modified times of generated files to the current time
+
Testcases
----------
+=========
To define a testcase, use the :func:`edalize_common.make_edalize_test` pytest factory fixture.
This defines a factory that you can call to set up a mocked-up backend appropriately.
diff --git a/tests/edalize_common.py b/tests/edalize_common.py
index 95a2a6570..b40e3ea53 100644
--- a/tests/edalize_common.py
+++ b/tests/edalize_common.py
@@ -11,7 +11,7 @@
class TestFixture:
- '''A fixture that makes an edalize backend with work_root directory
+ """A fixture that makes an edalize backend with work_root directory
Create this object using the make_edalize_test factory fixture. This passes
through its `tool_name` and sets up a temporary directory for `work_root`,
@@ -41,29 +41,41 @@ class TestFixture:
use_vpi: If true, set up backend with definitions from :attr:`VPI`.
Defaults to `False`.
- '''
- def __init__(self,
- tool_name,
- work_root,
- test_name=None,
- param_types=['plusarg', 'vlogdefine', 'vlogparam'],
- files=None,
- tool_options={},
- ref_dir='.',
- use_vpi=False,
- toplevel='top_module'):
-
- raw_ref_dir = os.path.join(tests_dir, 'test_' + tool_name, ref_dir)
-
- self.test_name = ('test_{}_0'.format(tool_name)
- if test_name is None else test_name)
+ """
+
+ def __init__(
+ self,
+ tool_name,
+ work_root,
+ test_name=None,
+ param_types=["plusarg", "vlogdefine", "vlogparam"],
+ files=None,
+ tool_options={},
+ ref_dir=".",
+ use_vpi=False,
+ toplevel="top_module",
+ ):
+
+ raw_ref_dir = os.path.join(tests_dir, "test_" + tool_name, ref_dir)
+
+ self.test_name = (
+ "test_{}_0".format(tool_name) if test_name is None else test_name
+ )
self.ref_dir = os.path.normpath(raw_ref_dir)
self.work_root = work_root
- self.backend = _setup_backend(self.test_name, tool_name, param_types,
- files, tool_options, work_root, use_vpi, toplevel)
-
- def compare_files(self, files, ref_subdir='.'):
- '''Check some files in the work root match those in the ref directory
+ self.backend = _setup_backend(
+ self.test_name,
+ tool_name,
+ param_types,
+ files,
+ tool_options,
+ work_root,
+ use_vpi,
+ toplevel,
+ )
+
+ def compare_files(self, files, ref_subdir="."):
+ """Check some files in the work root match those in the ref directory
The files argument gives the list of files to check. These are
interpreted as paths relative to the work directory and relative to
@@ -73,26 +85,27 @@ def compare_files(self, files, ref_subdir='.'):
documentation for how to use the :envvar:`GOLDEN_RUN` environment
variable to copy across a golden reference.
- '''
+ """
ref_dir = os.path.normpath(os.path.join(self.ref_dir, ref_subdir))
return compare_files(ref_dir, self.work_root, files)
def copy_to_work_root(self, path):
- shutil.copy(os.path.join(self.ref_dir, path),
- os.path.join(self.work_root, path))
+ shutil.copy(
+ os.path.join(self.ref_dir, path), os.path.join(self.work_root, path)
+ )
@pytest.fixture
def make_edalize_test(monkeypatch, tmpdir):
- '''A factory fixture to make an edalize backend with work_root directory
+ """A factory fixture to make an edalize backend with work_root directory
The returned factory method takes a `tool_name` (the name of the tool) and
the keyword arguments supported by :class:`TestFixture`. It returns a
:class:`TestFixture` object, whose `work_root` is a temporary directory.
- '''
+ """
# Prepend directory `mock_commands` to PATH environment variable
- monkeypatch.setenv('PATH', os.path.join(tests_dir, 'mock_commands'), ':')
+ monkeypatch.setenv("PATH", os.path.join(tests_dir, "mock_commands"), ":")
created = []
@@ -119,7 +132,7 @@ def compare_files(ref_dir, work_root, files):
assert os.path.exists(generated_file)
- if 'GOLDEN_RUN' in os.environ:
+ if "GOLDEN_RUN" in os.environ:
shutil.copy(generated_file, reference_file)
with open(reference_file) as fref, open(generated_file) as fgen:
@@ -131,23 +144,25 @@ def param_gen(paramtypes):
defs = OrderedDict()
for paramtype in paramtypes:
- for datatype in ['bool', 'int', 'str']:
- if datatype == 'int':
+ for datatype in ["bool", "int", "str"]:
+ if datatype == "int":
default = 42
- elif datatype == 'str':
- default = 'hello'
+ elif datatype == "str":
+ default = "hello"
else:
default = True
- defs[paramtype+'_'+datatype] = {
- 'datatype' : datatype,
- 'default' : default,
- 'description' : '',
- 'paramtype' : paramtype}
+ defs[paramtype + "_" + datatype] = {
+ "datatype": datatype,
+ "default": default,
+ "description": "",
+ "paramtype": paramtype,
+ }
return defs
-def _setup_backend(name, tool, paramtypes, files,
- tool_options, work_root, use_vpi, toplevel):
+def _setup_backend(
+ name, tool, paramtypes, files, tool_options, work_root, use_vpi, toplevel
+):
"""Set up a backend.
The backend is called *name*, is set up for *tool* with *tool_options*,
@@ -160,19 +175,21 @@ def _setup_backend(name, tool, paramtypes, files,
if use_vpi:
_vpi = VPI
for v in VPI:
- for f in v['src_files']:
+ for f in v["src_files"]:
_f = os.path.join(work_root, f)
if not os.path.exists(os.path.dirname(_f)):
os.makedirs(os.path.dirname(_f))
- with open(_f, 'a'):
+ with open(_f, "a"):
os.utime(_f, None)
- edam = {'name' : name,
- 'files' : FILES if files is None else files,
- 'parameters' : parameters,
- 'tool_options' : {tool : tool_options},
- 'toplevel' : toplevel,
- 'vpi' : _vpi}
+ edam = {
+ "name": name,
+ "files": FILES if files is None else files,
+ "parameters": parameters,
+ "tool_options": {tool: tool_options},
+ "toplevel": toplevel,
+ "vpi": _vpi,
+ }
return get_edatool(tool)(edam=edam, work_root=work_root)
@@ -204,23 +221,22 @@ def _setup_backend(name, tool, paramtypes, files,
{"name": "config.vbl", "file_type": "veribleLintRules"},
{"name": "verible_waiver.vbw", "file_type": "veribleLintWaiver"},
{"name": "verible_waiver2.vbw", "file_type": "veribleLintWaiver"},
- {'name': 'config.sby.j2', 'file_type': 'sbyConfigTemplate'},
+ {"name": "config.sby.j2", "file_type": "sbyConfigTemplate"},
{"name": "another_sv_file.sv", "file_type": "systemVerilogSource"},
{"name": "pdc_constraint_file.pdc", "file_type": "PDC"},
{"name": "pdc_floorplan_constraint_file.pdc", "file_type": "FPPDC"},
- {'name': 'lpf_file.lpf', 'file_type': 'LPF'}
+ {"name": "lpf_file.lpf", "file_type": "LPF"},
]
"""Files of all supported file types."""
VPI = [
- {'src_files': ['src/vpi_1/f1',
- 'src/vpi_1/f3'],
- 'include_dirs': ['src/vpi_1/'],
- 'libs': ['some_lib'],
- 'name': 'vpi1'},
- {'src_files': ['src/vpi_2/f4'],
- 'include_dirs': [],
- 'libs': [],
- 'name': 'vpi2'}]
+ {
+ "src_files": ["src/vpi_1/f1", "src/vpi_1/f3"],
+ "include_dirs": ["src/vpi_1/"],
+ "libs": ["some_lib"],
+ "name": "vpi1",
+ },
+ {"src_files": ["src/vpi_2/f4"], "include_dirs": [], "libs": [], "name": "vpi2"},
+]
"""Predefined VPI modules to build."""
diff --git a/tests/mock_commands/arachne-pnr b/tests/mock_commands/arachne-pnr
index ceeca1cf0..c6a4a8f93 100755
--- a/tests/mock_commands/arachne-pnr
+++ b/tests/mock_commands/arachne-pnr
@@ -4,13 +4,13 @@ import sys
is_argvalue = False
for arg in sys.argv[1:]:
- if not (is_argvalue or arg[0] == '-'):
+ if not (is_argvalue or arg[0] == "-"):
input_file = arg
- is_argvalue = (arg[0] == '-')
+ is_argvalue = arg[0] == "-"
-output_file = input_file[0:-4]+'txt'
-with open(output_file, 'a'):
+output_file = input_file[0:-4] + "txt"
+with open(output_file, "a"):
os.utime(output_file, None)
-with open('arachne-pnr.cmd', 'w') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("arachne-pnr.cmd", "w") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/ascentlint b/tests/mock_commands/ascentlint
index 09f729612..e39fc5cda 100755
--- a/tests/mock_commands/ascentlint
+++ b/tests/mock_commands/ascentlint
@@ -1,8 +1,8 @@
#!/usr/bin/env python3
import sys
-with open('ascentlint.cmd', 'w') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("ascentlint.cmd", "w") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
-with open('ascentlint.log', 'w') as f:
- f.write('No lint violations found\n')
+with open("ascentlint.log", "w") as f:
+ f.write("No lint violations found\n")
diff --git a/tests/mock_commands/diamondc b/tests/mock_commands/diamondc
index aa4986c6b..417c4d4af 100755
--- a/tests/mock_commands/diamondc
+++ b/tests/mock_commands/diamondc
@@ -2,5 +2,5 @@
import os
import sys
-with open('diamondc.cmd', 'a') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("diamondc.cmd", "a") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/ecppack b/tests/mock_commands/ecppack
index b98e4bd12..3886f75c5 100755
--- a/tests/mock_commands/ecppack
+++ b/tests/mock_commands/ecppack
@@ -3,9 +3,9 @@ import os
import sys
output_file = sys.argv[2]
-with open(output_file, 'a'):
+with open(output_file, "a"):
# set the access and modified times to the current time
os.utime(output_file, None)
-with open('ecppack.cmd', 'w') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("ecppack.cmd", "w") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/ghdl b/tests/mock_commands/ghdl
index 13a963c29..1432aa98c 100755
--- a/tests/mock_commands/ghdl
+++ b/tests/mock_commands/ghdl
@@ -2,16 +2,16 @@
import os
import sys
-cmd_file = 'analyze.cmd' if sys.argv[1] == '-i' else 'elab-run.cmd'
+cmd_file = "analyze.cmd" if sys.argv[1] == "-i" else "elab-run.cmd"
-with open(cmd_file, 'a') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open(cmd_file, "a") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
-if sys.argv[1] == '-i':
+if sys.argv[1] == "-i":
for arg in sys.argv:
- if arg.startswith('--std'):
- std = arg.split('=')[1]
- output_file = 'work-obj'+std+'.cf'
- with open(output_file, 'a'):
+ if arg.startswith("--std"):
+ std = arg.split("=")[1]
+ output_file = "work-obj" + std + ".cf"
+ with open(output_file, "a"):
# set the access and modified times to the current time
os.utime(output_file, None)
diff --git a/tests/mock_commands/gowin_pack b/tests/mock_commands/gowin_pack
index ced2c16c0..ba172b66d 100755
--- a/tests/mock_commands/gowin_pack
+++ b/tests/mock_commands/gowin_pack
@@ -2,10 +2,10 @@
import os
import sys
-output_file = sys.argv[sys.argv.index('-o')+1]
-with open(output_file, 'a'):
+output_file = sys.argv[sys.argv.index("-o") + 1]
+with open(output_file, "a"):
# set the access and modified times to the current time
os.utime(output_file, None)
-with open('gowin_pack.cmd', 'w') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("gowin_pack.cmd", "w") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/icepack b/tests/mock_commands/icepack
index 5b6c98447..b0e41b2d6 100755
--- a/tests/mock_commands/icepack
+++ b/tests/mock_commands/icepack
@@ -3,9 +3,9 @@ import os
import sys
output_file = sys.argv[2]
-with open(output_file, 'a'):
+with open(output_file, "a"):
# set the access and modified times to the current time
os.utime(output_file, None)
-with open('icepack.cmd', 'w') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("icepack.cmd", "w") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/ip-generate b/tests/mock_commands/ip-generate
index 6733d56ed..393305c5a 100755
--- a/tests/mock_commands/ip-generate
+++ b/tests/mock_commands/ip-generate
@@ -2,5 +2,5 @@
import os
import sys
-with open('ip-generate.cmd', 'w') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("ip-generate.cmd", "w") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/iverilog b/tests/mock_commands/iverilog
index 67a49fdd4..28d0872c5 100755
--- a/tests/mock_commands/iverilog
+++ b/tests/mock_commands/iverilog
@@ -6,10 +6,10 @@ grab_input = False
for arg in sys.argv:
if grab_input:
output_file = arg
- grab_input = (arg == '-o')
+ grab_input = arg == "-o"
-with open(output_file, 'a'):
+with open(output_file, "a"):
os.utime(output_file, None)
-with open('iverilog.cmd', 'w') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("iverilog.cmd", "w") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/iverilog-vpi b/tests/mock_commands/iverilog-vpi
index f2995e5d5..b15102fa8 100755
--- a/tests/mock_commands/iverilog-vpi
+++ b/tests/mock_commands/iverilog-vpi
@@ -3,11 +3,11 @@ import os
import sys
for arg in sys.argv:
- if arg.startswith('--name='):
- output_file = arg[7:]+'.vpi'
+ if arg.startswith("--name="):
+ output_file = arg[7:] + ".vpi"
-with open(output_file, 'a'):
+with open(output_file, "a"):
os.utime(output_file, None)
-with open('iverilog-vpi.cmd', 'a') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("iverilog-vpi.cmd", "a") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/nextpnr-ecp5 b/tests/mock_commands/nextpnr-ecp5
index 2e27a0a63..145330a65 100755
--- a/tests/mock_commands/nextpnr-ecp5
+++ b/tests/mock_commands/nextpnr-ecp5
@@ -2,10 +2,10 @@
import os
import sys
-output_file = sys.argv[sys.argv.index('--textcfg')+1]
+output_file = sys.argv[sys.argv.index("--textcfg") + 1]
-with open(output_file, 'a'):
+with open(output_file, "a"):
os.utime(output_file, None)
-with open('nextpnr-ecp5.cmd', 'w') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("nextpnr-ecp5.cmd", "w") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/nextpnr-gowin b/tests/mock_commands/nextpnr-gowin
index f967efea6..58d857c94 100755
--- a/tests/mock_commands/nextpnr-gowin
+++ b/tests/mock_commands/nextpnr-gowin
@@ -2,10 +2,10 @@
import os
import sys
-output_file = sys.argv[sys.argv.index('--write')+1]
+output_file = sys.argv[sys.argv.index("--write") + 1]
-with open(output_file, 'a'):
+with open(output_file, "a"):
os.utime(output_file, None)
-with open('nextpnr-gowin.cmd', 'w') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("nextpnr-gowin.cmd", "w") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/nextpnr-ice40 b/tests/mock_commands/nextpnr-ice40
index 869f929e1..5b5bea25a 100755
--- a/tests/mock_commands/nextpnr-ice40
+++ b/tests/mock_commands/nextpnr-ice40
@@ -2,10 +2,10 @@
import os
import sys
-output_file = sys.argv[sys.argv.index('--asc')+1]
+output_file = sys.argv[sys.argv.index("--asc") + 1]
-with open(output_file, 'a'):
+with open(output_file, "a"):
os.utime(output_file, None)
-with open('nextpnr-ice40.cmd', 'w') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("nextpnr-ice40.cmd", "w") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/qsys-generate b/tests/mock_commands/qsys-generate
index 2e3b367eb..dbfef401b 100755
--- a/tests/mock_commands/qsys-generate
+++ b/tests/mock_commands/qsys-generate
@@ -2,5 +2,5 @@
import os
import sys
-with open('qsys-generate.cmd', 'w') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
\ No newline at end of file
+with open("qsys-generate.cmd", "w") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/quartus_asm b/tests/mock_commands/quartus_asm
index 47e14a91a..017c3b44c 100755
--- a/tests/mock_commands/quartus_asm
+++ b/tests/mock_commands/quartus_asm
@@ -2,5 +2,5 @@
import os
import sys
-with open('quartus_asm.cmd', 'w') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("quartus_asm.cmd", "w") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/quartus_dse b/tests/mock_commands/quartus_dse
index 65f4351ef..47e359583 100755
--- a/tests/mock_commands/quartus_dse
+++ b/tests/mock_commands/quartus_dse
@@ -2,5 +2,5 @@
import os
import sys
-with open('quartus_dse.cmd', 'w') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("quartus_dse.cmd", "w") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/quartus_fit b/tests/mock_commands/quartus_fit
index dede11151..4d764598b 100755
--- a/tests/mock_commands/quartus_fit
+++ b/tests/mock_commands/quartus_fit
@@ -2,5 +2,5 @@
import os
import sys
-with open('quartus_fit.cmd', 'w') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("quartus_fit.cmd", "w") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/quartus_map b/tests/mock_commands/quartus_map
index b8e63de6c..6d8ce7695 100755
--- a/tests/mock_commands/quartus_map
+++ b/tests/mock_commands/quartus_map
@@ -2,5 +2,5 @@
import os
import sys
-with open('quartus_map.cmd', 'w') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("quartus_map.cmd", "w") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/quartus_sh b/tests/mock_commands/quartus_sh
index c3f9e45fd..70d451330 100755
--- a/tests/mock_commands/quartus_sh
+++ b/tests/mock_commands/quartus_sh
@@ -12,5 +12,5 @@ if len(sys.argv) > 1 and sys.argv[1] == "--version":
print("Version 16.1.2 Build 203 01/18/2017 SJ Standard Edition")
sys.exit(0)
-with open('quartus_sh.cmd', 'w') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("quartus_sh.cmd", "w") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/quartus_sta b/tests/mock_commands/quartus_sta
index b58bb0777..ee7b906b7 100755
--- a/tests/mock_commands/quartus_sta
+++ b/tests/mock_commands/quartus_sta
@@ -2,5 +2,5 @@
import os
import sys
-with open('quartus_sta.cmd', 'w') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("quartus_sta.cmd", "w") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/quartus_syn b/tests/mock_commands/quartus_syn
index 63f416108..cf4962b0f 100755
--- a/tests/mock_commands/quartus_syn
+++ b/tests/mock_commands/quartus_syn
@@ -2,5 +2,5 @@
import os
import sys
-with open('quartus_syn.cmd', 'w') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("quartus_syn.cmd", "w") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/radiantc b/tests/mock_commands/radiantc
index b42e22125..5d900458c 100755
--- a/tests/mock_commands/radiantc
+++ b/tests/mock_commands/radiantc
@@ -2,5 +2,5 @@
import os
import sys
-with open('radiantc.cmd', 'a') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("radiantc.cmd", "a") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/sby b/tests/mock_commands/sby
index f838b57d2..ba478a2fb 100755
--- a/tests/mock_commands/sby
+++ b/tests/mock_commands/sby
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
import sys
-with open('sby.cmd', 'a') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("sby.cmd", "a") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/sg_shell b/tests/mock_commands/sg_shell
index e81373f10..9c1573f75 100755
--- a/tests/mock_commands/sg_shell
+++ b/tests/mock_commands/sg_shell
@@ -2,5 +2,5 @@
import os
import sys
-with open('spyglass.cmd', 'a') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("spyglass.cmd", "a") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/vcs b/tests/mock_commands/vcs
index d71cd864c..31e10df70 100755
--- a/tests/mock_commands/vcs
+++ b/tests/mock_commands/vcs
@@ -3,11 +3,11 @@ import os
import sys
# output_file is the argument to the option `-o`
-output_file = sys.argv[sys.argv.index('-o')+1]
+output_file = sys.argv[sys.argv.index("-o") + 1]
# output_file, when executed, will record the arguments it has been called with
# in a file `run.cmd`
-with open(output_file, 'w') as f:
+with open(output_file, "w") as f:
s = """#!/usr/bin/env python3
import sys
with open('run.cmd', 'w') as f:
@@ -19,5 +19,5 @@ with open('run.cmd', 'w') as f:
os.chmod(output_file, 0o755)
# write out the arguments this mock has been called with to `vcs.cmd`
-with open('vcs.cmd', 'w') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("vcs.cmd", "w") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/verible-verilog-format b/tests/mock_commands/verible-verilog-format
index 7dc8e83cc..6130033ee 100755
--- a/tests/mock_commands/verible-verilog-format
+++ b/tests/mock_commands/verible-verilog-format
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
import sys
-with open('verible-verilog-format.cmd', 'a') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("verible-verilog-format.cmd", "a") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/verible-verilog-lint b/tests/mock_commands/verible-verilog-lint
index 1c3f7a39b..8e5897228 100755
--- a/tests/mock_commands/verible-verilog-lint
+++ b/tests/mock_commands/verible-verilog-lint
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
import sys
-with open('verible-verilog-lint.cmd', 'a') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("verible-verilog-lint.cmd", "a") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/vivado b/tests/mock_commands/vivado
index 72ed1b32f..98a71bcf6 100755
--- a/tests/mock_commands/vivado
+++ b/tests/mock_commands/vivado
@@ -2,5 +2,5 @@
import os
import sys
-with open('vivado.cmd', 'a') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("vivado.cmd", "a") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/vsim b/tests/mock_commands/vsim
index 2019c5eab..28b9de7a3 100755
--- a/tests/mock_commands/vsim
+++ b/tests/mock_commands/vsim
@@ -2,5 +2,5 @@
import os
import sys
-with open('vsim.cmd', 'w') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("vsim.cmd", "w") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/xcelium/tools/bin/xrun b/tests/mock_commands/xcelium/tools/bin/xrun
index 15cab54c0..85f81ebdc 100755
--- a/tests/mock_commands/xcelium/tools/bin/xrun
+++ b/tests/mock_commands/xcelium/tools/bin/xrun
@@ -2,5 +2,5 @@
import sys
import shlex
-with open('xrun.cmd', 'w') as f:
- f.write(' '.join(shlex.quote(arg) for arg in sys.argv[1:]) + '\n')
+with open("xrun.cmd", "w") as f:
+ f.write(" ".join(shlex.quote(arg) for arg in sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/xtclsh b/tests/mock_commands/xtclsh
index ed5855193..38c44bd8a 100755
--- a/tests/mock_commands/xtclsh
+++ b/tests/mock_commands/xtclsh
@@ -2,11 +2,11 @@
import os
import sys
-#Pretend xtclsh is called to create the .xise file
+# Pretend xtclsh is called to create the .xise file
if len(sys.argv) == 2:
- output_file = sys.argv[1][0:-3]+'xise'
- with open(output_file, 'a'):
+ output_file = sys.argv[1][0:-3] + "xise"
+ with open(output_file, "a"):
os.utime(output_file, None)
-with open('xtclsh.cmd', 'a') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("xtclsh.cmd", "a") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/mock_commands/yosys b/tests/mock_commands/yosys
index 17ebe7a50..66a939180 100755
--- a/tests/mock_commands/yosys
+++ b/tests/mock_commands/yosys
@@ -6,15 +6,15 @@ grab_input = False
for arg in sys.argv:
if grab_input:
input_file = arg.replace(" ", "_")
- grab_input = (arg == '-p')
+ grab_input = arg == "-p"
-output_file = input_file[0:-3]+'blif'
-with open(output_file, 'a'):
- os.utime(output_file, None)
+output_file = input_file[0:-3] + "blif"
+with open(output_file, "a"):
+ os.utime(output_file, None)
-output_file = input_file[0:-3]+'json'
-with open(output_file, 'a'):
- os.utime(output_file, None)
+output_file = input_file[0:-3] + "json"
+with open(output_file, "a"):
+ os.utime(output_file, None)
-with open('yosys.cmd', 'w') as f:
- f.write(' '.join(sys.argv[1:]) + '\n')
+with open("yosys.cmd", "w") as f:
+ f.write(" ".join(sys.argv[1:]) + "\n")
diff --git a/tests/test_apicula.py b/tests/test_apicula.py
index c476433ad..3fccb49a0 100644
--- a/tests/test_apicula.py
+++ b/tests/test_apicula.py
@@ -6,51 +6,60 @@
def run_apicula_test(tf):
tf.backend.configure()
- tf.compare_files(['Makefile', 'edalize_yosys_procs.tcl', 'edalize_yosys_template.tcl'])
+ tf.compare_files(
+ ["Makefile", "edalize_yosys_procs.tcl", "edalize_yosys_template.tcl"]
+ )
tf.backend.build()
- tf.compare_files(['yosys.cmd', 'nextpnr-gowin.cmd', 'gowin_pack.cmd'])
+ tf.compare_files(["yosys.cmd", "nextpnr-gowin.cmd", "gowin_pack.cmd"])
+
def test_apicula(make_edalize_test):
tool_options = {
- 'device' : 'GW1N-LV1QN48C6/I5',
- 'yosys_synth_options': ['some', 'yosys_synth_options'],
- 'nextpnr_options': ['a', 'few', 'nextpnr_options']
+ "device": "GW1N-LV1QN48C6/I5",
+ "yosys_synth_options": ["some", "yosys_synth_options"],
+ "nextpnr_options": ["a", "few", "nextpnr_options"],
}
- tf = make_edalize_test('apicula',
- param_types=['vlogdefine', 'vlogparam'],
- tool_options=tool_options)
+ tf = make_edalize_test(
+ "apicula", param_types=["vlogdefine", "vlogparam"], tool_options=tool_options
+ )
run_apicula_test(tf)
+
def test_apicula_minimal(make_edalize_test):
tool_options = {
- 'device' : 'GW1N-LV1QN48C6/I5',
+ "device": "GW1N-LV1QN48C6/I5",
}
- tf = make_edalize_test('apicula',
- param_types=[],
- files=[],
- tool_options = tool_options,
- ref_dir='minimal')
+ tf = make_edalize_test(
+ "apicula",
+ param_types=[],
+ files=[],
+ tool_options=tool_options,
+ ref_dir="minimal",
+ )
run_apicula_test(tf)
+
def test_apicula_multiple_cst(make_edalize_test):
- files = [{'name': 'cst_file.cst', 'file_type': 'CST'},
- {'name': 'cst_file2.cst', 'file_type': 'CST'}]
- tf = make_edalize_test('apicula',
- param_types=[],
- files=files)
+ files = [
+ {"name": "cst_file.cst", "file_type": "CST"},
+ {"name": "cst_file2.cst", "file_type": "CST"},
+ ]
+ tf = make_edalize_test("apicula", param_types=[], files=files)
with pytest.raises(RuntimeError) as e:
tf.backend.configure()
- assert "Nextpnr only supports one CST file. Found cst_file.cst and cst_file2.cst" in str(e.value)
+ assert (
+ "Nextpnr only supports one CST file. Found cst_file.cst and cst_file2.cst"
+ in str(e.value)
+ )
+
def test_apicula_no_device(make_edalize_test):
- tf = make_edalize_test('apicula',
- param_types=[])
+ tf = make_edalize_test("apicula", param_types=[])
with pytest.raises(RuntimeError) as e:
tf.backend.configure()
assert "Missing required option 'device' for nextpnr-gowin" in str(e.value)
-
diff --git a/tests/test_ascentlint.py b/tests/test_ascentlint.py
index 6627e274a..075ecadae 100644
--- a/tests/test_ascentlint.py
+++ b/tests/test_ascentlint.py
@@ -2,16 +2,18 @@
def test_ascentlint_defaults(make_edalize_test):
- """ Test the default configuration of Ascent Lint """
- tf = make_edalize_test('ascentlint',
- test_name='test_ascentlint',
- param_types=['vlogdefine', 'vlogparam'],
- ref_dir='defaults')
+ """Test the default configuration of Ascent Lint"""
+ tf = make_edalize_test(
+ "ascentlint",
+ test_name="test_ascentlint",
+ param_types=["vlogdefine", "vlogparam"],
+ ref_dir="defaults",
+ )
tf.backend.configure()
- tf.compare_files(['Makefile', 'run-ascentlint.tcl', 'sources.f'])
+ tf.compare_files(["Makefile", "run-ascentlint.tcl", "sources.f"])
tf.backend.build()
- tf.compare_files(['ascentlint.cmd'])
+ tf.compare_files(["ascentlint.cmd"])
diff --git a/tests/test_diamond.py b/tests/test_diamond.py
index 8ac4e7565..38df84d0d 100644
--- a/tests/test_diamond.py
+++ b/tests/test_diamond.py
@@ -2,21 +2,23 @@
def test_diamond(make_edalize_test):
- name = 'test_diamond_0'
- tf = make_edalize_test('diamond',
- test_name=name,
- param_types=['generic', 'vlogdefine', 'vlogparam'],
- tool_options={
- 'part': 'LFE5U-85F-6BG381C',
- })
+ name = "test_diamond_0"
+ tf = make_edalize_test(
+ "diamond",
+ test_name=name,
+ param_types=["generic", "vlogdefine", "vlogparam"],
+ tool_options={
+ "part": "LFE5U-85F-6BG381C",
+ },
+ )
tf.backend.configure()
- tf.compare_files([name + '.tcl', name + '_run.tcl'])
+ tf.compare_files([name + ".tcl", name + "_run.tcl"])
tf.backend.build()
- tf.compare_files(['diamondc.cmd'])
+ tf.compare_files(["diamondc.cmd"])
def test_diamond_minimal(tmpdir):
@@ -26,28 +28,36 @@ def test_diamond_minimal(tmpdir):
from edalize_common import compare_files, tests_dir
- ref_dir = os.path.join(tests_dir, __name__, 'minimal')
- os.environ['PATH'] = os.path.join(tests_dir, 'mock_commands')+':'+os.environ['PATH']
- tool = 'diamond'
+ ref_dir = os.path.join(tests_dir, __name__, "minimal")
+ os.environ["PATH"] = (
+ os.path.join(tests_dir, "mock_commands") + ":" + os.environ["PATH"]
+ )
+ tool = "diamond"
tool_options = {
- 'part' : 'LFE5U-85F-6BG381C',
+ "part": "LFE5U-85F-6BG381C",
}
- name = 'test_{}_minimal_0'.format(tool)
+ name = "test_{}_minimal_0".format(tool)
work_root = str(tmpdir)
- edam = {'name' : name,
- 'tool_options' : {tool : tool_options}
- }
+ edam = {"name": name, "tool_options": {tool: tool_options}}
backend = get_edatool(tool)(edam=edam, work_root=work_root)
backend.configure()
- compare_files(ref_dir, work_root, [
- name+'.tcl',
- name+'_run.tcl',
- ])
+ compare_files(
+ ref_dir,
+ work_root,
+ [
+ name + ".tcl",
+ name + "_run.tcl",
+ ],
+ )
backend.build()
- compare_files(ref_dir, work_root, [
- 'diamondc.cmd',
- ])
+ compare_files(
+ ref_dir,
+ work_root,
+ [
+ "diamondc.cmd",
+ ],
+ )
diff --git a/tests/test_edam.py b/tests/test_edam.py
index 575443482..9f1c3d6be 100644
--- a/tests/test_edam.py
+++ b/tests/test_edam.py
@@ -8,103 +8,119 @@ def test_empty_edam():
(h, edam_file) = tempfile.mkstemp()
with pytest.raises(TypeError):
- backend = get_edatool('icarus')(edam=None)
+ backend = get_edatool("icarus")(edam=None)
+
def test_incomplete_edam():
from edalize import get_edatool
with pytest.raises(RuntimeError) as excinfo:
- backend = get_edatool('icarus')(edam={'version' : '0.1.2'})
+ backend = get_edatool("icarus")(edam={"version": "0.1.2"})
assert "Missing required parameter 'name'" in str(excinfo.value)
- backend = get_edatool('icarus')(edam={
- 'version' : '0.1.2',
- 'name' : 'corename'})
+ backend = get_edatool("icarus")(edam={"version": "0.1.2", "name": "corename"})
+
def test_edam_files():
from edalize import get_edatool
- files = [{'name' : 'plain_file'},
- {'name' : 'subdir/plain_include_file',
- 'is_include_file' : True},
- {'name' : 'file_with_args',
- 'file_type' : 'verilogSource',
- 'logical_name' : 'libx'},
- {'name' : 'include_file_with_args',
- 'is_include_file' : True,
- 'file_type' : 'verilogSource',
- 'logical_name' : 'libx'}]
- edam = {'files' : files,
- 'name' : 'test_edam_files'}
-
- backend = get_edatool('icarus')(edam=edam)
+
+ files = [
+ {"name": "plain_file"},
+ {"name": "subdir/plain_include_file", "is_include_file": True},
+ {
+ "name": "file_with_args",
+ "file_type": "verilogSource",
+ "logical_name": "libx",
+ },
+ {
+ "name": "include_file_with_args",
+ "is_include_file": True,
+ "file_type": "verilogSource",
+ "logical_name": "libx",
+ },
+ ]
+ edam = {"files": files, "name": "test_edam_files"}
+
+ backend = get_edatool("icarus")(edam=edam)
(parsed_files, incdirs) = backend._get_fileset_files()
assert len(parsed_files) == 2
- assert parsed_files[0].name == 'plain_file'
- assert parsed_files[0].file_type == ''
- assert parsed_files[0].logical_name == ''
- assert parsed_files[1].name == 'file_with_args'
- assert parsed_files[1].file_type == 'verilogSource'
- assert parsed_files[1].logical_name == 'libx'
+ assert parsed_files[0].name == "plain_file"
+ assert parsed_files[0].file_type == ""
+ assert parsed_files[0].logical_name == ""
+ assert parsed_files[1].name == "file_with_args"
+ assert parsed_files[1].file_type == "verilogSource"
+ assert parsed_files[1].logical_name == "libx"
+
+ assert incdirs == ["subdir", "."]
- assert incdirs == ['subdir', '.']
def test_verilog_include_file_with_include_path():
from edalize import get_edatool
- files = [{'name' : 'some_dir/some_file',
- 'file_type' : 'verilogSource',
- 'is_include_file' : True,
- 'include_path' : 'some_dir'}]
- edam = {'files' : files,
- 'name' : 'test_edam_files'}
-
- backend = get_edatool('icarus')(edam=edam)
+
+ files = [
+ {
+ "name": "some_dir/some_file",
+ "file_type": "verilogSource",
+ "is_include_file": True,
+ "include_path": "some_dir",
+ }
+ ]
+ edam = {"files": files, "name": "test_edam_files"}
+
+ backend = get_edatool("icarus")(edam=edam)
(parsed_files, incdirs) = backend._get_fileset_files()
assert len(parsed_files) == 0
- assert incdirs == ['some_dir']
+ assert incdirs == ["some_dir"]
+
def test_verilog_include_file_with_partial_include_path():
from edalize import get_edatool
- files = [{'name' : '../some_dir/some_subdir/some_file',
- 'file_type' : 'verilogSource',
- 'is_include_file' : True,
- 'include_path' : '../some_dir'}]
- edam = {'files' : files,
- 'name' : 'test_edam_files'}
-
- backend = get_edatool('icarus')(edam=edam)
+
+ files = [
+ {
+ "name": "../some_dir/some_subdir/some_file",
+ "file_type": "verilogSource",
+ "is_include_file": True,
+ "include_path": "../some_dir",
+ }
+ ]
+ edam = {"files": files, "name": "test_edam_files"}
+
+ backend = get_edatool("icarus")(edam=edam)
(parsed_files, incdirs) = backend._get_fileset_files()
assert len(parsed_files) == 0
- assert incdirs == ['../some_dir']
+ assert incdirs == ["../some_dir"]
+
def test_edam_hook_failing(tmpdir):
import os.path
from edalize import get_edatool
tests_dir = os.path.dirname(__file__)
- ref_dir = os.path.join(tests_dir, __name__)
+ ref_dir = os.path.join(tests_dir, __name__)
- script = 'exit_1_script'
- hooks = {'pre_build' : [
- {'cmd' : ['sh', os.path.join(ref_dir, script)],
- 'name' : script}]}
+ script = "exit_1_script"
+ hooks = {
+ "pre_build": [{"cmd": ["sh", os.path.join(ref_dir, script)], "name": script}]
+ }
work_root = str(tmpdir)
- edam = {'hooks' : hooks,
- 'name' : script}
+ edam = {"hooks": hooks, "name": script}
- backend = get_edatool('icarus')(edam=edam,
- work_root=work_root)
- exc_str_exp = (r"pre_build script 'exit_1_script': "
- r"\['sh', '.+/exit_1_script'\] exited with error code 1")
+ backend = get_edatool("icarus")(edam=edam, work_root=work_root)
+ exc_str_exp = (
+ r"pre_build script 'exit_1_script': "
+ r"\['sh', '.+/exit_1_script'\] exited with error code 1"
+ )
with pytest.raises(RuntimeError, match=exc_str_exp):
backend.build_pre()
def test_edam_multiple_hooks(tmpdir):
- """ Test if more than one hook gets successfully executed. """
+ """Test if more than one hook gets successfully executed."""
import os.path
from edalize import get_edatool
@@ -125,11 +141,11 @@ def test_edam_multiple_hooks(tmpdir):
work_root = str(tmpdir)
edam = {"hooks": hooks, "name": "test_edam_multiple_hooks"}
- assert not os.path.exists(os.path.join(work_root, 'hook_1_executed.txt'))
- assert not os.path.exists(os.path.join(work_root, 'hook_2_executed.txt'))
+ assert not os.path.exists(os.path.join(work_root, "hook_1_executed.txt"))
+ assert not os.path.exists(os.path.join(work_root, "hook_2_executed.txt"))
backend = get_edatool("icarus")(edam=edam, work_root=work_root)
backend.build_pre()
- assert os.path.exists(os.path.join(work_root, 'hook_1_executed.txt'))
- assert os.path.exists(os.path.join(work_root, 'hook_2_executed.txt'))
+ assert os.path.exists(os.path.join(work_root, "hook_1_executed.txt"))
+ assert os.path.exists(os.path.join(work_root, "hook_2_executed.txt"))
diff --git a/tests/test_ghdl.py b/tests/test_ghdl.py
index 01f7568a5..491456b65 100644
--- a/tests/test_ghdl.py
+++ b/tests/test_ghdl.py
@@ -2,113 +2,118 @@
from edalize_common import make_edalize_test
-
def test_ghdl_01(make_edalize_test):
- tf = make_edalize_test('ghdl',
- ref_dir = "test01",
- param_types=['generic'],
- tool_options={
- 'analyze_options': ['some', 'analyze_options'],
- 'run_options': ['a', 'few', 'run_options']
- })
-
- for vhdl_file in ['vhdl_file.vhd', 'vhdl_lfile', 'vhdl2008_file']:
- with open(os.path.join(tf.work_root, vhdl_file), 'a'):
+ tf = make_edalize_test(
+ "ghdl",
+ ref_dir="test01",
+ param_types=["generic"],
+ tool_options={
+ "analyze_options": ["some", "analyze_options"],
+ "run_options": ["a", "few", "run_options"],
+ },
+ )
+
+ for vhdl_file in ["vhdl_file.vhd", "vhdl_lfile", "vhdl2008_file"]:
+ with open(os.path.join(tf.work_root, vhdl_file), "a"):
os.utime(os.path.join(tf.work_root, vhdl_file), None)
tf.backend.configure()
- tf.compare_files(['Makefile'])
+ tf.compare_files(["Makefile"])
tf.backend.build()
- tf.compare_files(['analyze.cmd'])
+ tf.compare_files(["analyze.cmd"])
tf.backend.run()
- tf.compare_files(['elab-run.cmd'])
-
+ tf.compare_files(["elab-run.cmd"])
LOCAL_FILES = [
- {'name' : 'vhdl_file.vhd', 'file_type' : 'vhdlSource'},
- {'name' : 'vhdl_lfile' , 'file_type' : 'vhdlSource', 'logical_name' : 'libx'},
+ {"name": "vhdl_file.vhd", "file_type": "vhdlSource"},
+ {"name": "vhdl_lfile", "file_type": "vhdlSource", "logical_name": "libx"},
]
# Test 02 - no vhdl version specified
def test_ghdl_02(make_edalize_test):
- tf = make_edalize_test('ghdl',
- ref_dir = "test02",
- test_name = "test_ghdl_02",
- param_types=['generic'],
- files = LOCAL_FILES,
- tool_options={
- 'analyze_options': ['some', 'analyze_options'],
- 'run_options': ['a', 'few', 'run_options']
- })
-
- for vhdl_file in ['vhdl_file.vhd', 'vhdl_lfile', 'vhdl2008_file']:
- with open(os.path.join(tf.work_root, vhdl_file), 'a'):
+ tf = make_edalize_test(
+ "ghdl",
+ ref_dir="test02",
+ test_name="test_ghdl_02",
+ param_types=["generic"],
+ files=LOCAL_FILES,
+ tool_options={
+ "analyze_options": ["some", "analyze_options"],
+ "run_options": ["a", "few", "run_options"],
+ },
+ )
+
+ for vhdl_file in ["vhdl_file.vhd", "vhdl_lfile", "vhdl2008_file"]:
+ with open(os.path.join(tf.work_root, vhdl_file), "a"):
os.utime(os.path.join(tf.work_root, vhdl_file), None)
tf.backend.configure()
- tf.compare_files(['Makefile'])
+ tf.compare_files(["Makefile"])
tf.backend.build()
- tf.compare_files(['analyze.cmd'])
+ tf.compare_files(["analyze.cmd"])
tf.backend.run()
- tf.compare_files(['elab-run.cmd'])
-
+ tf.compare_files(["elab-run.cmd"])
# Test 03 - vhdl Version override
def test_ghdl_03(make_edalize_test):
- tf = make_edalize_test('ghdl',
- ref_dir = "test03",
- test_name = "test_ghdl_03",
- param_types=['generic'],
- files = LOCAL_FILES,
- tool_options={
- 'analyze_options': ['--std=08','--ieee=synopsys'],
- 'run_options': ['a', 'few', 'run_options']
- })
-
- for vhdl_file in ['vhdl_file.vhd', 'vhdl_lfile', 'vhdl2008_file']:
- with open(os.path.join(tf.work_root, vhdl_file), 'a'):
+ tf = make_edalize_test(
+ "ghdl",
+ ref_dir="test03",
+ test_name="test_ghdl_03",
+ param_types=["generic"],
+ files=LOCAL_FILES,
+ tool_options={
+ "analyze_options": ["--std=08", "--ieee=synopsys"],
+ "run_options": ["a", "few", "run_options"],
+ },
+ )
+
+ for vhdl_file in ["vhdl_file.vhd", "vhdl_lfile", "vhdl2008_file"]:
+ with open(os.path.join(tf.work_root, vhdl_file), "a"):
os.utime(os.path.join(tf.work_root, vhdl_file), None)
tf.backend.configure()
- tf.compare_files(['Makefile'])
+ tf.compare_files(["Makefile"])
tf.backend.build()
- tf.compare_files(['analyze.cmd'])
+ tf.compare_files(["analyze.cmd"])
tf.backend.run()
- tf.compare_files(['elab-run.cmd'])
+ tf.compare_files(["elab-run.cmd"])
# Test 04 - Top level includes library
def test_ghdl_04(make_edalize_test):
- tf = make_edalize_test('ghdl',
- ref_dir = "test04",
- param_types=['generic'],
- tool_options={
- 'analyze_options': ['some', 'analyze_options'],
- 'run_options': ['a', 'few', 'run_options']
- },
- toplevel="libx.vhdl_lfile")
-
- for vhdl_file in ['vhdl_file.vhd', 'vhdl_lfile', 'vhdl2008_file']:
- with open(os.path.join(tf.work_root, vhdl_file), 'a'):
+ tf = make_edalize_test(
+ "ghdl",
+ ref_dir="test04",
+ param_types=["generic"],
+ tool_options={
+ "analyze_options": ["some", "analyze_options"],
+ "run_options": ["a", "few", "run_options"],
+ },
+ toplevel="libx.vhdl_lfile",
+ )
+
+ for vhdl_file in ["vhdl_file.vhd", "vhdl_lfile", "vhdl2008_file"]:
+ with open(os.path.join(tf.work_root, vhdl_file), "a"):
os.utime(os.path.join(tf.work_root, vhdl_file), None)
tf.backend.configure()
- tf.compare_files(['Makefile'])
+ tf.compare_files(["Makefile"])
tf.backend.build()
- tf.compare_files(['analyze.cmd'])
+ tf.compare_files(["analyze.cmd"])
tf.backend.run()
- tf.compare_files(['elab-run.cmd'])
+ tf.compare_files(["elab-run.cmd"])
diff --git a/tests/test_icarus.py b/tests/test_icarus.py
index f8c198aab..f85d068cc 100644
--- a/tests/test_icarus.py
+++ b/tests/test_icarus.py
@@ -2,26 +2,25 @@
def test_icarus(make_edalize_test):
- name = 'test_icarus_0'
+ name = "test_icarus_0"
tool_options = {
- 'iverilog_options': ['some', 'iverilog_options'],
- 'timescale': '1ns/1ns',
+ "iverilog_options": ["some", "iverilog_options"],
+ "timescale": "1ns/1ns",
}
- tf = make_edalize_test('icarus',
- test_name=name,
- tool_options=tool_options,
- use_vpi=True)
+ tf = make_edalize_test(
+ "icarus", test_name=name, tool_options=tool_options, use_vpi=True
+ )
tf.backend.configure()
- tf.compare_files(['Makefile', name + '.scr', 'timescale.v'])
+ tf.compare_files(["Makefile", name + ".scr", "timescale.v"])
tf.backend.build()
- tf.compare_files(['iverilog.cmd', 'iverilog-vpi.cmd'])
+ tf.compare_files(["iverilog.cmd", "iverilog-vpi.cmd"])
tf.backend.run()
- tf.compare_files(['vvp.cmd'])
+ tf.compare_files(["vvp.cmd"])
def test_icarus_minimal(tmpdir):
@@ -31,25 +30,31 @@ def test_icarus_minimal(tmpdir):
from edalize_common import compare_files, tests_dir
- ref_dir = os.path.join(tests_dir, __name__, 'minimal')
- os.environ['PATH'] = os.path.join(tests_dir, 'mock_commands')+':'+os.environ['PATH']
- tool = 'icarus'
- name = 'test_'+tool+'_minimal_0'
+ ref_dir = os.path.join(tests_dir, __name__, "minimal")
+ os.environ["PATH"] = (
+ os.path.join(tests_dir, "mock_commands") + ":" + os.environ["PATH"]
+ )
+ tool = "icarus"
+ name = "test_" + tool + "_minimal_0"
work_root = str(tmpdir)
- edam = {'name' : name,
- 'toplevel' : 'top'}
+ edam = {"name": name, "toplevel": "top"}
backend = get_edatool(tool)(edam=edam, work_root=work_root)
backend.configure()
- compare_files(ref_dir, work_root, ['Makefile',
- name+'.scr',
- ])
+ compare_files(
+ ref_dir,
+ work_root,
+ [
+ "Makefile",
+ name + ".scr",
+ ],
+ )
backend.build()
- compare_files(ref_dir, work_root, ['iverilog.cmd'])
+ compare_files(ref_dir, work_root, ["iverilog.cmd"])
backend.run()
- compare_files(ref_dir, work_root, ['vvp.cmd'])
+ compare_files(ref_dir, work_root, ["vvp.cmd"])
diff --git a/tests/test_icestorm.py b/tests/test_icestorm.py
index f80400e4e..c5166830d 100644
--- a/tests/test_icestorm.py
+++ b/tests/test_icestorm.py
@@ -3,83 +3,91 @@
from edalize_common import make_edalize_test
-def run_icestorm_test(tf, pnr_cmdfile='nextpnr-ice40.cmd'):
+def run_icestorm_test(tf, pnr_cmdfile="nextpnr-ice40.cmd"):
tf.backend.configure()
- tf.compare_files(['Makefile', 'edalize_yosys_procs.tcl', 'edalize_yosys_template.tcl'])
+ tf.compare_files(
+ ["Makefile", "edalize_yosys_procs.tcl", "edalize_yosys_template.tcl"]
+ )
- f = os.path.join(tf.work_root, 'pcf_file.pcf')
- with open(f, 'a'):
+ f = os.path.join(tf.work_root, "pcf_file.pcf")
+ with open(f, "a"):
os.utime(f, None)
tf.backend.build()
- tf.compare_files(['yosys.cmd', pnr_cmdfile, 'icepack.cmd'])
+ tf.compare_files(["yosys.cmd", pnr_cmdfile, "icepack.cmd"])
def test_icestorm(make_edalize_test):
tool_options = {
- 'yosys_synth_options': ['some', 'yosys_synth_options'],
- 'arachne_pnr_options': ['a', 'few', 'arachne_pnr_options']
+ "yosys_synth_options": ["some", "yosys_synth_options"],
+ "arachne_pnr_options": ["a", "few", "arachne_pnr_options"],
}
- tf = make_edalize_test('icestorm',
- param_types=['vlogdefine', 'vlogparam'],
- tool_options=tool_options)
+ tf = make_edalize_test(
+ "icestorm", param_types=["vlogdefine", "vlogparam"], tool_options=tool_options
+ )
run_icestorm_test(tf)
def test_icestorm_minimal(make_edalize_test):
- files = [{'name': 'pcf_file.pcf', 'file_type': 'PCF'}]
- tf = make_edalize_test('icestorm',
- param_types=[],
- files=files,
- ref_dir='minimal')
+ files = [{"name": "pcf_file.pcf", "file_type": "PCF"}]
+ tf = make_edalize_test("icestorm", param_types=[], files=files, ref_dir="minimal")
run_icestorm_test(tf)
def test_icestorm_no_pcf(make_edalize_test):
- tf = make_edalize_test('icestorm',
- param_types=[],
- files=[])
+ tf = make_edalize_test("icestorm", param_types=[], files=[])
tf.backend.configure()
+
def test_icestorm_multiple_pcf(make_edalize_test):
- files = [{'name': 'pcf_file.pcf', 'file_type': 'PCF'},
- {'name': 'pcf_file2.pcf', 'file_type': 'PCF'}]
- tf = make_edalize_test('icestorm',
- param_types=[],
- files=files)
+ files = [
+ {"name": "pcf_file.pcf", "file_type": "PCF"},
+ {"name": "pcf_file2.pcf", "file_type": "PCF"},
+ ]
+ tf = make_edalize_test("icestorm", param_types=[], files=files)
with pytest.raises(RuntimeError) as e:
tf.backend.configure()
- assert "Nextpnr only supports one PCF file. Found pcf_file.pcf and pcf_file2.pcf" in str(e.value)
+ assert (
+ "Nextpnr only supports one PCF file. Found pcf_file.pcf and pcf_file2.pcf"
+ in str(e.value)
+ )
def test_icestorm_nextpnr(make_edalize_test):
tool_options = {
- 'yosys_synth_options': ['some', 'yosys_synth_options'],
- 'arachne_pnr_options': ['a', 'few', 'arachne_pnr_options'],
- 'nextpnr_options': ['multiple', 'nextpnr_options'],
- 'pnr': 'next'
+ "yosys_synth_options": ["some", "yosys_synth_options"],
+ "arachne_pnr_options": ["a", "few", "arachne_pnr_options"],
+ "nextpnr_options": ["multiple", "nextpnr_options"],
+ "pnr": "next",
}
- tf = make_edalize_test('icestorm',
- param_types=['vlogdefine', 'vlogparam'],
- tool_options=tool_options,
- ref_dir='nextpnr')
+ tf = make_edalize_test(
+ "icestorm",
+ param_types=["vlogdefine", "vlogparam"],
+ tool_options=tool_options,
+ ref_dir="nextpnr",
+ )
- run_icestorm_test(tf, pnr_cmdfile='nextpnr-ice40.cmd')
+ run_icestorm_test(tf, pnr_cmdfile="nextpnr-ice40.cmd")
def test_icestorm_invalid_pnr(make_edalize_test):
- name = 'test_icestorm_0'
- tf = make_edalize_test('icestorm',
- test_name=name,
- param_types=['vlogdefine', 'vlogparam'],
- tool_options={'pnr': 'invalid'},
- ref_dir='nextpnr')
+ name = "test_icestorm_0"
+ tf = make_edalize_test(
+ "icestorm",
+ test_name=name,
+ param_types=["vlogdefine", "vlogparam"],
+ tool_options={"pnr": "invalid"},
+ ref_dir="nextpnr",
+ )
with pytest.raises(RuntimeError) as e:
tf.backend.configure()
- assert "Invalid pnr option 'invalid'. Valid values are 'arachne' for Arachne-pnr, 'next' for nextpnr or 'none' to only perform synthesis" in str(e.value)
+ assert (
+ "Invalid pnr option 'invalid'. Valid values are 'arachne' for Arachne-pnr, 'next' for nextpnr or 'none' to only perform synthesis"
+ in str(e.value)
+ )
diff --git a/tests/test_ise.py b/tests/test_ise.py
index 1a6bdfc82..8e25f1600 100644
--- a/tests/test_ise.py
+++ b/tests/test_ise.py
@@ -3,36 +3,37 @@
def test_ise(make_edalize_test):
- name = 'test_ise_0'
+ name = "test_ise_0"
tool_options = {
- 'family': 'spartan6',
- 'device': 'xc6slx45',
- 'package': 'csg324',
- 'speed': '-2'
+ "family": "spartan6",
+ "device": "xc6slx45",
+ "package": "csg324",
+ "speed": "-2",
}
- tf = make_edalize_test('ise',
- test_name=name,
- param_types=['vlogdefine', 'vlogparam'],
- tool_options=tool_options)
+ tf = make_edalize_test(
+ "ise",
+ test_name=name,
+ param_types=["vlogdefine", "vlogparam"],
+ tool_options=tool_options,
+ )
tf.backend.configure()
- tf.compare_files(['Makefile', 'config.mk',
- name + '.tcl', name + '_run.tcl'])
+ tf.compare_files(["Makefile", "config.mk", name + ".tcl", name + "_run.tcl"])
tf.backend.build()
- tf.compare_files(['xtclsh.cmd'])
+ tf.compare_files(["xtclsh.cmd"])
def test_ise_missing_options(make_edalize_test):
tool_options = {
- 'family': 'spartan6',
- 'device': 'xc6slx45',
- 'package': 'csg324',
+ "family": "spartan6",
+ "device": "xc6slx45",
+ "package": "csg324",
}
- tf = make_edalize_test('ise',
- param_types=['vlogdefine', 'vlogparam'],
- tool_options=tool_options)
+ tf = make_edalize_test(
+ "ise", param_types=["vlogdefine", "vlogparam"], tool_options=tool_options
+ )
with pytest.raises(RuntimeError) as e:
tf.backend.configure()
diff --git a/tests/test_isim.py b/tests/test_isim.py
index c38c5904c..0889f943c 100644
--- a/tests/test_isim.py
+++ b/tests/test_isim.py
@@ -3,21 +3,19 @@
def test_isim(make_edalize_test):
tool_options = {
- 'fuse_options': ['some', 'fuse_options'],
- 'isim_options': ['a', 'few', 'isim_options'],
+ "fuse_options": ["some", "fuse_options"],
+ "isim_options": ["a", "few", "isim_options"],
}
- tf = make_edalize_test('isim',
- tool_options=tool_options)
+ tf = make_edalize_test("isim", tool_options=tool_options)
tf.backend.configure()
- tf.compare_files(['config.mk',
- 'Makefile',
- 'run_test_isim_0.tcl',
- 'test_isim_0.prj'])
+ tf.compare_files(
+ ["config.mk", "Makefile", "run_test_isim_0.tcl", "test_isim_0.prj"]
+ )
- tf.copy_to_work_root('test_isim_0')
+ tf.copy_to_work_root("test_isim_0")
tf.backend.run()
- tf.compare_files(['run.cmd'])
+ tf.compare_files(["run.cmd"])
diff --git a/tests/test_libero.py b/tests/test_libero.py
index a6673def6..ea593273b 100644
--- a/tests/test_libero.py
+++ b/tests/test_libero.py
@@ -2,41 +2,43 @@
def test_libero(make_edalize_test):
- """ Test passing tool options to the Libero backend """
- name = 'libero-test'
- tool_options = {
- 'family': 'PolarFire',
- 'die': 'MPF300TS_ES',
- 'package': 'FCG1152'
- }
+ """Test passing tool options to the Libero backend"""
+ name = "libero-test"
+ tool_options = {"family": "PolarFire", "die": "MPF300TS_ES", "package": "FCG1152"}
- tf = make_edalize_test('libero',
- test_name=name,
- tool_options=tool_options)
+ tf = make_edalize_test("libero", test_name=name, tool_options=tool_options)
tf.backend.configure()
- tf.compare_files([name + '-project.tcl', name +
- '-run.tcl', name + '-syn-user.tcl', ])
+ tf.compare_files(
+ [
+ name + "-project.tcl",
+ name + "-run.tcl",
+ name + "-syn-user.tcl",
+ ]
+ )
def test_libero_with_params(make_edalize_test):
- """ Test passing tool options to the Libero backend """
- name = 'libero-test-all'
+ """Test passing tool options to the Libero backend"""
+ name = "libero-test-all"
tool_options = {
- 'family': 'PolarFire',
- 'die': 'MPF300TS_ES',
- 'package': 'FCG1152',
- 'speed': '-1',
- 'dievoltage': '1.0',
- 'range': 'EXT',
- 'defiostd': 'LVCMOS 1.8V',
- 'hdl': 'VHDL',
+ "family": "PolarFire",
+ "die": "MPF300TS_ES",
+ "package": "FCG1152",
+ "speed": "-1",
+ "dievoltage": "1.0",
+ "range": "EXT",
+ "defiostd": "LVCMOS 1.8V",
+ "hdl": "VHDL",
}
- tf = make_edalize_test('libero',
- test_name=name,
- tool_options=tool_options)
+ tf = make_edalize_test("libero", test_name=name, tool_options=tool_options)
tf.backend.configure()
- tf.compare_files([name + '-project.tcl', name +
- '-run.tcl', name + '-syn-user.tcl', ])
\ No newline at end of file
+ tf.compare_files(
+ [
+ name + "-project.tcl",
+ name + "-run.tcl",
+ name + "-syn-user.tcl",
+ ]
+ )
diff --git a/tests/test_modelsim.py b/tests/test_modelsim.py
index c63fed8fb..63c03f3cd 100644
--- a/tests/test_modelsim.py
+++ b/tests/test_modelsim.py
@@ -5,34 +5,33 @@
def test_modelsim(make_edalize_test):
tool_options = {
- 'vcom_options': ['various', 'vcom_options'],
- 'vlog_options': ['some', 'vlog_options'],
- 'vsim_options': ['a', 'few', 'vsim_options'],
+ "vcom_options": ["various", "vcom_options"],
+ "vlog_options": ["some", "vlog_options"],
+ "vsim_options": ["a", "few", "vsim_options"],
}
# FIXME: Add VPI tests
- tf = make_edalize_test('modelsim',
- tool_options=tool_options)
+ tf = make_edalize_test("modelsim", tool_options=tool_options)
tf.backend.configure()
- tf.compare_files(['Makefile',
- 'edalize_build_rtl.tcl',
- 'edalize_main.tcl'])
+ tf.compare_files(["Makefile", "edalize_build_rtl.tcl", "edalize_main.tcl"])
orig_env = os.environ.copy()
try:
- os.environ['MODEL_TECH'] = os.path.join(tests_dir, 'mock_commands')
+ os.environ["MODEL_TECH"] = os.path.join(tests_dir, "mock_commands")
tf.backend.build()
- os.makedirs(os.path.join(tf.work_root, 'work'))
+ os.makedirs(os.path.join(tf.work_root, "work"))
- tf.compare_files(['vsim.cmd'])
+ tf.compare_files(["vsim.cmd"])
tf.backend.run()
- assert filecmp.cmp(os.path.join(tf.ref_dir, 'vsim2.cmd'),
- os.path.join(tf.work_root, 'vsim.cmd'),
- shallow=False)
+ assert filecmp.cmp(
+ os.path.join(tf.ref_dir, "vsim2.cmd"),
+ os.path.join(tf.work_root, "vsim.cmd"),
+ shallow=False,
+ )
finally:
os.environ = orig_env
diff --git a/tests/test_morty.py b/tests/test_morty.py
index c806d3298..db797982d 100644
--- a/tests/test_morty.py
+++ b/tests/test_morty.py
@@ -3,12 +3,10 @@
def test_morty(make_edalize_test):
- tool_options = {'morty_options' : ['--prefix', 'blub']}
- paramtypes = ['vlogdefine']
+ tool_options = {"morty_options": ["--prefix", "blub"]}
+ paramtypes = ["vlogdefine"]
- tf = make_edalize_test('morty',
- tool_options=tool_options,
- param_types=paramtypes)
+ tf = make_edalize_test("morty", tool_options=tool_options, param_types=paramtypes)
tf.backend.build()
- tf.compare_files(['morty.cmd'])
+ tf.compare_files(["morty.cmd"])
diff --git a/tests/test_openlane.py b/tests/test_openlane.py
index 41e68b903..2df39e8d5 100644
--- a/tests/test_openlane.py
+++ b/tests/test_openlane.py
@@ -1,14 +1,15 @@
from edalize_common import make_edalize_test
import os
+
def test_openlane(make_edalize_test):
tool_options = {}
- paramtypes = ['vlogdefine']
+ paramtypes = ["vlogdefine"]
- tf = make_edalize_test('openlane',
- tool_options=tool_options,
- param_types=paramtypes)
+ tf = make_edalize_test(
+ "openlane", tool_options=tool_options, param_types=paramtypes
+ )
tf.backend.configure()
tf.backend.build()
- tf.compare_files(['config.tcl', 'Makefile','flow.tcl.cmd'])
+ tf.compare_files(["config.tcl", "Makefile", "flow.tcl.cmd"])
diff --git a/tests/test_quartus.py b/tests/test_quartus.py
index a7cdc1699..5e5e9245e 100644
--- a/tests/test_quartus.py
+++ b/tests/test_quartus.py
@@ -15,20 +15,50 @@
"""
-qsys_fill = {"Standard": "",
- "Pro" : "tool=\"QsysPro\""}
+qsys_fill = {"Standard": "", "Pro": 'tool="QsysPro"'}
+
+test_sets = {
+ "Standard": {
+ "Quartus": [
+ "ip-generate.cmd",
+ "quartus_asm.cmd",
+ "quartus_fit.cmd",
+ "quartus_map.cmd",
+ "quartus_sh.cmd",
+ "quartus_sta.cmd",
+ ],
+ "DSE": [
+ "ip-generate.cmd",
+ "quartus_map.cmd",
+ "quartus_sh.cmd",
+ "quartus_dse.cmd",
+ ],
+ },
+ "Pro": {
+ "Quartus": [
+ "qsys-generate.cmd",
+ "quartus_asm.cmd",
+ "quartus_fit.cmd",
+ "quartus_syn.cmd",
+ "quartus_sh.cmd",
+ "quartus_sta.cmd",
+ ],
+ "DSE": [
+ "qsys-generate.cmd",
+ "quartus_syn.cmd",
+ "quartus_sh.cmd",
+ "quartus_dse.cmd",
+ ],
+ },
+}
-test_sets = {"Standard": {"Quartus": ['ip-generate.cmd', 'quartus_asm.cmd', 'quartus_fit.cmd', 'quartus_map.cmd', 'quartus_sh.cmd', 'quartus_sta.cmd'],
- "DSE" : ['ip-generate.cmd', 'quartus_map.cmd', 'quartus_sh.cmd', 'quartus_dse.cmd']},
- "Pro" : {"Quartus": ['qsys-generate.cmd', 'quartus_asm.cmd', 'quartus_fit.cmd', 'quartus_syn.cmd', 'quartus_sh.cmd', 'quartus_sta.cmd'],
- "DSE" : ['qsys-generate.cmd', 'quartus_syn.cmd', 'quartus_sh.cmd', 'quartus_dse.cmd']}}
def test_quartus(make_edalize_test):
tool_options = {
- 'family' : 'Cyclone V',
- 'device' : '5CSXFC6D6F31C8ES',
- 'quartus_options' : ['some', 'quartus_options'],
- 'dse_options' : ['some', 'dse_options'],
+ "family": "Cyclone V",
+ "device": "5CSXFC6D6F31C8ES",
+ "quartus_options": ["some", "quartus_options"],
+ "dse_options": ["some", "dse_options"],
}
# Test each edition of Quartus
@@ -44,18 +74,20 @@ def test_quartus(make_edalize_test):
# present
os.environ["FUSESOC_QUARTUS_EDITION"] = edition
- tf = make_edalize_test('quartus',
- param_types=['vlogdefine', 'vlogparam'],
- tool_options=_tool_options,
- ref_dir=edition)
+ tf = make_edalize_test(
+ "quartus",
+ param_types=["vlogdefine", "vlogparam"],
+ tool_options=_tool_options,
+ ref_dir=edition,
+ )
# Each edition performs checks on the QSYS files present, so
# provide a minimal example
- with open(os.path.join(tf.work_root, "qsys_file"), 'w') as f:
+ with open(os.path.join(tf.work_root, "qsys_file"), "w") as f:
f.write(qsys_file.format(qsys_fill[edition]))
tf.backend.configure()
- tf.compare_files(['Makefile', tf.test_name + '.tcl'])
+ tf.compare_files(["Makefile", tf.test_name + ".tcl"])
tf.backend.build()
tf.compare_files(test_sets[edition][pnr])
diff --git a/tests/test_radiant.py b/tests/test_radiant.py
index bc77f73c9..872397ab4 100644
--- a/tests/test_radiant.py
+++ b/tests/test_radiant.py
@@ -2,21 +2,23 @@
def test_radiant(make_edalize_test):
- name = 'test_radiant_0'
- tf = make_edalize_test('radiant',
- test_name=name,
- param_types=['generic', 'vlogdefine', 'vlogparam'],
- tool_options={
- 'part': 'LIFCL-40-9BG400C',
- })
+ name = "test_radiant_0"
+ tf = make_edalize_test(
+ "radiant",
+ test_name=name,
+ param_types=["generic", "vlogdefine", "vlogparam"],
+ tool_options={
+ "part": "LIFCL-40-9BG400C",
+ },
+ )
tf.backend.configure()
- tf.compare_files([name + '.tcl', name + '_run.tcl'])
+ tf.compare_files([name + ".tcl", name + "_run.tcl"])
tf.backend.build()
- tf.compare_files(['radiantc.cmd'])
+ tf.compare_files(["radiantc.cmd"])
def test_radiant_minimal(tmpdir):
@@ -26,28 +28,36 @@ def test_radiant_minimal(tmpdir):
from edalize_common import compare_files, tests_dir
- ref_dir = os.path.join(tests_dir, __name__, 'minimal')
- os.environ['PATH'] = os.path.join(tests_dir, 'mock_commands')+':'+os.environ['PATH']
- tool = 'radiant'
+ ref_dir = os.path.join(tests_dir, __name__, "minimal")
+ os.environ["PATH"] = (
+ os.path.join(tests_dir, "mock_commands") + ":" + os.environ["PATH"]
+ )
+ tool = "radiant"
tool_options = {
- 'part' : 'LIFCL-40-9BG400C',
+ "part": "LIFCL-40-9BG400C",
}
- name = 'test_{}_minimal_0'.format(tool)
+ name = "test_{}_minimal_0".format(tool)
work_root = str(tmpdir)
- edam = {'name' : name,
- 'tool_options' : {tool : tool_options}
- }
+ edam = {"name": name, "tool_options": {tool: tool_options}}
backend = get_edatool(tool)(edam=edam, work_root=work_root)
backend.configure()
- compare_files(ref_dir, work_root, [
- name+'.tcl',
- name+'_run.tcl',
- ])
+ compare_files(
+ ref_dir,
+ work_root,
+ [
+ name + ".tcl",
+ name + "_run.tcl",
+ ],
+ )
backend.build()
- compare_files(ref_dir, work_root, [
- 'radiantc.cmd',
- ])
+ compare_files(
+ ref_dir,
+ work_root,
+ [
+ "radiantc.cmd",
+ ],
+ )
diff --git a/tests/test_reporting.py b/tests/test_reporting.py
index 21a1d3b02..96cd31b74 100644
--- a/tests/test_reporting.py
+++ b/tests/test_reporting.py
@@ -110,7 +110,7 @@ def picorv32_cyclone4_data():
def test_picorv32_quartus_cyclone4_summary(picorv32_cyclone4_data):
- """ Check all summary fields """
+ """Check all summary fields"""
summary = picorv32_cyclone4_data["summary"]
@@ -179,7 +179,7 @@ def picorv32_cyclone10_data():
def test_picorv32_quartus_cyclone10_summary(picorv32_cyclone10_data):
- """ Check all summary fields """
+ """Check all summary fields"""
summary = picorv32_cyclone10_data["summary"]
@@ -198,7 +198,7 @@ def test_picorv32_quartus_cyclone10_summary(picorv32_cyclone10_data):
def test_picorv32_quartus_cyclone10_no_header(picorv32_cyclone10_data):
- """ Check a table with no header """
+ """Check a table with no header"""
df = picorv32_cyclone10_data["resources"]["Fitter Summary"].set_index(0)
@@ -240,7 +240,7 @@ def picorv32_s6_data():
def test_picorv32_ise_spartan6_summary(picorv32_s6_data):
- """ Check all summary fields """
+ """Check all summary fields"""
summary = picorv32_s6_data["summary"]
@@ -259,7 +259,7 @@ def test_picorv32_ise_spartan6_summary(picorv32_s6_data):
def test_picorv32_ise_spartan6_multiline(picorv32_s6_data):
- """ Check multi-line headings """
+ """Check multi-line headings"""
df = picorv32_s6_data["resources"]["IOB Properties"]
assert list(df.columns) == [
@@ -277,7 +277,7 @@ def test_picorv32_ise_spartan6_multiline(picorv32_s6_data):
def test_picorv32_ise_spartan6_resources(picorv32_s6_data):
- """ Check resource values """
+ """Check resource values"""
rpt = picorv32_s6_data["resources"]
@@ -317,7 +317,7 @@ def test_picorv32_ise_spartan6_resources(picorv32_s6_data):
def test_picorv32_ise_spartan6_timing(picorv32_s6_data):
- """ Check timing values """
+ """Check timing values"""
rpt = picorv32_s6_data["timing"]
@@ -349,7 +349,7 @@ def picorv32_artix7_data():
def test_picorv32_artix7_summary(picorv32_artix7_data):
- """ Check all summary fields """
+ """Check all summary fields"""
summary = picorv32_artix7_data["summary"]
@@ -368,7 +368,7 @@ def test_picorv32_artix7_summary(picorv32_artix7_data):
def test_picorv32_artix7_resources(picorv32_artix7_data):
- """ Check selected resource report fields """
+ """Check selected resource report fields"""
rpt = picorv32_artix7_data["resources"]
@@ -384,7 +384,7 @@ def test_picorv32_artix7_resources(picorv32_artix7_data):
def test_picorv32_artix7_timing(picorv32_artix7_data):
- """ Check selected timing report fields """
+ """Check selected timing report fields"""
rpt = picorv32_artix7_data["timing"]
@@ -410,7 +410,7 @@ def picorv32_kusp_data():
def test_picorv32_kusp_summary(picorv32_kusp_data):
- """ Check all summary fields """
+ """Check all summary fields"""
summary = picorv32_kusp_data["summary"]
@@ -429,7 +429,7 @@ def test_picorv32_kusp_summary(picorv32_kusp_data):
def test_picorv32_kusp_resources(picorv32_kusp_data):
- """ Check selected resource report fields """
+ """Check selected resource report fields"""
tables = picorv32_kusp_data["resources"]
@@ -448,7 +448,7 @@ def test_picorv32_kusp_resources(picorv32_kusp_data):
def test_picorv32_kusp_timing(picorv32_kusp_data):
- """ Check selected timing report fields """
+ """Check selected timing report fields"""
rpt = picorv32_kusp_data["timing"]
diff --git a/tests/test_rivierapro.py b/tests/test_rivierapro.py
index 7e25a7046..23bf959a7 100644
--- a/tests/test_rivierapro.py
+++ b/tests/test_rivierapro.py
@@ -5,32 +5,33 @@
def test_rivierapro(make_edalize_test):
tool_options = {
- 'vlog_options': ['some', 'vlog_options'],
- 'vsim_options': ['a', 'few', 'vsim_options'],
+ "vlog_options": ["some", "vlog_options"],
+ "vsim_options": ["a", "few", "vsim_options"],
}
# FIXME: Add VPI tests
- tf = make_edalize_test('rivierapro',
- tool_options=tool_options)
+ tf = make_edalize_test("rivierapro", tool_options=tool_options)
tf.backend.configure()
- tf.compare_files(['edalize_build_rtl.tcl',
- 'edalize_launch.tcl',
- 'edalize_main.tcl'])
+ tf.compare_files(
+ ["edalize_build_rtl.tcl", "edalize_launch.tcl", "edalize_main.tcl"]
+ )
orig_env = os.environ.copy()
try:
- os.environ['ALDEC_PATH'] = os.path.join(tests_dir, 'mock_commands')
+ os.environ["ALDEC_PATH"] = os.path.join(tests_dir, "mock_commands")
tf.backend.build()
- os.makedirs(os.path.join(tf.work_root, 'work'))
+ os.makedirs(os.path.join(tf.work_root, "work"))
- tf.compare_files(['vsim.cmd'])
+ tf.compare_files(["vsim.cmd"])
tf.backend.run()
- assert filecmp.cmp(os.path.join(tf.ref_dir, 'vsim2.cmd'),
- os.path.join(tf.work_root, 'vsim.cmd'),
- shallow=False)
+ assert filecmp.cmp(
+ os.path.join(tf.ref_dir, "vsim2.cmd"),
+ os.path.join(tf.work_root, "vsim.cmd"),
+ shallow=False,
+ )
finally:
os.environ = orig_env
diff --git a/tests/test_spyglass.py b/tests/test_spyglass.py
index a62845702..2f685906e 100644
--- a/tests/test_spyglass.py
+++ b/tests/test_spyglass.py
@@ -4,33 +4,39 @@
def run_spyglass_test(tf):
tf.backend.configure()
- tf.compare_files(['Makefile',
- 'spyglass-run-design_read.tcl',
- 'spyglass-run-lint_lint_rtl.tcl',
- tf.test_name + '.prj'])
+ tf.compare_files(
+ [
+ "Makefile",
+ "spyglass-run-design_read.tcl",
+ "spyglass-run-lint_lint_rtl.tcl",
+ tf.test_name + ".prj",
+ ]
+ )
tf.backend.build()
- tf.compare_files(['spyglass.cmd'])
+ tf.compare_files(["spyglass.cmd"])
def test_spyglass_defaults(make_edalize_test):
- """ Test if the SpyGlass backend picks up the tool defaults """
- tf = make_edalize_test('spyglass',
- param_types=['vlogdefine', 'vlogparam'],
- ref_dir='defaults')
+ """Test if the SpyGlass backend picks up the tool defaults"""
+ tf = make_edalize_test(
+ "spyglass", param_types=["vlogdefine", "vlogparam"], ref_dir="defaults"
+ )
run_spyglass_test(tf)
def test_spyglass_tooloptions(make_edalize_test):
- """ Test passing tool options to the Spyglass backend """
+ """Test passing tool options to the Spyglass backend"""
tool_options = {
- 'methodology': 'GuideWare/latest/block/rtl_somethingelse',
- 'goals': ['lint/lint_rtl', 'some/othergoal'],
- 'spyglass_options': ['handlememory yes'],
- 'rule_parameters': ['handle_static_caselabels yes'],
+ "methodology": "GuideWare/latest/block/rtl_somethingelse",
+ "goals": ["lint/lint_rtl", "some/othergoal"],
+ "spyglass_options": ["handlememory yes"],
+ "rule_parameters": ["handle_static_caselabels yes"],
}
- tf = make_edalize_test('spyglass',
- param_types=['vlogdefine', 'vlogparam'],
- ref_dir='tooloptions',
- tool_options=tool_options)
+ tf = make_edalize_test(
+ "spyglass",
+ param_types=["vlogdefine", "vlogparam"],
+ ref_dir="tooloptions",
+ tool_options=tool_options,
+ )
run_spyglass_test(tf)
diff --git a/tests/test_symbiflow.py b/tests/test_symbiflow.py
index 014dcd912..d1c286349 100644
--- a/tests/test_symbiflow.py
+++ b/tests/test_symbiflow.py
@@ -16,7 +16,7 @@ def test_symbiflow_vtr(make_edalize_test):
"package": "csg324-1",
"vendor": "xilinx",
"pnr": "vtr",
- "vpr_options": "--fake_option 1000"
+ "vpr_options": "--fake_option 1000",
}
files = [
@@ -24,12 +24,14 @@ def test_symbiflow_vtr(make_edalize_test):
{"name": "top.sdc", "file_type": "SDC"},
{"name": "top.pcf", "file_type": "PCF"},
]
- tf = make_edalize_test("symbiflow",
- test_name="test_symbiflow_vtr_0",
- param_types=["vlogdefine", "vlogparam"],
- tool_options=tool_options,
- files=files,
- ref_dir="vtr")
+ tf = make_edalize_test(
+ "symbiflow",
+ test_name="test_symbiflow_vtr_0",
+ param_types=["vlogdefine", "vlogparam"],
+ tool_options=tool_options,
+ files=files,
+ ref_dir="vtr",
+ )
run_symbiflow_test(tf)
@@ -42,7 +44,7 @@ def test_symbiflow_nextpnr_xilinx(make_edalize_test):
"package": "csg324-1",
"vendor": "xilinx",
"pnr": "nextpnr",
- "nextpnr_options": "--fake_option 1000"
+ "nextpnr_options": "--fake_option 1000",
}
files = [
@@ -51,12 +53,14 @@ def test_symbiflow_nextpnr_xilinx(make_edalize_test):
]
test_name = "test_symbiflow_nextpnr_xilinx_0"
- tf = make_edalize_test("symbiflow",
- test_name=test_name,
- param_types=["vlogdefine", "vlogparam"],
- tool_options=tool_options,
- files=files,
- ref_dir=os.path.join("nextpnr", "xilinx"))
+ tf = make_edalize_test(
+ "symbiflow",
+ test_name=test_name,
+ param_types=["vlogdefine", "vlogparam"],
+ tool_options=tool_options,
+ files=files,
+ ref_dir=os.path.join("nextpnr", "xilinx"),
+ )
config_files = [
"edalize_yosys_procs.tcl",
@@ -74,7 +78,7 @@ def test_symbiflow_nextpnr_fpga_interchange(make_edalize_test):
"package": "csg324-1",
"vendor": "xilinx",
"pnr": "nextpnr",
- "nextpnr_options": "--fake_option 1000"
+ "nextpnr_options": "--fake_option 1000",
}
files = [
@@ -84,12 +88,14 @@ def test_symbiflow_nextpnr_fpga_interchange(make_edalize_test):
]
test_name = "test_symbiflow_nextpnr_fpga_interchange_0"
- tf = make_edalize_test("symbiflow",
- test_name=test_name,
- param_types=["vlogdefine", "vlogparam"],
- tool_options=tool_options,
- files=files,
- ref_dir=os.path.join("nextpnr", "fpga_interchange"))
+ tf = make_edalize_test(
+ "symbiflow",
+ test_name=test_name,
+ param_types=["vlogdefine", "vlogparam"],
+ tool_options=tool_options,
+ files=files,
+ ref_dir=os.path.join("nextpnr", "fpga_interchange"),
+ )
config_files = [
"edalize_yosys_procs.tcl",
diff --git a/tests/test_symbiyosys.py b/tests/test_symbiyosys.py
index 61eb3127c..b02a04b0c 100644
--- a/tests/test_symbiyosys.py
+++ b/tests/test_symbiyosys.py
@@ -2,24 +2,26 @@
def test_symbiyosys(make_edalize_test):
- tf = make_edalize_test('symbiyosys',
- param_types=['vlogdefine', 'vlogparam'],
- tool_options={
- 'tasknames': ['task0', 'task1'],
- })
+ tf = make_edalize_test(
+ "symbiyosys",
+ param_types=["vlogdefine", "vlogparam"],
+ tool_options={
+ "tasknames": ["task0", "task1"],
+ },
+ )
# Copy our example configuration file to the work root. The name matches an
# entry in edalize_common's FILES list. Note that we chose a name that
# doesn't collide with test.sby (the file that the tool generates, in the
# same directory).
- tf.copy_to_work_root('config.sby.j2')
+ tf.copy_to_work_root("config.sby.j2")
tf.backend.configure()
# The configure step is supposed to interpolate the .sby file and dump
# lists of RTL files and include directories. (These are needed if you want
# to use sv2v as a fusesoc pre_build hook).
- tf.compare_files(['test.sby', 'files.txt', 'incdirs.txt'])
+ tf.compare_files(["test.sby", "files.txt", "incdirs.txt"])
# The 'build' step doesn't actually do anything, but we should run it to
# check that nothing explodes.
@@ -29,4 +31,4 @@ def test_symbiyosys(make_edalize_test):
# arguments to "sby.cmd".
tf.backend.run()
- tf.compare_files(['sby.cmd'])
+ tf.compare_files(["sby.cmd"])
diff --git a/tests/test_trellis.py b/tests/test_trellis.py
index 0848782b5..ba67591cc 100644
--- a/tests/test_trellis.py
+++ b/tests/test_trellis.py
@@ -3,40 +3,45 @@
from edalize_common import make_edalize_test
-def run_trellis_test(tf, pnr_cmdfile='nextpnr-ice40.cmd'):
+def run_trellis_test(tf, pnr_cmdfile="nextpnr-ice40.cmd"):
tf.backend.configure()
- tf.compare_files(['Makefile', 'edalize_yosys_procs.tcl', 'edalize_yosys_template.tcl'])
+ tf.compare_files(
+ ["Makefile", "edalize_yosys_procs.tcl", "edalize_yosys_template.tcl"]
+ )
tf.backend.build()
- tf.compare_files(['yosys.cmd', 'nextpnr-ecp5.cmd', 'ecppack.cmd'])
+ tf.compare_files(["yosys.cmd", "nextpnr-ecp5.cmd", "ecppack.cmd"])
+
def test_trellis(make_edalize_test):
tool_options = {
- 'yosys_synth_options': ['some', 'yosys_synth_options'],
- 'nextpnr_options': ['a', 'few', 'nextpnr_options']
+ "yosys_synth_options": ["some", "yosys_synth_options"],
+ "nextpnr_options": ["a", "few", "nextpnr_options"],
}
- tf = make_edalize_test('trellis',
- param_types=['vlogdefine', 'vlogparam'],
- tool_options=tool_options)
+ tf = make_edalize_test(
+ "trellis", param_types=["vlogdefine", "vlogparam"], tool_options=tool_options
+ )
run_trellis_test(tf)
+
def test_trellis_minimal(make_edalize_test):
- tf = make_edalize_test('trellis',
- param_types=[],
- files=[],
- ref_dir='minimal')
+ tf = make_edalize_test("trellis", param_types=[], files=[], ref_dir="minimal")
run_trellis_test(tf)
+
def test_trellis_multiple_pcf(make_edalize_test):
- files = [{'name': 'pcf_file.pcf', 'file_type': 'PCF'},
- {'name': 'pcf_file2.pcf', 'file_type': 'PCF'}]
- tf = make_edalize_test('trellis',
- param_types=[],
- files=files)
+ files = [
+ {"name": "pcf_file.pcf", "file_type": "PCF"},
+ {"name": "pcf_file2.pcf", "file_type": "PCF"},
+ ]
+ tf = make_edalize_test("trellis", param_types=[], files=files)
with pytest.raises(RuntimeError) as e:
tf.backend.configure()
- assert "Nextpnr only supports one PCF file. Found pcf_file.pcf and pcf_file2.pcf" in str(e.value)
+ assert (
+ "Nextpnr only supports one PCF file. Found pcf_file.pcf and pcf_file2.pcf"
+ in str(e.value)
+ )
diff --git a/tests/test_vcs.py b/tests/test_vcs.py
index 5781490e9..c065899d4 100644
--- a/tests/test_vcs.py
+++ b/tests/test_vcs.py
@@ -4,29 +4,31 @@
def run_vcs_test(tf):
tf.backend.configure()
- tf.compare_files(['Makefile', tf.test_name + '.scr'])
+ tf.compare_files(["Makefile", tf.test_name + ".scr"])
tf.backend.build()
- tf.compare_files(['vcs.cmd'])
+ tf.compare_files(["vcs.cmd"])
tf.backend.run()
- tf.compare_files(['run.cmd'])
+ tf.compare_files(["run.cmd"])
def test_vcs_tool_options(make_edalize_test):
tool_options = {
- 'vcs_options' : [ '-debug_access+pp', '-debug_access+all' ],
- 'run_options' : [ '-licqueue' ],
+ "vcs_options": ["-debug_access+pp", "-debug_access+all"],
+ "run_options": ["-licqueue"],
}
- tf = make_edalize_test('vcs',
- test_name='test_vcs_tool_options_0',
- ref_dir='tool_options',
- tool_options=tool_options)
+ tf = make_edalize_test(
+ "vcs",
+ test_name="test_vcs_tool_options_0",
+ ref_dir="tool_options",
+ tool_options=tool_options,
+ )
run_vcs_test(tf)
def test_vcs_no_tool_options(make_edalize_test):
- tf = make_edalize_test('vcs', ref_dir='no_tool_options')
+ tf = make_edalize_test("vcs", ref_dir="no_tool_options")
run_vcs_test(tf)
@@ -37,24 +39,24 @@ def test_vcs_minimal(tmpdir):
from edalize_common import compare_files, tests_dir
- ref_dir = os.path.join(tests_dir, __name__, 'minimal')
- os.environ['PATH'] = os.path.join(tests_dir, 'mock_commands')+':'+os.environ['PATH']
- tool = 'vcs'
- name = 'test_'+tool+'_minimal_0'
+ ref_dir = os.path.join(tests_dir, __name__, "minimal")
+ os.environ["PATH"] = (
+ os.path.join(tests_dir, "mock_commands") + ":" + os.environ["PATH"]
+ )
+ tool = "vcs"
+ name = "test_" + tool + "_minimal_0"
work_root = str(tmpdir)
- edam = {'name' : name,
- 'toplevel' : 'top'}
+ edam = {"name": name, "toplevel": "top"}
backend = get_edatool(tool)(edam=edam, work_root=work_root)
backend.configure()
- compare_files(ref_dir, work_root, ['Makefile', name + '.scr' ])
+ compare_files(ref_dir, work_root, ["Makefile", name + ".scr"])
backend.build()
- compare_files(ref_dir, work_root, ['vcs.cmd'])
+ compare_files(ref_dir, work_root, ["vcs.cmd"])
backend.run()
- compare_files(ref_dir, work_root, ['run.cmd'])
-
+ compare_files(ref_dir, work_root, ["run.cmd"])
diff --git a/tests/test_veribleformat.py b/tests/test_veribleformat.py
index 5c58b997c..1daf3249b 100644
--- a/tests/test_veribleformat.py
+++ b/tests/test_veribleformat.py
@@ -2,12 +2,14 @@
def test_veribleformat_default(make_edalize_test):
- """ Test the format mode of Verible """
- tf = make_edalize_test('veribleformat',
- test_name='test_verible',
- param_types=['vlogdefine', 'vlogparam'],
- ref_dir='default')
+ """Test the format mode of Verible"""
+ tf = make_edalize_test(
+ "veribleformat",
+ test_name="test_verible",
+ param_types=["vlogdefine", "vlogparam"],
+ ref_dir="default",
+ )
tf.backend.configure()
tf.backend.build()
tf.backend.run()
- tf.compare_files(['verible-verilog-format.cmd'])
+ tf.compare_files(["verible-verilog-format.cmd"])
diff --git a/tests/test_veriblelint.py b/tests/test_veriblelint.py
index c7bc4706f..58402ba66 100644
--- a/tests/test_veriblelint.py
+++ b/tests/test_veriblelint.py
@@ -2,12 +2,14 @@
def test_veriblelint_default(make_edalize_test):
- """ Test the lint mode of Verible """
- tf = make_edalize_test('veriblelint',
- test_name='test_verible',
- param_types=['vlogdefine', 'vlogparam'],
- ref_dir='lint')
+ """Test the lint mode of Verible"""
+ tf = make_edalize_test(
+ "veriblelint",
+ test_name="test_verible",
+ param_types=["vlogdefine", "vlogparam"],
+ ref_dir="lint",
+ )
tf.backend.configure()
tf.backend.build()
tf.backend.run()
- tf.compare_files(['verible-verilog-lint.cmd'])
+ tf.compare_files(["verible-verilog-lint.cmd"])
diff --git a/tests/test_verilator.py b/tests/test_verilator.py
index 825a48695..3fdf66564 100644
--- a/tests/test_verilator.py
+++ b/tests/test_verilator.py
@@ -2,48 +2,45 @@
def test_verilator_cc(make_edalize_test):
- mode = 'cc'
+ mode = "cc"
tool_options = {
- 'libs' : ['-lelf'],
- 'mode' : mode,
- 'verilator_options' : ['-Wno-fatal', '--trace'],
- 'make_options' : ['OPT_FAST=-O2'],
+ "libs": ["-lelf"],
+ "mode": mode,
+ "verilator_options": ["-Wno-fatal", "--trace"],
+ "make_options": ["OPT_FAST=-O2"],
}
- tf = make_edalize_test('verilator',
- param_types=['cmdlinearg', 'plusarg',
- 'vlogdefine', 'vlogparam'],
- tool_options=tool_options)
+ tf = make_edalize_test(
+ "verilator",
+ param_types=["cmdlinearg", "plusarg", "vlogdefine", "vlogparam"],
+ tool_options=tool_options,
+ )
tf.backend.configure()
- tf.compare_files(['Makefile'])
- tf.compare_files(['config.mk', tf.test_name + '.vc'], ref_subdir=mode)
+ tf.compare_files(["Makefile"])
+ tf.compare_files(["config.mk", tf.test_name + ".vc"], ref_subdir=mode)
- tf.copy_to_work_root('Vtop_module')
+ tf.copy_to_work_root("Vtop_module")
tf.backend.run()
- tf.compare_files(['run.cmd'])
+ tf.compare_files(["run.cmd"])
def test_verilator_sc(make_edalize_test):
- mode = 'sc'
- tf = make_edalize_test('verilator',
- param_types=[],
- tool_options={'mode': mode})
+ mode = "sc"
+ tf = make_edalize_test("verilator", param_types=[], tool_options={"mode": mode})
tf.backend.configure()
- tf.compare_files(['Makefile'])
- tf.compare_files(['config.mk', tf.test_name + '.vc'], ref_subdir=mode)
+ tf.compare_files(["Makefile"])
+ tf.compare_files(["config.mk", tf.test_name + ".vc"], ref_subdir=mode)
def test_verilator_lint_only(make_edalize_test):
- mode = 'lint-only'
- tf = make_edalize_test('verilator',
- param_types=[],
- tool_options={'mode': mode})
+ mode = "lint-only"
+ tf = make_edalize_test("verilator", param_types=[], tool_options={"mode": mode})
tf.backend.configure()
- tf.compare_files(['Makefile'])
- tf.compare_files(['config.mk', tf.test_name + '.vc'], ref_subdir=mode)
+ tf.compare_files(["Makefile"])
+ tf.compare_files(["config.mk", tf.test_name + ".vc"], ref_subdir=mode)
diff --git a/tests/test_vivado.py b/tests/test_vivado.py
index 3df4f6653..d3570d377 100644
--- a/tests/test_vivado.py
+++ b/tests/test_vivado.py
@@ -3,19 +3,25 @@
def test_vivado(make_edalize_test):
- tf = make_edalize_test('vivado',
- param_types=['generic', 'vlogdefine', 'vlogparam'],
- tool_options={'part': 'xc7a35tcsg324-1'})
+ tf = make_edalize_test(
+ "vivado",
+ param_types=["generic", "vlogdefine", "vlogparam"],
+ tool_options={"part": "xc7a35tcsg324-1"},
+ )
tf.backend.configure()
- tf.compare_files(['Makefile',
- tf.test_name + '.tcl',
- tf.test_name + '_synth.tcl',
- tf.test_name + '_run.tcl',
- tf.test_name + '_pgm.tcl'])
+ tf.compare_files(
+ [
+ "Makefile",
+ tf.test_name + ".tcl",
+ tf.test_name + "_synth.tcl",
+ tf.test_name + "_run.tcl",
+ tf.test_name + "_pgm.tcl",
+ ]
+ )
tf.backend.build()
- tf.compare_files(['vivado.cmd'])
+ tf.compare_files(["vivado.cmd"])
@pytest.mark.parametrize("params", [("minimal", "vivado"), ("yosys", "yosys")])
@@ -28,42 +34,42 @@ def test_vivado_minimal(params, tmpdir):
test_name, synth_tool = params
- ref_dir = os.path.join(tests_dir, __name__, test_name)
- os.environ['PATH'] = os.path.join(tests_dir, 'mock_commands')+':'+os.environ['PATH']
- tool = 'vivado'
+ ref_dir = os.path.join(tests_dir, __name__, test_name)
+ os.environ["PATH"] = (
+ os.path.join(tests_dir, "mock_commands") + ":" + os.environ["PATH"]
+ )
+ tool = "vivado"
tool_options = {
- 'part' : 'xc7a35tcsg324-1',
- 'synth': synth_tool,
+ "part": "xc7a35tcsg324-1",
+ "synth": synth_tool,
}
- name = 'test_vivado_{}_0'.format(test_name)
+ name = "test_vivado_{}_0".format(test_name)
work_root = str(tmpdir)
- edam = {'name' : name,
- 'tool_options' : {'vivado' : tool_options}
- }
+ edam = {"name": name, "tool_options": {"vivado": tool_options}}
backend = get_edatool(tool)(edam=edam, work_root=work_root)
backend.configure()
config_file_list = [
- 'Makefile',
- name+'.tcl',
- name+'_run.tcl',
- name+'_pgm.tcl',
+ "Makefile",
+ name + ".tcl",
+ name + "_run.tcl",
+ name + "_pgm.tcl",
]
if synth_tool == "yosys":
- config_file_list.append('edalize_yosys_procs.tcl')
- config_file_list.append('edalize_yosys_template.tcl')
+ config_file_list.append("edalize_yosys_procs.tcl")
+ config_file_list.append("edalize_yosys_template.tcl")
else:
- config_file_list.append(name+'_synth.tcl')
+ config_file_list.append(name + "_synth.tcl")
compare_files(ref_dir, work_root, config_file_list)
- build_file_list = ['vivado.cmd']
+ build_file_list = ["vivado.cmd"]
if synth_tool == "yosys":
- build_file_list.append('yosys.cmd')
+ build_file_list.append("yosys.cmd")
backend.build()
compare_files(ref_dir, work_root, build_file_list)
diff --git a/tests/test_vunit.py b/tests/test_vunit.py
index 7b09eeebd..b3386c360 100644
--- a/tests/test_vunit.py
+++ b/tests/test_vunit.py
@@ -6,9 +6,9 @@
def test_vunit_codegen(make_edalize_test):
- tf = make_edalize_test('vunit', param_types=['cmdlinearg'])
+ tf = make_edalize_test("vunit", param_types=["cmdlinearg"])
tf.backend.configure()
- tf.compare_files(['run.py'])
+ tf.compare_files(["run.py"])
def test_vunit_hooks(tmpdir):
@@ -20,31 +20,36 @@ def test_vunit_hooks(tmpdir):
from unittest import mock
from edalize import get_edatool
- sys.path = [os.path.join(tests_dir, __name__, 'vunit_mock')] + sys.path
+ sys.path = [os.path.join(tests_dir, __name__, "vunit_mock")] + sys.path
- ref_dir = os.path.join(tests_dir, __name__, 'minimal')
- tool = 'vunit'
- name = 'test_' + tool + '_minimal_0'
+ ref_dir = os.path.join(tests_dir, __name__, "minimal")
+ tool = "vunit"
+ name = "test_" + tool + "_minimal_0"
work_root = str(tmpdir)
- files = [{'name' : os.path.join(ref_dir, 'vunit_runner_test.py'),
- 'file_type' : 'pythonSource-3.7'},
- {'name' : os.path.join(ref_dir, 'tb_minimal.vhd'),
- 'file_type' : 'vhdlSource-2008',
- 'logical_name' : 'libx'}]
+ files = [
+ {
+ "name": os.path.join(ref_dir, "vunit_runner_test.py"),
+ "file_type": "pythonSource-3.7",
+ },
+ {
+ "name": os.path.join(ref_dir, "tb_minimal.vhd"),
+ "file_type": "vhdlSource-2008",
+ "logical_name": "libx",
+ },
+ ]
- edam = {'name' : name,
- 'files' : files,
- 'toplevel' : 'top'}
+ edam = {"name": name, "files": files, "toplevel": "top"}
backend = get_edatool(tool)(edam=edam, work_root=work_root)
original_impl = edalize.edatool.run
def subprocess_intercept(args, **kwargs):
- if len(args) > 1 and args[1].endswith('run.py'):
+ if len(args) > 1 and args[1].endswith("run.py"):
import sys
- with patch.object(sys, 'argv', args):
+
+ with patch.object(sys, "argv", args):
spec = importlib.util.spec_from_file_location("__main__", args[1])
runner_script = importlib.util.module_from_spec(spec)
@@ -53,10 +58,10 @@ def subprocess_intercept(args, **kwargs):
else:
return original_impl(args, **kwargs)
- with mock.patch('edalize.edatool.run', new=subprocess_intercept):
+ with mock.patch("edalize.edatool.run", new=subprocess_intercept):
backend.configure()
- with mock.patch('edalize.vunit_hooks.VUnitRunner') as hooks_constructor:
+ with mock.patch("edalize.vunit_hooks.VUnitRunner") as hooks_constructor:
hooks = MagicMock()
vu_library = MagicMock()
vu_mock = MagicMock()
@@ -71,10 +76,10 @@ def subprocess_intercept(args, **kwargs):
vu_mock.add_library.assert_called_with("libx")
hooks.create.assert_called_once_with()
- hooks.handle_library.assert_called_with('libx', vu_library)
+ hooks.handle_library.assert_called_with("libx", vu_library)
hooks.main.assert_called_with(vu_mock)
- with mock.patch('edalize.vunit_hooks.VUnitRunner') as hooks_constructor:
+ with mock.patch("edalize.vunit_hooks.VUnitRunner") as hooks_constructor:
hooks = MagicMock()
vu_library = MagicMock()
vu_mock = MagicMock()
@@ -88,12 +93,13 @@ def subprocess_intercept(args, **kwargs):
hooks.create.assert_called_once_with()
vu_mock.add_library.assert_called_with("libx")
hooks.create.assert_called_once_with()
- hooks.handle_library.assert_called_with('libx', vu_library)
+ hooks.handle_library.assert_called_with("libx", vu_library)
hooks.main.assert_called_with(vu_mock)
-if __name__ == '__main__':
+if __name__ == "__main__":
from os.path import dirname
import sys
+
sys.path.append(dirname(dirname(__file__)))
pytest.main(args=[__file__])
diff --git a/tests/test_vunit/run.py b/tests/test_vunit/run.py
index a593a31ed..782225081 100644
--- a/tests/test_vunit/run.py
+++ b/tests/test_vunit/run.py
@@ -1,17 +1,21 @@
# Auto generated by Edalize
+
def load_module_from_file(name, python_file):
import importlib.util
+
spec = importlib.util.spec_from_file_location(name, python_file)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
-def load_runner_hooks(python_file = r''):
+
+def load_runner_hooks(python_file=r""):
if len(python_file) > 0:
- return load_module_from_file('vunit_runner_hooks', python_file)
+ return load_module_from_file("vunit_runner_hooks", python_file)
else:
- return __import__('edalize.vunit_hooks', fromlist=['vunit_hooks'])
+ return __import__("edalize.vunit_hooks", fromlist=["vunit_hooks"])
+
runner = load_runner_hooks().VUnitRunner()
diff --git a/tests/test_vunit/vunit_mock/vunit/ui.py b/tests/test_vunit/vunit_mock/vunit/ui.py
index d7dddd8cb..a219a2686 100644
--- a/tests/test_vunit/vunit_mock/vunit/ui.py
+++ b/tests/test_vunit/vunit_mock/vunit/ui.py
@@ -1,9 +1,11 @@
from unittest.mock import MagicMock
+
class Library(MagicMock):
def add_source_files(self, file):
print("add_source_files()")
+
class VUnit(MagicMock):
@staticmethod
def from_argv():
@@ -11,4 +13,4 @@ def from_argv():
def add_library(self):
print("add_library()")
- return Library()
\ No newline at end of file
+ return Library()
diff --git a/tests/test_vunit/vunit_mock/vunit/vhdl_standard.py b/tests/test_vunit/vunit_mock/vunit/vhdl_standard.py
index cddcf4818..98d83424e 100644
--- a/tests/test_vunit/vunit_mock/vunit/vhdl_standard.py
+++ b/tests/test_vunit/vunit_mock/vunit/vhdl_standard.py
@@ -1,6 +1,7 @@
from unittest.mock import MagicMock
+
class VHDL(MagicMock):
@staticmethod
def standard(std):
- pass
\ No newline at end of file
+ pass
diff --git a/tests/test_xcelium.py b/tests/test_xcelium.py
index 93f13947b..41e5c69f3 100644
--- a/tests/test_xcelium.py
+++ b/tests/test_xcelium.py
@@ -4,34 +4,33 @@
def test_xcelium(make_edalize_test):
tool_options = {
- 'xmvhdl_options' : ['various', 'xmvhdl_options'],
- 'xmvlog_options' : ['some', 'xmvlog_options'],
- 'xmsim_options' : ['a', 'few', 'xmsim_options'],
- 'xrun_options' : ['plenty', 'of', 'xrun_options'],
+ "xmvhdl_options": ["various", "xmvhdl_options"],
+ "xmvlog_options": ["some", "xmvlog_options"],
+ "xmsim_options": ["a", "few", "xmsim_options"],
+ "xrun_options": ["plenty", "of", "xrun_options"],
}
- #FIXME: Add VPI tests
- tf = make_edalize_test('xcelium', tool_options=tool_options)
+ # FIXME: Add VPI tests
+ tf = make_edalize_test("xcelium", tool_options=tool_options)
tf.backend.configure()
- tf.compare_files(['Makefile',
- 'edalize_build_rtl.f',
- 'edalize_main.f'])
+ tf.compare_files(["Makefile", "edalize_build_rtl.f", "edalize_main.f"])
orig_env = os.environ.copy()
try:
- os.environ['PATH'] = '{}:{}'.format(os.path.join(tests_dir, 'mock_commands/xcelium'),
- os.environ['PATH'])
+ os.environ["PATH"] = "{}:{}".format(
+ os.path.join(tests_dir, "mock_commands/xcelium"), os.environ["PATH"]
+ )
# For some strange reason, writing to os.environ['PATH'] doesn't update the environment. This
# leads to test fails, but only when running multiple tests. When running this test by itself,
# everything works fine without the 'putenv'.
- os.putenv('PATH', os.environ['PATH'])
+ os.putenv("PATH", os.environ["PATH"])
tf.backend.build()
- os.makedirs(os.path.join(tf.work_root, 'work'))
+ os.makedirs(os.path.join(tf.work_root, "work"))
tf.backend.run()
- tf.compare_files(['xrun.cmd'])
+ tf.compare_files(["xrun.cmd"])
finally:
os.environ = orig_env
diff --git a/tests/test_xsim.py b/tests/test_xsim.py
index 6c0a620c7..83bc991a2 100644
--- a/tests/test_xsim.py
+++ b/tests/test_xsim.py
@@ -3,50 +3,49 @@
def test_xsim(make_edalize_test):
- tool_options = {'xelab_options' : ['some', 'xelab_options'],
- 'xsim_options' : ['a', 'few', 'xsim_options']}
- paramtypes = ['plusarg', 'vlogdefine', 'vlogparam', 'generic']
+ tool_options = {
+ "xelab_options": ["some", "xelab_options"],
+ "xsim_options": ["a", "few", "xsim_options"],
+ }
+ paramtypes = ["plusarg", "vlogdefine", "vlogparam", "generic"]
- tf = make_edalize_test('xsim',
- tool_options=tool_options,
- param_types=paramtypes)
+ tf = make_edalize_test("xsim", tool_options=tool_options, param_types=paramtypes)
tf.backend.configure()
- tf.compare_files(['config.mk', 'Makefile', tf.test_name + '.prj'])
+ tf.compare_files(["config.mk", "Makefile", tf.test_name + ".prj"])
tf.backend.build()
- tf.compare_files(['xelab.cmd'])
+ tf.compare_files(["xelab.cmd"])
- xsimkdir = os.path.join(tf.work_root, 'xsim.dir', tf.test_name)
+ xsimkdir = os.path.join(tf.work_root, "xsim.dir", tf.test_name)
os.makedirs(xsimkdir)
- with open(os.path.join(xsimkdir, 'xsimk'), 'w') as f:
+ with open(os.path.join(xsimkdir, "xsimk"), "w") as f:
f.write("I am a compiled simulation kernel\n")
tf.backend.run()
- tf.compare_files(['xsim.cmd'])
+ tf.compare_files(["xsim.cmd"])
+
def test_xsim_mfc(make_edalize_test):
- tool_options = {'compilation_mode' : 'common',
- 'xelab_options' : ['some', 'xelab_options'],
- 'xsim_options' : ['a', 'few', 'xsim_options']}
- paramtypes = ['plusarg', 'vlogdefine', 'vlogparam', 'generic']
+ tool_options = {
+ "compilation_mode": "common",
+ "xelab_options": ["some", "xelab_options"],
+ "xsim_options": ["a", "few", "xsim_options"],
+ }
+ paramtypes = ["plusarg", "vlogdefine", "vlogparam", "generic"]
- tf = make_edalize_test('xsim',
- tool_options=tool_options,
- param_types=paramtypes)
+ tf = make_edalize_test("xsim", tool_options=tool_options, param_types=paramtypes)
tf.backend.configure()
- tf.compare_files(['config.mk', 'Makefile', tf.test_name + '.prj'], ref_subdir='mfc')
+ tf.compare_files(["config.mk", "Makefile", tf.test_name + ".prj"], ref_subdir="mfc")
tf.backend.build()
- tf.compare_files(['xelab.cmd'])
+ tf.compare_files(["xelab.cmd"])
- xsimkdir = os.path.join(tf.work_root, 'xsim.dir', tf.test_name)
+ xsimkdir = os.path.join(tf.work_root, "xsim.dir", tf.test_name)
os.makedirs(xsimkdir)
- with open(os.path.join(xsimkdir, 'xsimk'), 'w') as f:
+ with open(os.path.join(xsimkdir, "xsimk"), "w") as f:
f.write("I am a compiled simulation kernel\n")
tf.backend.run()
- tf.compare_files(['xsim.cmd'])
-
-
+ tf.compare_files(["xsim.cmd"])