From 8987344d8d78b80539eb8faf88250ecf5c876454 Mon Sep 17 00:00:00 2001 From: Joaquin Matres <4514346+joamatab@users.noreply.github.com> Date: Sat, 21 Oct 2023 09:21:28 -0700 Subject: [PATCH] first commit --- .changelog.d/changelog_template.jinja | 15 + .github/dependabot.yml | 11 + .github/release-drafter.yml | 44 ++ .github/workflows/pages.yml | 46 ++ .github/workflows/release-drafter.yml | 35 ++ .github/workflows/release.yml | 53 +++ .github/workflows/stale.yml | 26 ++ .github/workflows/test_code.yml | 85 ++++ .github/workflows/test_code_conda.yml | 92 ++++ .gitignore | 147 +++++++ .pre-commit-config.yaml | 76 ++++ .sourcery.yaml | 5 + CHANGELOG.md | 3 + LICENSE | 21 + Makefile | 31 ++ README.md | 9 + docs/_config.yml | 58 +++ docs/_toc.yml | 15 + docs/api.rst | 7 + docs/changelog.md | 2 + docs/index.md | 2 + docs/tutorial.md | 3 + pyproject.toml | 188 ++++++++ ubc2/__init__.py | 3 + ubc2/config.py | 26 ++ ubc2/cutback_2x2.py | 55 +++ ubc2/test_masks.py | 41 ++ ubc2/ubc_helge.py | 85 ++++ ubc2/ubc_joaquin_matres1.py | 205 +++++++++ ubc2/ubc_simon.py | 604 ++++++++++++++++++++++++++ ubc2/write_mask.py | 27 ++ 31 files changed, 2020 insertions(+) create mode 100644 .changelog.d/changelog_template.jinja create mode 100644 .github/dependabot.yml create mode 100644 .github/release-drafter.yml create mode 100644 .github/workflows/pages.yml create mode 100644 .github/workflows/release-drafter.yml create mode 100644 .github/workflows/release.yml create mode 100644 .github/workflows/stale.yml create mode 100644 .github/workflows/test_code.yml create mode 100644 .github/workflows/test_code_conda.yml create mode 100644 .gitignore create mode 100644 .pre-commit-config.yaml create mode 100644 .sourcery.yaml create mode 100644 CHANGELOG.md create mode 100644 LICENSE create mode 100644 Makefile create mode 100644 README.md create mode 100755 docs/_config.yml create mode 100644 docs/_toc.yml create mode 100644 docs/api.rst create mode 100644 docs/changelog.md create mode 100644 docs/index.md create mode 100644 docs/tutorial.md create mode 100644 pyproject.toml create mode 100644 ubc2/__init__.py create mode 100644 ubc2/config.py create mode 100644 ubc2/cutback_2x2.py create mode 100644 ubc2/test_masks.py create mode 100644 ubc2/ubc_helge.py create mode 100644 ubc2/ubc_joaquin_matres1.py create mode 100644 ubc2/ubc_simon.py create mode 100644 ubc2/write_mask.py diff --git a/.changelog.d/changelog_template.jinja b/.changelog.d/changelog_template.jinja new file mode 100644 index 0000000..b4e1f90 --- /dev/null +++ b/.changelog.d/changelog_template.jinja @@ -0,0 +1,15 @@ +{% if sections[""] %} +{% for category, val in definitions.items() if category in sections[""] %} + +### {{ definitions[category]['name'] }} + +{% for text, values in sections[""][category].items() %} +- {{ text }} {{ values | join(', ') }} +{% endfor %} + +{% endfor %} +{% else %} +No significant changes. + + +{% endif %} diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..0aada4d --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,11 @@ +version: 2 +updates: + - package-ecosystem: "pip" + directory: "/" # Location of package manifests + schedule: + interval: "daily" + + - package-ecosystem: github-actions + directory: / + schedule: + interval: monthly diff --git a/.github/release-drafter.yml b/.github/release-drafter.yml new file mode 100644 index 0000000..ae012d6 --- /dev/null +++ b/.github/release-drafter.yml @@ -0,0 +1,44 @@ +name-template: 'v$RESOLVED_VERSION' +tag-template: 'v$RESOLVED_VERSION' +change-template: '- $TITLE [#$NUMBER](https://github.com/gdsfactory/ubc2/pull/$NUMBER)' +template: | + # What's Changed + + $CHANGES + + **Full Changelog**: https://github.com/$OWNER/$REPOSITORY/compare/$PREVIOUS_TAG...v$RESOLVED_VERSION + +categories: + - title: 'Breaking' + label: 'type: breaking' + - title: 'New' + label: 'type: feature' + - title: 'Bug Fixes' + label: 'type: bug' + - title: 'Maintenance' + label: 'type: maintenance' + - title: 'Documentation' + label: 'type: docs' + - title: 'Other changes' + - title: 'Dependency Updates' + label: 'type: dependencies' + collapse-after: 5 + +version-resolver: + major: + labels: + - 'type: breaking' + minor: + labels: + - 'type: feature' + patch: + labels: + - 'type: bug' + - 'type: maintenance' + - 'type: docs' + - 'type: dependencies' + - 'type: security' + default: patch + +exclude-labels: + - 'skip-changelog' diff --git a/.github/workflows/pages.yml b/.github/workflows/pages.yml new file mode 100644 index 0000000..b80a94a --- /dev/null +++ b/.github/workflows/pages.yml @@ -0,0 +1,46 @@ +name: Sphinx docs to gh-pages + +on: + push: + branches: + - main + workflow_dispatch: + +jobs: + build-docs: + runs-on: ubuntu-latest + name: Sphinx docs to gh-pages + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.10' + cache: "pip" + cache-dependency-path: pyproject.toml + - name: Installing the library + shell: bash -l {0} + run: | + make dev + - name: make docs + run: | + make docs + - name: Upload artifact + uses: actions/upload-pages-artifact@v2 + with: + path: "./docs/_build/html/" + deploy-docs: + needs: build-docs + permissions: + pages: write + id-token: write + + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + + runs-on: ubuntu-latest + steps: + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v2 diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml new file mode 100644 index 0000000..c9205f3 --- /dev/null +++ b/.github/workflows/release-drafter.yml @@ -0,0 +1,35 @@ +name: Release Drafter + +on: + push: + branches: + - main + # pull_request: + # types: [opened, reopened, synchronize] + +permissions: + contents: read + +jobs: + update_release_draft: + permissions: + # write permission is required to create a github release + contents: write + # write permission is required for autolabeler + # otherwise, read permission is required at least + pull-requests: write + runs-on: ubuntu-latest + steps: + # (Optional) GitHub Enterprise requires GHE_HOST variable set + #- name: Set GHE_HOST + # run: | + # echo "GHE_HOST=${GITHUB_SERVER_URL##https:\/\/}" >> $GITHUB_ENV + + # Drafts your next Release notes as Pull Requests are merged into "master" + - uses: release-drafter/release-drafter@v5 + # (Optional) specify config name to use, relative to .github/. Default: release-drafter.yml + # with: + # config-name: my-config.yml + # disable-autolabeler: true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..70265f5 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,53 @@ +name: Release PyPI, installers and docker container + +on: + push: + tags: "v*" + +jobs: + release_pypi: + if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: 3.x + cache-dependency-path: pyproject.toml + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install setuptools wheel twine + - name: Build and publish + env: + TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} + TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} + run: | + make build + twine upload dist/* + + + release_environment: + if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.11' + cache: "pip" + cache-dependency-path: pyproject.toml + - name: Install dependencies + run: | + pip install . + pip freeze > requirements.txt + - name: Publish Latest Draft + run: | + gh release edit ${{ github.ref_name }} --draft=false + gh release upload ${{ github.ref_name }} requirements.txt --clobber + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml new file mode 100644 index 0000000..a1d5539 --- /dev/null +++ b/.github/workflows/stale.yml @@ -0,0 +1,26 @@ +# This workflow warns and then closes issues and PRs that have had no activity for a specified amount of time. +# +# You can adjust the behavior by modifying this file. +# For more information, see: +# https://github.com/actions/stale +name: Mark stale issues and pull requests + +on: + schedule: + - cron: '22 12 * * *' + +jobs: + stale: + runs-on: ubuntu-latest + permissions: + issues: write + pull-requests: write + + steps: + - uses: actions/stale@v8 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + stale-issue-message: 'This issue is stale because it has been inactive for 60 days. Remove stale label or comment or this will be closed in 7 days.' + stale-pr-message: 'This pull request is stale because it has been inactive for 60 days. Remove stale label or comment or this will be closed in 7 days.' + stale-issue-label: 'no-issue-activity' + stale-pr-label: 'no-pr-activity' diff --git a/.github/workflows/test_code.yml b/.github/workflows/test_code.yml new file mode 100644 index 0000000..7e4e03e --- /dev/null +++ b/.github/workflows/test_code.yml @@ -0,0 +1,85 @@ +name: Test pre-commit, code and docs + +on: + pull_request: + push: + branches: + - main + +jobs: + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.10" + cache: "pip" + cache-dependency-path: pyproject.toml + - name: Test pre-commit hooks + run: | + python -m pip install --upgrade pip + pip install pre-commit + pre-commit run -a + test_code: + runs-on: ${{ matrix.os }} + strategy: + max-parallel: 12 + matrix: + python-version: ["3.10"] + os: [ubuntu-latest, windows-latest, macos-latest] + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + cache: "pip" + cache-dependency-path: pyproject.toml + - name: Install dependencies + run: | + make dev + - name: Test with pytest + run: pytest + test_code_coverage: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.10' + - name: Install dependencies + run: | + make dev + - name: Test with pytest + run: | + pytest --cov + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v3 + with: + token: ${{ secrets.CODECOV_TOKEN }} + fail_ci_if_error: false + test_docs: + runs-on: ${{ matrix.os }} + strategy: + max-parallel: 12 + matrix: + python-version: ['3.10'] + os: [ubuntu-latest] + + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.10' + cache: "pip" + cache-dependency-path: pyproject.toml + - name: Install dependencies + run: | + make dev + - name: Test documentation + run: | + make docs diff --git a/.github/workflows/test_code_conda.yml b/.github/workflows/test_code_conda.yml new file mode 100644 index 0000000..70be8f4 --- /dev/null +++ b/.github/workflows/test_code_conda.yml @@ -0,0 +1,92 @@ +name: Test pre-commit, code and docs + +on: + pull_request: + push: + branches: + - main + +jobs: + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.10" + cache: "pip" + cache-dependency-path: pyproject.toml + - name: Test pre-commit hooks + run: | + python -m pip install --upgrade pip + pip install pre-commit + pre-commit run -a + test_code: + needs: [pre-commit] + runs-on: ${{ matrix.os }} + strategy: + max-parallel: 12 + matrix: + python-version: ["3.10"] + os: [ubuntu-latest, windows-latest, macos-latest] + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + cache: "pip" + cache-dependency-path: pyproject.toml + - name: Install dependencies + run: | + make dev + - name: Test with pytest + run: pytest + test_code_coverage: + runs-on: ubuntu-latest + needs: [pre-commit] + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.10' + - name: Install dependencies + run: | + make dev + - name: Test with pytest + run: | + pytest --cov={{ cookiecutter.package_name }} {{ cookiecutter.package_name }} + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v3 + with: + token: ${{ secrets.CODECOV_TOKEN }} + fail_ci_if_error: false + test_docs: + needs: [pre-commit] + runs-on: ${{ matrix.os }} + strategy: + max-parallel: 12 + matrix: + python-version: ['3.10'] + os: [ubuntu-latest] + + steps: + - uses: actions/checkout@v4 + - uses: conda-incubator/setup-miniconda@v2 + with: + python-version: '3.10' + mamba-version: "*" + channels: conda-forge,defaults + channel-priority: true + activate-environment: anaconda-client-env + - name: Add conda to system path + run: | + echo $CONDA/bin >> $GITHUB_PATH + - name: Install dependencies + run: | + make dev + - name: Test documentation + run: | + make docs diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..ddbbc7f --- /dev/null +++ b/.gitignore @@ -0,0 +1,147 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class +Pipfile + +# C extensions +*.so +*.fsp + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +extra/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +*.ipynb +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints +**/*.ipynb_checkpoints/ + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +*.DS_Store +.DS_Store +*Thumbs.db diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..4f25ea8 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,76 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: "9260cbc9c84c06022993bfbcc42fdbf0305c5b8e" + hooks: + - id: check-added-large-files + - id: check-case-conflict + - id: check-merge-conflict + - id: check-symlinks + - id: check-yaml + - id: debug-statements + - id: end-of-file-fixer + - id: mixed-line-ending + - id: name-tests-test + args: ["--pytest-test-first"] + - id: trailing-whitespace + + - repo: https://github.com/charliermarsh/ruff-pre-commit + rev: "6a0ba1854991b693612486cc84a2254de82d071d" + hooks: + - id: ruff + + - repo: https://github.com/psf/black + rev: "d9b8a6407e2f46304a8d36b18e4a73d8e0613519" + hooks: + - id: black + + - repo: https://github.com/shellcheck-py/shellcheck-py + rev: 953faa6870f6663ac0121ab4a800f1ce76bca31f + hooks: + - id: shellcheck + + # - repo: https://github.com/pre-commit/mirrors-mypy + # rev: "v1.0.1" + # hooks: + # - id: mypy + # exclude: ^(docs/|example-plugin/|tests/fixtures) + # additional_dependencies: + # - "pydantic" + + - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks + rev: d2425a62376c2197448cce2f825d5a0c3926b862 + hooks: + - id: pretty-format-toml + args: [--autofix] + + - repo: https://github.com/aristanetworks/j2lint.git + rev: 742a25ef5da996b9762f167ebae9bc8223e8382e + hooks: + - id: j2lint + types: [file] + files: \.(j2|yml|yaml)$ + args: [--extensions, "j2,yml,yaml", --ignore, jinja-statements-delimiter, jinja-statements-indentation, --] + exclude: .github/.* + - repo: https://github.com/codespell-project/codespell + rev: 38bc4a97f6e22827e86526148efa38f1044a97f8 + hooks: + - id: codespell + additional_dependencies: + - tomli + + # - repo: https://github.com/kynan/nbstripout + # rev: e4c5b4dcbab4afa0b88b7a9243db42b1a8d95dde + # hooks: + # - id: nbstripout + # files: ".ipynb" + # - repo: https://github.com/pre-commit/pygrep-hooks + # rev: 7b4409161486c6956bb3206ce96db5d56731b1b9 # Use the ref you want to point at + # hooks: + # - id: python-use-type-annotations + # - repo: https://github.com/PyCQA/bandit + # rev: fe1361fdcc274850d4099885a802f2c9f28aca08 + # hooks: + # - id: bandit + # args: [--exit-zero] + # # ignore all tests, not just tests data + # exclude: ^tests/ diff --git a/.sourcery.yaml b/.sourcery.yaml new file mode 100644 index 0000000..e5b68f4 --- /dev/null +++ b/.sourcery.yaml @@ -0,0 +1,5 @@ +rule_settings: + enable: [default] + disable: + - use-named-expression + python_version: '3.10' diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..874d763 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,3 @@ +# CHANGELOG + + diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..9449ab1 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2023 Joaquin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..205078b --- /dev/null +++ b/Makefile @@ -0,0 +1,31 @@ +install: + pip install -e .[dev] + pre-commit install + +dev: + pip install -e .[dev,docs] + +test: + pytest -s + +update-pre: + pre-commit autoupdate --bleeding-edge + +git-rm-merged: + git branch -D `git branch --merged | grep -v \* | xargs` + +build: + rm -rf dist + pip install build + python -m build + +jupytext: + jupytext docs/**/*.ipynb --to py + +notebooks: + jupytext docs/**/*.py --to ipynb + +docs: + jb build docs + +.PHONY: drc doc docs diff --git a/README.md b/README.md new file mode 100644 index 0000000..2e3c560 --- /dev/null +++ b/README.md @@ -0,0 +1,9 @@ +# ubc2 0.0.1 + +ubc + +## Usage + +## Installation + +`make install` diff --git a/docs/_config.yml b/docs/_config.yml new file mode 100755 index 0000000..3ceeead --- /dev/null +++ b/docs/_config.yml @@ -0,0 +1,58 @@ +# Book settings +# Learn more at https://jupyterbook.org/customize/config.html + +title: ubc2 +author: Joaquin + +# Force re-execution of notebooks on each build. +# See https://jupyterbook.org/content/execute.html +execute: + execute_notebooks: cache + timeout: -1 + allow_errors: false + # execute_notebooks: force + # execute_notebooks: "off" + # exclude_patterns: + # - '*notebooks/devsim/01_pin_waveguide*' + +latex: + latex_engine: pdflatex # one of 'pdflatex', 'xelatex' (recommended for unicode), 'luatex', 'platex', 'uplatex' + use_jupyterbook_latex: true # use sphinx-jupyterbook-latex for pdf builds as default + +# Add a bibtex file so that we can create citations + +html: + home_page_in_navbar: true + use_edit_page_button: true + use_repository_button: true + use_issues_button: true + baseurl: https://github.com/gdsfactory/ubc2 + +# Information about where the book exists on the web +repository: + url: https://github.com/gdsfactory/ubc2 + path_to_book: docs # Optional path to your book, relative to the repository root + branch: main # Which branch of the repository should be used when creating links (optional) + +launch_buttons: + notebook_interface: jupyterlab + colab_url: "https://colab.research.google.com" + +sphinx: + extra_extensions: + - "sphinx.ext.autodoc" + - "sphinx.ext.autodoc.typehints" + - "sphinx.ext.autosummary" + - "sphinx.ext.napoleon" + - "sphinx.ext.viewcode" + - "matplotlib.sphinxext.plot_directive" + config: + #autodoc_typehints: description + autodoc_type_aliases: + "ComponentSpec": "ComponentSpec" + nb_execution_show_tb: True + nb_execution_raise_on_error: true + nb_custom_formats: + .py: + - jupytext.reads + - fmt: py diff --git a/docs/_toc.yml b/docs/_toc.yml new file mode 100644 index 0000000..2694515 --- /dev/null +++ b/docs/_toc.yml @@ -0,0 +1,15 @@ +# Table of contents +# Learn more at https://jupyterbook.org/customize/toc.html + +format: jb-book +root: index +parts: + # - caption: Tutorial + # chapters: + # - file: tutorial + # sections: + # - file: notebooks/demo + - caption: Reference + chapters: + - file: api + - file: changelog diff --git a/docs/api.rst b/docs/api.rst new file mode 100644 index 0000000..0a8e605 --- /dev/null +++ b/docs/api.rst @@ -0,0 +1,7 @@ +ubc2 +=================================== + +Config +--------------------- + +.. automodule:: ubc2.config diff --git a/docs/changelog.md b/docs/changelog.md new file mode 100644 index 0000000..66efc0f --- /dev/null +++ b/docs/changelog.md @@ -0,0 +1,2 @@ +```{include} ../CHANGELOG.md +``` diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 0000000..451beda --- /dev/null +++ b/docs/index.md @@ -0,0 +1,2 @@ +```{include} ../README.md +``` diff --git a/docs/tutorial.md b/docs/tutorial.md new file mode 100644 index 0000000..19c4c07 --- /dev/null +++ b/docs/tutorial.md @@ -0,0 +1,3 @@ + +```{tableofcontents} +``` diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..82f3825 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,188 @@ +# https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html + +[build-system] +build-backend = "flit_core.buildapi" +requires = ["flit_core >=3.2,<4"] + +[project] +authors = [ + {name = "Joaquin", email = "contact@gdsfactory.com"} +] +classifiers = [ + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent" +] +dependencies = [ + "ubcpdk==2.3.0" +] +description = "ubc" +keywords = ["python"] +license = {file = "LICENSE"} +name = "ubc2" +readme = "README.md" +requires-python = ">=3.10" +version = "0.0.1" + +[project.optional-dependencies] +dev = [ + "pre-commit", + "pytest", + "pytest-cov", + "pytest_regressions" +] +docs = [ + "jupytext", + "matplotlib", + "jupyter-book==0.15.1" +] + +[tool.black] +exclude = ''' # Specify the files/dirs that should be ignored by the black formatter +/( + \.eggs + | \.git + | \.hg + | \.mypy_cache + | \.tox + | \.venv + | env + | _build + | buck-out + | build + | dist +)/ +''' +include = '\.pyi?$' +line-length = 88 +target-version = ['py310'] + +[tool.codespell] +ignore-words-list = "te, te/tm, te, ba, fpr, fpr_spacing, ro, nd, donot, schem" + +[tool.mypy] +python_version = "3.10" +strict = true + +[tool.pylsp-mypy] +enabled = true +live_mode = true +strict = true + +[tool.pytest.ini_options] +# addopts = --tb=no +addopts = '--tb=short' +norecursedirs = ["extra/*.py"] +python_files = ["ubc2/*.py", "notebooks/*.ipynb", "tests/*.py"] +testpaths = ["ubc2/", "tests"] + +[tool.ruff] +fix = true +ignore = [ + "E501", # line too long, handled by black + "B008", # do not perform function calls in argument defaults + "C901", # too complex + "B905", # `zip()` without an explicit `strict=` parameter + "C408" # C408 Unnecessary `dict` call (rewrite as a literal) +] +select = [ + "E", # pycodestyle errors + "W", # pycodestyle warnings + "F", # pyflakes + "I", # isort + "C", # flake8-comprehensions + "B", # flake8-bugbear + "UP" +] + +[tool.ruff.pydocstyle] +convention = "google" + +[tool.setuptools.package-data] +mypkg = ["*.csv", "*.yaml"] + +[tool.setuptools.packages] +find = {} + +[tool.tbump] + +[[tool.tbump.before_commit]] +cmd = "towncrier build --yes --version {new_version}" +name = "create & check changelog" + +[[tool.tbump.before_commit]] +cmd = "git add CHANGELOG.md" +name = "create & check changelog" + +[[tool.tbump.before_commit]] +cmd = "grep -q -F {new_version} CHANGELOG.md" +name = "create & check changelog" + +# For each file to patch, add a [[file]] config +# section containing the path of the file, relative to the +# tbump.toml location. +[[tool.tbump.file]] +src = "README.md" + +[[tool.tbump.file]] +src = "pyproject.toml" + +[[tool.tbump.file]] +src = "ubc2/__init__.py" + +[tool.tbump.git] +message_template = "Bump to {new_version}" +tag_template = "v{new_version}" + +[tool.tbump.version] +current = "0.0.1" +# Example of a semver regexp. +# Make sure this matches current_version before +# using tbump +regex = ''' + (?P\d+) + \. + (?P\d+) + \. + (?P\d+) + ''' + +[tool.towncrier] +directory = ".changelog.d" +filename = "CHANGELOG.md" +issue_format = "[#{issue}](https://github.com/gdsfactory/ubc2/issues/{issue})" +package = "gdsfactory" +start_string = "\n" +template = ".changelog.d/changelog_template.jinja" +title_format = "## [{version}](https://github.com/gdsfactory/ubc2/releases/tag/v{version}) - {project_date}" +underlines = ["", "", ""] + +[[tool.towncrier.type]] +directory = "security" +name = "Security" +showcontent = true + +[[tool.towncrier.type]] +directory = "removed" +name = "Removed" +showcontent = true + +[[tool.towncrier.type]] +directory = "deprecated" +name = "Deprecated" +showcontent = true + +[[tool.towncrier.type]] +directory = "added" +name = "Added" +showcontent = true + +[[tool.towncrier.type]] +directory = "changed" +name = "Changed" +showcontent = true + +[[tool.towncrier.type]] +directory = "fixed" +name = "Fixed" +showcontent = true diff --git a/ubc2/__init__.py b/ubc2/__init__.py new file mode 100644 index 0000000..f736492 --- /dev/null +++ b/ubc2/__init__.py @@ -0,0 +1,3 @@ +"""ubc2 - ubc""" + +__version__ = "0.0.1" diff --git a/ubc2/config.py b/ubc2/config.py new file mode 100644 index 0000000..9de3dfb --- /dev/null +++ b/ubc2/config.py @@ -0,0 +1,26 @@ +"""Store configuration.""" + +__all__ = ["PATH"] + +import pathlib + +home = pathlib.Path.home() +cwd = pathlib.Path.cwd() +cwd_config = cwd / "config.yml" + +home_config = home / ".config" / "ubc2.yml" +config_dir = home / ".config" +config_dir.mkdir(exist_ok=True) +module_path = pathlib.Path(__file__).parent.absolute() +repo_path = module_path.parent + + +class Path: + module = module_path + repo = repo_path + + +PATH = Path() + +if __name__ == "__main__": + print(PATH) diff --git a/ubc2/cutback_2x2.py b/ubc2/cutback_2x2.py new file mode 100644 index 0000000..2d6e3c5 --- /dev/null +++ b/ubc2/cutback_2x2.py @@ -0,0 +1,55 @@ +from __future__ import annotations + +import gdsfactory as gf +from gdsfactory.component import Component +from gdsfactory.components.component_sequence import component_sequence +from gdsfactory.components.mmi2x2 import mmi2x2 +from gdsfactory.typings import ComponentSpec + + +@gf.cell +def cutback_2x2( + component: ComponentSpec = mmi2x2, + cols: int = 4, + port1: str = "o1", + port2: str = "o2", + port3: str = "o3", + port4: str = "o4", +) -> Component: + """Returns a daisy chain of 2x2 couplers for measuring their loss. + + Args: + component: for cutback. + cols: number of columns/components. + port1: name of first optical port. + port2: name of second optical port. + port3: name of third optical port. + port4: name of fourth optical port. + """ + component = gf.get_component(component) + + # Define a map between symbols and (component, input port, output port) + symbol_to_component = { + "A": (component, port1, port3), + "B": (component, port4, port2), + } + + # Generate the sequence of staircases + + s = "AB" * cols + + seq = component_sequence(sequence=s, symbol_to_component=symbol_to_component) + + c = gf.Component() + ref = c << seq + c.add_ports(ref.ports) + + n = len(s) - 2 + c.copy_child_info(component) + c.info["components"] = n + return c + + +if __name__ == "__main__": + c = cutback_2x2(component=gf.c.coupler, cols=2) + c.show(show_ports=True) diff --git a/ubc2/test_masks.py b/ubc2/test_masks.py new file mode 100644 index 0000000..78092c2 --- /dev/null +++ b/ubc2/test_masks.py @@ -0,0 +1,41 @@ +"""Write all mask for the course.""" +import shutil + +import ubc1.ubc_helge as m12 +import ubc1.ubc_joaquin_matres1 as m11 +import ubc1.ubc_simon as m13 +from ubcpdk.config import PATH + + +def test_masks_2023_v1(): + """Write all masks for 2023_v1.""" + dirpath = PATH.mask + dirpath_gds = dirpath / "gds" + + if dirpath.exists(): + shutil.rmtree(dirpath) + dirpath_gds.mkdir(exist_ok=True, parents=True) + + for mask in [ + m11.test_mask1, + m11.test_mask2, + m11.test_mask3, + m11.test_mask4, + m11.test_mask5, + m11.test_mask6, + m11.test_mask7, + m12.test_mask1, + m12.test_mask2, + m13.test_mask1, + m13.test_mask3, + m13.test_mask4, + m13.test_mask5, + ]: + mask() + + for gdspath in dirpath.glob("*.gds"): + shutil.copyfile(gdspath, dirpath_gds / f"{gdspath.name}") + + +if __name__ == "__main__": + test_masks_2023_v1() diff --git a/ubc2/ubc_helge.py b/ubc2/ubc_helge.py new file mode 100644 index 0000000..3858508 --- /dev/null +++ b/ubc2/ubc_helge.py @@ -0,0 +1,85 @@ +from pathlib import Path + +import gdsfactory as gf +import ubcpdk +import ubcpdk.components as pdk +from ubc1.write_mask import pack, size, write_mask_gds_with_metadata +from ubcpdk.tech import LAYER + +add_gc = ubcpdk.components.add_fiber_array +nm = 1e-3 + + +def test_mask1() -> Path: + """DBR filters.""" + + @gf.cell + def dbr_filter(n) -> gf.Component: + c = gf.Component() + splitter = pdk.ebeam_bdc_te1550() + + splitter_1 = c << splitter + splitter_2 = c << splitter + + dbr = pdk.dbg(n=n) if n > 0 else pdk.straight(0) + dbr_1 = c << dbr + dbr_2 = c << dbr + + dbr_1.connect("o1", splitter_1["o3"]) + dbr_2.connect("o1", splitter_1["o4"]) + splitter_2.connect("o4", dbr_1["o2"]) + + bend_1 = c << pdk.bend(angle=90) + bend_1.connect("o1", splitter_1["o1"]) + bend_2 = c << pdk.bend(angle=90) + bend_2.connect("o2", splitter_2["o1"]) + + term = c << pdk.terminator_short(length=5, width1=0.5, width2=60 * nm) + term.connect("o1", splitter_2["o2"]) + + c.add_port("out1", port=bend_2["o1"]) + c.add_port("out2", port=bend_1["o2"]) + c.add_port("in1", port=splitter_1["o2"]) + # c.add_port("in2", port=splitter_1["o1"]) + + return c + + rings = [dbr_filter(length) for length in [0, 250, 500, 750, 1000, 1250]] + rings_gc = [pdk.add_fiber_array(ring, fanout_length=15) for ring in rings] + + c = pack(rings_gc) + m = c[0] + m.name = "EBeam_JoaquinMatres_Helge_1" + _ = m << gf.components.rectangle(size=size, layer=LAYER.FLOORPLAN) + return write_mask_gds_with_metadata(m) + + +def test_mask2() -> Path: + """Ring with different couplings.""" + e = [ + add_gc(ubcpdk.components.straight(), component_name=f"straight_{i}") + for i in range(2) + ] + e += [ + add_gc( + pdk.ring_single( + radius=12, + gap=gap, + length_x=coupling_length, + ) + ) + for gap in [0.1, 0.2, 0.3, 0.4, 0.5, 0.6] + for coupling_length in [0, 2] + ] + + c = pack(e) + m = c[0] + m.name = "EBeam_JoaquinMatres_Helge_2" + _ = m << gf.components.rectangle(size=size, layer=LAYER.FLOORPLAN) + return write_mask_gds_with_metadata(m) + + +if __name__ == "__main__": + # m = test_mask1() + m = test_mask2() + gf.show(m) diff --git a/ubc2/ubc_joaquin_matres1.py b/ubc2/ubc_joaquin_matres1.py new file mode 100644 index 0000000..88db758 --- /dev/null +++ b/ubc2/ubc_joaquin_matres1.py @@ -0,0 +1,205 @@ +"""Sample mask for the edx course Q1 2023.""" + +from functools import partial +from pathlib import Path + +import gdsfactory as gf +import ubcpdk +import ubcpdk.components as pdk +from ubcpdk import tech +from ubcpdk.tech import LAYER + +from ubc2.cutback_2x2 import cutback_2x2 +from ubc2.write_mask import add_gc, pack, size, write_mask_gds_with_metadata + + +def test_mask1() -> Path: + """Add DBR cavities.""" + e = [add_gc(ubcpdk.components.straight())] + e += [add_gc(pdk.mzi(delta_length=dl)) for dl in [9.32, 93.19]] + e += [ + add_gc(pdk.ring_single(radius=12, gap=gap, length_x=coupling_length)) + for gap in [0.2] + for coupling_length in [0.1, 2.5, 4.5] + ] + e += [ + add_gc(pdk.ring_double(radius=12, gap=gap, length_x=coupling_length)) + for gap in [0.2] + for coupling_length in [0.1] + ] + + e += [ + ubcpdk.components.dbr_cavity_te(w0=w0, dw=dw) + for w0 in [0.5] + for dw in [50e-3, 100e-3, 150e-3, 200e-3] + ] + e += [add_gc(ubcpdk.components.ring_with_crossing())] + e += [ + add_gc( + ubcpdk.components.ring_with_crossing(port_name="o2", with_component=False) + ) + ] + + c = pack(e) + m = c[0] + m.name = "EBeam_JoaquinMatres_11" + _ = m << gf.components.rectangle(size=size, layer=LAYER.FLOORPLAN) + return write_mask_gds_with_metadata(m) + + +def test_mask2() -> Path: + """spirals for extracting straight waveguide loss""" + N = 12 + radius = 10 + + e = [ + ubcpdk.components.add_fiber_array( + component=ubcpdk.components.spiral( + N=N, + radius=radius, + y_straight_inner_top=0, + x_inner_length_cutback=0, + info=dict(does=["spiral", "te1550"]), + ) + ) + ] + + e.append( + ubcpdk.components.add_fiber_array( + component=ubcpdk.components.spiral( + N=N, + radius=radius, + y_straight_inner_top=0, + x_inner_length_cutback=185, + ) + ) + ) + + c = pack(e) + + m = c[0] + m.name = "EBeam_JoaquinMatres_12" + _ = m << gf.components.rectangle(size=size, layer=LAYER.FLOORPLAN) + return write_mask_gds_with_metadata(m) + + +def test_mask3() -> Path: + """contains mirror cavities and structures inside a resonator""" + e = [] + e += [add_gc(ubcpdk.components.ebeam_crossing4())] + e += [add_gc(ubcpdk.components.ebeam_adiabatic_te1550(), optical_routing_type=1)] + e += [add_gc(ubcpdk.components.ebeam_bdc_te1550())] + e += [add_gc(ubcpdk.components.ebeam_y_1550(), optical_routing_type=1)] + e += [add_gc(ubcpdk.components.ebeam_y_adiabatic_tapers(), optical_routing_type=1)] + e += [ + add_gc(ubcpdk.components.straight(), component_name=f"straight_{i}") + for i in range(2) + ] + c = pack(e) + m = c[0] + m.name = "EBeam_JoaquinMatres_13" + _ = m << gf.components.rectangle(size=size, layer=LAYER.FLOORPLAN) + return write_mask_gds_with_metadata(m) + + +def test_mask4() -> Path: + """MZI interferometers.""" + mzi = partial(gf.components.mzi, splitter=ubcpdk.components.ebeam_y_1550) + mzis = [mzi(delta_length=delta_length) for delta_length in [10, 40, 100]] + mzis_gc = [pdk.add_fiber_array(mzi) for mzi in mzis] + + mzis = [pdk.mzi_heater(delta_length=delta_length) for delta_length in [40]] + mzis_heater_gc = [ + pdk.add_fiber_array_pads_rf(mzi, orientation=90, optical_routing_type=2) + for mzi in mzis + ] + + c = pack(mzis_gc + mzis_heater_gc) + m = c[0] + m.name = "EBeam_JoaquinMatres_14" + _ = m << gf.components.rectangle(size=size, layer=LAYER.FLOORPLAN) + return write_mask_gds_with_metadata(m) + + +def test_mask5() -> Path: + """Ring resonators.""" + rings = [pdk.ring_single_heater(length_x=length_x) for length_x in [4, 6]] + rings = [gf.functions.rotate180(ring) for ring in rings] + rings_gc = [pdk.add_fiber_array_pads_rf(ring) for ring in rings] + + c = pack(rings_gc) + m = c[0] + m.name = "EBeam_JoaquinMatres_15" + _ = m << gf.components.rectangle(size=size, layer=LAYER.FLOORPLAN) + return write_mask_gds_with_metadata(m) + + +def test_mask6() -> Path: + """Splitters 1x2.""" + mmis = [] + mmis += [ + gf.components.cutback_splitter( + component=pdk.ebeam_y_adiabatic_tapers, + cols=1, + rows=7, + bend180=pdk.bend_euler180_sc, + ) + ] + mmis += [ + gf.components.cutback_splitter( + component=pdk.ebeam_y_1550, cols=6, rows=7, bend180=pdk.bend_euler180_sc + ) + ] + mmis_gc = [pdk.add_fiber_array(mmi, optical_routing_type=1) for mmi in mmis] + + c = pack(mmis_gc) + m = c[0] + m.name = "EBeam_JoaquinMatres_16" + _ = m << gf.components.rectangle(size=size, layer=LAYER.FLOORPLAN) + return write_mask_gds_with_metadata(m) + + +def test_mask7() -> Path: + """Splitters 2x2.""" + # mmi2x2_with_sbend = partial( + # gf.components.mmi2x2_with_sbend, + # decorator=tech.add_pins_bbox_siepic_remove_layers, + # ) + + mmi2x2_with_sbend = tech.add_pins_bbox_siepic_remove_layers( + gf.components.mmi2x2_with_sbend().flatten() + ) + mmi2x2_with_sbend.name = "mmi2x2_with_sbend" + + mmis = [] + mmis += [cutback_2x2(component=pdk.ebeam_bdc_te1550, cols=3)] + mmis += [cutback_2x2(component=mmi2x2_with_sbend, cols=6)] + + mmis += [mmi2x2_with_sbend] + mmis += [pdk.ebeam_bdc_te1550()] + + mmis_gc = [ + pdk.add_fiber_array(component=mmi, optical_routing_type=1) for mmi in mmis + ] + c = pack(mmis_gc) + + m = c[0] + m.name = "EBeam_JoaquinMatres_17" + _ = m << gf.components.rectangle(size=size, layer=LAYER.FLOORPLAN) + return write_mask_gds_with_metadata(m) + + +if __name__ == "__main__": + c = test_mask1() # dbr and mzi + # c = test_mask2() # spirals + # c = test_mask3() # coupler and crossing + # c = test_mask4() # heated mzis + # c = test_mask5() # heated rings + # c = test_mask6() # 1x2 mmis + # c = test_mask7() # 2x2mmis + gf.show(c) + # c = partial( + # gf.components.mmi2x2_with_sbend, + # decorator=tech.add_pins_bbox_siepic_remove_layers, + # )() + # c.show() diff --git a/ubc2/ubc_simon.py b/ubc2/ubc_simon.py new file mode 100644 index 0000000..90f52cc --- /dev/null +++ b/ubc2/ubc_simon.py @@ -0,0 +1,604 @@ +"""Sample mask for the edx course Q1 2023.""" + +from functools import partial +from pathlib import Path + +import gdsfactory as gf +import ubcpdk +import ubcpdk.components as pdk +from gdsfactory.components.bend_euler import bend_euler +from gdsfactory.components.coupler_ring import coupler_ring as _coupler_ring +from gdsfactory.components.straight import straight +from gdsfactory.components.via_stack import via_stack_heater_m3 +from gdsfactory.typings import ComponentSpec, CrossSectionSpec, Float2 +from ubc1.write_mask import write_mask_gds_with_metadata +from ubcpdk.tech import LAYER + +via_stack_heater_m3_mini = partial(via_stack_heater_m3, size=(4, 4)) + + +size = (440, 470) +add_gc = ubcpdk.components.add_fiber_array +layer_label = LAYER.TEXT +GC_PITCH = 127 + + +@gf.cell +def ring_single_heater( + gap: float = 0.2, + radius: float = 10.0, + length_x: float = 4.0, + length_y: float = 0.6, + coupler_ring: ComponentSpec = _coupler_ring, + bend: ComponentSpec = bend_euler, + cross_section_waveguide_heater: CrossSectionSpec = "xs_sc_heater_metal", + cross_section: CrossSectionSpec = "xs_sc", + via_stack: ComponentSpec = via_stack_heater_m3_mini, + port_orientation: tuple[float, ...] | None = (180, 0), + via_stack_offset: Float2 = (0, 0), + **kwargs, +) -> gf.Component: + """Override from gdsfactory to make ports face different directions. + + Returns a single ring with heater on top. + + ring coupler (cb: bottom) connects to two vertical straights (sl: left, sr: right), + two bends (bl, br) and horizontal straight (wg: top) + + Args: + gap: gap between for coupler. + radius: for the bend and coupler. + length_x: ring coupler length. + length_y: vertical straight length. + coupler_ring: ring coupler function. + bend: 90 degrees bend function. + cross_section_waveguide_heater: for heater. + cross_section: for regular waveguide. + via_stack: for heater to routing metal. + port_orientation: for electrical ports to promote from via_stack. + via_stack_offset: x,y offset for via_stack. + kwargs: cross_section settings. + + .. code:: + + bl-st-br + | | + sl sr length_y + | | + --==cb==-- gap + + length_x + """ + gap = gf.snap.snap_to_grid(gap, nm=2) + + coupler_ring = gf.get_component( + coupler_ring, + bend=bend, + gap=gap, + radius=radius, + length_x=length_x, + cross_section=cross_section, + cross_section_bend=cross_section_waveguide_heater, + **kwargs, + ) + + straight_side = straight( + length=length_y, + cross_section=cross_section_waveguide_heater, + **kwargs, + ) + straight_top = straight( + length=length_x, + cross_section=cross_section_waveguide_heater, + **kwargs, + ) + + bend = gf.get_component( + bend, radius=radius, cross_section=cross_section_waveguide_heater, **kwargs + ) + + c = gf.Component() + cb = c << coupler_ring + sl = c << straight_side + sr = c << straight_side + bl = c << bend + br = c << bend + st = c << straight_top + + sl.connect(port="o1", destination=cb.ports["o2"]) + bl.connect(port="o2", destination=sl.ports["o2"]) + + st.connect(port="o2", destination=bl.ports["o1"]) + br.connect(port="o2", destination=st.ports["o1"]) + sr.connect(port="o1", destination=br.ports["o1"]) + sr.connect(port="o2", destination=cb.ports["o3"]) + + c.add_port("o2", port=cb.ports["o4"]) + c.add_port("o1", port=cb.ports["o1"]) + + via = gf.get_component(via_stack) + c1 = c << via + c2 = c << via + c1.xmax = -length_x / 2 + cb.x - via_stack_offset[0] + c2.xmin = +length_x / 2 + cb.x + via_stack_offset[0] + c1.movey(via_stack_offset[1]) + c2.movey(via_stack_offset[1]) + c.add_ports(c1.get_ports_list(orientation=port_orientation[0]), prefix="e1") + c.add_ports(c2.get_ports_list(orientation=port_orientation[1]), prefix="e2") + c.auto_rename_ports() + return c + + +@gf.cell +def rings_proximity( + num_rings: int = 5, + sep_resonators: float = 2.0, + radius: float = 10.0, +) -> gf.Component: + """A sequence of multiple rings, with the first one having a heater. + + Args: + num_rings: number of rings. + sep_resonators: separation between resonators. + radius: radius of the rings. + + """ + c = gf.Component() + gap = 0.2 # TODO: make variable + width = 0.5 # TODO: make variable + for index in range(num_rings): + if index in [0, num_rings // 2]: + ring = c << ring_single_heater( + length_x=2, via_stack=pdk.via_stack_heater_mtop + ) + ring.rotate(90).movex( + -index * (sep_resonators + 2 * radius + 3 * width - gap) + ) + c.add_port(f"e1_{index}", port=ring.ports["e1"]) + c.add_port(f"e2_{index}", port=ring.ports["e2"]) + else: + ring = c << gf.components.ring_single(length_x=2) + ring.rotate(90).movex( + -index * (sep_resonators + 2 * radius + 3 * width - gap) + ) + c.add_port(f"o1_{index}", port=ring.ports["o1"]) + c.add_port(f"o2_{index}", port=ring.ports["o2"]) + + return c + + +@gf.cell +def disks_proximity( + num_rings=5, + sep_resonators=5, + radius=10.0, +) -> gf.Component: + c = gf.Component() + gap = 0.2 + width = 0.5 + for index in range(num_rings): + if index in [0, num_rings // 2]: + disk = c << gf.components.disk_heater( + wrap_angle_deg=10.0, + radius=radius, + port_orientation=270, + via_stack=pdk.via_stack_heater_mtop, + heater_layer=LAYER.M1_HEATER, + ).rotate(90).movex(-index * (sep_resonators + 2 * radius + 2 * width + gap)) + c.add_port(f"e1_{index}", port=disk.ports["e2"]) + c.add_port(f"e2_{index}", port=disk.ports["e1"]) + else: + disk = c << gf.components.disk( + wrap_angle_deg=10.0, + radius=radius, + ).rotate( + 90 + ).movex(-index * (sep_resonators + 2 * radius + 2 * width + gap)) + c.add_port(f"o1_{index}", port=disk.ports["o1"]) + c.add_port(f"o2_{index}", port=disk.ports["o2"]) + return c + + +def bend_gc_array( + gc_spec: ComponentSpec = pdk.gc_te1550(), + bend_spec: ComponentSpec = gf.components.bend_euler(), +) -> gf.Component: + """Two gc's with opposite bends. + + Not completely needed, was originally intended to make routing easier. + """ + c = gf.Component() + gc_top = c << gf.get_component(gc_spec).movey(GC_PITCH) + bend_top = c << gf.get_component(bend_spec) + bend_top.connect("o1", destination=gc_top.ports["o1"]) + + gc_bot = c << gf.get_component(gc_spec) + bend_bot = c << gf.get_component(bend_spec) + bend_bot.connect("o2", destination=gc_bot.ports["o1"]) + + c.add_port(name="o1", port=bend_top["o2"]) + c.add_port(name="o2", port=bend_bot["o1"]) + return c + + +def resonator_proximity_io( + resonator_array: ComponentSpec = rings_proximity, + num_resonators=9, + sep_resonators=3, + radius_resonators=10.0, + grating_buffer=50.0, + waveguide_buffer=2.5, + gc_bus_buffer=10, +) -> gf.Component: + """Resonator proximity experiment with fiber array. + + Arguments: + resonator_array: component with resonator array (first one needs a heater). + num_resonators, sep_resonators, radius_resonators: resonator_array arguments. + grating_buffer: distance between neighbouring grating couplers. + waveguide_buffer: distance between bus waveguides. + gc_bus_buffer: distance between the closest bus waveguide and grating coupler ports. + """ + c = gf.Component() + resonators = c << resonator_array( + num_rings=num_resonators, + sep_resonators=sep_resonators, + radius=radius_resonators, + ) + resonators.movey(GC_PITCH / 2) + for i in range(num_resonators + 1): + gc_array = c << gf.get_component(bend_gc_array).movex(i * grating_buffer) + gc_array.mirror() + routes = [] + if i == 0: + # Calibration, just add a waveguide + routes.append( + gf.routing.get_route(gc_array.ports["o1"], gc_array.ports["o2"]) + ) + else: + # Route top ports to top GCs + x0 = resonators.ports[f"o2_{i-1}"].x + y0 = resonators.ports[f"o2_{i-1}"].y + x2 = gc_array.ports["o1"].x + y2 = gc_array.ports["o1"].y + routes.append( + gf.routing.get_route_from_waypoints( + [ + (x0, y0), + (x0, y2 - gc_bus_buffer - waveguide_buffer * (i - 1)), + (x2, y2 - gc_bus_buffer - waveguide_buffer * (i - 1)), + (x2, y2), + ] + ) + ) + # Route bottom ports to bottom GCs + x0 = resonators.ports[f"o1_{i-1}"].x + y0 = resonators.ports[f"o1_{i-1}"].y + x2 = gc_array.ports["o2"].x + y2 = gc_array.ports["o2"].y + routes.append( + gf.routing.get_route_from_waypoints( + [ + (x0, y0), + (x0, y2 + gc_bus_buffer + waveguide_buffer * (i - 1)), + (x2, y2 + gc_bus_buffer + waveguide_buffer * (i - 1)), + (x2, y2), + ] + ) + ) + for route in routes: + c.add(route.references) + + c.add_port("e1", port=resonators.ports["e1"]) + c.add_port("e2", port=resonators.ports["e2"]) + + return c + + +def needs_fixing() -> Path: + """Ring resonators with thermal cross-talk. + + Old cell; does not pass verification + + - needs labels. + """ + c = gf.Component() + rings = c << resonator_proximity_io(num_resonators=7) + disks = c << resonator_proximity_io( + resonator_array=disks_proximity, num_resonators=7 + ).movey(-GC_PITCH - 50) + floorplan = c << gf.components.rectangle(size=size, layer=LAYER.FLOORPLAN) + floorplan.x = disks.xmin + size[0] / 2 - 25 + floorplan.y = disks.ymin + size[1] / 2 - 25 + + # Add electrical pads + pads = c << gf.get_component( + gf.components.pad_array, columns=3, orientation=90 + ).rotate(90).movex(130).movey(-160) + route = gf.routing.get_route_electrical( + rings.ports["e1"], pads.ports["e13"], bend="wire_corner" + ) + c.add(route.references) + route = gf.routing.get_route_electrical( + rings.ports["e2"], pads.ports["e12"], bend="wire_corner" + ) + c.add(route.references) + route = gf.routing.get_route_electrical( + disks.ports["e1"], pads.ports["e12"], bend="wire_corner" + ) + c.add(route.references) + route = gf.routing.get_route_electrical( + disks.ports["e2"], pads.ports["e11"], bend="wire_corner" + ) + c.add(route.references) + + return write_mask_gds_with_metadata(c) + + +def test_mask1() -> Path: + """Ring resonators with thermal cross-talk. + + Old cell; does not pass verification + """ + rings_active = [pdk.ring_single_heater(length_x=4)] + rings_passive = [pdk.ring_single(length_x=4)] + + rings_passive = [gf.functions.rotate180(ring) for ring in rings_passive] + rings_active = [gf.functions.rotate180(ring) for ring in rings_active] + + rings_active_gc = [pdk.add_fiber_array_pads_rf(ring) for ring in rings_active] + rings_passive_gc = [pdk.add_fiber_array(ring) for ring in rings_passive] + rings_gc = rings_passive_gc + rings_active_gc + + m = gf.Component() + spacing = 1 + g = m << gf.grid( + rings_gc, + shape=(1, len(rings_gc)), + spacing=(spacing, spacing), + add_ports_prefix=False, + add_ports_suffix=True, + ) + g.xmin = 1 + g.ymin = 1 + + m.add_ports(g.ports) + _ = m << gf.components.rectangle(size=size, layer=LAYER.FLOORPLAN) + m.name = "EBeam_JoaquinMatres_Simon_0" + return write_mask_gds_with_metadata(m) + + +def crosstalk_experiment_parametrized_mask( + name="EBeam_JoaquinMatres_Simon_1", + num_gcs: int = 10, + num_gc_per_pitch: int = 5, + sep_resonators: float = 15.0, + ring_y_offset: float = 0.0, + resonator_func: ComponentSpec = rings_proximity, + fill_layers=None, + fill_margin=2, + fill_size=(0.5, 0.5), + padding=20, +) -> gf.Component: + """Ring resonators with thermal cross-talk. + + Args: + name: for labels. + num_gcs: number of grating couplers (should be <10). + num_gc_per_pitch: number of grating couplers within a GC pitch (5 is optimal). + sep_resonators: distance between the resonators. + ring_y_offset: manual offset for the resonator positions to make the routes DRC clean. + resonator_func: rings_proximity or disks_proximity. + fill_layers: layers to add as unity dennity fill around the rings. + fill_margin: keepout between the fill_layers and the same design layers. + fill_size: tiling size. + padding: how much to extend the fill beyond the ring component. + """ + m = gf.Component() + + # GC array + spacing = GC_PITCH / num_gc_per_pitch - ( + pdk.gc_te1550().ymax - pdk.gc_te1550().ymin + ) + g = m << gf.grid( + [pdk.gc_te1550()] * num_gcs, + shape=(num_gcs, 1), + spacing=(spacing, spacing), + add_ports_prefix=False, + add_ports_suffix=True, + rotation=180, + ) + g.xmin = 25 + g.ymin = 150 + + # Pads + pad_spacing = 125 - (pdk.pad().ymax - pdk.pad().ymin) + pads = m << gf.grid( + [pdk.pad] * 4, + shape=(4, 1), + spacing=(pad_spacing, pad_spacing), + add_ports_prefix=False, + add_ports_suffix=True, + ) + pads.xmin = 360 + pads.ymin = 10 + + # Rings + rings = m << resonator_func(num_rings=num_gcs // 2, sep_resonators=sep_resonators) + rings.rotate(90) + rings.movex(g.xmin + 225).movey((pads.ymin + pads.ymax) / 2 + ring_y_offset) + if fill_layers: + for layer in fill_layers: + _ = m << gf.fill_rectangle( + rings, + fill_size=fill_size, + fill_layers=[layer], + margin=fill_margin, + fill_densities=[1.0], + avoid_layers=[layer], + ) + + # Left optical connections + right_ports = [rings.ports[f"o2_{i}"] for i in range(num_gc_per_pitch)] + left_ports = [g.ports[f"o1_{i}_0"] for i in range(num_gc_per_pitch)] + routes = gf.routing.get_bundle(right_ports, left_ports) + for route in routes: + m.add(route.references) + + # GC loopbacks for easier routing + extended_gc_ports = [] + for i in range(num_gc_per_pitch, num_gcs - 1): + bend = m << gf.get_component(gf.components.bend_euler180) + bend.connect("o2", destination=g.ports[f"o1_{i}_0"]) + escape = ( + m + << gf.get_component( + gf.components.bezier, + control_points=[(0.0, 0.0), (15.0, 0.0), (15.0, 7.5), (30.0, 7.5)], + ).mirror() + ) + escape.connect("o1", destination=bend.ports["o1"]) + straight = m << gf.get_component(gf.components.straight, length=35 - 4 * i) + straight.connect("o1", destination=escape.ports["o2"]) + bend = m << gf.get_component(gf.components.bend_euler) + bend.connect("o1", destination=straight.ports["o2"]) + extended_gc_ports.append(bend.ports["o2"]) + bend = m << gf.get_component(gf.components.bend_euler) + bend.connect("o2", destination=g.ports[f"o1_{num_gcs-1}_0"]) + extended_gc_ports.append(bend.ports["o1"]) + + # Right optical connections + right_ports = [rings.ports[f"o1_{i}"] for i in range(num_gc_per_pitch)] + left_ports = extended_gc_ports + for i, (port1, port2) in enumerate(zip(right_ports, left_ports)): + x0 = port1.x + y0 = port1.y + x2 = port2.x + y2 = port2.y + dx = 50 + (len(right_ports) - i) * 5 + y1 = rings.ymin - 50 - (len(right_ports) - i) * 5 + route = gf.routing.get_route_from_waypoints( + [(x0, y0), (x0 + dx, y0), (x0 + dx, y1), (x2, y1), (x2, y2)] + ) + m.add(route.references) + + # Electrical connections + for ring_index, pad_index in zip([0, num_gcs // 4], [0, 3]): + ring_port = rings.ports[f"e2_{ring_index}"] + pad_port = pads.ports[f"e1_{pad_index}_0"] + x0 = ring_port.x + y0 = ring_port.y + x2 = pad_port.x + y2 = pad_port.y + dx = -50 + route = gf.routing.get_route_from_waypoints( + [(x0, y0), (x0 + dx, y0), (x0 + dx, y2), (x2, y2)], + cross_section="xs_metal_routing", + bend=gf.components.wire_corner, + ) + m.add(route.references) + for ring_index, pad_index in zip([0, num_gcs // 4], [1, 2]): + ring_port = rings.ports[f"e1_{ring_index}"] + pad_port = pads.ports[f"e1_{pad_index}_0"] + x0 = ring_port.x + y0 = ring_port.y + x2 = pad_port.x + y2 = pad_port.y + dx = 50 + route = gf.routing.get_route_from_waypoints( + [(x0, y0), (x0 + dx, y0), (x0 + dx, y2), (x2, y2)], + cross_section="xs_metal_routing", + bend=gf.components.wire_corner, + ) + m.add(route.references) + + # Add test labels + # For every experiment, label the input GC (bottom one) + for i in range(num_gc_per_pitch, num_gcs): + unique_name = f"opt_in_TE_1550_device_{name}_{i}" + # Place label at GC port + label = gf.component_layout.Label( + text=unique_name, + origin=g.ports[f"o1_{i}_0"].center, + anchor="o", + magnification=1.0, + rotation=0.0, + layer=layer_label[0], + texttype=layer_label[1], + x_reflection=False, + ) + m.add(label) + # Place label at electrical ports + for index, padname in enumerate(["G1", "S1", "G2", "S2"][::-1]): + label = gf.component_layout.Label( + text=f"elec_{unique_name}_{padname}", + origin=(pads.xmin + 75 / 2, pads.ymin + (125) * index + 75 / 2), + anchor="o", + magnification=1.0, + rotation=0.0, + layer=layer_label[0], + texttype=layer_label[1], + x_reflection=False, + ) + m.add(label) + + m.add_ports(g.ports) + _ = m << gf.components.rectangle(size=size, layer=LAYER.FLOORPLAN) + m.name = name + return m + + +def test_mask3() -> Path: + """Rings with thermal crosstalk, close rings""" + m = crosstalk_experiment_parametrized_mask( + name="EBeam_JoaquinMatres_Simon_1", + sep_resonators=5.0, + ring_y_offset=20.0, + resonator_func=rings_proximity, + ) + return write_mask_gds_with_metadata(m) + + +def test_mask4() -> Path: + """Rings with thermal crosstalk, far rings""" + m = crosstalk_experiment_parametrized_mask( + name="EBeam_JoaquinMatres_Simon_2", + sep_resonators=20.0, + ring_y_offset=40.0, + resonator_func=rings_proximity, + ) + return write_mask_gds_with_metadata(m) + + +def test_mask5() -> Path: + """Rings with thermal crosstalk, metal fill""" + m = crosstalk_experiment_parametrized_mask( + name="EBeam_JoaquinMatres_Simon_3", + sep_resonators=20.0, + ring_y_offset=40.0, + resonator_func=rings_proximity, + fill_layers=[LAYER.M1_HEATER], + ) + return write_mask_gds_with_metadata(m) + + +def test_mask6() -> Path: + """Rings with thermal crosstalk, silicon fill""" + m = crosstalk_experiment_parametrized_mask( + name="EBeam_JoaquinMatres_Simon_4", + sep_resonators=20.0, + ring_y_offset=40.0, + resonator_func=rings_proximity, + fill_layers=[LAYER.WG, LAYER.M1_HEATER], + fill_margin=5, + fill_size=(0.5, 0.5), + ) + return write_mask_gds_with_metadata(m) + + +if __name__ == "__main__": + # m = test_mask1() + # m = test_mask3() + # m = test_mask4() + m = test_mask5() + gf.show(m) diff --git a/ubc2/write_mask.py b/ubc2/write_mask.py new file mode 100644 index 0000000..d6520ec --- /dev/null +++ b/ubc2/write_mask.py @@ -0,0 +1,27 @@ +"""Sample mask for the course.""" +from functools import partial +from pathlib import Path + +import gdsfactory as gf +import ubcpdk +from omegaconf import OmegaConf +from ubc1.config import PATH +from ubcpdk.tech import LAYER + +size = (440, 470) +add_gc = ubcpdk.components.add_fiber_array +pack = partial( + gf.pack, max_size=size, add_ports_prefix=False, add_ports_suffix=False, spacing=2 +) + + +def write_mask_gds_with_metadata(m) -> Path: + """Returns gdspath.""" + gdspath = PATH.build / f"{m.name}.gds" + m.write_gds(gdspath=gdspath, with_metadata=True) + metadata_path = gdspath.with_suffix(".yml") + OmegaConf.load(metadata_path) + gf.labels.write_labels.write_labels_gdstk( + gdspath=gdspath, layer_label=LAYER.TEXT, debug=True + ) + return gdspath