diff --git a/server/venv/Lib/site-packages/Jinja2-3.1.2.dist-info/INSTALLER b/server/venv/Lib/site-packages/Jinja2-3.1.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/server/venv/Lib/site-packages/Jinja2-3.1.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/server/venv/Lib/site-packages/Jinja2-3.1.2.dist-info/LICENSE.rst b/server/venv/Lib/site-packages/Jinja2-3.1.2.dist-info/LICENSE.rst deleted file mode 100644 index c37cae4..0000000 --- a/server/venv/Lib/site-packages/Jinja2-3.1.2.dist-info/LICENSE.rst +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2007 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/server/venv/Lib/site-packages/Jinja2-3.1.2.dist-info/METADATA b/server/venv/Lib/site-packages/Jinja2-3.1.2.dist-info/METADATA deleted file mode 100644 index f54bb5c..0000000 --- a/server/venv/Lib/site-packages/Jinja2-3.1.2.dist-info/METADATA +++ /dev/null @@ -1,113 +0,0 @@ -Metadata-Version: 2.1 -Name: Jinja2 -Version: 3.1.2 -Summary: A very fast and expressive template engine. -Home-page: https://palletsprojects.com/p/jinja/ -Author: Armin Ronacher -Author-email: armin.ronacher@active-4.com -Maintainer: Pallets -Maintainer-email: contact@palletsprojects.com -License: BSD-3-Clause -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Documentation, https://jinja.palletsprojects.com/ -Project-URL: Changes, https://jinja.palletsprojects.com/changes/ -Project-URL: Source Code, https://github.com/pallets/jinja/ -Project-URL: Issue Tracker, https://github.com/pallets/jinja/issues/ -Project-URL: Twitter, https://twitter.com/PalletsTeam -Project-URL: Chat, https://discord.gg/pallets -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Text Processing :: Markup :: HTML -Requires-Python: >=3.7 -Description-Content-Type: text/x-rst -License-File: LICENSE.rst -Requires-Dist: MarkupSafe (>=2.0) -Provides-Extra: i18n -Requires-Dist: Babel (>=2.7) ; extra == 'i18n' - -Jinja -===== - -Jinja is a fast, expressive, extensible templating engine. Special -placeholders in the template allow writing code similar to Python -syntax. Then the template is passed data to render the final document. - -It includes: - -- Template inheritance and inclusion. -- Define and import macros within templates. -- HTML templates can use autoescaping to prevent XSS from untrusted - user input. -- A sandboxed environment can safely render untrusted templates. -- AsyncIO support for generating templates and calling async - functions. -- I18N support with Babel. -- Templates are compiled to optimized Python code just-in-time and - cached, or can be compiled ahead-of-time. -- Exceptions point to the correct line in templates to make debugging - easier. -- Extensible filters, tests, functions, and even syntax. - -Jinja's philosophy is that while application logic belongs in Python if -possible, it shouldn't make the template designer's job difficult by -restricting functionality too much. - - -Installing ----------- - -Install and update using `pip`_: - -.. code-block:: text - - $ pip install -U Jinja2 - -.. _pip: https://pip.pypa.io/en/stable/getting-started/ - - -In A Nutshell -------------- - -.. code-block:: jinja - - {% extends "base.html" %} - {% block title %}Members{% endblock %} - {% block content %} - - {% endblock %} - - -Donate ------- - -The Pallets organization develops and supports Jinja and other popular -packages. In order to grow the community of contributors and users, and -allow the maintainers to devote more time to the projects, `please -donate today`_. - -.. _please donate today: https://palletsprojects.com/donate - - -Links ------ - -- Documentation: https://jinja.palletsprojects.com/ -- Changes: https://jinja.palletsprojects.com/changes/ -- PyPI Releases: https://pypi.org/project/Jinja2/ -- Source Code: https://github.com/pallets/jinja/ -- Issue Tracker: https://github.com/pallets/jinja/issues/ -- Website: https://palletsprojects.com/p/jinja/ -- Twitter: https://twitter.com/PalletsTeam -- Chat: https://discord.gg/pallets - - diff --git a/server/venv/Lib/site-packages/Jinja2-3.1.2.dist-info/RECORD b/server/venv/Lib/site-packages/Jinja2-3.1.2.dist-info/RECORD deleted file mode 100644 index b145ed2..0000000 --- a/server/venv/Lib/site-packages/Jinja2-3.1.2.dist-info/RECORD +++ /dev/null @@ -1,59 +0,0 @@ -Jinja2-3.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -Jinja2-3.1.2.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475 -Jinja2-3.1.2.dist-info/METADATA,sha256=PZ6v2SIidMNixR7MRUX9f7ZWsPwtXanknqiZUmRbh4U,3539 -Jinja2-3.1.2.dist-info/RECORD,, -Jinja2-3.1.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -Jinja2-3.1.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 -Jinja2-3.1.2.dist-info/entry_points.txt,sha256=zRd62fbqIyfUpsRtU7EVIFyiu1tPwfgO7EvPErnxgTE,59 -Jinja2-3.1.2.dist-info/top_level.txt,sha256=PkeVWtLb3-CqjWi1fO29OCbj55EhX_chhKrCdrVe_zs,7 -jinja2/__init__.py,sha256=8vGduD8ytwgD6GDSqpYc2m3aU-T7PKOAddvVXgGr_Fs,1927 -jinja2/__pycache__/__init__.cpython-311.pyc,, -jinja2/__pycache__/_identifier.cpython-311.pyc,, -jinja2/__pycache__/async_utils.cpython-311.pyc,, -jinja2/__pycache__/bccache.cpython-311.pyc,, -jinja2/__pycache__/compiler.cpython-311.pyc,, -jinja2/__pycache__/constants.cpython-311.pyc,, -jinja2/__pycache__/debug.cpython-311.pyc,, -jinja2/__pycache__/defaults.cpython-311.pyc,, -jinja2/__pycache__/environment.cpython-311.pyc,, -jinja2/__pycache__/exceptions.cpython-311.pyc,, -jinja2/__pycache__/ext.cpython-311.pyc,, -jinja2/__pycache__/filters.cpython-311.pyc,, -jinja2/__pycache__/idtracking.cpython-311.pyc,, -jinja2/__pycache__/lexer.cpython-311.pyc,, -jinja2/__pycache__/loaders.cpython-311.pyc,, -jinja2/__pycache__/meta.cpython-311.pyc,, -jinja2/__pycache__/nativetypes.cpython-311.pyc,, -jinja2/__pycache__/nodes.cpython-311.pyc,, -jinja2/__pycache__/optimizer.cpython-311.pyc,, -jinja2/__pycache__/parser.cpython-311.pyc,, -jinja2/__pycache__/runtime.cpython-311.pyc,, -jinja2/__pycache__/sandbox.cpython-311.pyc,, -jinja2/__pycache__/tests.cpython-311.pyc,, -jinja2/__pycache__/utils.cpython-311.pyc,, -jinja2/__pycache__/visitor.cpython-311.pyc,, -jinja2/_identifier.py,sha256=_zYctNKzRqlk_murTNlzrju1FFJL7Va_Ijqqd7ii2lU,1958 -jinja2/async_utils.py,sha256=dHlbTeaxFPtAOQEYOGYh_PHcDT0rsDaUJAFDl_0XtTg,2472 -jinja2/bccache.py,sha256=mhz5xtLxCcHRAa56azOhphIAe19u1we0ojifNMClDio,14061 -jinja2/compiler.py,sha256=Gs-N8ThJ7OWK4-reKoO8Wh1ZXz95MVphBKNVf75qBr8,72172 -jinja2/constants.py,sha256=GMoFydBF_kdpaRKPoM5cl5MviquVRLVyZtfp5-16jg0,1433 -jinja2/debug.py,sha256=iWJ432RadxJNnaMOPrjIDInz50UEgni3_HKuFXi2vuQ,6299 -jinja2/defaults.py,sha256=boBcSw78h-lp20YbaXSJsqkAI2uN_mD_TtCydpeq5wU,1267 -jinja2/environment.py,sha256=6uHIcc7ZblqOMdx_uYNKqRnnwAF0_nzbyeMP9FFtuh4,61349 -jinja2/exceptions.py,sha256=ioHeHrWwCWNaXX1inHmHVblvc4haO7AXsjCp3GfWvx0,5071 -jinja2/ext.py,sha256=ivr3P7LKbddiXDVez20EflcO3q2aHQwz9P_PgWGHVqE,31502 -jinja2/filters.py,sha256=9js1V-h2RlyW90IhLiBGLM2U-k6SCy2F4BUUMgB3K9Q,53509 -jinja2/idtracking.py,sha256=GfNmadir4oDALVxzn3DL9YInhJDr69ebXeA2ygfuCGA,10704 -jinja2/lexer.py,sha256=DW2nX9zk-6MWp65YR2bqqj0xqCvLtD-u9NWT8AnFRxQ,29726 -jinja2/loaders.py,sha256=BfptfvTVpClUd-leMkHczdyPNYFzp_n7PKOJ98iyHOg,23207 -jinja2/meta.py,sha256=GNPEvifmSaU3CMxlbheBOZjeZ277HThOPUTf1RkppKQ,4396 -jinja2/nativetypes.py,sha256=DXgORDPRmVWgy034H0xL8eF7qYoK3DrMxs-935d0Fzk,4226 -jinja2/nodes.py,sha256=i34GPRAZexXMT6bwuf5SEyvdmS-bRCy9KMjwN5O6pjk,34550 -jinja2/optimizer.py,sha256=tHkMwXxfZkbfA1KmLcqmBMSaz7RLIvvItrJcPoXTyD8,1650 -jinja2/parser.py,sha256=nHd-DFHbiygvfaPtm9rcQXJChZG7DPsWfiEsqfwKerY,39595 -jinja2/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -jinja2/runtime.py,sha256=5CmD5BjbEJxSiDNTFBeKCaq8qU4aYD2v6q2EluyExms,33476 -jinja2/sandbox.py,sha256=Y0xZeXQnH6EX5VjaV2YixESxoepnRbW_3UeQosaBU3M,14584 -jinja2/tests.py,sha256=Am5Z6Lmfr2XaH_npIfJJ8MdXtWsbLjMULZJulTAj30E,5905 -jinja2/utils.py,sha256=u9jXESxGn8ATZNVolwmkjUVu4SA-tLgV0W7PcSfPfdQ,23965 -jinja2/visitor.py,sha256=MH14C6yq24G_KVtWzjwaI7Wg14PCJIYlWW1kpkxYak0,3568 diff --git a/server/venv/Lib/site-packages/Jinja2-3.1.2.dist-info/REQUESTED b/server/venv/Lib/site-packages/Jinja2-3.1.2.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/Jinja2-3.1.2.dist-info/WHEEL b/server/venv/Lib/site-packages/Jinja2-3.1.2.dist-info/WHEEL deleted file mode 100644 index becc9a6..0000000 --- a/server/venv/Lib/site-packages/Jinja2-3.1.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.1) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/server/venv/Lib/site-packages/Jinja2-3.1.2.dist-info/entry_points.txt b/server/venv/Lib/site-packages/Jinja2-3.1.2.dist-info/entry_points.txt deleted file mode 100644 index 7b9666c..0000000 --- a/server/venv/Lib/site-packages/Jinja2-3.1.2.dist-info/entry_points.txt +++ /dev/null @@ -1,2 +0,0 @@ -[babel.extractors] -jinja2 = jinja2.ext:babel_extract[i18n] diff --git a/server/venv/Lib/site-packages/Jinja2-3.1.2.dist-info/top_level.txt b/server/venv/Lib/site-packages/Jinja2-3.1.2.dist-info/top_level.txt deleted file mode 100644 index 7f7afbf..0000000 --- a/server/venv/Lib/site-packages/Jinja2-3.1.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -jinja2 diff --git a/server/venv/Lib/site-packages/MarkupSafe-2.1.3.dist-info/INSTALLER b/server/venv/Lib/site-packages/MarkupSafe-2.1.3.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/server/venv/Lib/site-packages/MarkupSafe-2.1.3.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/server/venv/Lib/site-packages/MarkupSafe-2.1.3.dist-info/LICENSE.rst b/server/venv/Lib/site-packages/MarkupSafe-2.1.3.dist-info/LICENSE.rst deleted file mode 100644 index c4700f9..0000000 --- a/server/venv/Lib/site-packages/MarkupSafe-2.1.3.dist-info/LICENSE.rst +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2010 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/server/venv/Lib/site-packages/MarkupSafe-2.1.3.dist-info/METADATA b/server/venv/Lib/site-packages/MarkupSafe-2.1.3.dist-info/METADATA deleted file mode 100644 index c79e71f..0000000 --- a/server/venv/Lib/site-packages/MarkupSafe-2.1.3.dist-info/METADATA +++ /dev/null @@ -1,93 +0,0 @@ -Metadata-Version: 2.1 -Name: MarkupSafe -Version: 2.1.3 -Summary: Safely add untrusted strings to HTML/XML markup. -Home-page: https://palletsprojects.com/p/markupsafe/ -Maintainer: Pallets -Maintainer-email: contact@palletsprojects.com -License: BSD-3-Clause -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Documentation, https://markupsafe.palletsprojects.com/ -Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/ -Project-URL: Source Code, https://github.com/pallets/markupsafe/ -Project-URL: Issue Tracker, https://github.com/pallets/markupsafe/issues/ -Project-URL: Chat, https://discord.gg/pallets -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Text Processing :: Markup :: HTML -Requires-Python: >=3.7 -Description-Content-Type: text/x-rst -License-File: LICENSE.rst - -MarkupSafe -========== - -MarkupSafe implements a text object that escapes characters so it is -safe to use in HTML and XML. Characters that have special meanings are -replaced so that they display as the actual characters. This mitigates -injection attacks, meaning untrusted user input can safely be displayed -on a page. - - -Installing ----------- - -Install and update using `pip`_: - -.. code-block:: text - - pip install -U MarkupSafe - -.. _pip: https://pip.pypa.io/en/stable/getting-started/ - - -Examples --------- - -.. code-block:: pycon - - >>> from markupsafe import Markup, escape - - >>> # escape replaces special characters and wraps in Markup - >>> escape("") - Markup('<script>alert(document.cookie);</script>') - - >>> # wrap in Markup to mark text "safe" and prevent escaping - >>> Markup("Hello") - Markup('hello') - - >>> escape(Markup("Hello")) - Markup('hello') - - >>> # Markup is a str subclass - >>> # methods and operators escape their arguments - >>> template = Markup("Hello {name}") - >>> template.format(name='"World"') - Markup('Hello "World"') - - -Donate ------- - -The Pallets organization develops and supports MarkupSafe and other -popular packages. In order to grow the community of contributors and -users, and allow the maintainers to devote more time to the projects, -`please donate today`_. - -.. _please donate today: https://palletsprojects.com/donate - - -Links ------ - -- Documentation: https://markupsafe.palletsprojects.com/ -- Changes: https://markupsafe.palletsprojects.com/changes/ -- PyPI Releases: https://pypi.org/project/MarkupSafe/ -- Source Code: https://github.com/pallets/markupsafe/ -- Issue Tracker: https://github.com/pallets/markupsafe/issues/ -- Chat: https://discord.gg/pallets diff --git a/server/venv/Lib/site-packages/MarkupSafe-2.1.3.dist-info/RECORD b/server/venv/Lib/site-packages/MarkupSafe-2.1.3.dist-info/RECORD deleted file mode 100644 index 3da00a1..0000000 --- a/server/venv/Lib/site-packages/MarkupSafe-2.1.3.dist-info/RECORD +++ /dev/null @@ -1,15 +0,0 @@ -MarkupSafe-2.1.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -MarkupSafe-2.1.3.dist-info/LICENSE.rst,sha256=RjHsDbX9kKVH4zaBcmTGeYIUM4FG-KyUtKV_lu6MnsQ,1503 -MarkupSafe-2.1.3.dist-info/METADATA,sha256=5gU_TQw6eHpTaqkI6SPeZje6KTPlJPAV82uNiL3naKE,3096 -MarkupSafe-2.1.3.dist-info/RECORD,, -MarkupSafe-2.1.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -MarkupSafe-2.1.3.dist-info/WHEEL,sha256=9wvhO-5NhjjD8YmmxAvXTPQXMDOZ50W5vklzeoqFtkM,102 -MarkupSafe-2.1.3.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11 -markupsafe/__init__.py,sha256=GsRaSTjrhvg6c88PnPJNqm4MafU_mFatfXz4-h80-Qc,10642 -markupsafe/__pycache__/__init__.cpython-311.pyc,, -markupsafe/__pycache__/_native.cpython-311.pyc,, -markupsafe/_native.py,sha256=_Q7UsXCOvgdonCgqG3l5asANI6eo50EKnDM-mlwEC5M,1776 -markupsafe/_speedups.c,sha256=n3jzzaJwXcoN8nTFyA53f3vSqsWK2vujI-v6QYifjhQ,7403 -markupsafe/_speedups.cp311-win_amd64.pyd,sha256=TEUZdBQBxs061oYZQP2cGGlADVOAdHridgTeE6XQxTE,15872 -markupsafe/_speedups.pyi,sha256=f5QtwIOP0eLrxh2v5p6SmaYmlcHIGIfmz0DovaqL0OU,238 -markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/server/venv/Lib/site-packages/MarkupSafe-2.1.3.dist-info/REQUESTED b/server/venv/Lib/site-packages/MarkupSafe-2.1.3.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/MarkupSafe-2.1.3.dist-info/WHEEL b/server/venv/Lib/site-packages/MarkupSafe-2.1.3.dist-info/WHEEL deleted file mode 100644 index 30c3ff1..0000000 --- a/server/venv/Lib/site-packages/MarkupSafe-2.1.3.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.40.0) -Root-Is-Purelib: false -Tag: cp311-cp311-win_amd64 - diff --git a/server/venv/Lib/site-packages/MarkupSafe-2.1.3.dist-info/top_level.txt b/server/venv/Lib/site-packages/MarkupSafe-2.1.3.dist-info/top_level.txt deleted file mode 100644 index 75bf729..0000000 --- a/server/venv/Lib/site-packages/MarkupSafe-2.1.3.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -markupsafe diff --git a/server/venv/Lib/site-packages/PyWin32.chm b/server/venv/Lib/site-packages/PyWin32.chm deleted file mode 100644 index 7606f82..0000000 Binary files a/server/venv/Lib/site-packages/PyWin32.chm and /dev/null differ diff --git a/server/venv/Lib/site-packages/PyYAML-6.0.1.dist-info/INSTALLER b/server/venv/Lib/site-packages/PyYAML-6.0.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/server/venv/Lib/site-packages/PyYAML-6.0.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/server/venv/Lib/site-packages/PyYAML-6.0.1.dist-info/LICENSE b/server/venv/Lib/site-packages/PyYAML-6.0.1.dist-info/LICENSE deleted file mode 100644 index 2f1b8e1..0000000 --- a/server/venv/Lib/site-packages/PyYAML-6.0.1.dist-info/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -Copyright (c) 2017-2021 Ingy döt Net -Copyright (c) 2006-2016 Kirill Simonov - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/server/venv/Lib/site-packages/PyYAML-6.0.1.dist-info/METADATA b/server/venv/Lib/site-packages/PyYAML-6.0.1.dist-info/METADATA deleted file mode 100644 index 6523240..0000000 --- a/server/venv/Lib/site-packages/PyYAML-6.0.1.dist-info/METADATA +++ /dev/null @@ -1,46 +0,0 @@ -Metadata-Version: 2.1 -Name: PyYAML -Version: 6.0.1 -Summary: YAML parser and emitter for Python -Home-page: https://pyyaml.org/ -Download-URL: https://pypi.org/project/PyYAML/ -Author: Kirill Simonov -Author-email: xi@resolvent.net -License: MIT -Project-URL: Bug Tracker, https://github.com/yaml/pyyaml/issues -Project-URL: CI, https://github.com/yaml/pyyaml/actions -Project-URL: Documentation, https://pyyaml.org/wiki/PyYAMLDocumentation -Project-URL: Mailing lists, http://lists.sourceforge.net/lists/listinfo/yaml-core -Project-URL: Source Code, https://github.com/yaml/pyyaml -Platform: Any -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Cython -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Text Processing :: Markup -Requires-Python: >=3.6 -License-File: LICENSE - -YAML is a data serialization format designed for human readability -and interaction with scripting languages. PyYAML is a YAML parser -and emitter for Python. - -PyYAML features a complete YAML 1.1 parser, Unicode support, pickle -support, capable extension API, and sensible error messages. PyYAML -supports standard YAML tags and provides Python-specific tags that -allow to represent an arbitrary Python object. - -PyYAML is applicable for a broad range of tasks from complex -configuration files to object serialization and persistence. diff --git a/server/venv/Lib/site-packages/PyYAML-6.0.1.dist-info/RECORD b/server/venv/Lib/site-packages/PyYAML-6.0.1.dist-info/RECORD deleted file mode 100644 index 85b830f..0000000 --- a/server/venv/Lib/site-packages/PyYAML-6.0.1.dist-info/RECORD +++ /dev/null @@ -1,44 +0,0 @@ -PyYAML-6.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -PyYAML-6.0.1.dist-info/LICENSE,sha256=jTko-dxEkP1jVwfLiOsmvXZBAqcoKVQwfT5RZ6V36KQ,1101 -PyYAML-6.0.1.dist-info/METADATA,sha256=i3GoINVJ0RnmgIBLEFUIA75PtRGAhQAZGeWJNZFKogc,2104 -PyYAML-6.0.1.dist-info/RECORD,, -PyYAML-6.0.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -PyYAML-6.0.1.dist-info/WHEEL,sha256=9wvhO-5NhjjD8YmmxAvXTPQXMDOZ50W5vklzeoqFtkM,102 -PyYAML-6.0.1.dist-info/top_level.txt,sha256=rpj0IVMTisAjh_1vG3Ccf9v5jpCQwAz6cD1IVU5ZdhQ,11 -_yaml/__init__.py,sha256=04Ae_5osxahpJHa3XBZUAf4wi6XX32gR8D6X6p64GEA,1402 -_yaml/__pycache__/__init__.cpython-311.pyc,, -yaml/__init__.py,sha256=bhl05qSeO-1ZxlSRjGrvl2m9nrXb1n9-GQatTN0Mrqc,12311 -yaml/__pycache__/__init__.cpython-311.pyc,, -yaml/__pycache__/composer.cpython-311.pyc,, -yaml/__pycache__/constructor.cpython-311.pyc,, -yaml/__pycache__/cyaml.cpython-311.pyc,, -yaml/__pycache__/dumper.cpython-311.pyc,, -yaml/__pycache__/emitter.cpython-311.pyc,, -yaml/__pycache__/error.cpython-311.pyc,, -yaml/__pycache__/events.cpython-311.pyc,, -yaml/__pycache__/loader.cpython-311.pyc,, -yaml/__pycache__/nodes.cpython-311.pyc,, -yaml/__pycache__/parser.cpython-311.pyc,, -yaml/__pycache__/reader.cpython-311.pyc,, -yaml/__pycache__/representer.cpython-311.pyc,, -yaml/__pycache__/resolver.cpython-311.pyc,, -yaml/__pycache__/scanner.cpython-311.pyc,, -yaml/__pycache__/serializer.cpython-311.pyc,, -yaml/__pycache__/tokens.cpython-311.pyc,, -yaml/_yaml.cp311-win_amd64.pyd,sha256=oMYsA1zRMc4eV0dC2R1BXedhpcXVw1pPNqQbjgsKsZU,233984 -yaml/composer.py,sha256=_Ko30Wr6eDWUeUpauUGT3Lcg9QPBnOPVlTnIMRGJ9FM,4883 -yaml/constructor.py,sha256=kNgkfaeLUkwQYY_Q6Ff1Tz2XVw_pG1xVE9Ak7z-viLA,28639 -yaml/cyaml.py,sha256=6ZrAG9fAYvdVe2FK_w0hmXoG7ZYsoYUwapG8CiC72H0,3851 -yaml/dumper.py,sha256=PLctZlYwZLp7XmeUdwRuv4nYOZ2UBnDIUy8-lKfLF-o,2837 -yaml/emitter.py,sha256=jghtaU7eFwg31bG0B7RZea_29Adi9CKmXq_QjgQpCkQ,43006 -yaml/error.py,sha256=Ah9z-toHJUbE9j-M8YpxgSRM5CgLCcwVzJgLLRF2Fxo,2533 -yaml/events.py,sha256=50_TksgQiE4up-lKo_V-nBy-tAIxkIPQxY5qDhKCeHw,2445 -yaml/loader.py,sha256=UVa-zIqmkFSCIYq_PgSGm4NSJttHY2Rf_zQ4_b1fHN0,2061 -yaml/nodes.py,sha256=gPKNj8pKCdh2d4gr3gIYINnPOaOxGhJAUiYhGRnPE84,1440 -yaml/parser.py,sha256=ilWp5vvgoHFGzvOZDItFoGjD6D42nhlZrZyjAwa0oJo,25495 -yaml/reader.py,sha256=0dmzirOiDG4Xo41RnuQS7K9rkY3xjHiVasfDMNTqCNw,6794 -yaml/representer.py,sha256=IuWP-cAW9sHKEnS0gCqSa894k1Bg4cgTxaDwIcbRQ-Y,14190 -yaml/resolver.py,sha256=9L-VYfm4mWHxUD1Vg4X7rjDRK_7VZd6b92wzq7Y2IKY,9004 -yaml/scanner.py,sha256=YEM3iLZSaQwXcQRg2l2R4MdT0zGP2F9eHkKGKnHyWQY,51279 -yaml/serializer.py,sha256=ChuFgmhU01hj4xgI8GaKv6vfM2Bujwa9i7d2FAHj7cA,4165 -yaml/tokens.py,sha256=lTQIzSVw8Mg9wv459-TjiOQe6wVziqaRlqX2_89rp54,2573 diff --git a/server/venv/Lib/site-packages/PyYAML-6.0.1.dist-info/REQUESTED b/server/venv/Lib/site-packages/PyYAML-6.0.1.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/PyYAML-6.0.1.dist-info/WHEEL b/server/venv/Lib/site-packages/PyYAML-6.0.1.dist-info/WHEEL deleted file mode 100644 index 30c3ff1..0000000 --- a/server/venv/Lib/site-packages/PyYAML-6.0.1.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.40.0) -Root-Is-Purelib: false -Tag: cp311-cp311-win_amd64 - diff --git a/server/venv/Lib/site-packages/PyYAML-6.0.1.dist-info/top_level.txt b/server/venv/Lib/site-packages/PyYAML-6.0.1.dist-info/top_level.txt deleted file mode 100644 index e6475e9..0000000 --- a/server/venv/Lib/site-packages/PyYAML-6.0.1.dist-info/top_level.txt +++ /dev/null @@ -1,2 +0,0 @@ -_yaml -yaml diff --git a/server/venv/Lib/site-packages/_distutils_hack/__init__.py b/server/venv/Lib/site-packages/_distutils_hack/__init__.py deleted file mode 100644 index f987a53..0000000 --- a/server/venv/Lib/site-packages/_distutils_hack/__init__.py +++ /dev/null @@ -1,222 +0,0 @@ -# don't import any costly modules -import sys -import os - - -is_pypy = '__pypy__' in sys.builtin_module_names - - -def warn_distutils_present(): - if 'distutils' not in sys.modules: - return - if is_pypy and sys.version_info < (3, 7): - # PyPy for 3.6 unconditionally imports distutils, so bypass the warning - # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250 - return - import warnings - - warnings.warn( - "Distutils was imported before Setuptools, but importing Setuptools " - "also replaces the `distutils` module in `sys.modules`. This may lead " - "to undesirable behaviors or errors. To avoid these issues, avoid " - "using distutils directly, ensure that setuptools is installed in the " - "traditional way (e.g. not an editable install), and/or make sure " - "that setuptools is always imported before distutils." - ) - - -def clear_distutils(): - if 'distutils' not in sys.modules: - return - import warnings - - warnings.warn("Setuptools is replacing distutils.") - mods = [ - name - for name in sys.modules - if name == "distutils" or name.startswith("distutils.") - ] - for name in mods: - del sys.modules[name] - - -def enabled(): - """ - Allow selection of distutils by environment variable. - """ - which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local') - return which == 'local' - - -def ensure_local_distutils(): - import importlib - - clear_distutils() - - # With the DistutilsMetaFinder in place, - # perform an import to cause distutils to be - # loaded from setuptools._distutils. Ref #2906. - with shim(): - importlib.import_module('distutils') - - # check that submodules load as expected - core = importlib.import_module('distutils.core') - assert '_distutils' in core.__file__, core.__file__ - assert 'setuptools._distutils.log' not in sys.modules - - -def do_override(): - """ - Ensure that the local copy of distutils is preferred over stdlib. - - See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401 - for more motivation. - """ - if enabled(): - warn_distutils_present() - ensure_local_distutils() - - -class _TrivialRe: - def __init__(self, *patterns): - self._patterns = patterns - - def match(self, string): - return all(pat in string for pat in self._patterns) - - -class DistutilsMetaFinder: - def find_spec(self, fullname, path, target=None): - # optimization: only consider top level modules and those - # found in the CPython test suite. - if path is not None and not fullname.startswith('test.'): - return - - method_name = 'spec_for_{fullname}'.format(**locals()) - method = getattr(self, method_name, lambda: None) - return method() - - def spec_for_distutils(self): - if self.is_cpython(): - return - - import importlib - import importlib.abc - import importlib.util - - try: - mod = importlib.import_module('setuptools._distutils') - except Exception: - # There are a couple of cases where setuptools._distutils - # may not be present: - # - An older Setuptools without a local distutils is - # taking precedence. Ref #2957. - # - Path manipulation during sitecustomize removes - # setuptools from the path but only after the hook - # has been loaded. Ref #2980. - # In either case, fall back to stdlib behavior. - return - - class DistutilsLoader(importlib.abc.Loader): - def create_module(self, spec): - mod.__name__ = 'distutils' - return mod - - def exec_module(self, module): - pass - - return importlib.util.spec_from_loader( - 'distutils', DistutilsLoader(), origin=mod.__file__ - ) - - @staticmethod - def is_cpython(): - """ - Suppress supplying distutils for CPython (build and tests). - Ref #2965 and #3007. - """ - return os.path.isfile('pybuilddir.txt') - - def spec_for_pip(self): - """ - Ensure stdlib distutils when running under pip. - See pypa/pip#8761 for rationale. - """ - if self.pip_imported_during_build(): - return - clear_distutils() - self.spec_for_distutils = lambda: None - - @classmethod - def pip_imported_during_build(cls): - """ - Detect if pip is being imported in a build script. Ref #2355. - """ - import traceback - - return any( - cls.frame_file_is_setup(frame) for frame, line in traceback.walk_stack(None) - ) - - @staticmethod - def frame_file_is_setup(frame): - """ - Return True if the indicated frame suggests a setup.py file. - """ - # some frames may not have __file__ (#2940) - return frame.f_globals.get('__file__', '').endswith('setup.py') - - def spec_for_sensitive_tests(self): - """ - Ensure stdlib distutils when running select tests under CPython. - - python/cpython#91169 - """ - clear_distutils() - self.spec_for_distutils = lambda: None - - sensitive_tests = ( - [ - 'test.test_distutils', - 'test.test_peg_generator', - 'test.test_importlib', - ] - if sys.version_info < (3, 10) - else [ - 'test.test_distutils', - ] - ) - - -for name in DistutilsMetaFinder.sensitive_tests: - setattr( - DistutilsMetaFinder, - f'spec_for_{name}', - DistutilsMetaFinder.spec_for_sensitive_tests, - ) - - -DISTUTILS_FINDER = DistutilsMetaFinder() - - -def add_shim(): - DISTUTILS_FINDER in sys.meta_path or insert_shim() - - -class shim: - def __enter__(self): - insert_shim() - - def __exit__(self, exc, value, tb): - remove_shim() - - -def insert_shim(): - sys.meta_path.insert(0, DISTUTILS_FINDER) - - -def remove_shim(): - try: - sys.meta_path.remove(DISTUTILS_FINDER) - except ValueError: - pass diff --git a/server/venv/Lib/site-packages/_distutils_hack/override.py b/server/venv/Lib/site-packages/_distutils_hack/override.py deleted file mode 100644 index 2cc433a..0000000 --- a/server/venv/Lib/site-packages/_distutils_hack/override.py +++ /dev/null @@ -1 +0,0 @@ -__import__('_distutils_hack').do_override() diff --git a/server/venv/Lib/site-packages/_yaml/__init__.py b/server/venv/Lib/site-packages/_yaml/__init__.py deleted file mode 100644 index 7baa8c4..0000000 --- a/server/venv/Lib/site-packages/_yaml/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# This is a stub package designed to roughly emulate the _yaml -# extension module, which previously existed as a standalone module -# and has been moved into the `yaml` package namespace. -# It does not perfectly mimic its old counterpart, but should get -# close enough for anyone who's relying on it even when they shouldn't. -import yaml - -# in some circumstances, the yaml module we imoprted may be from a different version, so we need -# to tread carefully when poking at it here (it may not have the attributes we expect) -if not getattr(yaml, '__with_libyaml__', False): - from sys import version_info - - exc = ModuleNotFoundError if version_info >= (3, 6) else ImportError - raise exc("No module named '_yaml'") -else: - from yaml._yaml import * - import warnings - warnings.warn( - 'The _yaml extension module is now located at yaml._yaml' - ' and its location is subject to change. To use the' - ' LibYAML-based parser and emitter, import from `yaml`:' - ' `from yaml import CLoader as Loader, CDumper as Dumper`.', - DeprecationWarning - ) - del warnings - # Don't `del yaml` here because yaml is actually an existing - # namespace member of _yaml. - -__name__ = '_yaml' -# If the module is top-level (i.e. not a part of any specific package) -# then the attribute should be set to ''. -# https://docs.python.org/3.8/library/types.html -__package__ = '' diff --git a/server/venv/Lib/site-packages/adodbapi/__init__.py b/server/venv/Lib/site-packages/adodbapi/__init__.py deleted file mode 100644 index 0d769e0..0000000 --- a/server/venv/Lib/site-packages/adodbapi/__init__.py +++ /dev/null @@ -1,74 +0,0 @@ -"""adodbapi - A python DB API 2.0 (PEP 249) interface to Microsoft ADO - -Copyright (C) 2002 Henrik Ekelund, version 2.1 by Vernon Cole -* http://sourceforge.net/projects/adodbapi -""" -import sys -import time - -from .adodbapi import Connection, Cursor, __version__, connect, dateconverter -from .apibase import ( - BINARY, - DATETIME, - NUMBER, - ROWID, - STRING, - DatabaseError, - DataError, - Error, - FetchFailedError, - IntegrityError, - InterfaceError, - InternalError, - NotSupportedError, - OperationalError, - ProgrammingError, - Warning, - apilevel, - paramstyle, - threadsafety, -) - - -def Binary(aString): - """This function constructs an object capable of holding a binary (long) string value.""" - return bytes(aString) - - -def Date(year, month, day): - "This function constructs an object holding a date value." - return dateconverter.Date(year, month, day) - - -def Time(hour, minute, second): - "This function constructs an object holding a time value." - return dateconverter.Time(hour, minute, second) - - -def Timestamp(year, month, day, hour, minute, second): - "This function constructs an object holding a time stamp value." - return dateconverter.Timestamp(year, month, day, hour, minute, second) - - -def DateFromTicks(ticks): - """This function constructs an object holding a date value from the given ticks value - (number of seconds since the epoch; see the documentation of the standard Python time module for details). - """ - return Date(*time.gmtime(ticks)[:3]) - - -def TimeFromTicks(ticks): - """This function constructs an object holding a time value from the given ticks value - (number of seconds since the epoch; see the documentation of the standard Python time module for details). - """ - return Time(*time.gmtime(ticks)[3:6]) - - -def TimestampFromTicks(ticks): - """This function constructs an object holding a time stamp value from the given - ticks value (number of seconds since the epoch; - see the documentation of the standard Python time module for details).""" - return Timestamp(*time.gmtime(ticks)[:6]) - - -version = "adodbapi v" + __version__ diff --git a/server/venv/Lib/site-packages/adodbapi/ado_consts.py b/server/venv/Lib/site-packages/adodbapi/ado_consts.py deleted file mode 100644 index ecb2147..0000000 --- a/server/venv/Lib/site-packages/adodbapi/ado_consts.py +++ /dev/null @@ -1,281 +0,0 @@ -# ADO enumerated constants documented on MSDN: -# http://msdn.microsoft.com/en-us/library/ms678353(VS.85).aspx - -# IsolationLevelEnum -adXactUnspecified = -1 -adXactBrowse = 0x100 -adXactChaos = 0x10 -adXactCursorStability = 0x1000 -adXactIsolated = 0x100000 -adXactReadCommitted = 0x1000 -adXactReadUncommitted = 0x100 -adXactRepeatableRead = 0x10000 -adXactSerializable = 0x100000 - -# CursorLocationEnum -adUseClient = 3 -adUseServer = 2 - -# CursorTypeEnum -adOpenDynamic = 2 -adOpenForwardOnly = 0 -adOpenKeyset = 1 -adOpenStatic = 3 -adOpenUnspecified = -1 - -# CommandTypeEnum -adCmdText = 1 -adCmdStoredProc = 4 -adSchemaTables = 20 - -# ParameterDirectionEnum -adParamInput = 1 -adParamInputOutput = 3 -adParamOutput = 2 -adParamReturnValue = 4 -adParamUnknown = 0 -directions = { - 0: "Unknown", - 1: "Input", - 2: "Output", - 3: "InputOutput", - 4: "Return", -} - - -def ado_direction_name(ado_dir): - try: - return "adParam" + directions[ado_dir] - except: - return "unknown direction (" + str(ado_dir) + ")" - - -# ObjectStateEnum -adStateClosed = 0 -adStateOpen = 1 -adStateConnecting = 2 -adStateExecuting = 4 -adStateFetching = 8 - -# FieldAttributeEnum -adFldMayBeNull = 0x40 - -# ConnectModeEnum -adModeUnknown = 0 -adModeRead = 1 -adModeWrite = 2 -adModeReadWrite = 3 -adModeShareDenyRead = 4 -adModeShareDenyWrite = 8 -adModeShareExclusive = 12 -adModeShareDenyNone = 16 -adModeRecursive = 0x400000 - -# XactAttributeEnum -adXactCommitRetaining = 131072 -adXactAbortRetaining = 262144 - -ado_error_TIMEOUT = -2147217871 - -# DataTypeEnum - ADO Data types documented at: -# http://msdn2.microsoft.com/en-us/library/ms675318.aspx -adArray = 0x2000 -adEmpty = 0x0 -adBSTR = 0x8 -adBigInt = 0x14 -adBinary = 0x80 -adBoolean = 0xB -adChapter = 0x88 -adChar = 0x81 -adCurrency = 0x6 -adDBDate = 0x85 -adDBTime = 0x86 -adDBTimeStamp = 0x87 -adDate = 0x7 -adDecimal = 0xE -adDouble = 0x5 -adError = 0xA -adFileTime = 0x40 -adGUID = 0x48 -adIDispatch = 0x9 -adIUnknown = 0xD -adInteger = 0x3 -adLongVarBinary = 0xCD -adLongVarChar = 0xC9 -adLongVarWChar = 0xCB -adNumeric = 0x83 -adPropVariant = 0x8A -adSingle = 0x4 -adSmallInt = 0x2 -adTinyInt = 0x10 -adUnsignedBigInt = 0x15 -adUnsignedInt = 0x13 -adUnsignedSmallInt = 0x12 -adUnsignedTinyInt = 0x11 -adUserDefined = 0x84 -adVarBinary = 0xCC -adVarChar = 0xC8 -adVarNumeric = 0x8B -adVarWChar = 0xCA -adVariant = 0xC -adWChar = 0x82 -# Additional constants used by introspection but not ADO itself -AUTO_FIELD_MARKER = -1000 - -adTypeNames = { - adBSTR: "adBSTR", - adBigInt: "adBigInt", - adBinary: "adBinary", - adBoolean: "adBoolean", - adChapter: "adChapter", - adChar: "adChar", - adCurrency: "adCurrency", - adDBDate: "adDBDate", - adDBTime: "adDBTime", - adDBTimeStamp: "adDBTimeStamp", - adDate: "adDate", - adDecimal: "adDecimal", - adDouble: "adDouble", - adEmpty: "adEmpty", - adError: "adError", - adFileTime: "adFileTime", - adGUID: "adGUID", - adIDispatch: "adIDispatch", - adIUnknown: "adIUnknown", - adInteger: "adInteger", - adLongVarBinary: "adLongVarBinary", - adLongVarChar: "adLongVarChar", - adLongVarWChar: "adLongVarWChar", - adNumeric: "adNumeric", - adPropVariant: "adPropVariant", - adSingle: "adSingle", - adSmallInt: "adSmallInt", - adTinyInt: "adTinyInt", - adUnsignedBigInt: "adUnsignedBigInt", - adUnsignedInt: "adUnsignedInt", - adUnsignedSmallInt: "adUnsignedSmallInt", - adUnsignedTinyInt: "adUnsignedTinyInt", - adUserDefined: "adUserDefined", - adVarBinary: "adVarBinary", - adVarChar: "adVarChar", - adVarNumeric: "adVarNumeric", - adVarWChar: "adVarWChar", - adVariant: "adVariant", - adWChar: "adWChar", -} - - -def ado_type_name(ado_type): - return adTypeNames.get(ado_type, "unknown type (" + str(ado_type) + ")") - - -# here in decimal, sorted by value -# adEmpty 0 Specifies no value (DBTYPE_EMPTY). -# adSmallInt 2 Indicates a two-byte signed integer (DBTYPE_I2). -# adInteger 3 Indicates a four-byte signed integer (DBTYPE_I4). -# adSingle 4 Indicates a single-precision floating-point value (DBTYPE_R4). -# adDouble 5 Indicates a double-precision floating-point value (DBTYPE_R8). -# adCurrency 6 Indicates a currency value (DBTYPE_CY). Currency is a fixed-point number -# with four digits to the right of the decimal point. It is stored in an eight-byte signed integer scaled by 10,000. -# adDate 7 Indicates a date value (DBTYPE_DATE). A date is stored as a double, the whole part of which is -# the number of days since December 30, 1899, and the fractional part of which is the fraction of a day. -# adBSTR 8 Indicates a null-terminated character string (Unicode) (DBTYPE_BSTR). -# adIDispatch 9 Indicates a pointer to an IDispatch interface on a COM object (DBTYPE_IDISPATCH). -# adError 10 Indicates a 32-bit error code (DBTYPE_ERROR). -# adBoolean 11 Indicates a boolean value (DBTYPE_BOOL). -# adVariant 12 Indicates an Automation Variant (DBTYPE_VARIANT). -# adIUnknown 13 Indicates a pointer to an IUnknown interface on a COM object (DBTYPE_IUNKNOWN). -# adDecimal 14 Indicates an exact numeric value with a fixed precision and scale (DBTYPE_DECIMAL). -# adTinyInt 16 Indicates a one-byte signed integer (DBTYPE_I1). -# adUnsignedTinyInt 17 Indicates a one-byte unsigned integer (DBTYPE_UI1). -# adUnsignedSmallInt 18 Indicates a two-byte unsigned integer (DBTYPE_UI2). -# adUnsignedInt 19 Indicates a four-byte unsigned integer (DBTYPE_UI4). -# adBigInt 20 Indicates an eight-byte signed integer (DBTYPE_I8). -# adUnsignedBigInt 21 Indicates an eight-byte unsigned integer (DBTYPE_UI8). -# adFileTime 64 Indicates a 64-bit value representing the number of 100-nanosecond intervals since -# January 1, 1601 (DBTYPE_FILETIME). -# adGUID 72 Indicates a globally unique identifier (GUID) (DBTYPE_GUID). -# adBinary 128 Indicates a binary value (DBTYPE_BYTES). -# adChar 129 Indicates a string value (DBTYPE_STR). -# adWChar 130 Indicates a null-terminated Unicode character string (DBTYPE_WSTR). -# adNumeric 131 Indicates an exact numeric value with a fixed precision and scale (DBTYPE_NUMERIC). -# adUserDefined 132 Indicates a user-defined variable (DBTYPE_UDT). -# adUserDefined 132 Indicates a user-defined variable (DBTYPE_UDT). -# adDBDate 133 Indicates a date value (yyyymmdd) (DBTYPE_DBDATE). -# adDBTime 134 Indicates a time value (hhmmss) (DBTYPE_DBTIME). -# adDBTimeStamp 135 Indicates a date/time stamp (yyyymmddhhmmss plus a fraction in billionths) (DBTYPE_DBTIMESTAMP). -# adChapter 136 Indicates a four-byte chapter value that identifies rows in a child rowset (DBTYPE_HCHAPTER). -# adPropVariant 138 Indicates an Automation PROPVARIANT (DBTYPE_PROP_VARIANT). -# adVarNumeric 139 Indicates a numeric value (Parameter object only). -# adVarChar 200 Indicates a string value (Parameter object only). -# adLongVarChar 201 Indicates a long string value (Parameter object only). -# adVarWChar 202 Indicates a null-terminated Unicode character string (Parameter object only). -# adLongVarWChar 203 Indicates a long null-terminated Unicode string value (Parameter object only). -# adVarBinary 204 Indicates a binary value (Parameter object only). -# adLongVarBinary 205 Indicates a long binary value (Parameter object only). -# adArray (Does not apply to ADOX.) 0x2000 A flag value, always combined with another data type constant, -# that indicates an array of that other data type. - -# Error codes to names -adoErrors = { - 0xE7B: "adErrBoundToCommand", - 0xE94: "adErrCannotComplete", - 0xEA4: "adErrCantChangeConnection", - 0xC94: "adErrCantChangeProvider", - 0xE8C: "adErrCantConvertvalue", - 0xE8D: "adErrCantCreate", - 0xEA3: "adErrCatalogNotSet", - 0xE8E: "adErrColumnNotOnThisRow", - 0xD5D: "adErrDataConversion", - 0xE89: "adErrDataOverflow", - 0xE9A: "adErrDelResOutOfScope", - 0xEA6: "adErrDenyNotSupported", - 0xEA7: "adErrDenyTypeNotSupported", - 0xCB3: "adErrFeatureNotAvailable", - 0xEA5: "adErrFieldsUpdateFailed", - 0xC93: "adErrIllegalOperation", - 0xCAE: "adErrInTransaction", - 0xE87: "adErrIntegrityViolation", - 0xBB9: "adErrInvalidArgument", - 0xE7D: "adErrInvalidConnection", - 0xE7C: "adErrInvalidParamInfo", - 0xE82: "adErrInvalidTransaction", - 0xE91: "adErrInvalidURL", - 0xCC1: "adErrItemNotFound", - 0xBCD: "adErrNoCurrentRecord", - 0xE83: "adErrNotExecuting", - 0xE7E: "adErrNotReentrant", - 0xE78: "adErrObjectClosed", - 0xD27: "adErrObjectInCollection", - 0xD5C: "adErrObjectNotSet", - 0xE79: "adErrObjectOpen", - 0xBBA: "adErrOpeningFile", - 0xE80: "adErrOperationCancelled", - 0xE96: "adErrOutOfSpace", - 0xE88: "adErrPermissionDenied", - 0xE9E: "adErrPropConflicting", - 0xE9B: "adErrPropInvalidColumn", - 0xE9C: "adErrPropInvalidOption", - 0xE9D: "adErrPropInvalidValue", - 0xE9F: "adErrPropNotAllSettable", - 0xEA0: "adErrPropNotSet", - 0xEA1: "adErrPropNotSettable", - 0xEA2: "adErrPropNotSupported", - 0xBB8: "adErrProviderFailed", - 0xE7A: "adErrProviderNotFound", - 0xBBB: "adErrReadFile", - 0xE93: "adErrResourceExists", - 0xE92: "adErrResourceLocked", - 0xE97: "adErrResourceOutOfScope", - 0xE8A: "adErrSchemaViolation", - 0xE8B: "adErrSignMismatch", - 0xE81: "adErrStillConnecting", - 0xE7F: "adErrStillExecuting", - 0xE90: "adErrTreePermissionDenied", - 0xE8F: "adErrURLDoesNotExist", - 0xE99: "adErrURLNamedRowDoesNotExist", - 0xE98: "adErrUnavailable", - 0xE84: "adErrUnsafeOperation", - 0xE95: "adErrVolumeNotFound", - 0xBBC: "adErrWriteFile", -} diff --git a/server/venv/Lib/site-packages/adodbapi/adodbapi.py b/server/venv/Lib/site-packages/adodbapi/adodbapi.py deleted file mode 100644 index 8f7c045..0000000 --- a/server/venv/Lib/site-packages/adodbapi/adodbapi.py +++ /dev/null @@ -1,1223 +0,0 @@ -"""adodbapi - A python DB API 2.0 (PEP 249) interface to Microsoft ADO - -Copyright (C) 2002 Henrik Ekelund, versions 2.1 and later by Vernon Cole -* http://sourceforge.net/projects/pywin32 -* https://github.com/mhammond/pywin32 -* http://sourceforge.net/projects/adodbapi - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - - django adaptations and refactoring by Adam Vandenberg - -DB-API 2.0 specification: http://www.python.org/dev/peps/pep-0249/ - -This module source should run correctly in CPython versions 2.7 and later, -or IronPython version 2.7 and later, -or, after running through 2to3.py, CPython 3.4 or later. -""" - -__version__ = "2.6.2.0" -version = "adodbapi v" + __version__ - -import copy -import decimal -import os -import sys -import weakref - -from . import ado_consts as adc, apibase as api, process_connect_string - -try: - verbose = int(os.environ["ADODBAPI_VERBOSE"]) -except: - verbose = False -if verbose: - print(version) - -# --- define objects to smooth out IronPython <-> CPython differences -onWin32 = False # assume the worst -if api.onIronPython: - from clr import Reference - from System import ( - Activator, - Array, - Byte, - DateTime, - DBNull, - Decimal as SystemDecimal, - Type, - ) - - def Dispatch(dispatch): - type = Type.GetTypeFromProgID(dispatch) - return Activator.CreateInstance(type) - - def getIndexedValue(obj, index): - return obj.Item[index] - -else: # try pywin32 - try: - import pythoncom - import pywintypes - import win32com.client - - onWin32 = True - - def Dispatch(dispatch): - return win32com.client.Dispatch(dispatch) - - except ImportError: - import warnings - - warnings.warn( - "pywin32 package (or IronPython) required for adodbapi.", ImportWarning - ) - - def getIndexedValue(obj, index): - return obj(index) - - -from collections.abc import Mapping - -# --- define objects to smooth out Python3000 <-> Python 2.x differences -unicodeType = str -longType = int -StringTypes = str -maxint = sys.maxsize - - -# ----------------- The .connect method ----------------- -def make_COM_connecter(): - try: - if onWin32: - pythoncom.CoInitialize() # v2.1 Paj - c = Dispatch("ADODB.Connection") # connect _after_ CoIninialize v2.1.1 adamvan - except: - raise api.InterfaceError( - "Windows COM Error: Dispatch('ADODB.Connection') failed." - ) - return c - - -def connect(*args, **kwargs): # --> a db-api connection object - """Connect to a database. - - call using: - :connection_string -- An ADODB formatted connection string, see: - * http://www.connectionstrings.com - * http://www.asp101.com/articles/john/connstring/default.asp - :timeout -- A command timeout value, in seconds (default 30 seconds) - """ - co = Connection() # make an empty connection object - - kwargs = process_connect_string.process(args, kwargs, True) - - try: # connect to the database, using the connection information in kwargs - co.connect(kwargs) - return co - except Exception as e: - message = 'Error opening connection to "%s"' % co.connection_string - raise api.OperationalError(e, message) - - -# so you could use something like: -# myConnection.paramstyle = 'named' -# The programmer may also change the default. -# For example, if I were using django, I would say: -# import adodbapi as Database -# Database.adodbapi.paramstyle = 'format' - -# ------- other module level defaults -------- -defaultIsolationLevel = adc.adXactReadCommitted -# Set defaultIsolationLevel on module level before creating the connection. -# For example: -# import adodbapi, ado_consts -# adodbapi.adodbapi.defaultIsolationLevel=ado_consts.adXactBrowse" -# -# Set defaultCursorLocation on module level before creating the connection. -# It may be one of the "adUse..." consts. -defaultCursorLocation = adc.adUseClient # changed from adUseServer as of v 2.3.0 - -dateconverter = api.pythonDateTimeConverter() # default - - -def format_parameters(ADOparameters, show_value=False): - """Format a collection of ADO Command Parameters. - - Used by error reporting in _execute_command. - """ - try: - if show_value: - desc = [ - 'Name: %s, Dir.: %s, Type: %s, Size: %s, Value: "%s", Precision: %s, NumericScale: %s' - % ( - p.Name, - adc.directions[p.Direction], - adc.adTypeNames.get(p.Type, str(p.Type) + " (unknown type)"), - p.Size, - p.Value, - p.Precision, - p.NumericScale, - ) - for p in ADOparameters - ] - else: - desc = [ - "Name: %s, Dir.: %s, Type: %s, Size: %s, Precision: %s, NumericScale: %s" - % ( - p.Name, - adc.directions[p.Direction], - adc.adTypeNames.get(p.Type, str(p.Type) + " (unknown type)"), - p.Size, - p.Precision, - p.NumericScale, - ) - for p in ADOparameters - ] - return "[" + "\n".join(desc) + "]" - except: - return "[]" - - -def _configure_parameter(p, value, adotype, settings_known): - """Configure the given ADO Parameter 'p' with the Python 'value'.""" - - if adotype in api.adoBinaryTypes: - p.Size = len(value) - p.AppendChunk(value) - - elif isinstance(value, StringTypes): # v2.1 Jevon - L = len(value) - if adotype in api.adoStringTypes: # v2.2.1 Cole - if settings_known: - L = min(L, p.Size) # v2.1 Cole limit data to defined size - p.Value = value[:L] # v2.1 Jevon & v2.1 Cole - else: - p.Value = value # dont limit if db column is numeric - if L > 0: # v2.1 Cole something does not like p.Size as Zero - p.Size = L # v2.1 Jevon - - elif isinstance(value, decimal.Decimal): - if api.onIronPython: - s = str(value) - p.Value = s - p.Size = len(s) - else: - p.Value = value - exponent = value.as_tuple()[2] - digit_count = len(value.as_tuple()[1]) - p.Precision = digit_count - if exponent == 0: - p.NumericScale = 0 - elif exponent < 0: - p.NumericScale = -exponent - if p.Precision < p.NumericScale: - p.Precision = p.NumericScale - else: # exponent > 0: - p.NumericScale = 0 - p.Precision = digit_count + exponent - - elif type(value) in dateconverter.types: - if settings_known and adotype in api.adoDateTimeTypes: - p.Value = dateconverter.COMDate(value) - else: # probably a string - # provide the date as a string in the format 'YYYY-MM-dd' - s = dateconverter.DateObjectToIsoFormatString(value) - p.Value = s - p.Size = len(s) - - elif api.onIronPython and isinstance(value, longType): # Iron Python Long - s = str(value) # feature workaround for IPy 2.0 - p.Value = s - - elif adotype == adc.adEmpty: # ADO will not let you specify a null column - p.Type = ( - adc.adInteger - ) # so we will fake it to be an integer (just to have something) - p.Value = None # and pass in a Null *value* - - # For any other type, set the value and let pythoncom do the right thing. - else: - p.Value = value - - -# # # # # ----- the Class that defines a connection ----- # # # # # -class Connection(object): - # include connection attributes as class attributes required by api definition. - Warning = api.Warning - Error = api.Error - InterfaceError = api.InterfaceError - DataError = api.DataError - DatabaseError = api.DatabaseError - OperationalError = api.OperationalError - IntegrityError = api.IntegrityError - InternalError = api.InternalError - NotSupportedError = api.NotSupportedError - ProgrammingError = api.ProgrammingError - FetchFailedError = api.FetchFailedError # (special for django) - # ...class attributes... (can be overridden by instance attributes) - verbose = api.verbose - - @property - def dbapi(self): # a proposed db-api version 3 extension. - "Return a reference to the DBAPI module for this Connection." - return api - - def __init__(self): # now define the instance attributes - self.connector = None - self.paramstyle = api.paramstyle - self.supportsTransactions = False - self.connection_string = "" - self.cursors = weakref.WeakValueDictionary() - self.dbms_name = "" - self.dbms_version = "" - self.errorhandler = None # use the standard error handler for this instance - self.transaction_level = 0 # 0 == Not in a transaction, at the top level - self._autocommit = False - - def connect(self, kwargs, connection_maker=make_COM_connecter): - if verbose > 9: - print("kwargs=", repr(kwargs)) - try: - self.connection_string = ( - kwargs["connection_string"] % kwargs - ) # insert keyword arguments - except Exception as e: - self._raiseConnectionError( - KeyError, "Python string format error in connection string->" - ) - self.timeout = kwargs.get("timeout", 30) - self.mode = kwargs.get("mode", adc.adModeUnknown) - self.kwargs = kwargs - if verbose: - print('%s attempting: "%s"' % (version, self.connection_string)) - self.connector = connection_maker() - self.connector.ConnectionTimeout = self.timeout - self.connector.ConnectionString = self.connection_string - self.connector.Mode = self.mode - - try: - self.connector.Open() # Open the ADO connection - except api.Error: - self._raiseConnectionError( - api.DatabaseError, - "ADO error trying to Open=%s" % self.connection_string, - ) - - try: # Stefan Fuchs; support WINCCOLEDBProvider - if getIndexedValue(self.connector.Properties, "Transaction DDL").Value != 0: - self.supportsTransactions = True - except pywintypes.com_error: - pass # Stefan Fuchs - self.dbms_name = getIndexedValue(self.connector.Properties, "DBMS Name").Value - try: # Stefan Fuchs - self.dbms_version = getIndexedValue( - self.connector.Properties, "DBMS Version" - ).Value - except pywintypes.com_error: - pass # Stefan Fuchs - self.connector.CursorLocation = defaultCursorLocation # v2.1 Rose - if self.supportsTransactions: - self.connector.IsolationLevel = defaultIsolationLevel - self._autocommit = bool(kwargs.get("autocommit", False)) - if not self._autocommit: - self.transaction_level = ( - self.connector.BeginTrans() - ) # Disables autocommit & inits transaction_level - else: - self._autocommit = True - if "paramstyle" in kwargs: - self.paramstyle = kwargs["paramstyle"] # let setattr do the error checking - self.messages = [] - if verbose: - print("adodbapi New connection at %X" % id(self)) - - def _raiseConnectionError(self, errorclass, errorvalue): - eh = self.errorhandler - if eh is None: - eh = api.standardErrorHandler - eh(self, None, errorclass, errorvalue) - - def _closeAdoConnection(self): # all v2.1 Rose - """close the underlying ADO Connection object, - rolling it back first if it supports transactions.""" - if self.connector is None: - return - if not self._autocommit: - if self.transaction_level: - try: - self.connector.RollbackTrans() - except: - pass - self.connector.Close() - if verbose: - print("adodbapi Closed connection at %X" % id(self)) - - def close(self): - """Close the connection now (rather than whenever __del__ is called). - - The connection will be unusable from this point forward; - an Error (or subclass) exception will be raised if any operation is attempted with the connection. - The same applies to all cursor objects trying to use the connection. - """ - for crsr in list(self.cursors.values())[ - : - ]: # copy the list, then close each one - crsr.close(dont_tell_me=True) # close without back-link clearing - self.messages = [] - try: - self._closeAdoConnection() # v2.1 Rose - except Exception as e: - self._raiseConnectionError(sys.exc_info()[0], sys.exc_info()[1]) - - self.connector = None # v2.4.2.2 fix subtle timeout bug - # per M.Hammond: "I expect the benefits of uninitializing are probably fairly small, - # so never uninitializing will probably not cause any problems." - - def commit(self): - """Commit any pending transaction to the database. - - Note that if the database supports an auto-commit feature, - this must be initially off. An interface method may be provided to turn it back on. - Database modules that do not support transactions should implement this method with void functionality. - """ - self.messages = [] - if not self.supportsTransactions: - return - - try: - self.transaction_level = self.connector.CommitTrans() - if verbose > 1: - print("commit done on connection at %X" % id(self)) - if not ( - self._autocommit - or (self.connector.Attributes & adc.adXactAbortRetaining) - ): - # If attributes has adXactCommitRetaining it performs retaining commits that is, - # calling CommitTrans automatically starts a new transaction. Not all providers support this. - # If not, we will have to start a new transaction by this command: - self.transaction_level = self.connector.BeginTrans() - except Exception as e: - self._raiseConnectionError(api.ProgrammingError, e) - - def _rollback(self): - """In case a database does provide transactions this method causes the the database to roll back to - the start of any pending transaction. Closing a connection without committing the changes first will - cause an implicit rollback to be performed. - - If the database does not support the functionality required by the method, the interface should - throw an exception in case the method is used. - The preferred approach is to not implement the method and thus have Python generate - an AttributeError in case the method is requested. This allows the programmer to check for database - capabilities using the standard hasattr() function. - - For some dynamically configured interfaces it may not be appropriate to require dynamically making - the method available. These interfaces should then raise a NotSupportedError to indicate the - non-ability to perform the roll back when the method is invoked. - """ - self.messages = [] - if ( - self.transaction_level - ): # trying to roll back with no open transaction causes an error - try: - self.transaction_level = self.connector.RollbackTrans() - if verbose > 1: - print("rollback done on connection at %X" % id(self)) - if not self._autocommit and not ( - self.connector.Attributes & adc.adXactAbortRetaining - ): - # If attributes has adXactAbortRetaining it performs retaining aborts that is, - # calling RollbackTrans automatically starts a new transaction. Not all providers support this. - # If not, we will have to start a new transaction by this command: - if ( - not self.transaction_level - ): # if self.transaction_level == 0 or self.transaction_level is None: - self.transaction_level = self.connector.BeginTrans() - except Exception as e: - self._raiseConnectionError(api.ProgrammingError, e) - - def __setattr__(self, name, value): - if name == "autocommit": # extension: allow user to turn autocommit on or off - if self.supportsTransactions: - object.__setattr__(self, "_autocommit", bool(value)) - try: - self._rollback() # must clear any outstanding transactions - except: - pass - return - elif name == "paramstyle": - if value not in api.accepted_paramstyles: - self._raiseConnectionError( - api.NotSupportedError, - 'paramstyle="%s" not in:%s' - % (value, repr(api.accepted_paramstyles)), - ) - elif name == "variantConversions": - value = copy.copy( - value - ) # make a new copy -- no changes in the default, please - object.__setattr__(self, name, value) - - def __getattr__(self, item): - if ( - item == "rollback" - ): # the rollback method only appears if the database supports transactions - if self.supportsTransactions: - return ( - self._rollback - ) # return the rollback method so the caller can execute it. - else: - raise AttributeError("this data provider does not support Rollback") - elif item == "autocommit": - return self._autocommit - else: - raise AttributeError( - 'no such attribute in ADO connection object as="%s"' % item - ) - - def cursor(self): - "Return a new Cursor Object using the connection." - self.messages = [] - c = Cursor(self) - return c - - def _i_am_here(self, crsr): - "message from a new cursor proclaiming its existence" - oid = id(crsr) - self.cursors[oid] = crsr - - def _i_am_closing(self, crsr): - "message from a cursor giving connection a chance to clean up" - try: - del self.cursors[id(crsr)] - except: - pass - - def printADOerrors(self): - j = self.connector.Errors.Count - if j: - print("ADO Errors:(%i)" % j) - for e in self.connector.Errors: - print("Description: %s" % e.Description) - print("Error: %s %s " % (e.Number, adc.adoErrors.get(e.Number, "unknown"))) - if e.Number == adc.ado_error_TIMEOUT: - print( - "Timeout Error: Try using adodbpi.connect(constr,timeout=Nseconds)" - ) - print("Source: %s" % e.Source) - print("NativeError: %s" % e.NativeError) - print("SQL State: %s" % e.SQLState) - - def _suggest_error_class(self): - """Introspect the current ADO Errors and determine an appropriate error class. - - Error.SQLState is a SQL-defined error condition, per the SQL specification: - http://www.contrib.andrew.cmu.edu/~shadow/sql/sql1992.txt - - The 23000 class of errors are integrity errors. - Error 40002 is a transactional integrity error. - """ - if self.connector is not None: - for e in self.connector.Errors: - state = str(e.SQLState) - if state.startswith("23") or state == "40002": - return api.IntegrityError - return api.DatabaseError - - def __del__(self): - try: - self._closeAdoConnection() # v2.1 Rose - except: - pass - self.connector = None - - def __enter__(self): # Connections are context managers - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if exc_type: - self._rollback() # automatic rollback on errors - else: - self.commit() - - def get_table_names(self): - schema = self.connector.OpenSchema(20) # constant = adSchemaTables - - tables = [] - while not schema.EOF: - name = getIndexedValue(schema.Fields, "TABLE_NAME").Value - tables.append(name) - schema.MoveNext() - del schema - return tables - - -# # # # # ----- the Class that defines a cursor ----- # # # # # -class Cursor(object): - ## ** api required attributes: - ## description... - ## This read-only attribute is a sequence of 7-item sequences. - ## Each of these sequences contains information describing one result column: - ## (name, type_code, display_size, internal_size, precision, scale, null_ok). - ## This attribute will be None for operations that do not return rows or if the - ## cursor has not had an operation invoked via the executeXXX() method yet. - ## The type_code can be interpreted by comparing it to the Type Objects specified in the section below. - ## rowcount... - ## This read-only attribute specifies the number of rows that the last executeXXX() produced - ## (for DQL statements like select) or affected (for DML statements like update or insert). - ## The attribute is -1 in case no executeXXX() has been performed on the cursor or - ## the rowcount of the last operation is not determinable by the interface.[7] - ## arraysize... - ## This read/write attribute specifies the number of rows to fetch at a time with fetchmany(). - ## It defaults to 1 meaning to fetch a single row at a time. - ## Implementations must observe this value with respect to the fetchmany() method, - ## but are free to interact with the database a single row at a time. - ## It may also be used in the implementation of executemany(). - ## ** extension attributes: - ## paramstyle... - ## allows the programmer to override the connection's default paramstyle - ## errorhandler... - ## allows the programmer to override the connection's default error handler - - def __init__(self, connection): - self.command = None - self._ado_prepared = False - self.messages = [] - self.connection = connection - self.paramstyle = connection.paramstyle # used for overriding the paramstyle - self._parameter_names = [] - self.recordset_is_remote = False - self.rs = None # the ADO recordset for this cursor - self.converters = [] # conversion function for each column - self.columnNames = {} # names of columns {lowercase name : number,...} - self.numberOfColumns = 0 - self._description = None - self.rowcount = -1 - self.errorhandler = connection.errorhandler - self.arraysize = 1 - connection._i_am_here(self) - if verbose: - print( - "%s New cursor at %X on conn %X" - % (version, id(self), id(self.connection)) - ) - - def __iter__(self): # [2.1 Zamarev] - return iter(self.fetchone, None) # [2.1 Zamarev] - - def prepare(self, operation): - self.command = operation - self._description = None - self._ado_prepared = "setup" - - def __next__(self): - r = self.fetchone() - if r: - return r - raise StopIteration - - def __enter__(self): - "Allow database cursors to be used with context managers." - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - "Allow database cursors to be used with context managers." - self.close() - - def _raiseCursorError(self, errorclass, errorvalue): - eh = self.errorhandler - if eh is None: - eh = api.standardErrorHandler - eh(self.connection, self, errorclass, errorvalue) - - def build_column_info(self, recordset): - self.converters = [] # convertion function for each column - self.columnNames = {} # names of columns {lowercase name : number,...} - self._description = None - - # if EOF and BOF are true at the same time, there are no records in the recordset - if (recordset is None) or (recordset.State == adc.adStateClosed): - self.rs = None - self.numberOfColumns = 0 - return - self.rs = recordset # v2.1.1 bkline - self.recordset_format = api.RS_ARRAY if api.onIronPython else api.RS_WIN_32 - self.numberOfColumns = recordset.Fields.Count - try: - varCon = self.connection.variantConversions - except AttributeError: - varCon = api.variantConversions - for i in range(self.numberOfColumns): - f = getIndexedValue(self.rs.Fields, i) - try: - self.converters.append( - varCon[f.Type] - ) # conversion function for this column - except KeyError: - self._raiseCursorError( - api.InternalError, "Data column of Unknown ADO type=%s" % f.Type - ) - self.columnNames[f.Name.lower()] = i # columnNames lookup - - def _makeDescriptionFromRS(self): - # Abort if closed or no recordset. - if self.rs is None: - self._description = None - return - desc = [] - for i in range(self.numberOfColumns): - f = getIndexedValue(self.rs.Fields, i) - if self.rs.EOF or self.rs.BOF: - display_size = None - else: - display_size = ( - f.ActualSize - ) # TODO: Is this the correct defintion according to the DB API 2 Spec ? - null_ok = bool(f.Attributes & adc.adFldMayBeNull) # v2.1 Cole - desc.append( - ( - f.Name, - f.Type, - display_size, - f.DefinedSize, - f.Precision, - f.NumericScale, - null_ok, - ) - ) - self._description = desc - - def get_description(self): - if not self._description: - self._makeDescriptionFromRS() - return self._description - - def __getattr__(self, item): - if item == "description": - return self.get_description() - object.__getattribute__( - self, item - ) # may get here on Remote attribute calls for existing attributes - - def format_description(self, d): - """Format db_api description tuple for printing.""" - if self.description is None: - self._makeDescriptionFromRS() - if isinstance(d, int): - d = self.description[d] - desc = ( - "Name= %s, Type= %s, DispSize= %s, IntSize= %s, Precision= %s, Scale= %s NullOK=%s" - % ( - d[0], - adc.adTypeNames.get(d[1], str(d[1]) + " (unknown type)"), - d[2], - d[3], - d[4], - d[5], - d[6], - ) - ) - return desc - - def close(self, dont_tell_me=False): - """Close the cursor now (rather than whenever __del__ is called). - The cursor will be unusable from this point forward; an Error (or subclass) - exception will be raised if any operation is attempted with the cursor. - """ - if self.connection is None: - return - self.messages = [] - if ( - self.rs and self.rs.State != adc.adStateClosed - ): # rs exists and is open #v2.1 Rose - self.rs.Close() # v2.1 Rose - self.rs = None # let go of the recordset so ADO will let it be disposed #v2.1 Rose - if not dont_tell_me: - self.connection._i_am_closing( - self - ) # take me off the connection's cursors list - self.connection = ( - None # this will make all future method calls on me throw an exception - ) - if verbose: - print("adodbapi Closed cursor at %X" % id(self)) - - def __del__(self): - try: - self.close() - except: - pass - - def _new_command(self, command_type=adc.adCmdText): - self.cmd = None - self.messages = [] - - if self.connection is None: - self._raiseCursorError(api.InterfaceError, None) - return - try: - self.cmd = Dispatch("ADODB.Command") - self.cmd.ActiveConnection = self.connection.connector - self.cmd.CommandTimeout = self.connection.timeout - self.cmd.CommandType = command_type - self.cmd.CommandText = self.commandText - self.cmd.Prepared = bool(self._ado_prepared) - except: - self._raiseCursorError( - api.DatabaseError, - 'Error creating new ADODB.Command object for "%s"' - % repr(self.commandText), - ) - - def _execute_command(self): - # Stored procedures may have an integer return value - self.return_value = None - recordset = None - count = -1 # default value - if verbose: - print('Executing command="%s"' % self.commandText) - try: - # ----- the actual SQL is executed here --- - if api.onIronPython: - ra = Reference[int]() - recordset = self.cmd.Execute(ra) - count = ra.Value - else: # pywin32 - recordset, count = self.cmd.Execute() - # ----- ------------------------------- --- - except Exception as e: - _message = "" - if hasattr(e, "args"): - _message += str(e.args) + "\n" - _message += "Command:\n%s\nParameters:\n%s" % ( - self.commandText, - format_parameters(self.cmd.Parameters, True), - ) - klass = self.connection._suggest_error_class() - self._raiseCursorError(klass, _message) - try: - self.rowcount = recordset.RecordCount - except: - self.rowcount = count - self.build_column_info(recordset) - - # The ADO documentation hints that obtaining the recordcount may be timeconsuming - # "If the Recordset object does not support approximate positioning, this property - # may be a significant drain on resources # [ekelund] - # Therefore, COM will not return rowcount for server-side cursors. [Cole] - # Client-side cursors (the default since v2.8) will force a static - # cursor, and rowcount will then be set accurately [Cole] - - def get_rowcount(self): - return self.rowcount - - def get_returned_parameters(self): - """with some providers, returned parameters and the .return_value are not available until - after the last recordset has been read. In that case, you must coll nextset() until it - returns None, then call this method to get your returned information.""" - - retLst = ( - [] - ) # store procedures may return altered parameters, including an added "return value" item - for p in tuple(self.cmd.Parameters): - if verbose > 2: - print( - 'Returned=Name: %s, Dir.: %s, Type: %s, Size: %s, Value: "%s",' - " Precision: %s, NumericScale: %s" - % ( - p.Name, - adc.directions[p.Direction], - adc.adTypeNames.get(p.Type, str(p.Type) + " (unknown type)"), - p.Size, - p.Value, - p.Precision, - p.NumericScale, - ) - ) - pyObject = api.convert_to_python(p.Value, api.variantConversions[p.Type]) - if p.Direction == adc.adParamReturnValue: - self.returnValue = ( - pyObject # also load the undocumented attribute (Vernon's Error!) - ) - self.return_value = pyObject - else: - retLst.append(pyObject) - return retLst # return the parameter list to the caller - - def callproc(self, procname, parameters=None): - """Call a stored database procedure with the given name. - The sequence of parameters must contain one entry for each - argument that the sproc expects. The result of the - call is returned as modified copy of the input - sequence. Input parameters are left untouched, output and - input/output parameters replaced with possibly new values. - - The sproc may also provide a result set as output, - which is available through the standard .fetch*() methods. - Extension: A "return_value" property may be set on the - cursor if the sproc defines an integer return value. - """ - self._parameter_names = [] - self.commandText = procname - self._new_command(command_type=adc.adCmdStoredProc) - self._buildADOparameterList(parameters, sproc=True) - if verbose > 2: - print( - "Calling Stored Proc with Params=", - format_parameters(self.cmd.Parameters, True), - ) - self._execute_command() - return self.get_returned_parameters() - - def _reformat_operation(self, operation, parameters): - if self.paramstyle in ("format", "pyformat"): # convert %s to ? - operation, self._parameter_names = api.changeFormatToQmark(operation) - elif self.paramstyle == "named" or ( - self.paramstyle == "dynamic" and isinstance(parameters, Mapping) - ): - operation, self._parameter_names = api.changeNamedToQmark( - operation - ) # convert :name to ? - return operation - - def _buildADOparameterList(self, parameters, sproc=False): - self.parameters = parameters - if parameters is None: - parameters = [] - - # Note: ADO does not preserve the parameter list, even if "Prepared" is True, so we must build every time. - parameters_known = False - if sproc: # needed only if we are calling a stored procedure - try: # attempt to use ADO's parameter list - self.cmd.Parameters.Refresh() - if verbose > 2: - print( - "ADO detected Params=", - format_parameters(self.cmd.Parameters, True), - ) - print("Program Parameters=", repr(parameters)) - parameters_known = True - except api.Error: - if verbose: - print("ADO Parameter Refresh failed") - pass - else: - if len(parameters) != self.cmd.Parameters.Count - 1: - raise api.ProgrammingError( - "You must supply %d parameters for this stored procedure" - % (self.cmd.Parameters.Count - 1) - ) - if sproc or parameters != []: - i = 0 - if parameters_known: # use ado parameter list - if self._parameter_names: # named parameters - for i, pm_name in enumerate(self._parameter_names): - p = getIndexedValue(self.cmd.Parameters, i) - try: - _configure_parameter( - p, parameters[pm_name], p.Type, parameters_known - ) - except Exception as e: - _message = ( - "Error Converting Parameter %s: %s, %s <- %s\n" - % ( - p.Name, - adc.ado_type_name(p.Type), - p.Value, - repr(parameters[pm_name]), - ) - ) - self._raiseCursorError( - api.DataError, _message + "->" + repr(e.args) - ) - else: # regular sequence of parameters - for value in parameters: - p = getIndexedValue(self.cmd.Parameters, i) - if ( - p.Direction == adc.adParamReturnValue - ): # this is an extra parameter added by ADO - i += 1 # skip the extra - p = getIndexedValue(self.cmd.Parameters, i) - try: - _configure_parameter(p, value, p.Type, parameters_known) - except Exception as e: - _message = ( - "Error Converting Parameter %s: %s, %s <- %s\n" - % ( - p.Name, - adc.ado_type_name(p.Type), - p.Value, - repr(value), - ) - ) - self._raiseCursorError( - api.DataError, _message + "->" + repr(e.args) - ) - i += 1 - else: # -- build own parameter list - if ( - self._parameter_names - ): # we expect a dictionary of parameters, this is the list of expected names - for parm_name in self._parameter_names: - elem = parameters[parm_name] - adotype = api.pyTypeToADOType(elem) - p = self.cmd.CreateParameter( - parm_name, adotype, adc.adParamInput - ) - _configure_parameter(p, elem, adotype, parameters_known) - try: - self.cmd.Parameters.Append(p) - except Exception as e: - _message = "Error Building Parameter %s: %s, %s <- %s\n" % ( - p.Name, - adc.ado_type_name(p.Type), - p.Value, - repr(elem), - ) - self._raiseCursorError( - api.DataError, _message + "->" + repr(e.args) - ) - else: # expecting the usual sequence of parameters - if sproc: - p = self.cmd.CreateParameter( - "@RETURN_VALUE", adc.adInteger, adc.adParamReturnValue - ) - self.cmd.Parameters.Append(p) - - for elem in parameters: - name = "p%i" % i - adotype = api.pyTypeToADOType(elem) - p = self.cmd.CreateParameter( - name, adotype, adc.adParamInput - ) # Name, Type, Direction, Size, Value - _configure_parameter(p, elem, adotype, parameters_known) - try: - self.cmd.Parameters.Append(p) - except Exception as e: - _message = "Error Building Parameter %s: %s, %s <- %s\n" % ( - p.Name, - adc.ado_type_name(p.Type), - p.Value, - repr(elem), - ) - self._raiseCursorError( - api.DataError, _message + "->" + repr(e.args) - ) - i += 1 - if self._ado_prepared == "setup": - self._ado_prepared = ( - True # parameters will be "known" by ADO next loop - ) - - def execute(self, operation, parameters=None): - """Prepare and execute a database operation (query or command). - - Parameters may be provided as sequence or mapping and will be bound to variables in the operation. - Variables are specified in a database-specific notation - (see the module's paramstyle attribute for details). [5] - A reference to the operation will be retained by the cursor. - If the same operation object is passed in again, then the cursor - can optimize its behavior. This is most effective for algorithms - where the same operation is used, but different parameters are bound to it (many times). - - For maximum efficiency when reusing an operation, it is best to use - the setinputsizes() method to specify the parameter types and sizes ahead of time. - It is legal for a parameter to not match the predefined information; - the implementation should compensate, possibly with a loss of efficiency. - - The parameters may also be specified as list of tuples to e.g. insert multiple rows in - a single operation, but this kind of usage is depreciated: executemany() should be used instead. - - Return value is not defined. - - [5] The module will use the __getitem__ method of the parameters object to map either positions - (integers) or names (strings) to parameter values. This allows for both sequences and mappings - to be used as input. - The term "bound" refers to the process of binding an input value to a database execution buffer. - In practical terms, this means that the input value is directly used as a value in the operation. - The client should not be required to "escape" the value so that it can be used -- the value - should be equal to the actual database value.""" - if ( - self.command is not operation - or self._ado_prepared == "setup" - or not hasattr(self, "commandText") - ): - if self.command is not operation: - self._ado_prepared = False - self.command = operation - self._parameter_names = [] - self.commandText = ( - operation - if (self.paramstyle == "qmark" or not parameters) - else self._reformat_operation(operation, parameters) - ) - self._new_command() - self._buildADOparameterList(parameters) - if verbose > 3: - print("Params=", format_parameters(self.cmd.Parameters, True)) - self._execute_command() - - def executemany(self, operation, seq_of_parameters): - """Prepare a database operation (query or command) - and then execute it against all parameter sequences or mappings found in the sequence seq_of_parameters. - - Return values are not defined. - """ - self.messages = list() - total_recordcount = 0 - - self.prepare(operation) - for params in seq_of_parameters: - self.execute(self.command, params) - if self.rowcount == -1: - total_recordcount = -1 - if total_recordcount != -1: - total_recordcount += self.rowcount - self.rowcount = total_recordcount - - def _fetch(self, limit=None): - """Fetch rows from the current recordset. - - limit -- Number of rows to fetch, or None (default) to fetch all rows. - """ - if self.connection is None or self.rs is None: - self._raiseCursorError( - api.FetchFailedError, "fetch() on closed connection or empty query set" - ) - return - - if self.rs.State == adc.adStateClosed or self.rs.BOF or self.rs.EOF: - return list() - if limit: # limit number of rows retrieved - ado_results = self.rs.GetRows(limit) - else: # get all rows - ado_results = self.rs.GetRows() - if ( - self.recordset_format == api.RS_ARRAY - ): # result of GetRows is a two-dimension array - length = ( - len(ado_results) // self.numberOfColumns - ) # length of first dimension - else: # pywin32 - length = len(ado_results[0]) # result of GetRows is tuples in a tuple - fetchObject = api.SQLrows( - ado_results, length, self - ) # new object to hold the results of the fetch - return fetchObject - - def fetchone(self): - """Fetch the next row of a query result set, returning a single sequence, - or None when no more data is available. - - An Error (or subclass) exception is raised if the previous call to executeXXX() - did not produce any result set or no call was issued yet. - """ - self.messages = [] - result = self._fetch(1) - if result: # return record (not list of records) - return result[0] - return None - - def fetchmany(self, size=None): - """Fetch the next set of rows of a query result, returning a list of tuples. An empty sequence is returned when no more rows are available. - - The number of rows to fetch per call is specified by the parameter. - If it is not given, the cursor's arraysize determines the number of rows to be fetched. - The method should try to fetch as many rows as indicated by the size parameter. - If this is not possible due to the specified number of rows not being available, - fewer rows may be returned. - - An Error (or subclass) exception is raised if the previous call to executeXXX() - did not produce any result set or no call was issued yet. - - Note there are performance considerations involved with the size parameter. - For optimal performance, it is usually best to use the arraysize attribute. - If the size parameter is used, then it is best for it to retain the same value from - one fetchmany() call to the next. - """ - self.messages = [] - if size is None: - size = self.arraysize - return self._fetch(size) - - def fetchall(self): - """Fetch all (remaining) rows of a query result, returning them as a sequence of sequences (e.g. a list of tuples). - - Note that the cursor's arraysize attribute - can affect the performance of this operation. - An Error (or subclass) exception is raised if the previous call to executeXXX() - did not produce any result set or no call was issued yet. - """ - self.messages = [] - return self._fetch() - - def nextset(self): - """Skip to the next available recordset, discarding any remaining rows from the current recordset. - - If there are no more sets, the method returns None. Otherwise, it returns a true - value and subsequent calls to the fetch methods will return rows from the next result set. - - An Error (or subclass) exception is raised if the previous call to executeXXX() - did not produce any result set or no call was issued yet. - """ - self.messages = [] - if self.connection is None or self.rs is None: - self._raiseCursorError( - api.OperationalError, - ("nextset() on closed connection or empty query set"), - ) - return None - - if api.onIronPython: - try: - recordset = self.rs.NextRecordset() - except TypeError: - recordset = None - except api.Error as exc: - self._raiseCursorError(api.NotSupportedError, exc.args) - else: # pywin32 - try: # [begin 2.1 ekelund] - rsTuple = self.rs.NextRecordset() # - except pywintypes.com_error as exc: # return appropriate error - self._raiseCursorError( - api.NotSupportedError, exc.args - ) # [end 2.1 ekelund] - recordset = rsTuple[0] - if recordset is None: - return None - self.build_column_info(recordset) - return True - - def setinputsizes(self, sizes): - pass - - def setoutputsize(self, size, column=None): - pass - - def _last_query(self): # let the programmer see what query we actually used - try: - if self.parameters == None: - ret = self.commandText - else: - ret = "%s,parameters=%s" % (self.commandText, repr(self.parameters)) - except: - ret = None - return ret - - query = property(_last_query, None, None, "returns the last query executed") - - -if __name__ == "__main__": - raise api.ProgrammingError(version + " cannot be run as a main program.") diff --git a/server/venv/Lib/site-packages/adodbapi/apibase.py b/server/venv/Lib/site-packages/adodbapi/apibase.py deleted file mode 100644 index 50770c9..0000000 --- a/server/venv/Lib/site-packages/adodbapi/apibase.py +++ /dev/null @@ -1,794 +0,0 @@ -"""adodbapi.apibase - A python DB API 2.0 (PEP 249) interface to Microsoft ADO - -Copyright (C) 2002 Henrik Ekelund, version 2.1 by Vernon Cole -* http://sourceforge.net/projects/pywin32 -* http://sourceforge.net/projects/adodbapi -""" - -import datetime -import decimal -import numbers -import sys -import time - -# noinspection PyUnresolvedReferences -from . import ado_consts as adc - -verbose = False # debugging flag - -onIronPython = sys.platform == "cli" -if onIronPython: # we need type definitions for odd data we may need to convert - # noinspection PyUnresolvedReferences - from System import DateTime, DBNull - - NullTypes = (type(None), DBNull) -else: - DateTime = type(NotImplemented) # should never be seen on win32 - NullTypes = type(None) - -# --- define objects to smooth out Python3 <-> Python 2.x differences -unicodeType = str -longType = int -StringTypes = str -makeByteBuffer = bytes -memoryViewType = memoryview -_BaseException = Exception - -try: # jdhardy -- handle bytes under IronPython & Py3 - bytes -except NameError: - bytes = str # define it for old Pythons - - -# ------- Error handlers ------ -def standardErrorHandler(connection, cursor, errorclass, errorvalue): - err = (errorclass, errorvalue) - try: - connection.messages.append(err) - except: - pass - if cursor is not None: - try: - cursor.messages.append(err) - except: - pass - raise errorclass(errorvalue) - - -# Note: _BaseException is defined differently between Python 2.x and 3.x -class Error(_BaseException): - pass # Exception that is the base class of all other error - # exceptions. You can use this to catch all errors with one - # single 'except' statement. Warnings are not considered - # errors and thus should not use this class as base. It must - # be a subclass of the Python StandardError (defined in the - # module exceptions). - - -class Warning(_BaseException): - pass - - -class InterfaceError(Error): - pass - - -class DatabaseError(Error): - pass - - -class InternalError(DatabaseError): - pass - - -class OperationalError(DatabaseError): - pass - - -class ProgrammingError(DatabaseError): - pass - - -class IntegrityError(DatabaseError): - pass - - -class DataError(DatabaseError): - pass - - -class NotSupportedError(DatabaseError): - pass - - -class FetchFailedError(OperationalError): - """ - Error is used by RawStoredProcedureQuerySet to determine when a fetch - failed due to a connection being closed or there is no record set - returned. (Non-standard, added especially for django) - """ - - pass - - -# # # # # ----- Type Objects and Constructors ----- # # # # # -# Many databases need to have the input in a particular format for binding to an operation's input parameters. -# For example, if an input is destined for a DATE column, then it must be bound to the database in a particular -# string format. Similar problems exist for "Row ID" columns or large binary items (e.g. blobs or RAW columns). -# This presents problems for Python since the parameters to the executeXXX() method are untyped. -# When the database module sees a Python string object, it doesn't know if it should be bound as a simple CHAR -# column, as a raw BINARY item, or as a DATE. -# -# To overcome this problem, a module must provide the constructors defined below to create objects that can -# hold special values. When passed to the cursor methods, the module can then detect the proper type of -# the input parameter and bind it accordingly. - -# A Cursor Object's description attribute returns information about each of the result columns of a query. -# The type_code must compare equal to one of Type Objects defined below. Type Objects may be equal to more than -# one type code (e.g. DATETIME could be equal to the type codes for date, time and timestamp columns; -# see the Implementation Hints below for details). - -# SQL NULL values are represented by the Python None singleton on input and output. - -# Note: Usage of Unix ticks for database interfacing can cause troubles because of the limited date range they cover. - - -# def Date(year,month,day): -# "This function constructs an object holding a date value. " -# return dateconverter.date(year,month,day) #dateconverter.Date(year,month,day) -# -# def Time(hour,minute,second): -# "This function constructs an object holding a time value. " -# return dateconverter.time(hour, minute, second) # dateconverter.Time(hour,minute,second) -# -# def Timestamp(year,month,day,hour,minute,second): -# "This function constructs an object holding a time stamp value. " -# return dateconverter.datetime(year,month,day,hour,minute,second) -# -# def DateFromTicks(ticks): -# """This function constructs an object holding a date value from the given ticks value -# (number of seconds since the epoch; see the documentation of the standard Python time module for details). """ -# return Date(*time.gmtime(ticks)[:3]) -# -# def TimeFromTicks(ticks): -# """This function constructs an object holding a time value from the given ticks value -# (number of seconds since the epoch; see the documentation of the standard Python time module for details). """ -# return Time(*time.gmtime(ticks)[3:6]) -# -# def TimestampFromTicks(ticks): -# """This function constructs an object holding a time stamp value from the given -# ticks value (number of seconds since the epoch; -# see the documentation of the standard Python time module for details). """ -# return Timestamp(*time.gmtime(ticks)[:6]) -# -# def Binary(aString): -# """This function constructs an object capable of holding a binary (long) string value. """ -# b = makeByteBuffer(aString) -# return b -# ----- Time converters ---------------------------------------------- -class TimeConverter(object): # this is a generic time converter skeleton - def __init__(self): # the details will be filled in by instances - self._ordinal_1899_12_31 = datetime.date(1899, 12, 31).toordinal() - 1 - # Use cls.types to compare if an input parameter is a datetime - self.types = { - type(self.Date(2000, 1, 1)), - type(self.Time(12, 1, 1)), - type(self.Timestamp(2000, 1, 1, 12, 1, 1)), - datetime.datetime, - datetime.time, - datetime.date, - } - - def COMDate(self, obj): - """Returns a ComDate from a date-time""" - try: # most likely a datetime - tt = obj.timetuple() - - try: - ms = obj.microsecond - except: - ms = 0 - return self.ComDateFromTuple(tt, ms) - except: # might be a tuple - try: - return self.ComDateFromTuple(obj) - except: # try an mxdate - try: - return obj.COMDate() - except: - raise ValueError('Cannot convert "%s" to COMdate.' % repr(obj)) - - def ComDateFromTuple(self, t, microseconds=0): - d = datetime.date(t[0], t[1], t[2]) - integerPart = d.toordinal() - self._ordinal_1899_12_31 - ms = (t[3] * 3600 + t[4] * 60 + t[5]) * 1000000 + microseconds - fractPart = float(ms) / 86400000000.0 - return integerPart + fractPart - - def DateObjectFromCOMDate(self, comDate): - "Returns an object of the wanted type from a ComDate" - raise NotImplementedError # "Abstract class" - - def Date(self, year, month, day): - "This function constructs an object holding a date value." - raise NotImplementedError # "Abstract class" - - def Time(self, hour, minute, second): - "This function constructs an object holding a time value." - raise NotImplementedError # "Abstract class" - - def Timestamp(self, year, month, day, hour, minute, second): - "This function constructs an object holding a time stamp value." - raise NotImplementedError # "Abstract class" - # all purpose date to ISO format converter - - def DateObjectToIsoFormatString(self, obj): - "This function should return a string in the format 'YYYY-MM-dd HH:MM:SS:ms' (ms optional)" - try: # most likely, a datetime.datetime - s = obj.isoformat(" ") - except (TypeError, AttributeError): - if isinstance(obj, datetime.date): - s = obj.isoformat() + " 00:00:00" # return exact midnight - else: - try: # maybe it has a strftime method, like mx - s = obj.strftime("%Y-%m-%d %H:%M:%S") - except AttributeError: - try: # but may be time.struct_time - s = time.strftime("%Y-%m-%d %H:%M:%S", obj) - except: - raise ValueError('Cannot convert "%s" to isoformat' % repr(obj)) - return s - - -# -- Optional: if mx extensions are installed you may use mxDateTime ---- -try: - import mx.DateTime - - mxDateTime = True -except: - mxDateTime = False -if mxDateTime: - - class mxDateTimeConverter(TimeConverter): # used optionally if installed - def __init__(self): - TimeConverter.__init__(self) - self.types.add(type(mx.DateTime)) - - def DateObjectFromCOMDate(self, comDate): - return mx.DateTime.DateTimeFromCOMDate(comDate) - - def Date(self, year, month, day): - return mx.DateTime.Date(year, month, day) - - def Time(self, hour, minute, second): - return mx.DateTime.Time(hour, minute, second) - - def Timestamp(self, year, month, day, hour, minute, second): - return mx.DateTime.Timestamp(year, month, day, hour, minute, second) - -else: - - class mxDateTimeConverter(TimeConverter): - pass # if no mx is installed - - -class pythonDateTimeConverter(TimeConverter): # standard since Python 2.3 - def __init__(self): - TimeConverter.__init__(self) - - def DateObjectFromCOMDate(self, comDate): - if isinstance(comDate, datetime.datetime): - odn = comDate.toordinal() - tim = comDate.time() - new = datetime.datetime.combine(datetime.datetime.fromordinal(odn), tim) - return new - # return comDate.replace(tzinfo=None) # make non aware - elif isinstance(comDate, DateTime): - fComDate = comDate.ToOADate() # ironPython clr Date/Time - else: - fComDate = float(comDate) # ComDate is number of days since 1899-12-31 - integerPart = int(fComDate) - floatpart = fComDate - integerPart - ##if floatpart == 0.0: - ## return datetime.date.fromordinal(integerPart + self._ordinal_1899_12_31) - dte = datetime.datetime.fromordinal( - integerPart + self._ordinal_1899_12_31 - ) + datetime.timedelta(milliseconds=floatpart * 86400000) - # millisecondsperday=86400000 # 24*60*60*1000 - return dte - - def Date(self, year, month, day): - return datetime.date(year, month, day) - - def Time(self, hour, minute, second): - return datetime.time(hour, minute, second) - - def Timestamp(self, year, month, day, hour, minute, second): - return datetime.datetime(year, month, day, hour, minute, second) - - -class pythonTimeConverter(TimeConverter): # the old, ?nix type date and time - def __init__(self): # caution: this Class gets confised by timezones and DST - TimeConverter.__init__(self) - self.types.add(time.struct_time) - - def DateObjectFromCOMDate(self, comDate): - "Returns ticks since 1970" - if isinstance(comDate, datetime.datetime): - return comDate.timetuple() - elif isinstance(comDate, DateTime): # ironPython clr date/time - fcomDate = comDate.ToOADate() - else: - fcomDate = float(comDate) - secondsperday = 86400 # 24*60*60 - # ComDate is number of days since 1899-12-31, gmtime epoch is 1970-1-1 = 25569 days - t = time.gmtime(secondsperday * (fcomDate - 25569.0)) - return t # year,month,day,hour,minute,second,weekday,julianday,daylightsaving=t - - def Date(self, year, month, day): - return self.Timestamp(year, month, day, 0, 0, 0) - - def Time(self, hour, minute, second): - return time.gmtime((hour * 60 + minute) * 60 + second) - - def Timestamp(self, year, month, day, hour, minute, second): - return time.localtime( - time.mktime((year, month, day, hour, minute, second, 0, 0, -1)) - ) - - -base_dateconverter = pythonDateTimeConverter() - -# ------ DB API required module attributes --------------------- -threadsafety = 1 # TODO -- find out whether this module is actually BETTER than 1. - -apilevel = "2.0" # String constant stating the supported DB API level. - -paramstyle = "qmark" # the default parameter style - -# ------ control for an extension which may become part of DB API 3.0 --- -accepted_paramstyles = ("qmark", "named", "format", "pyformat", "dynamic") - -# ------------------------------------------------------------------------------------------ -# define similar types for generic conversion routines -adoIntegerTypes = ( - adc.adInteger, - adc.adSmallInt, - adc.adTinyInt, - adc.adUnsignedInt, - adc.adUnsignedSmallInt, - adc.adUnsignedTinyInt, - adc.adBoolean, - adc.adError, -) # max 32 bits -adoRowIdTypes = (adc.adChapter,) # v2.1 Rose -adoLongTypes = (adc.adBigInt, adc.adFileTime, adc.adUnsignedBigInt) -adoExactNumericTypes = ( - adc.adDecimal, - adc.adNumeric, - adc.adVarNumeric, - adc.adCurrency, -) # v2.3 Cole -adoApproximateNumericTypes = (adc.adDouble, adc.adSingle) # v2.1 Cole -adoStringTypes = ( - adc.adBSTR, - adc.adChar, - adc.adLongVarChar, - adc.adLongVarWChar, - adc.adVarChar, - adc.adVarWChar, - adc.adWChar, -) -adoBinaryTypes = (adc.adBinary, adc.adLongVarBinary, adc.adVarBinary) -adoDateTimeTypes = (adc.adDBTime, adc.adDBTimeStamp, adc.adDate, adc.adDBDate) -adoRemainingTypes = ( - adc.adEmpty, - adc.adIDispatch, - adc.adIUnknown, - adc.adPropVariant, - adc.adArray, - adc.adUserDefined, - adc.adVariant, - adc.adGUID, -) - - -# this class is a trick to determine whether a type is a member of a related group of types. see PEP notes -class DBAPITypeObject(object): - def __init__(self, valuesTuple): - self.values = frozenset(valuesTuple) - - def __eq__(self, other): - return other in self.values - - def __ne__(self, other): - return other not in self.values - - -"""This type object is used to describe columns in a database that are string-based (e.g. CHAR). """ -STRING = DBAPITypeObject(adoStringTypes) - -"""This type object is used to describe (long) binary columns in a database (e.g. LONG, RAW, BLOBs). """ -BINARY = DBAPITypeObject(adoBinaryTypes) - -"""This type object is used to describe numeric columns in a database. """ -NUMBER = DBAPITypeObject( - adoIntegerTypes + adoLongTypes + adoExactNumericTypes + adoApproximateNumericTypes -) - -"""This type object is used to describe date/time columns in a database. """ - -DATETIME = DBAPITypeObject(adoDateTimeTypes) -"""This type object is used to describe the "Row ID" column in a database. """ -ROWID = DBAPITypeObject(adoRowIdTypes) - -OTHER = DBAPITypeObject(adoRemainingTypes) - -# ------- utilities for translating python data types to ADO data types --------------------------------- -typeMap = { - memoryViewType: adc.adVarBinary, - float: adc.adDouble, - type(None): adc.adEmpty, - str: adc.adBSTR, - bool: adc.adBoolean, # v2.1 Cole - decimal.Decimal: adc.adDecimal, - int: adc.adBigInt, - bytes: adc.adVarBinary, -} - - -def pyTypeToADOType(d): - tp = type(d) - try: - return typeMap[tp] - except KeyError: # The type was not defined in the pre-computed Type table - from . import dateconverter - - if ( - tp in dateconverter.types - ): # maybe it is one of our supported Date/Time types - return adc.adDate - # otherwise, attempt to discern the type by probing the data object itself -- to handle duck typing - if isinstance(d, StringTypes): - return adc.adBSTR - if isinstance(d, numbers.Integral): - return adc.adBigInt - if isinstance(d, numbers.Real): - return adc.adDouble - raise DataError('cannot convert "%s" (type=%s) to ADO' % (repr(d), tp)) - - -# # # # # # # # # # # # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -# functions to convert database values to Python objects -# ------------------------------------------------------------------------ -# variant type : function converting variant to Python value -def variantConvertDate(v): - from . import dateconverter # this function only called when adodbapi is running - - return dateconverter.DateObjectFromCOMDate(v) - - -def cvtString(variant): # use to get old action of adodbapi v1 if desired - if onIronPython: - try: - return variant.ToString() - except: - pass - return str(variant) - - -def cvtDecimal(variant): # better name - return _convertNumberWithCulture(variant, decimal.Decimal) - - -def cvtNumeric(variant): # older name - don't break old code - return cvtDecimal(variant) - - -def cvtFloat(variant): - return _convertNumberWithCulture(variant, float) - - -def _convertNumberWithCulture(variant, f): - try: - return f(variant) - except (ValueError, TypeError, decimal.InvalidOperation): - try: - europeVsUS = str(variant).replace(",", ".") - return f(europeVsUS) - except (ValueError, TypeError, decimal.InvalidOperation): - pass - - -def cvtInt(variant): - return int(variant) - - -def cvtLong(variant): # only important in old versions where long and int differ - return int(variant) - - -def cvtBuffer(variant): - return bytes(variant) - - -def cvtUnicode(variant): - return str(variant) - - -def identity(x): - return x - - -def cvtUnusual(variant): - if verbose > 1: - sys.stderr.write("Conversion called for Unusual data=%s\n" % repr(variant)) - if isinstance(variant, DateTime): # COMdate or System.Date - from .adodbapi import ( # this will only be called when adodbapi is in use, and very rarely - dateconverter, - ) - - return dateconverter.DateObjectFromCOMDate(variant) - return variant # cannot find conversion function -- just give the data to the user - - -def convert_to_python(variant, func): # convert DB value into Python value - if isinstance(variant, NullTypes): # IronPython Null or None - return None - return func(variant) # call the appropriate conversion function - - -class MultiMap(dict): # builds a dictionary from {(sequence,of,keys) : function} - """A dictionary of ado.type : function -- but you can set multiple items by passing a sequence of keys""" - - # useful for defining conversion functions for groups of similar data types. - def __init__(self, aDict): - for k, v in list(aDict.items()): - self[k] = v # we must call __setitem__ - - def __setitem__(self, adoType, cvtFn): - "set a single item, or a whole sequence of items" - try: # user passed us a sequence, set them individually - for type in adoType: - dict.__setitem__(self, type, cvtFn) - except TypeError: # a single value fails attempt to iterate - dict.__setitem__(self, adoType, cvtFn) - - -# initialize variantConversions dictionary used to convert SQL to Python -# this is the dictionary of default conversion functions, built by the class above. -# this becomes a class attribute for the Connection, and that attribute is used -# to build the list of column conversion functions for the Cursor -variantConversions = MultiMap( - { - adoDateTimeTypes: variantConvertDate, - adoApproximateNumericTypes: cvtFloat, - adoExactNumericTypes: cvtDecimal, # use to force decimal rather than unicode - adoLongTypes: cvtLong, - adoIntegerTypes: cvtInt, - adoRowIdTypes: cvtInt, - adoStringTypes: identity, - adoBinaryTypes: cvtBuffer, - adoRemainingTypes: cvtUnusual, - } -) - -# # # # # classes to emulate the result of cursor.fetchxxx() as a sequence of sequences # # # # # -# "an ENUM of how my low level records are laid out" -RS_WIN_32, RS_ARRAY, RS_REMOTE = list(range(1, 4)) - - -class SQLrow(object): # a single database row - # class to emulate a sequence, so that a column may be retrieved by either number or name - def __init__(self, rows, index): # "rows" is an _SQLrows object, index is which row - self.rows = rows # parent 'fetch' container object - self.index = index # my row number within parent - - def __getattr__(self, name): # used for row.columnName type of value access - try: - return self._getValue(self.rows.columnNames[name.lower()]) - except KeyError: - raise AttributeError('Unknown column name "{}"'.format(name)) - - def _getValue(self, key): # key must be an integer - if ( - self.rows.recordset_format == RS_ARRAY - ): # retrieve from two-dimensional array - v = self.rows.ado_results[key, self.index] - elif self.rows.recordset_format == RS_REMOTE: - v = self.rows.ado_results[self.index][key] - else: # pywin32 - retrieve from tuple of tuples - v = self.rows.ado_results[key][self.index] - if self.rows.converters is NotImplemented: - return v - return convert_to_python(v, self.rows.converters[key]) - - def __len__(self): - return self.rows.numberOfColumns - - def __getitem__(self, key): # used for row[key] type of value access - if isinstance(key, int): # normal row[1] designation - try: - return self._getValue(key) - except IndexError: - raise - if isinstance(key, slice): - indices = key.indices(self.rows.numberOfColumns) - vl = [self._getValue(i) for i in range(*indices)] - return tuple(vl) - try: - return self._getValue( - self.rows.columnNames[key.lower()] - ) # extension row[columnName] designation - except (KeyError, TypeError): - er, st, tr = sys.exc_info() - raise er( - 'No such key as "%s" in %s' % (repr(key), self.__repr__()) - ).with_traceback(tr) - - def __iter__(self): - return iter(self.__next__()) - - def __next__(self): - for n in range(self.rows.numberOfColumns): - yield self._getValue(n) - - def __repr__(self): # create a human readable representation - taglist = sorted(list(self.rows.columnNames.items()), key=lambda x: x[1]) - s = "" - - def __str__(self): # create a pretty human readable representation - return str( - tuple(str(self._getValue(i)) for i in range(self.rows.numberOfColumns)) - ) - - # TO-DO implement pickling an SQLrow directly - # def __getstate__(self): return self.__dict__ - # def __setstate__(self, d): self.__dict__.update(d) - # which basically tell pickle to treat your class just like a normal one, - # taking self.__dict__ as representing the whole of the instance state, - # despite the existence of the __getattr__. - # # # # - - -class SQLrows(object): - # class to emulate a sequence for multiple rows using a container object - def __init__(self, ado_results, numberOfRows, cursor): - self.ado_results = ado_results # raw result of SQL get - try: - self.recordset_format = cursor.recordset_format - self.numberOfColumns = cursor.numberOfColumns - self.converters = cursor.converters - self.columnNames = cursor.columnNames - except AttributeError: - self.recordset_format = RS_ARRAY - self.numberOfColumns = 0 - self.converters = [] - self.columnNames = {} - self.numberOfRows = numberOfRows - - def __len__(self): - return self.numberOfRows - - def __getitem__(self, item): # used for row or row,column access - if not self.ado_results: - return [] - if isinstance(item, slice): # will return a list of row objects - indices = item.indices(self.numberOfRows) - return [SQLrow(self, k) for k in range(*indices)] - elif isinstance(item, tuple) and len(item) == 2: - # d = some_rowsObject[i,j] will return a datum from a two-dimension address - i, j = item - if not isinstance(j, int): - try: - j = self.columnNames[j.lower()] # convert named column to numeric - except KeyError: - raise KeyError('adodbapi: no such column name as "%s"' % repr(j)) - if self.recordset_format == RS_ARRAY: # retrieve from two-dimensional array - v = self.ado_results[j, i] - elif self.recordset_format == RS_REMOTE: - v = self.ado_results[i][j] - else: # pywin32 - retrieve from tuple of tuples - v = self.ado_results[j][i] - if self.converters is NotImplemented: - return v - return convert_to_python(v, self.converters[j]) - else: - row = SQLrow(self, item) # new row descriptor - return row - - def __iter__(self): - return iter(self.__next__()) - - def __next__(self): - for n in range(self.numberOfRows): - row = SQLrow(self, n) - yield row - # # # # # - - # # # # # functions to re-format SQL requests to other paramstyle requirements # # # # # # # # # # - - -def changeNamedToQmark( - op, -): # convert from 'named' paramstyle to ADO required '?'mark parameters - outOp = "" - outparms = [] - chunks = op.split( - "'" - ) # quote all literals -- odd numbered list results are literals. - inQuotes = False - for chunk in chunks: - if inQuotes: # this is inside a quote - if chunk == "": # double apostrophe to quote one apostrophe - outOp = outOp[:-1] # so take one away - else: - outOp += "'" + chunk + "'" # else pass the quoted string as is. - else: # is SQL code -- look for a :namedParameter - while chunk: # some SQL string remains - sp = chunk.split(":", 1) - outOp += sp[0] # concat the part up to the : - s = "" - try: - chunk = sp[1] - except IndexError: - chunk = None - if chunk: # there was a parameter - parse it out - i = 0 - c = chunk[0] - while c.isalnum() or c == "_": - i += 1 - try: - c = chunk[i] - except IndexError: - break - s = chunk[:i] - chunk = chunk[i:] - if s: - outparms.append(s) # list the parameters in order - outOp += "?" # put in the Qmark - inQuotes = not inQuotes - return outOp, outparms - - -def changeFormatToQmark( - op, -): # convert from 'format' paramstyle to ADO required '?'mark parameters - outOp = "" - outparams = [] - chunks = op.split( - "'" - ) # quote all literals -- odd numbered list results are literals. - inQuotes = False - for chunk in chunks: - if inQuotes: - if ( - outOp != "" and chunk == "" - ): # he used a double apostrophe to quote one apostrophe - outOp = outOp[:-1] # so take one away - else: - outOp += "'" + chunk + "'" # else pass the quoted string as is. - else: # is SQL code -- look for a %s parameter - if "%(" in chunk: # ugh! pyformat! - while chunk: # some SQL string remains - sp = chunk.split("%(", 1) - outOp += sp[0] # concat the part up to the % - if len(sp) > 1: - try: - s, chunk = sp[1].split(")s", 1) # find the ')s' - except ValueError: - raise ProgrammingError( - 'Pyformat SQL has incorrect format near "%s"' % chunk - ) - outparams.append(s) - outOp += "?" # put in the Qmark - else: - chunk = None - else: # proper '%s' format - sp = chunk.split("%s") # make each %s - outOp += "?".join(sp) # into ? - inQuotes = not inQuotes # every other chunk is a quoted string - return outOp, outparams diff --git a/server/venv/Lib/site-packages/adodbapi/examples/db_print.py b/server/venv/Lib/site-packages/adodbapi/examples/db_print.py deleted file mode 100644 index c0eb83e..0000000 --- a/server/venv/Lib/site-packages/adodbapi/examples/db_print.py +++ /dev/null @@ -1,72 +0,0 @@ -""" db_print.py -- a simple demo for ADO database reads.""" - -import sys - -import adodbapi.ado_consts as adc - -cmd_args = ("filename", "table_name") -if "help" in sys.argv: - print("possible settings keywords are:", cmd_args) - sys.exit() - -kw_args = {} # pick up filename and proxy address from command line (optionally) -for arg in sys.argv: - s = arg.split("=") - if len(s) > 1: - if s[0] in cmd_args: - kw_args[s[0]] = s[1] - -kw_args.setdefault( - "filename", "test.mdb" -) # assumes server is running from examples folder -kw_args.setdefault("table_name", "Products") # the name of the demo table - -# the server needs to select the provider based on his Python installation -provider_switch = ["provider", "Microsoft.ACE.OLEDB.12.0", "Microsoft.Jet.OLEDB.4.0"] - -# ------------------------ START HERE ------------------------------------- -# create the connection -constr = "Provider=%(provider)s;Data Source=%(filename)s" -import adodbapi as db - -con = db.connect(constr, kw_args, macro_is64bit=provider_switch) - -if kw_args["table_name"] == "?": - print("The tables in your database are:") - for name in con.get_table_names(): - print(name) -else: - # make a cursor on the connection - with con.cursor() as c: - # run an SQL statement on the cursor - sql = "select * from %s" % kw_args["table_name"] - print('performing query="%s"' % sql) - c.execute(sql) - - # check the results - print( - 'result rowcount shows as= %d. (Note: -1 means "not known")' % (c.rowcount,) - ) - print("") - print("result data description is:") - print(" NAME Type DispSize IntrnlSz Prec Scale Null?") - for d in c.description: - print( - ("%16s %-12s %8s %8d %4d %5d %s") - % (d[0], adc.adTypeNames[d[1]], d[2], d[3], d[4], d[5], bool(d[6])) - ) - print("") - print("str() of first five records are...") - - # get the results - db = c.fetchmany(5) - - # print them - for rec in db: - print(rec) - - print("") - print("repr() of next row is...") - print(repr(c.fetchone())) - print("") -con.close() diff --git a/server/venv/Lib/site-packages/adodbapi/examples/db_table_names.py b/server/venv/Lib/site-packages/adodbapi/examples/db_table_names.py deleted file mode 100644 index 2d7bf9d..0000000 --- a/server/venv/Lib/site-packages/adodbapi/examples/db_table_names.py +++ /dev/null @@ -1,20 +0,0 @@ -""" db_table_names.py -- a simple demo for ADO database table listing.""" -import sys - -import adodbapi - -try: - databasename = sys.argv[1] -except IndexError: - databasename = "test.mdb" - -provider = ["prv", "Microsoft.ACE.OLEDB.12.0", "Microsoft.Jet.OLEDB.4.0"] -constr = "Provider=%(prv)s;Data Source=%(db)s" - -# create the connection -con = adodbapi.connect(constr, db=databasename, macro_is64bit=provider) - -print("Table names in= %s" % databasename) - -for table in con.get_table_names(): - print(table) diff --git a/server/venv/Lib/site-packages/adodbapi/examples/xls_read.py b/server/venv/Lib/site-packages/adodbapi/examples/xls_read.py deleted file mode 100644 index 10bcc57..0000000 --- a/server/venv/Lib/site-packages/adodbapi/examples/xls_read.py +++ /dev/null @@ -1,41 +0,0 @@ -import sys - -import adodbapi - -try: - import adodbapi.is64bit as is64bit - - is64 = is64bit.Python() -except ImportError: - is64 = False - -if is64: - driver = "Microsoft.ACE.OLEDB.12.0" -else: - driver = "Microsoft.Jet.OLEDB.4.0" -extended = 'Extended Properties="Excel 8.0;HDR=Yes;IMEX=1;"' - -try: # first command line argument will be xls file name -- default to the one written by xls_write.py - filename = sys.argv[1] -except IndexError: - filename = "xx.xls" - -constr = "Provider=%s;Data Source=%s;%s" % (driver, filename, extended) - -conn = adodbapi.connect(constr) - -try: # second command line argument will be worksheet name -- default to first worksheet - sheet = sys.argv[2] -except IndexError: - # use ADO feature to get the name of the first worksheet - sheet = conn.get_table_names()[0] - -print("Shreadsheet=%s Worksheet=%s" % (filename, sheet)) -print("------------------------------------------------------------") -crsr = conn.cursor() -sql = "SELECT * from [%s]" % sheet -crsr.execute(sql) -for row in crsr.fetchmany(10): - print(repr(row)) -crsr.close() -conn.close() diff --git a/server/venv/Lib/site-packages/adodbapi/examples/xls_write.py b/server/venv/Lib/site-packages/adodbapi/examples/xls_write.py deleted file mode 100644 index 38baefd..0000000 --- a/server/venv/Lib/site-packages/adodbapi/examples/xls_write.py +++ /dev/null @@ -1,41 +0,0 @@ -import datetime - -import adodbapi - -try: - import adodbapi.is64bit as is64bit - - is64 = is64bit.Python() -except ImportError: - is64 = False # in case the user has an old version of adodbapi -if is64: - driver = "Microsoft.ACE.OLEDB.12.0" -else: - driver = "Microsoft.Jet.OLEDB.4.0" -filename = "xx.xls" # file will be created if it does not exist -extended = 'Extended Properties="Excel 8.0;Readonly=False;"' - -constr = "Provider=%s;Data Source=%s;%s" % (driver, filename, extended) - -conn = adodbapi.connect(constr) -with conn: # will auto commit if no errors - with conn.cursor() as crsr: - try: - crsr.execute("drop table SheetOne") - except: - pass # just is case there is one already there - - # create the sheet and the header row and set the types for the columns - crsr.execute( - "create table SheetOne (Name varchar, Rank varchar, SrvcNum integer, Weight float, Birth date)" - ) - - sql = "INSERT INTO SheetOne (name, rank , srvcnum, weight, birth) values (?,?,?,?,?)" - - data = ("Mike Murphy", "SSG", 123456789, 167.8, datetime.date(1922, 12, 27)) - crsr.execute(sql, data) # write the first row of data - crsr.execute( - sql, ["John Jones", "Pvt", 987654321, 140.0, datetime.date(1921, 7, 4)] - ) # another row of data -conn.close() -print("Created spreadsheet=%s worksheet=%s" % (filename, "SheetOne")) diff --git a/server/venv/Lib/site-packages/adodbapi/is64bit.py b/server/venv/Lib/site-packages/adodbapi/is64bit.py deleted file mode 100644 index 911c619..0000000 --- a/server/venv/Lib/site-packages/adodbapi/is64bit.py +++ /dev/null @@ -1,41 +0,0 @@ -"""is64bit.Python() --> boolean value of detected Python word size. is64bit.os() --> os build version""" -import sys - - -def Python(): - if sys.platform == "cli": # IronPython - import System - - return System.IntPtr.Size == 8 - else: - try: - return sys.maxsize > 2147483647 - except AttributeError: - return sys.maxint > 2147483647 - - -def os(): - import platform - - pm = platform.machine() - if pm != ".." and pm.endswith("64"): # recent Python (not Iron) - return True - else: - import os - - if "PROCESSOR_ARCHITEW6432" in os.environ: - return True # 32 bit program running on 64 bit Windows - try: - return os.environ["PROCESSOR_ARCHITECTURE"].endswith( - "64" - ) # 64 bit Windows 64 bit program - except (IndexError, KeyError): - pass # not Windows - try: - return "64" in platform.architecture()[0] # this often works in Linux - except: - return False # is an older version of Python, assume also an older os (best we can guess) - - -if __name__ == "__main__": - print("is64bit.Python() =", Python(), "is64bit.os() =", os()) diff --git a/server/venv/Lib/site-packages/adodbapi/license.txt b/server/venv/Lib/site-packages/adodbapi/license.txt deleted file mode 100644 index c255f4a..0000000 --- a/server/venv/Lib/site-packages/adodbapi/license.txt +++ /dev/null @@ -1,506 +0,0 @@ - GNU LESSER GENERAL PUBLIC LICENSE - Version 2.1, February 1999 - - Copyright (C) 1991, 1999 Free Software Foundation, Inc. - 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - -[This is the first released version of the Lesser GPL. It also counts - as the successor of the GNU Library Public License, version 2, hence - the version number 2.1.] - - Preamble - - The licenses for most software are designed to take away your -freedom to share and change it. By contrast, the GNU General Public -Licenses are intended to guarantee your freedom to share and change -free software--to make sure the software is free for all its users. - - This license, the Lesser General Public License, applies to some -specially designated software packages--typically libraries--of the -Free Software Foundation and other authors who decide to use it. You -can use it too, but we suggest you first think carefully about whether -this license or the ordinary General Public License is the better -strategy to use in any particular case, based on the explanations below. - - When we speak of free software, we are referring to freedom of use, -not price. Our General Public Licenses are designed to make sure that -you have the freedom to distribute copies of free software (and charge -for this service if you wish); that you receive source code or can get -it if you want it; that you can change the software and use pieces of -it in new free programs; and that you are informed that you can do -these things. - - To protect your rights, we need to make restrictions that forbid -distributors to deny you these rights or to ask you to surrender these -rights. These restrictions translate to certain responsibilities for -you if you distribute copies of the library or if you modify it. - - For example, if you distribute copies of the library, whether gratis -or for a fee, you must give the recipients all the rights that we gave -you. You must make sure that they, too, receive or can get the source -code. If you link other code with the library, you must provide -complete object files to the recipients, so that they can relink them -with the library after making changes to the library and recompiling -it. And you must show them these terms so they know their rights. - - We protect your rights with a two-step method: (1) we copyright the -library, and (2) we offer you this license, which gives you legal -permission to copy, distribute and/or modify the library. - - To protect each distributor, we want to make it very clear that -there is no warranty for the free library. Also, if the library is -modified by someone else and passed on, the recipients should know -that what they have is not the original version, so that the original -author's reputation will not be affected by problems that might be -introduced by others. - - - - Finally, software patents pose a constant threat to the existence of -any free program. We wish to make sure that a company cannot -effectively restrict the users of a free program by obtaining a -restrictive license from a patent holder. Therefore, we insist that -any patent license obtained for a version of the library must be -consistent with the full freedom of use specified in this license. - - Most GNU software, including some libraries, is covered by the -ordinary GNU General Public License. This license, the GNU Lesser -General Public License, applies to certain designated libraries, and -is quite different from the ordinary General Public License. We use -this license for certain libraries in order to permit linking those -libraries into non-free programs. - - When a program is linked with a library, whether statically or using -a shared library, the combination of the two is legally speaking a -combined work, a derivative of the original library. The ordinary -General Public License therefore permits such linking only if the -entire combination fits its criteria of freedom. The Lesser General -Public License permits more lax criteria for linking other code with -the library. - - We call this license the "Lesser" General Public License because it -does Less to protect the user's freedom than the ordinary General -Public License. It also provides other free software developers Less -of an advantage over competing non-free programs. These disadvantages -are the reason we use the ordinary General Public License for many -libraries. However, the Lesser license provides advantages in certain -special circumstances. - - For example, on rare occasions, there may be a special need to -encourage the widest possible use of a certain library, so that it becomes -a de-facto standard. To achieve this, non-free programs must be -allowed to use the library. A more frequent case is that a free -library does the same job as widely used non-free libraries. In this -case, there is little to gain by limiting the free library to free -software only, so we use the Lesser General Public License. - - In other cases, permission to use a particular library in non-free -programs enables a greater number of people to use a large body of -free software. For example, permission to use the GNU C Library in -non-free programs enables many more people to use the whole GNU -operating system, as well as its variant, the GNU/Linux operating -system. - - Although the Lesser General Public License is Less protective of the -users' freedom, it does ensure that the user of a program that is -linked with the Library has the freedom and the wherewithal to run -that program using a modified version of the Library. - - The precise terms and conditions for copying, distribution and -modification follow. Pay close attention to the difference between a -"work based on the library" and a "work that uses the library". The -former contains code derived from the library, whereas the latter must -be combined with the library in order to run. - - - - GNU LESSER GENERAL PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. This License Agreement applies to any software library or other -program which contains a notice placed by the copyright holder or -other authorized party saying it may be distributed under the terms of -this Lesser General Public License (also called "this License"). -Each licensee is addressed as "you". - - A "library" means a collection of software functions and/or data -prepared so as to be conveniently linked with application programs -(which use some of those functions and data) to form executables. - - The "Library", below, refers to any such software library or work -which has been distributed under these terms. A "work based on the -Library" means either the Library or any derivative work under -copyright law: that is to say, a work containing the Library or a -portion of it, either verbatim or with modifications and/or translated -straightforwardly into another language. (Hereinafter, translation is -included without limitation in the term "modification".) - - "Source code" for a work means the preferred form of the work for -making modifications to it. For a library, complete source code means -all the source code for all modules it contains, plus any associated -interface definition files, plus the scripts used to control compilation -and installation of the library. - - Activities other than copying, distribution and modification are not -covered by this License; they are outside its scope. The act of -running a program using the Library is not restricted, and output from -such a program is covered only if its contents constitute a work based -on the Library (independent of the use of the Library in a tool for -writing it). Whether that is true depends on what the Library does -and what the program that uses the Library does. - - 1. You may copy and distribute verbatim copies of the Library's -complete source code as you receive it, in any medium, provided that -you conspicuously and appropriately publish on each copy an -appropriate copyright notice and disclaimer of warranty; keep intact -all the notices that refer to this License and to the absence of any -warranty; and distribute a copy of this License along with the -Library. - You may charge a fee for the physical act of transferring a copy, -and you may at your option offer warranty protection in exchange for a -fee. - - 2. You may modify your copy or copies of the Library or any portion -of it, thus forming a work based on the Library, and copy and -distribute such modifications or work under the terms of Section 1 -above, provided that you also meet all of these conditions: - - a) The modified work must itself be a software library. - - b) You must cause the files modified to carry prominent notices - stating that you changed the files and the date of any change. - - c) You must cause the whole of the work to be licensed at no - charge to all third parties under the terms of this License. - - d) If a facility in the modified Library refers to a function or a - table of data to be supplied by an application program that uses - the facility, other than as an argument passed when the facility - is invoked, then you must make a good faith effort to ensure that, - in the event an application does not supply such function or - table, the facility still operates, and performs whatever part of - its purpose remains meaningful. - - (For example, a function in a library to compute square roots has - a purpose that is entirely well-defined independent of the - application. Therefore, Subsection 2d requires that any - application-supplied function or table used by this function must - be optional: if the application does not supply it, the square - root function must still compute square roots.) - -These requirements apply to the modified work as a whole. If -identifiable sections of that work are not derived from the Library, -and can be reasonably considered independent and separate works in -themselves, then this License, and its terms, do not apply to those -sections when you distribute them as separate works. But when you -distribute the same sections as part of a whole which is a work based -on the Library, the distribution of the whole must be on the terms of -this License, whose permissions for other licensees extend to the -entire whole, and thus to each and every part regardless of who wrote -it. - -Thus, it is not the intent of this section to claim rights or contest -your rights to work written entirely by you; rather, the intent is to -exercise the right to control the distribution of derivative or -collective works based on the Library. - -In addition, mere aggregation of another work not based on the Library -with the Library (or with a work based on the Library) on a volume of -a storage or distribution medium does not bring the other work under -the scope of this License. - - 3. You may opt to apply the terms of the ordinary GNU General Public -License instead of this License to a given copy of the Library. To do -this, you must alter all the notices that refer to this License, so -that they refer to the ordinary GNU General Public License, version 2, -instead of to this License. (If a newer version than version 2 of the -ordinary GNU General Public License has appeared, then you can specify -that version instead if you wish.) Do not make any other change in -these notices. - - Once this change is made in a given copy, it is irreversible for -that copy, so the ordinary GNU General Public License applies to all -subsequent copies and derivative works made from that copy. - - This option is useful when you wish to copy part of the code of -the Library into a program that is not a library. - - 4. You may copy and distribute the Library (or a portion or -derivative of it, under Section 2) in object code or executable form -under the terms of Sections 1 and 2 above provided that you accompany -it with the complete corresponding machine-readable source code, which -must be distributed under the terms of Sections 1 and 2 above on a -medium customarily used for software interchange. - - If distribution of object code is made by offering access to copy -from a designated place, then offering equivalent access to copy the -source code from the same place satisfies the requirement to -distribute the source code, even though third parties are not -compelled to copy the source along with the object code. - - 5. A program that contains no derivative of any portion of the -Library, but is designed to work with the Library by being compiled or -linked with it, is called a "work that uses the Library". Such a -work, in isolation, is not a derivative work of the Library, and -therefore falls outside the scope of this License. - - However, linking a "work that uses the Library" with the Library -creates an executable that is a derivative of the Library (because it -contains portions of the Library), rather than a "work that uses the -library". The executable is therefore covered by this License. -Section 6 states terms for distribution of such executables. - - When a "work that uses the Library" uses material from a header file -that is part of the Library, the object code for the work may be a -derivative work of the Library even though the source code is not. -Whether this is true is especially significant if the work can be -linked without the Library, or if the work is itself a library. The -threshold for this to be true is not precisely defined by law. - - If such an object file uses only numerical parameters, data -structure layouts and accessors, and small macros and small inline -functions (ten lines or less in length), then the use of the object -file is unrestricted, regardless of whether it is legally a derivative -work. (Executables containing this object code plus portions of the -Library will still fall under Section 6.) - - Otherwise, if the work is a derivative of the Library, you may -distribute the object code for the work under the terms of Section 6. -Any executables containing that work also fall under Section 6, -whether or not they are linked directly with the Library itself. - - 6. As an exception to the Sections above, you may also combine or -link a "work that uses the Library" with the Library to produce a -work containing portions of the Library, and distribute that work -under terms of your choice, provided that the terms permit -modification of the work for the customer's own use and reverse -engineering for debugging such modifications. - - You must give prominent notice with each copy of the work that the -Library is used in it and that the Library and its use are covered by -this License. You must supply a copy of this License. If the work -during execution displays copyright notices, you must include the -copyright notice for the Library among them, as well as a reference -directing the user to the copy of this License. Also, you must do one -of these things: - - a) Accompany the work with the complete corresponding - machine-readable source code for the Library including whatever - changes were used in the work (which must be distributed under - Sections 1 and 2 above); and, if the work is an executable linked - with the Library, with the complete machine-readable "work that - uses the Library", as object code and/or source code, so that the - user can modify the Library and then relink to produce a modified - executable containing the modified Library. (It is understood - that the user who changes the contents of definitions files in the - Library will not necessarily be able to recompile the application - to use the modified definitions.) - - b) Use a suitable shared library mechanism for linking with the - Library. A suitable mechanism is one that (1) uses at run time a - copy of the library already present on the user's computer system, - rather than copying library functions into the executable, and (2) - will operate properly with a modified version of the library, if - the user installs one, as long as the modified version is - interface-compatible with the version that the work was made with. - - c) Accompany the work with a written offer, valid for at - least three years, to give the same user the materials - specified in Subsection 6a, above, for a charge no more - than the cost of performing this distribution. - - d) If distribution of the work is made by offering access to copy - from a designated place, offer equivalent access to copy the above - specified materials from the same place. - - e) Verify that the user has already received a copy of these - materials or that you have already sent this user a copy. - - For an executable, the required form of the "work that uses the -Library" must include any data and utility programs needed for -reproducing the executable from it. However, as a special exception, -the materials to be distributed need not include anything that is -normally distributed (in either source or binary form) with the major -components (compiler, kernel, and so on) of the operating system on -which the executable runs, unless that component itself accompanies -the executable. - - It may happen that this requirement contradicts the license -restrictions of other proprietary libraries that do not normally -accompany the operating system. Such a contradiction means you cannot -use both them and the Library together in an executable that you -distribute. - - 7. You may place library facilities that are a work based on the -Library side-by-side in a single library together with other library -facilities not covered by this License, and distribute such a combined -library, provided that the separate distribution of the work based on -the Library and of the other library facilities is otherwise -permitted, and provided that you do these two things: - - a) Accompany the combined library with a copy of the same work - based on the Library, uncombined with any other library - facilities. This must be distributed under the terms of the - Sections above. - - b) Give prominent notice with the combined library of the fact - that part of it is a work based on the Library, and explaining - where to find the accompanying uncombined form of the same work. - - 8. You may not copy, modify, sublicense, link with, or distribute -the Library except as expressly provided under this License. Any -attempt otherwise to copy, modify, sublicense, link with, or -distribute the Library is void, and will automatically terminate your -rights under this License. However, parties who have received copies, -or rights, from you under this License will not have their licenses -terminated so long as such parties remain in full compliance. - - 9. You are not required to accept this License, since you have not -signed it. However, nothing else grants you permission to modify or -distribute the Library or its derivative works. These actions are -prohibited by law if you do not accept this License. Therefore, by -modifying or distributing the Library (or any work based on the -Library), you indicate your acceptance of this License to do so, and -all its terms and conditions for copying, distributing or modifying -the Library or works based on it. - - 10. Each time you redistribute the Library (or any work based on the -Library), the recipient automatically receives a license from the -original licensor to copy, distribute, link with or modify the Library -subject to these terms and conditions. You may not impose any further -restrictions on the recipients' exercise of the rights granted herein. -You are not responsible for enforcing compliance by third parties with -this License. - - 11. If, as a consequence of a court judgment or allegation of patent -infringement or for any other reason (not limited to patent issues), -conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot -distribute so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you -may not distribute the Library at all. For example, if a patent -license would not permit royalty-free redistribution of the Library by -all those who receive copies directly or indirectly through you, then -the only way you could satisfy both it and this License would be to -refrain entirely from distribution of the Library. - -If any portion of this section is held invalid or unenforceable under any -particular circumstance, the balance of the section is intended to apply, -and the section as a whole is intended to apply in other circumstances. - -It is not the purpose of this section to induce you to infringe any -patents or other property right claims or to contest validity of any -such claims; this section has the sole purpose of protecting the -integrity of the free software distribution system which is -implemented by public license practices. Many people have made -generous contributions to the wide range of software distributed -through that system in reliance on consistent application of that -system; it is up to the author/donor to decide if he or she is willing -to distribute software through any other system and a licensee cannot -impose that choice. - -This section is intended to make thoroughly clear what is believed to -be a consequence of the rest of this License. - - 12. If the distribution and/or use of the Library is restricted in -certain countries either by patents or by copyrighted interfaces, the -original copyright holder who places the Library under this License may add -an explicit geographical distribution limitation excluding those countries, -so that distribution is permitted only in or among countries not thus -excluded. In such case, this License incorporates the limitation as if -written in the body of this License. - - 13. The Free Software Foundation may publish revised and/or new -versions of the Lesser General Public License from time to time. -Such new versions will be similar in spirit to the present version, -but may differ in detail to address new problems or concerns. - -Each version is given a distinguishing version number. If the Library -specifies a version number of this License which applies to it and -"any later version", you have the option of following the terms and -conditions either of that version or of any later version published by -the Free Software Foundation. If the Library does not specify a -license version number, you may choose any version ever published by -the Free Software Foundation. - - 14. If you wish to incorporate parts of the Library into other free -programs whose distribution conditions are incompatible with these, -write to the author to ask for permission. For software which is -copyrighted by the Free Software Foundation, write to the Free -Software Foundation; we sometimes make exceptions for this. Our -decision will be guided by the two goals of preserving the free status -of all derivatives of our free software and of promoting the sharing -and reuse of software generally. - - NO WARRANTY - - 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO -WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. -EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR -OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY -KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE -LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME -THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN -WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY -AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU -FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR -CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE -LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING -RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A -FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF -SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH -DAMAGES. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Libraries - - If you develop a new library, and you want it to be of the greatest -possible use to the public, we recommend making it free software that -everyone can redistribute and change. You can do so by permitting -redistribution under these terms (or, alternatively, under the terms of the -ordinary General Public License). - - To apply these terms, attach the following notices to the library. It is -safest to attach them to the start of each source file to most effectively -convey the exclusion of warranty; and each file should have at least the -"copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - -Also add information on how to contact you by electronic and paper mail. - -You should also get your employer (if you work as a programmer) or your -school, if any, to sign a "copyright disclaimer" for the library, if -necessary. Here is a sample; alter the names: - - Yoyodyne, Inc., hereby disclaims all copyright interest in the - library `Frob' (a library for tweaking knobs) written by James Random Hacker. - - , 1 April 1990 - Ty Coon, President of Vice - -That's all there is to it! - diff --git a/server/venv/Lib/site-packages/adodbapi/process_connect_string.py b/server/venv/Lib/site-packages/adodbapi/process_connect_string.py deleted file mode 100644 index 3c3f9e7..0000000 --- a/server/venv/Lib/site-packages/adodbapi/process_connect_string.py +++ /dev/null @@ -1,144 +0,0 @@ -""" a clumsy attempt at a macro language to let the programmer execute code on the server (ex: determine 64bit)""" -from . import is64bit as is64bit - - -def macro_call(macro_name, args, kwargs): - """allow the programmer to perform limited processing on the server by passing macro names and args - - :new_key - the key name the macro will create - :args[0] - macro name - :args[1:] - any arguments - :code - the value of the keyword item - :kwargs - the connection keyword dictionary. ??key has been removed - --> the value to put in for kwargs['name'] = value - """ - if isinstance(args, (str, str)): - args = [ - args - ] # the user forgot to pass a sequence, so make a string into args[0] - new_key = args[0] - try: - if macro_name == "is64bit": - if is64bit.Python(): # if on 64 bit Python - return new_key, args[1] # return first argument - else: - try: - return new_key, args[2] # else return second argument (if defined) - except IndexError: - return new_key, "" # else return blank - - elif ( - macro_name == "getuser" - ): # get the name of the user the server is logged in under - if not new_key in kwargs: - import getpass - - return new_key, getpass.getuser() - - elif macro_name == "getnode": # get the name of the computer running the server - import platform - - try: - return new_key, args[1] % platform.node() - except IndexError: - return new_key, platform.node() - - elif macro_name == "getenv": # expand the server's environment variable args[1] - try: - dflt = args[2] # if not found, default from args[2] - except IndexError: # or blank - dflt = "" - return new_key, os.environ.get(args[1], dflt) - - elif macro_name == "auto_security": - if ( - not "user" in kwargs or not kwargs["user"] - ): # missing, blank, or Null username - return new_key, "Integrated Security=SSPI" - return new_key, "User ID=%(user)s; Password=%(password)s" % kwargs - - elif ( - macro_name == "find_temp_test_path" - ): # helper function for testing ado operation -- undocumented - import os - import tempfile - - return new_key, os.path.join( - tempfile.gettempdir(), "adodbapi_test", args[1] - ) - - raise ValueError("Unknown connect string macro=%s" % macro_name) - except: - raise ValueError("Error in macro processing %s %s" % (macro_name, repr(args))) - - -def process( - args, kwargs, expand_macros=False -): # --> connection string with keyword arguments processed. - """attempts to inject arguments into a connection string using Python "%" operator for strings - - co: adodbapi connection object - args: positional parameters from the .connect() call - kvargs: keyword arguments from the .connect() call - """ - try: - dsn = args[0] - except IndexError: - dsn = None - if isinstance( - dsn, dict - ): # as a convenience the first argument may be django settings - kwargs.update(dsn) - elif ( - dsn - ): # the connection string is passed to the connection as part of the keyword dictionary - kwargs["connection_string"] = dsn - try: - a1 = args[1] - except IndexError: - a1 = None - # historically, the second positional argument might be a timeout value - if isinstance(a1, int): - kwargs["timeout"] = a1 - # if the second positional argument is a string, then it is user - elif isinstance(a1, str): - kwargs["user"] = a1 - # if the second positional argument is a dictionary, use it as keyword arguments, too - elif isinstance(a1, dict): - kwargs.update(a1) - try: - kwargs["password"] = args[2] # the third positional argument is password - kwargs["host"] = args[3] # the fourth positional argument is host name - kwargs["database"] = args[4] # the fifth positional argument is database name - except IndexError: - pass - - # make sure connection string is defined somehow - if not "connection_string" in kwargs: - try: # perhaps 'dsn' was defined - kwargs["connection_string"] = kwargs["dsn"] - except KeyError: - try: # as a last effort, use the "host" keyword - kwargs["connection_string"] = kwargs["host"] - except KeyError: - raise TypeError("Must define 'connection_string' for ado connections") - if expand_macros: - for kwarg in list(kwargs.keys()): - if kwarg.startswith("macro_"): # If a key defines a macro - macro_name = kwarg[6:] # name without the "macro_" - macro_code = kwargs.pop( - kwarg - ) # we remove the macro_key and get the code to execute - new_key, rslt = macro_call( - macro_name, macro_code, kwargs - ) # run the code in the local context - kwargs[new_key] = rslt # put the result back in the keywords dict - # special processing for PyRO IPv6 host address - try: - s = kwargs["proxy_host"] - if ":" in s: # it is an IPv6 address - if s[0] != "[": # is not surrounded by brackets - kwargs["proxy_host"] = s.join(("[", "]")) # put it in brackets - except KeyError: - pass - return kwargs diff --git a/server/venv/Lib/site-packages/adodbapi/readme.txt b/server/venv/Lib/site-packages/adodbapi/readme.txt deleted file mode 100644 index 02bb620..0000000 --- a/server/venv/Lib/site-packages/adodbapi/readme.txt +++ /dev/null @@ -1,92 +0,0 @@ -Project -------- -adodbapi - -A Python DB-API 2.0 (PEP-249) module that makes it easy to use Microsoft ADO -for connecting with databases and other data sources -using either CPython or IronPython. - -Home page: - -Features: -* 100% DB-API 2.0 (PEP-249) compliant (including most extensions and recommendations). -* Includes pyunit testcases that describe how to use the module. -* Fully implemented in Python. -- runs in Python 2.5+ Python 3.0+ and IronPython 2.6+ -* Licensed under the LGPL license, which means that it can be used freely even in commercial programs subject to certain restrictions. -* The user can choose between paramstyles: 'qmark' 'named' 'format' 'pyformat' 'dynamic' -* Supports data retrieval by column name e.g.: - for row in myCurser.execute("select name,age from students"): - print("Student", row.name, "is", row.age, "years old.") -* Supports user-definable system-to-Python data conversion functions (selected by ADO data type, or by column) - -Prerequisites: -* C Python 2.7 or 3.5 or higher - and pywin32 (Mark Hammond's python for windows extensions.) -or - Iron Python 2.7 or higher. (works in IPy2.0 for all data types except BUFFER) - -Installation: -* (C-Python on Windows): Install pywin32 ("pip install pywin32") which includes adodbapi. -* (IronPython on Windows): Download adodbapi from http://sf.net/projects/adodbapi. Unpack the zip. - Open a command window as an administrator. CD to the folder containing the unzipped files. - Run "setup.py install" using the IronPython of your choice. - -NOTE: ........... -If you do not like the new default operation of returning Numeric columns as decimal.Decimal, -you can select other options by the user defined conversion feature. -Try: - adodbapi.apibase.variantConversions[adodbapi.ado_consts.adNumeric] = adodbapi.apibase.cvtString -or: - adodbapi.apibase.variantConversions[adodbapi.ado_consts.adNumeric] = adodbapi.apibase.cvtFloat -or: - adodbapi.apibase.variantConversions[adodbapi.ado_consts.adNumeric] = write_your_own_convertion_function - ............ -notes for 2.6.2: - The definitive source has been moved to https://github.com/mhammond/pywin32/tree/master/adodbapi. - Remote has proven too hard to configure and test with Pyro4. I am moving it to unsupported status - until I can change to a different connection method. -whats new in version 2.6 - A cursor.prepare() method and support for prepared SQL statements. - Lots of refactoring, especially of the Remote and Server modules (still to be treated as Beta code). - The quick start document 'quick_reference.odt' will export as a nice-looking pdf. - Added paramstyles 'pyformat' and 'dynamic'. If your 'paramstyle' is 'named' you _must_ pass a dictionary of - parameters to your .execute() method. If your 'paramstyle' is 'format' 'pyformat' or 'dynamic', you _may_ - pass a dictionary of parameters -- provided your SQL operation string is formatted correctly. - -whats new in version 2.5 - Remote module: (works on Linux!) allows a Windows computer to serve ADO databases via PyRO - Server module: PyRO server for ADO. Run using a command like= C:>python -m adodbapi.server - (server has simple connection string macros: is64bit, getuser, sql_provider, auto_security) - Brief documentation included. See adodbapi/examples folder adodbapi.rtf - New connection method conn.get_table_names() --> list of names of tables in database - - Vastly refactored. Data conversion things have been moved to the new adodbapi.apibase module. - Many former module-level attributes are now class attributes. (Should be more thread-safe) - Connection objects are now context managers for transactions and will commit or rollback. - Cursor objects are context managers and will automatically close themselves. - Autocommit can be switched on and off. - Keyword and positional arguments on the connect() method work as documented in PEP 249. - Keyword arguments from the connect call can be formatted into the connection string. - New keyword arguments defined, such as: autocommit, paramstyle, remote_proxy, remote_port. - *** Breaking change: variantConversion lookups are simplified: the following will raise KeyError: - oldconverter=adodbapi.variantConversions[adodbapi.adoStringTypes] - Refactor as: oldconverter=adodbapi.variantConversions[adodbapi.adoStringTypes[0]] - -License -------- -LGPL, see http://www.opensource.org/licenses/lgpl-license.php - -Documentation -------------- - -Look at adodbapi/quick_reference.md -http://www.python.org/topics/database/DatabaseAPI-2.0.html -read the examples in adodbapi/examples -and look at the test cases in adodbapi/test directory. - -Mailing lists -------------- -The adodbapi mailing lists have been deactivated. Submit comments to the -pywin32 or IronPython mailing lists. - -- the bug tracker on sourceforge.net/projects/adodbapi may be checked, (infrequently). - -- please use: https://github.com/mhammond/pywin32/issues diff --git a/server/venv/Lib/site-packages/adodbapi/remote.py b/server/venv/Lib/site-packages/adodbapi/remote.py deleted file mode 100644 index ae22b5a..0000000 --- a/server/venv/Lib/site-packages/adodbapi/remote.py +++ /dev/null @@ -1,634 +0,0 @@ -"""adodbapi.remote - A python DB API 2.0 (PEP 249) interface to Microsoft ADO - -Copyright (C) 2002 Henrik Ekelund, version 2.1 by Vernon Cole -* http://sourceforge.net/projects/pywin32 -* http://sourceforge.net/projects/adodbapi - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - - django adaptations and refactoring thanks to Adam Vandenberg - -DB-API 2.0 specification: http://www.python.org/dev/peps/pep-0249/ - -This module source should run correctly in CPython versions 2.5 and later, -or IronPython version 2.7 and later, -or, after running through 2to3.py, CPython 3.0 or later. -""" - -__version__ = "2.6.0.4" -version = "adodbapi.remote v" + __version__ - -import array -import datetime -import os -import sys -import time - -# Pyro4 is required for server and remote operation --> https://pypi.python.org/pypi/Pyro4/ -try: - import Pyro4 -except ImportError: - print('* * * Sorry, server operation requires Pyro4. Please "pip import" it.') - exit(11) - -import adodbapi -import adodbapi.apibase as api -import adodbapi.process_connect_string -from adodbapi.apibase import ProgrammingError - -_BaseException = api._BaseException - -sys.excepthook = Pyro4.util.excepthook -Pyro4.config.PREFER_IP_VERSION = 0 # allow system to prefer IPv6 -Pyro4.config.COMMTIMEOUT = 40.0 # a bit longer than the default SQL server Gtimeout -Pyro4.config.SERIALIZER = "pickle" - -try: - verbose = int(os.environ["ADODBAPI_VERBOSE"]) -except: - verbose = False -if verbose: - print(version) - -# --- define objects to smooth out Python3 <-> Python 2.x differences -unicodeType = str # this line will be altered by 2to3.py to '= str' -longType = int # this line will be altered by 2to3.py to '= int' -StringTypes = str -makeByteBuffer = bytes -memoryViewType = memoryview - -# ----------------------------------------------------------- -# conversion functions mandated by PEP 249 -Binary = makeByteBuffer # override the function from apibase.py - - -def Date(year, month, day): - return datetime.date(year, month, day) # dateconverter.Date(year,month,day) - - -def Time(hour, minute, second): - return datetime.time(hour, minute, second) # dateconverter.Time(hour,minute,second) - - -def Timestamp(year, month, day, hour, minute, second): - return datetime.datetime(year, month, day, hour, minute, second) - - -def DateFromTicks(ticks): - return Date(*time.gmtime(ticks)[:3]) - - -def TimeFromTicks(ticks): - return Time(*time.gmtime(ticks)[3:6]) - - -def TimestampFromTicks(ticks): - return Timestamp(*time.gmtime(ticks)[:6]) - - -def connect(*args, **kwargs): # --> a remote db-api connection object - """Create and open a remote db-api database connection object""" - # process the argument list the programmer gave us - kwargs = adodbapi.process_connect_string.process(args, kwargs) - # the "proxy_xxx" keys tell us where to find the PyRO proxy server - kwargs.setdefault( - "pyro_connection", "PYRO:ado.connection@%(proxy_host)s:%(proxy_port)s" - ) - if not "proxy_port" in kwargs: - try: - pport = os.environ["PROXY_PORT"] - except KeyError: - pport = 9099 - kwargs["proxy_port"] = pport - if not "proxy_host" in kwargs or not kwargs["proxy_host"]: - try: - phost = os.environ["PROXY_HOST"] - except KeyError: - phost = "[::1]" # '127.0.0.1' - kwargs["proxy_host"] = phost - ado_uri = kwargs["pyro_connection"] % kwargs - # ask PyRO make us a remote connection object - auto_retry = 3 - while auto_retry: - try: - dispatcher = Pyro4.Proxy(ado_uri) - if "comm_timeout" in kwargs: - dispatcher._pyroTimeout = float(kwargs["comm_timeout"]) - uri = dispatcher.make_connection() - break - except Pyro4.core.errors.PyroError: - auto_retry -= 1 - if auto_retry: - time.sleep(1) - else: - raise api.DatabaseError("Cannot create connection to=%s" % ado_uri) - - conn_uri = fix_uri(uri, kwargs) # get a host connection from the proxy server - while auto_retry: - try: - host_conn = Pyro4.Proxy( - conn_uri - ) # bring up an exclusive Pyro connection for my ADO connection - break - except Pyro4.core.errors.PyroError: - auto_retry -= 1 - if auto_retry: - time.sleep(1) - else: - raise api.DatabaseError( - "Cannot create ADO connection object using=%s" % conn_uri - ) - if "comm_timeout" in kwargs: - host_conn._pyroTimeout = float(kwargs["comm_timeout"]) - # make a local clone - myConn = Connection() - while auto_retry: - try: - myConn.connect( - kwargs, host_conn - ) # call my connect method -- hand him the host connection - break - except Pyro4.core.errors.PyroError: - auto_retry -= 1 - if auto_retry: - time.sleep(1) - else: - raise api.DatabaseError( - "Pyro error creating connection to/thru=%s" % repr(kwargs) - ) - except _BaseException as e: - raise api.DatabaseError( - "Error creating remote connection to=%s, e=%s, %s" - % (repr(kwargs), repr(e), sys.exc_info()[2]) - ) - return myConn - - -def fix_uri(uri, kwargs): - """convert a generic pyro uri with '0.0.0.0' into the address we actually called""" - u = uri.asString() - s = u.split("[::0]") # IPv6 generic address - if len(s) == 1: # did not find one - s = u.split("0.0.0.0") # IPv4 generic address - if len(s) > 1: # found a generic - return kwargs["proxy_host"].join(s) # fill in our address for the host - return uri - - -# # # # # ----- the Class that defines a connection ----- # # # # # -class Connection(object): - # include connection attributes required by api definition. - Warning = api.Warning - Error = api.Error - InterfaceError = api.InterfaceError - DataError = api.DataError - DatabaseError = api.DatabaseError - OperationalError = api.OperationalError - IntegrityError = api.IntegrityError - InternalError = api.InternalError - NotSupportedError = api.NotSupportedError - ProgrammingError = api.ProgrammingError - # set up some class attributes - paramstyle = api.paramstyle - - @property - def dbapi(self): # a proposed db-api version 3 extension. - "Return a reference to the DBAPI module for this Connection." - return api - - def __init__(self): - self.proxy = None - self.kwargs = {} - self.errorhandler = None - self.supportsTransactions = False - self.paramstyle = api.paramstyle - self.timeout = 30 - self.cursors = {} - - def connect(self, kwargs, connection_maker): - self.kwargs = kwargs - if verbose: - print('%s attempting: "%s"' % (version, repr(kwargs))) - self.proxy = connection_maker - ##try: - ret = self.proxy.connect(kwargs) # ask the server to hook us up - ##except ImportError, e: # Pyro is trying to import pywinTypes.comerrer - ## self._raiseConnectionError(api.DatabaseError, 'Proxy cannot connect using=%s' % repr(kwargs)) - if ret is not True: - self._raiseConnectionError( - api.OperationalError, "Proxy returns error message=%s" % repr(ret) - ) - - self.supportsTransactions = self.getIndexedValue("supportsTransactions") - self.paramstyle = self.getIndexedValue("paramstyle") - self.timeout = self.getIndexedValue("timeout") - if verbose: - print("adodbapi.remote New connection at %X" % id(self)) - - def _raiseConnectionError(self, errorclass, errorvalue): - eh = self.errorhandler - if eh is None: - eh = api.standardErrorHandler - eh(self, None, errorclass, errorvalue) - - def close(self): - """Close the connection now (rather than whenever __del__ is called). - - The connection will be unusable from this point forward; - an Error (or subclass) exception will be raised if any operation is attempted with the connection. - The same applies to all cursor objects trying to use the connection. - """ - for crsr in list(self.cursors.values())[ - : - ]: # copy the list, then close each one - crsr.close() - try: - """close the underlying remote Connection object""" - self.proxy.close() - if verbose: - print("adodbapi.remote Closed connection at %X" % id(self)) - object.__delattr__( - self, "proxy" - ) # future attempts to use closed cursor will be caught by __getattr__ - except Exception: - pass - - def __del__(self): - try: - self.proxy.close() - except: - pass - - def commit(self): - """Commit any pending transaction to the database. - - Note that if the database supports an auto-commit feature, - this must be initially off. An interface method may be provided to turn it back on. - Database modules that do not support transactions should implement this method with void functionality. - """ - if not self.supportsTransactions: - return - result = self.proxy.commit() - if result: - self._raiseConnectionError( - api.OperationalError, "Error during commit: %s" % result - ) - - def _rollback(self): - """In case a database does provide transactions this method causes the the database to roll back to - the start of any pending transaction. Closing a connection without committing the changes first will - cause an implicit rollback to be performed. - """ - result = self.proxy.rollback() - if result: - self._raiseConnectionError( - api.OperationalError, "Error during rollback: %s" % result - ) - - def __setattr__(self, name, value): - if name in ("paramstyle", "timeout", "autocommit"): - if self.proxy: - self.proxy.send_attribute_to_host(name, value) - object.__setattr__(self, name, value) # store attribute locally (too) - - def __getattr__(self, item): - if ( - item == "rollback" - ): # the rollback method only appears if the database supports transactions - if self.supportsTransactions: - return ( - self._rollback - ) # return the rollback method so the caller can execute it. - else: - raise self.ProgrammingError( - "this data provider does not support Rollback" - ) - elif item in ( - "dbms_name", - "dbms_version", - "connection_string", - "autocommit", - ): # 'messages' ): - return self.getIndexedValue(item) - elif item == "proxy": - raise self.ProgrammingError("Attempting to use closed connection") - else: - raise self.ProgrammingError('No remote access for attribute="%s"' % item) - - def getIndexedValue(self, index): - r = self.proxy.get_attribute_for_remote(index) - return r - - def cursor(self): - "Return a new Cursor Object using the connection." - myCursor = Cursor(self) - return myCursor - - def _i_am_here(self, crsr): - "message from a new cursor proclaiming its existence" - self.cursors[crsr.id] = crsr - - def _i_am_closing(self, crsr): - "message from a cursor giving connection a chance to clean up" - try: - del self.cursors[crsr.id] - except: - pass - - def __enter__(self): # Connections are context managers - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if exc_type: - self._rollback() # automatic rollback on errors - else: - self.commit() - - def get_table_names(self): - return self.proxy.get_table_names() - - -def fixpickle(x): - """pickle barfs on buffer(x) so we pass as array.array(x) then restore to original form for .execute()""" - if x is None: - return None - if isinstance(x, dict): - # for 'named' paramstyle user will pass a mapping - newargs = {} - for arg, val in list(x.items()): - if isinstance(val, memoryViewType): - newval = array.array("B") - newval.fromstring(val) - newargs[arg] = newval - else: - newargs[arg] = val - return newargs - # if not a mapping, then a sequence - newargs = [] - for arg in x: - if isinstance(arg, memoryViewType): - newarg = array.array("B") - newarg.fromstring(arg) - newargs.append(newarg) - else: - newargs.append(arg) - return newargs - - -class Cursor(object): - def __init__(self, connection): - self.command = None - self.errorhandler = None ## was: connection.errorhandler - self.connection = connection - self.proxy = self.connection.proxy - self.rs = None # the fetchable data for this cursor - self.converters = NotImplemented - self.id = connection.proxy.build_cursor() - connection._i_am_here(self) - self.recordset_format = api.RS_REMOTE - if verbose: - print( - "%s New cursor at %X on conn %X" - % (version, id(self), id(self.connection)) - ) - - def prepare(self, operation): - self.command = operation - try: - del self.description - except AttributeError: - pass - self.proxy.crsr_prepare(self.id, operation) - - def __iter__(self): # [2.1 Zamarev] - return iter(self.fetchone, None) # [2.1 Zamarev] - - def __next__(self): - r = self.fetchone() - if r: - return r - raise StopIteration - - def __enter__(self): - "Allow database cursors to be used with context managers." - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - "Allow database cursors to be used with context managers." - self.close() - - def __getattr__(self, key): - if key == "numberOfColumns": - try: - return len(self.rs[0]) - except: - return 0 - if key == "description": - try: - self.description = self.proxy.crsr_get_description(self.id)[:] - return self.description - except TypeError: - return None - if key == "columnNames": - try: - r = dict( - self.proxy.crsr_get_columnNames(self.id) - ) # copy the remote columns - - except TypeError: - r = {} - self.columnNames = r - return r - - if key == "remote_cursor": - raise api.OperationalError - try: - return self.proxy.crsr_get_attribute_for_remote(self.id, key) - except AttributeError: - raise api.InternalError( - 'Failure getting attribute "%s" from proxy cursor.' % key - ) - - def __setattr__(self, key, value): - if key == "arraysize": - self.proxy.crsr_set_arraysize(self.id, value) - if key == "paramstyle": - if value in api.accepted_paramstyles: - self.proxy.crsr_set_paramstyle(self.id, value) - else: - self._raiseCursorError( - api.ProgrammingError, 'invalid paramstyle ="%s"' % value - ) - object.__setattr__(self, key, value) - - def _raiseCursorError(self, errorclass, errorvalue): - eh = self.errorhandler - if eh is None: - eh = api.standardErrorHandler - eh(self.connection, self, errorclass, errorvalue) - - def execute(self, operation, parameters=None): - if self.connection is None: - self._raiseCursorError( - ProgrammingError, "Attempted operation on closed cursor" - ) - self.command = operation - try: - del self.description - except AttributeError: - pass - try: - del self.columnNames - except AttributeError: - pass - fp = fixpickle(parameters) - if verbose > 2: - print( - ( - '%s executing "%s" with params=%s' - % (version, operation, repr(parameters)) - ) - ) - result = self.proxy.crsr_execute(self.id, operation, fp) - if result: # an exception was triggered - self._raiseCursorError(result[0], result[1]) - - def executemany(self, operation, seq_of_parameters): - if self.connection is None: - self._raiseCursorError( - ProgrammingError, "Attempted operation on closed cursor" - ) - self.command = operation - try: - del self.description - except AttributeError: - pass - try: - del self.columnNames - except AttributeError: - pass - sq = [fixpickle(x) for x in seq_of_parameters] - if verbose > 2: - print( - ( - '%s executemany "%s" with params=%s' - % (version, operation, repr(seq_of_parameters)) - ) - ) - self.proxy.crsr_executemany(self.id, operation, sq) - - def nextset(self): - try: - del self.description - except AttributeError: - pass - try: - del self.columnNames - except AttributeError: - pass - if verbose > 2: - print(("%s nextset" % version)) - return self.proxy.crsr_nextset(self.id) - - def callproc(self, procname, parameters=None): - if self.connection is None: - self._raiseCursorError( - ProgrammingError, "Attempted operation on closed cursor" - ) - self.command = procname - try: - del self.description - except AttributeError: - pass - try: - del self.columnNames - except AttributeError: - pass - fp = fixpickle(parameters) - if verbose > 2: - print( - ( - '%s callproc "%s" with params=%s' - % (version, procname, repr(parameters)) - ) - ) - return self.proxy.crsr_callproc(self.id, procname, fp) - - def fetchone(self): - try: - f1 = self.proxy.crsr_fetchone(self.id) - except _BaseException as e: - self._raiseCursorError(api.DatabaseError, e) - else: - if f1 is None: - return None - self.rs = [f1] - return api.SQLrows(self.rs, 1, self)[ - 0 - ] # new object to hold the results of the fetch - - def fetchmany(self, size=None): - try: - self.rs = self.proxy.crsr_fetchmany(self.id, size) - if not self.rs: - return [] - r = api.SQLrows(self.rs, len(self.rs), self) - return r - except Exception as e: - self._raiseCursorError(api.DatabaseError, e) - - def fetchall(self): - try: - self.rs = self.proxy.crsr_fetchall(self.id) - if not self.rs: - return [] - return api.SQLrows(self.rs, len(self.rs), self) - except Exception as e: - self._raiseCursorError(api.DatabaseError, e) - - def close(self): - if self.connection is None: - return - self.connection._i_am_closing(self) # take me off the connection's cursors list - try: - self.proxy.crsr_close(self.id) - except: - pass - try: - del self.description - except: - pass - try: - del self.rs # let go of the recordset - except: - pass - self.connection = ( - None # this will make all future method calls on me throw an exception - ) - self.proxy = None - if verbose: - print("adodbapi.remote Closed cursor at %X" % id(self)) - - def __del__(self): - try: - self.close() - except: - pass - - def setinputsizes(self, sizes): - pass - - def setoutputsize(self, size, column=None): - pass diff --git a/server/venv/Lib/site-packages/adodbapi/schema_table.py b/server/venv/Lib/site-packages/adodbapi/schema_table.py deleted file mode 100644 index 21ad37e..0000000 --- a/server/venv/Lib/site-packages/adodbapi/schema_table.py +++ /dev/null @@ -1,15 +0,0 @@ -"""call using an open ADO connection --> list of table names""" -from . import adodbapi - - -def names(connection_object): - ado = connection_object.adoConn - schema = ado.OpenSchema(20) # constant = adSchemaTables - - tables = [] - while not schema.EOF: - name = adodbapi.getIndexedValue(schema.Fields, "TABLE_NAME").Value - tables.append(name) - schema.MoveNext() - del schema - return tables diff --git a/server/venv/Lib/site-packages/adodbapi/setup.py b/server/venv/Lib/site-packages/adodbapi/setup.py deleted file mode 100644 index d25869a..0000000 --- a/server/venv/Lib/site-packages/adodbapi/setup.py +++ /dev/null @@ -1,70 +0,0 @@ -"""adodbapi -- a pure Python PEP 249 DB-API package using Microsoft ADO - -Adodbapi can be run on CPython 3.5 and later. -or IronPython version 2.6 and later (in theory, possibly no longer in practice!) -""" -CLASSIFIERS = """\ -Development Status :: 5 - Production/Stable -Intended Audience :: Developers -License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL) -Operating System :: Microsoft :: Windows -Operating System :: POSIX :: Linux -Programming Language :: Python -Programming Language :: Python :: 3 -Programming Language :: SQL -Topic :: Software Development -Topic :: Software Development :: Libraries :: Python Modules -Topic :: Database -""" - -NAME = "adodbapi" -MAINTAINER = "Vernon Cole" -MAINTAINER_EMAIL = "vernondcole@gmail.com" -DESCRIPTION = ( - """A pure Python package implementing PEP 249 DB-API using Microsoft ADO.""" -) -URL = "http://sourceforge.net/projects/adodbapi" -LICENSE = "LGPL" -CLASSIFIERS = filter(None, CLASSIFIERS.split("\n")) -AUTHOR = "Henrik Ekelund, Vernon Cole, et.al." -AUTHOR_EMAIL = "vernondcole@gmail.com" -PLATFORMS = ["Windows", "Linux"] - -VERSION = None # in case searching for version fails -a = open("adodbapi.py") # find the version string in the source code -for line in a: - if "__version__" in line: - VERSION = line.split("'")[1] - print('adodbapi version="%s"' % VERSION) - break -a.close() - - -def setup_package(): - from distutils.command.build_py import build_py - from distutils.core import setup - - setup( - cmdclass={"build_py": build_py}, - name=NAME, - maintainer=MAINTAINER, - maintainer_email=MAINTAINER_EMAIL, - description=DESCRIPTION, - url=URL, - keywords="database ado odbc dbapi db-api Microsoft SQL", - ## download_url=DOWNLOAD_URL, - long_description=open("README.txt").read(), - license=LICENSE, - classifiers=CLASSIFIERS, - author=AUTHOR, - author_email=AUTHOR_EMAIL, - platforms=PLATFORMS, - version=VERSION, - package_dir={"adodbapi": ""}, - packages=["adodbapi"], - ) - return - - -if __name__ == "__main__": - setup_package() diff --git a/server/venv/Lib/site-packages/adodbapi/test/adodbapitest.py b/server/venv/Lib/site-packages/adodbapi/test/adodbapitest.py deleted file mode 100644 index e5b3dc1..0000000 --- a/server/venv/Lib/site-packages/adodbapi/test/adodbapitest.py +++ /dev/null @@ -1,1692 +0,0 @@ -""" Unit tests version 2.6.1.0 for adodbapi""" -""" - adodbapi - A python DB API 2.0 interface to Microsoft ADO - - Copyright (C) 2002 Henrik Ekelund - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - - Updates by Vernon Cole -""" - -import copy -import datetime -import decimal -import random -import string -import sys -import unittest - -try: - import win32com.client - - win32 = True -except ImportError: - win32 = False - -# run the configuration module. -import adodbapitestconfig as config # will set sys.path to find correct version of adodbapi - -# in our code below, all our switches are from config.whatever -import tryconnection - -import adodbapi -import adodbapi.apibase as api - -try: - import adodbapi.ado_consts as ado_consts -except ImportError: # we are doing a shortcut import as a module -- so - try: - import ado_consts - except ImportError: - from adodbapi import ado_consts - - -def str2bytes(sval): - return sval.encode("latin1") - - -long = int - - -def randomstring(length): - return "".join([random.choice(string.ascii_letters) for n in range(32)]) - - -class CommonDBTests(unittest.TestCase): - "Self contained super-simple tests in easy syntax, should work on everything between mySQL and Oracle" - - def setUp(self): - self.engine = "unknown" - - def getEngine(self): - return self.engine - - def getConnection(self): - raise NotImplementedError # "This method must be overriden by a subclass" - - def getCursor(self): - return self.getConnection().cursor() - - def testConnection(self): - crsr = self.getCursor() - assert crsr.__class__.__name__ == "Cursor" - - def testErrorHandlerInherits(self): - if not self.remote: - conn = self.getConnection() - mycallable = lambda connection, cursor, errorclass, errorvalue: 1 - conn.errorhandler = mycallable - crsr = conn.cursor() - assert ( - crsr.errorhandler == mycallable - ), "Error handler on crsr should be same as on connection" - - def testDefaultErrorHandlerConnection(self): - if not self.remote: - conn = self.getConnection() - del conn.messages[:] - try: - conn.close() - conn.commit() # Should not be able to use connection after it is closed - except: - assert len(conn.messages) == 1 - assert len(conn.messages[0]) == 2 - assert conn.messages[0][0] == api.ProgrammingError - - def testOwnErrorHandlerConnection(self): - if self.remote: # ToDo: use "skip" - return - mycallable = ( - lambda connection, cursor, errorclass, errorvalue: 1 - ) # does not raise anything - conn = self.getConnection() - conn.errorhandler = mycallable - conn.close() - conn.commit() # Should not be able to use connection after it is closed - assert len(conn.messages) == 0 - - conn.errorhandler = None # This should bring back the standard error handler - try: - conn.close() - conn.commit() # Should not be able to use connection after it is closed - except: - pass - # The Standard errorhandler appends error to messages attribute - assert ( - len(conn.messages) > 0 - ), "Setting errorhandler to none should bring back the standard error handler" - - def testDefaultErrorHandlerCursor(self): - crsr = self.getConnection().cursor() - if not self.remote: - del crsr.messages[:] - try: - crsr.execute("SELECT abbtytddrf FROM dasdasd") - except: - assert len(crsr.messages) == 1 - assert len(crsr.messages[0]) == 2 - assert crsr.messages[0][0] == api.DatabaseError - - def testOwnErrorHandlerCursor(self): - if self.remote: # ToDo: should be a "skip" - return - mycallable = ( - lambda connection, cursor, errorclass, errorvalue: 1 - ) # does not raise anything - crsr = self.getConnection().cursor() - crsr.errorhandler = mycallable - crsr.execute("SELECT abbtytddrf FROM dasdasd") - assert len(crsr.messages) == 0 - - crsr.errorhandler = None # This should bring back the standard error handler - try: - crsr.execute("SELECT abbtytddrf FROM dasdasd") - except: - pass - # The Standard errorhandler appends error to messages attribute - assert ( - len(crsr.messages) > 0 - ), "Setting errorhandler to none should bring back the standard error handler" - - def testUserDefinedConversions(self): - if self.remote: ## Todo: should be a "skip" - return - try: - duplicatingConverter = lambda aStringField: aStringField * 2 - assert duplicatingConverter("gabba") == "gabbagabba" - - self.helpForceDropOnTblTemp() - conn = self.getConnection() - # the variantConversions attribute should not exist on a normal connection object - self.assertRaises(AttributeError, lambda x: conn.variantConversions[x], [2]) - if not self.remote: - # create a variantConversions attribute on the connection - conn.variantConversions = copy.copy(api.variantConversions) - crsr = conn.cursor() - tabdef = ( - "CREATE TABLE xx_%s (fldData VARCHAR(100) NOT NULL, fld2 VARCHAR(20))" - % config.tmp - ) - crsr.execute(tabdef) - crsr.execute( - "INSERT INTO xx_%s(fldData,fld2) VALUES('gabba','booga')" - % config.tmp - ) - crsr.execute( - "INSERT INTO xx_%s(fldData,fld2) VALUES('hey','yo')" % config.tmp - ) - # change converter for ALL adoStringTypes columns - conn.variantConversions[api.adoStringTypes] = duplicatingConverter - crsr.execute( - "SELECT fldData,fld2 FROM xx_%s ORDER BY fldData" % config.tmp - ) - - rows = crsr.fetchall() - row = rows[0] - self.assertEqual(row[0], "gabbagabba") - row = rows[1] - self.assertEqual(row[0], "heyhey") - self.assertEqual(row[1], "yoyo") - - upcaseConverter = lambda aStringField: aStringField.upper() - assert upcaseConverter("upThis") == "UPTHIS" - - # now use a single column converter - rows.converters[1] = upcaseConverter # convert second column - self.assertEqual(row[0], "heyhey") # first will be unchanged - self.assertEqual(row[1], "YO") # second will convert to upper case - - finally: - try: - del conn.variantConversions # Restore the default - except: - pass - self.helpRollbackTblTemp() - - def testUserDefinedConversionForExactNumericTypes(self): - # variantConversions is a dictionary of conversion functions - # held internally in adodbapi.apibase - # - # !!! this test intentionally alters the value of what should be constant in the module - # !!! no new code should use this example, to is only a test to see that the - # !!! deprecated way of doing this still works. (use connection.variantConversions) - # - if not self.remote and sys.version_info < (3, 0): ### Py3 need different test - oldconverter = adodbapi.variantConversions[ - ado_consts.adNumeric - ] # keep old function to restore later - # By default decimal and "numbers" are returned as decimals. - # Instead, make numbers return as floats - try: - adodbapi.variantConversions[ado_consts.adNumeric] = adodbapi.cvtFloat - self.helpTestDataType( - "decimal(18,2)", "NUMBER", 3.45, compareAlmostEqual=1 - ) - self.helpTestDataType( - "numeric(18,2)", "NUMBER", 3.45, compareAlmostEqual=1 - ) - # now return strings - adodbapi.variantConversions[ado_consts.adNumeric] = adodbapi.cvtString - self.helpTestDataType("numeric(18,2)", "NUMBER", "3.45") - # now a completly weird user defined convertion - adodbapi.variantConversions[ado_consts.adNumeric] = ( - lambda x: "!!This function returns a funny unicode string %s!!" % x - ) - self.helpTestDataType( - "numeric(18,2)", - "NUMBER", - "3.45", - allowedReturnValues=[ - "!!This function returns a funny unicode string 3.45!!" - ], - ) - finally: - # now reset the converter to its original function - adodbapi.variantConversions[ - ado_consts.adNumeric - ] = oldconverter # Restore the original convertion function - - def helpTestDataType( - self, - sqlDataTypeString, - DBAPIDataTypeString, - pyData, - pyDataInputAlternatives=None, - compareAlmostEqual=None, - allowedReturnValues=None, - ): - self.helpForceDropOnTblTemp() - conn = self.getConnection() - crsr = conn.cursor() - tabdef = ( - """ - CREATE TABLE xx_%s ( - fldId integer NOT NULL, - fldData """ - % config.tmp - + sqlDataTypeString - + ")\n" - ) - - crsr.execute(tabdef) - - # Test Null values mapped to None - crsr.execute("INSERT INTO xx_%s (fldId) VALUES (1)" % config.tmp) - - crsr.execute("SELECT fldId,fldData FROM xx_%s" % config.tmp) - rs = crsr.fetchone() - self.assertEqual(rs[1], None) # Null should be mapped to None - assert rs[0] == 1 - - # Test description related - descTuple = crsr.description[1] - assert descTuple[0] in ["fldData", "flddata"], 'was "%s" expected "%s"' % ( - descTuple[0], - "fldData", - ) - - if DBAPIDataTypeString == "STRING": - assert descTuple[1] == api.STRING, 'was "%s" expected "%s"' % ( - descTuple[1], - api.STRING.values, - ) - elif DBAPIDataTypeString == "NUMBER": - assert descTuple[1] == api.NUMBER, 'was "%s" expected "%s"' % ( - descTuple[1], - api.NUMBER.values, - ) - elif DBAPIDataTypeString == "BINARY": - assert descTuple[1] == api.BINARY, 'was "%s" expected "%s"' % ( - descTuple[1], - api.BINARY.values, - ) - elif DBAPIDataTypeString == "DATETIME": - assert descTuple[1] == api.DATETIME, 'was "%s" expected "%s"' % ( - descTuple[1], - api.DATETIME.values, - ) - elif DBAPIDataTypeString == "ROWID": - assert descTuple[1] == api.ROWID, 'was "%s" expected "%s"' % ( - descTuple[1], - api.ROWID.values, - ) - elif DBAPIDataTypeString == "UUID": - assert descTuple[1] == api.OTHER, 'was "%s" expected "%s"' % ( - descTuple[1], - api.OTHER.values, - ) - else: - raise NotImplementedError # "DBAPIDataTypeString not provided" - - # Test data binding - inputs = [pyData] - if pyDataInputAlternatives: - inputs.extend(pyDataInputAlternatives) - inputs = set(inputs) # removes redundant string==unicode tests - fldId = 1 - for inParam in inputs: - fldId += 1 - try: - crsr.execute( - "INSERT INTO xx_%s (fldId,fldData) VALUES (?,?)" % config.tmp, - (fldId, inParam), - ) - except: - if self.remote: - for message in crsr.messages: - print(message) - else: - conn.printADOerrors() - raise - crsr.execute( - "SELECT fldData FROM xx_%s WHERE ?=fldID" % config.tmp, [fldId] - ) - rs = crsr.fetchone() - if allowedReturnValues: - allowedTypes = tuple([type(aRV) for aRV in allowedReturnValues]) - assert isinstance( - rs[0], allowedTypes - ), 'result type "%s" must be one of %s' % (type(rs[0]), allowedTypes) - else: - assert isinstance( - rs[0], type(pyData) - ), 'result type "%s" must be instance of %s' % ( - type(rs[0]), - type(pyData), - ) - - if compareAlmostEqual and DBAPIDataTypeString == "DATETIME": - iso1 = adodbapi.dateconverter.DateObjectToIsoFormatString(rs[0]) - iso2 = adodbapi.dateconverter.DateObjectToIsoFormatString(pyData) - self.assertEqual(iso1, iso2) - elif compareAlmostEqual: - s = float(pyData) - v = float(rs[0]) - assert ( - abs(v - s) / s < 0.00001 - ), "Values not almost equal recvd=%s, expected=%f" % (rs[0], s) - else: - if allowedReturnValues: - ok = False - self.assertTrue( - rs[0] in allowedReturnValues, - 'Value "%s" not in %s' % (repr(rs[0]), allowedReturnValues), - ) - else: - self.assertEqual( - rs[0], - pyData, - 'Values are not equal recvd="%s", expected="%s"' - % (rs[0], pyData), - ) - - def testDataTypeFloat(self): - self.helpTestDataType("real", "NUMBER", 3.45, compareAlmostEqual=True) - self.helpTestDataType("float", "NUMBER", 1.79e37, compareAlmostEqual=True) - - def testDataTypeDecmal(self): - self.helpTestDataType( - "decimal(18,2)", - "NUMBER", - 3.45, - allowedReturnValues=["3.45", "3,45", decimal.Decimal("3.45")], - ) - self.helpTestDataType( - "numeric(18,2)", - "NUMBER", - 3.45, - allowedReturnValues=["3.45", "3,45", decimal.Decimal("3.45")], - ) - self.helpTestDataType( - "decimal(20,2)", - "NUMBER", - 444444444444444444, - allowedReturnValues=[ - "444444444444444444.00", - "444444444444444444,00", - decimal.Decimal("444444444444444444"), - ], - ) - if self.getEngine() == "MSSQL": - self.helpTestDataType( - "uniqueidentifier", - "UUID", - "{71A4F49E-39F3-42B1-A41E-48FF154996E6}", - allowedReturnValues=["{71A4F49E-39F3-42B1-A41E-48FF154996E6}"], - ) - - def testDataTypeMoney(self): # v2.1 Cole -- use decimal for money - if self.getEngine() == "MySQL": - self.helpTestDataType( - "DECIMAL(20,4)", "NUMBER", decimal.Decimal("-922337203685477.5808") - ) - elif self.getEngine() == "PostgreSQL": - self.helpTestDataType( - "money", - "NUMBER", - decimal.Decimal("-922337203685477.5808"), - compareAlmostEqual=True, - allowedReturnValues=[ - -922337203685477.5808, - decimal.Decimal("-922337203685477.5808"), - ], - ) - else: - self.helpTestDataType("smallmoney", "NUMBER", decimal.Decimal("214748.02")) - self.helpTestDataType( - "money", "NUMBER", decimal.Decimal("-922337203685477.5808") - ) - - def testDataTypeInt(self): - if self.getEngine() != "PostgreSQL": - self.helpTestDataType("tinyint", "NUMBER", 115) - self.helpTestDataType("smallint", "NUMBER", -32768) - if self.getEngine() not in ["ACCESS", "PostgreSQL"]: - self.helpTestDataType( - "bit", "NUMBER", 1 - ) # Does not work correctly with access - if self.getEngine() in ["MSSQL", "PostgreSQL"]: - self.helpTestDataType( - "bigint", - "NUMBER", - 3000000000, - allowedReturnValues=[3000000000, int(3000000000)], - ) - self.helpTestDataType("int", "NUMBER", 2147483647) - - def testDataTypeChar(self): - for sqlDataType in ("char(6)", "nchar(6)"): - self.helpTestDataType( - sqlDataType, - "STRING", - "spam ", - allowedReturnValues=["spam", "spam", "spam ", "spam "], - ) - - def testDataTypeVarChar(self): - if self.getEngine() == "MySQL": - stringKinds = ["varchar(10)", "text"] - elif self.getEngine() == "PostgreSQL": - stringKinds = ["varchar(10)", "text", "character varying"] - else: - stringKinds = [ - "varchar(10)", - "nvarchar(10)", - "text", - "ntext", - ] # ,"varchar(max)"] - - for sqlDataType in stringKinds: - self.helpTestDataType(sqlDataType, "STRING", "spam", ["spam"]) - - def testDataTypeDate(self): - if self.getEngine() == "PostgreSQL": - dt = "timestamp" - else: - dt = "datetime" - self.helpTestDataType( - dt, "DATETIME", adodbapi.Date(2002, 10, 28), compareAlmostEqual=True - ) - if self.getEngine() not in ["MySQL", "PostgreSQL"]: - self.helpTestDataType( - "smalldatetime", - "DATETIME", - adodbapi.Date(2002, 10, 28), - compareAlmostEqual=True, - ) - if tag != "pythontime" and self.getEngine() not in [ - "MySQL", - "PostgreSQL", - ]: # fails when using pythonTime - self.helpTestDataType( - dt, - "DATETIME", - adodbapi.Timestamp(2002, 10, 28, 12, 15, 1), - compareAlmostEqual=True, - ) - - def testDataTypeBinary(self): - binfld = str2bytes("\x07\x00\xE2\x40*") - arv = [binfld, adodbapi.Binary(binfld), bytes(binfld)] - if self.getEngine() == "PostgreSQL": - self.helpTestDataType( - "bytea", "BINARY", adodbapi.Binary(binfld), allowedReturnValues=arv - ) - else: - self.helpTestDataType( - "binary(5)", "BINARY", adodbapi.Binary(binfld), allowedReturnValues=arv - ) - self.helpTestDataType( - "varbinary(100)", - "BINARY", - adodbapi.Binary(binfld), - allowedReturnValues=arv, - ) - if self.getEngine() != "MySQL": - self.helpTestDataType( - "image", "BINARY", adodbapi.Binary(binfld), allowedReturnValues=arv - ) - - def helpRollbackTblTemp(self): - self.helpForceDropOnTblTemp() - - def helpForceDropOnTblTemp(self): - conn = self.getConnection() - with conn.cursor() as crsr: - try: - crsr.execute("DROP TABLE xx_%s" % config.tmp) - if not conn.autocommit: - conn.commit() - except: - pass - - def helpCreateAndPopulateTableTemp(self, crsr): - tabdef = ( - """ - CREATE TABLE xx_%s ( - fldData INTEGER - ) - """ - % config.tmp - ) - try: # EAFP - crsr.execute(tabdef) - except api.DatabaseError: # was not dropped before - self.helpForceDropOnTblTemp() # so drop it now - crsr.execute(tabdef) - for i in range(9): # note: this poor SQL code, but a valid test - crsr.execute("INSERT INTO xx_%s (fldData) VALUES (%i)" % (config.tmp, i)) - # NOTE: building the test table without using parameter substitution - - def testFetchAll(self): - crsr = self.getCursor() - self.helpCreateAndPopulateTableTemp(crsr) - crsr.execute("SELECT fldData FROM xx_%s" % config.tmp) - rs = crsr.fetchall() - assert len(rs) == 9 - # test slice of rows - i = 3 - for row in rs[3:-2]: # should have rowid 3..6 - assert row[0] == i - i += 1 - self.helpRollbackTblTemp() - - def testPreparedStatement(self): - crsr = self.getCursor() - self.helpCreateAndPopulateTableTemp(crsr) - crsr.prepare("SELECT fldData FROM xx_%s" % config.tmp) - crsr.execute(crsr.command) # remember the one that was prepared - rs = crsr.fetchall() - assert len(rs) == 9 - assert rs[2][0] == 2 - self.helpRollbackTblTemp() - - def testWrongPreparedStatement(self): - crsr = self.getCursor() - self.helpCreateAndPopulateTableTemp(crsr) - crsr.prepare("SELECT * FROM nowhere") - crsr.execute( - "SELECT fldData FROM xx_%s" % config.tmp - ) # should execute this one, not the prepared one - rs = crsr.fetchall() - assert len(rs) == 9 - assert rs[2][0] == 2 - self.helpRollbackTblTemp() - - def testIterator(self): - crsr = self.getCursor() - self.helpCreateAndPopulateTableTemp(crsr) - crsr.execute("SELECT fldData FROM xx_%s" % config.tmp) - for i, row in enumerate( - crsr - ): # using cursor as an iterator, rather than fetchxxx - assert row[0] == i - self.helpRollbackTblTemp() - - def testExecuteMany(self): - crsr = self.getCursor() - self.helpCreateAndPopulateTableTemp(crsr) - seq_of_values = [(111,), (222,)] - crsr.executemany( - "INSERT INTO xx_%s (fldData) VALUES (?)" % config.tmp, seq_of_values - ) - if crsr.rowcount == -1: - print( - self.getEngine() - + " Provider does not support rowcount (on .executemany())" - ) - else: - self.assertEqual(crsr.rowcount, 2) - crsr.execute("SELECT fldData FROM xx_%s" % config.tmp) - rs = crsr.fetchall() - assert len(rs) == 11 - self.helpRollbackTblTemp() - - def testRowCount(self): - crsr = self.getCursor() - self.helpCreateAndPopulateTableTemp(crsr) - crsr.execute("SELECT fldData FROM xx_%s" % config.tmp) - if crsr.rowcount == -1: - # print("provider does not support rowcount on select") - pass - else: - self.assertEqual(crsr.rowcount, 9) - self.helpRollbackTblTemp() - - def testRowCountNoRecordset(self): - crsr = self.getCursor() - self.helpCreateAndPopulateTableTemp(crsr) - crsr.execute("DELETE FROM xx_%s WHERE fldData >= 5" % config.tmp) - if crsr.rowcount == -1: - print(self.getEngine() + " Provider does not support rowcount (on DELETE)") - else: - self.assertEqual(crsr.rowcount, 4) - self.helpRollbackTblTemp() - - def testFetchMany(self): - crsr = self.getCursor() - self.helpCreateAndPopulateTableTemp(crsr) - crsr.execute("SELECT fldData FROM xx_%s" % config.tmp) - rs = crsr.fetchmany(3) - assert len(rs) == 3 - rs = crsr.fetchmany(5) - assert len(rs) == 5 - rs = crsr.fetchmany(5) - assert len(rs) == 1 # Asked for five, but there is only one left - self.helpRollbackTblTemp() - - def testFetchManyWithArraySize(self): - crsr = self.getCursor() - self.helpCreateAndPopulateTableTemp(crsr) - crsr.execute("SELECT fldData FROM xx_%s" % config.tmp) - rs = crsr.fetchmany() - assert len(rs) == 1 # arraysize Defaults to one - crsr.arraysize = 4 - rs = crsr.fetchmany() - assert len(rs) == 4 - rs = crsr.fetchmany() - assert len(rs) == 4 - rs = crsr.fetchmany() - assert len(rs) == 0 - self.helpRollbackTblTemp() - - def testErrorConnect(self): - conn = self.getConnection() - kw = {} - if "proxy_host" in conn.kwargs: - kw["proxy_host"] = conn.kwargs["proxy_host"] - conn.close() - self.assertRaises(api.DatabaseError, self.db, "not a valid connect string", kw) - - def testRowIterator(self): - self.helpForceDropOnTblTemp() - conn = self.getConnection() - crsr = conn.cursor() - tabdef = ( - """ - CREATE TABLE xx_%s ( - fldId integer NOT NULL, - fldTwo integer, - fldThree integer, - fldFour integer) - """ - % config.tmp - ) - crsr.execute(tabdef) - - inputs = [(2, 3, 4), (102, 103, 104)] - fldId = 1 - for inParam in inputs: - fldId += 1 - try: - crsr.execute( - "INSERT INTO xx_%s (fldId,fldTwo,fldThree,fldFour) VALUES (?,?,?,?)" - % config.tmp, - (fldId, inParam[0], inParam[1], inParam[2]), - ) - except: - if self.remote: - for message in crsr.messages: - print(message) - else: - conn.printADOerrors() - raise - crsr.execute( - "SELECT fldTwo,fldThree,fldFour FROM xx_%s WHERE ?=fldID" % config.tmp, - [fldId], - ) - rec = crsr.fetchone() - # check that stepping through an emulated row works - for j in range(len(inParam)): - assert ( - rec[j] == inParam[j] - ), 'returned value:"%s" != test value:"%s"' % (rec[j], inParam[j]) - # check that we can get a complete tuple from a row - assert tuple(rec) == inParam, 'returned value:"%s" != test value:"%s"' % ( - repr(rec), - repr(inParam), - ) - # test that slices of rows work - slice1 = tuple(rec[:-1]) - slice2 = tuple(inParam[0:2]) - assert slice1 == slice2, 'returned value:"%s" != test value:"%s"' % ( - repr(slice1), - repr(slice2), - ) - # now test named column retrieval - assert rec["fldTwo"] == inParam[0] - assert rec.fldThree == inParam[1] - assert rec.fldFour == inParam[2] - # test array operation - # note that the fields vv vv vv are out of order - crsr.execute("select fldThree,fldFour,fldTwo from xx_%s" % config.tmp) - recs = crsr.fetchall() - assert recs[1][0] == 103 - assert recs[0][1] == 4 - assert recs[1]["fldFour"] == 104 - assert recs[0, 0] == 3 - assert recs[0, "fldTwo"] == 2 - assert recs[1, 2] == 102 - for i in range(1): - for j in range(2): - assert recs[i][j] == recs[i, j] - - def testFormatParamstyle(self): - self.helpForceDropOnTblTemp() - conn = self.getConnection() - conn.paramstyle = "format" # test nonstandard use of paramstyle - crsr = conn.cursor() - tabdef = ( - """ - CREATE TABLE xx_%s ( - fldId integer NOT NULL, - fldData varchar(10), - fldConst varchar(30)) - """ - % config.tmp - ) - crsr.execute(tabdef) - - inputs = ["one", "two", "three"] - fldId = 2 - for inParam in inputs: - fldId += 1 - sql = ( - "INSERT INTO xx_" - + config.tmp - + " (fldId,fldConst,fldData) VALUES (%s,'thi%s :may cause? trouble', %s)" - ) - try: - crsr.execute(sql, (fldId, inParam)) - except: - if self.remote: - for message in crsr.messages: - print(message) - else: - conn.printADOerrors() - raise - crsr.execute( - "SELECT fldData, fldConst FROM xx_" + config.tmp + " WHERE %s=fldID", - [fldId], - ) - rec = crsr.fetchone() - self.assertEqual( - rec[0], - inParam, - 'returned value:"%s" != test value:"%s"' % (rec[0], inParam), - ) - self.assertEqual(rec[1], "thi%s :may cause? trouble") - - # now try an operation with a "%s" as part of a literal - sel = ( - "insert into xx_" + config.tmp + " (fldId,fldData) VALUES (%s,'four%sfive')" - ) - params = (20,) - crsr.execute(sel, params) - - # test the .query implementation - assert "(?," in crsr.query, 'expected:"%s" in "%s"' % ("(?,", crsr.query) - # test the .command attribute - assert crsr.command == sel, 'expected:"%s" but found "%s"' % (sel, crsr.command) - - # test the .parameters attribute - if not self.remote: # parameter list will be altered in transit - self.assertEqual(crsr.parameters, params) - # now make sure the data made it - crsr.execute("SELECT fldData FROM xx_%s WHERE fldID=20" % config.tmp) - rec = crsr.fetchone() - self.assertEqual(rec[0], "four%sfive") - - def testNamedParamstyle(self): - self.helpForceDropOnTblTemp() - conn = self.getConnection() - crsr = conn.cursor() - crsr.paramstyle = "named" # test nonstandard use of paramstyle - tabdef = ( - """ - CREATE TABLE xx_%s ( - fldId integer NOT NULL, - fldData varchar(10)) - """ - % config.tmp - ) - crsr.execute(tabdef) - - inputs = ["four", "five", "six"] - fldId = 10 - for inParam in inputs: - fldId += 1 - try: - crsr.execute( - "INSERT INTO xx_%s (fldId,fldData) VALUES (:Id,:f_Val)" - % config.tmp, - {"f_Val": inParam, "Id": fldId}, - ) - except: - if self.remote: - for message in crsr.messages: - print(message) - else: - conn.printADOerrors() - raise - crsr.execute( - "SELECT fldData FROM xx_%s WHERE fldID=:Id" % config.tmp, {"Id": fldId} - ) - rec = crsr.fetchone() - self.assertEqual( - rec[0], - inParam, - 'returned value:"%s" != test value:"%s"' % (rec[0], inParam), - ) - # now a test with a ":" as part of a literal - crsr.execute( - "insert into xx_%s (fldId,fldData) VALUES (:xyz,'six:five')" % config.tmp, - {"xyz": 30}, - ) - crsr.execute("SELECT fldData FROM xx_%s WHERE fldID=30" % config.tmp) - rec = crsr.fetchone() - self.assertEqual(rec[0], "six:five") - - def testPyformatParamstyle(self): - self.helpForceDropOnTblTemp() - conn = self.getConnection() - crsr = conn.cursor() - crsr.paramstyle = "pyformat" # test nonstandard use of paramstyle - tabdef = ( - """ - CREATE TABLE xx_%s ( - fldId integer NOT NULL, - fldData varchar(10)) - """ - % config.tmp - ) - crsr.execute(tabdef) - - inputs = ["four", "five", "six"] - fldId = 10 - for inParam in inputs: - fldId += 1 - try: - crsr.execute( - "INSERT INTO xx_%s (fldId,fldData) VALUES (%%(Id)s,%%(f_Val)s)" - % config.tmp, - {"f_Val": inParam, "Id": fldId}, - ) - except: - if self.remote: - for message in crsr.messages: - print(message) - else: - conn.printADOerrors() - raise - crsr.execute( - "SELECT fldData FROM xx_%s WHERE fldID=%%(Id)s" % config.tmp, - {"Id": fldId}, - ) - rec = crsr.fetchone() - self.assertEqual( - rec[0], - inParam, - 'returned value:"%s" != test value:"%s"' % (rec[0], inParam), - ) - # now a test with a "%" as part of a literal - crsr.execute( - "insert into xx_%s (fldId,fldData) VALUES (%%(xyz)s,'six%%five')" - % config.tmp, - {"xyz": 30}, - ) - crsr.execute("SELECT fldData FROM xx_%s WHERE fldID=30" % config.tmp) - rec = crsr.fetchone() - self.assertEqual(rec[0], "six%five") - - def testAutomaticParamstyle(self): - self.helpForceDropOnTblTemp() - conn = self.getConnection() - conn.paramstyle = "dynamic" # test nonstandard use of paramstyle - crsr = conn.cursor() - tabdef = ( - """ - CREATE TABLE xx_%s ( - fldId integer NOT NULL, - fldData varchar(10), - fldConst varchar(30)) - """ - % config.tmp - ) - crsr.execute(tabdef) - inputs = ["one", "two", "three"] - fldId = 2 - for inParam in inputs: - fldId += 1 - try: - crsr.execute( - "INSERT INTO xx_" - + config.tmp - + " (fldId,fldConst,fldData) VALUES (?,'thi%s :may cause? troub:1e', ?)", - (fldId, inParam), - ) - except: - if self.remote: - for message in crsr.messages: - print(message) - else: - conn.printADOerrors() - raise - trouble = "thi%s :may cause? troub:1e" - crsr.execute( - "SELECT fldData, fldConst FROM xx_" + config.tmp + " WHERE ?=fldID", - [fldId], - ) - rec = crsr.fetchone() - self.assertEqual( - rec[0], - inParam, - 'returned value:"%s" != test value:"%s"' % (rec[0], inParam), - ) - self.assertEqual(rec[1], trouble) - # inputs = [u'four',u'five',u'six'] - fldId = 10 - for inParam in inputs: - fldId += 1 - try: - crsr.execute( - "INSERT INTO xx_%s (fldId,fldData) VALUES (:Id,:f_Val)" - % config.tmp, - {"f_Val": inParam, "Id": fldId}, - ) - except: - if self.remote: - for message in crsr.messages: - print(message) - else: - conn.printADOerrors() - raise - crsr.execute( - "SELECT fldData FROM xx_%s WHERE :Id=fldID" % config.tmp, {"Id": fldId} - ) - rec = crsr.fetchone() - self.assertEqual( - rec[0], - inParam, - 'returned value:"%s" != test value:"%s"' % (rec[0], inParam), - ) - # now a test with a ":" as part of a literal -- and use a prepared query - ppdcmd = ( - "insert into xx_%s (fldId,fldData) VALUES (:xyz,'six:five')" % config.tmp - ) - crsr.prepare(ppdcmd) - crsr.execute(ppdcmd, {"xyz": 30}) - crsr.execute("SELECT fldData FROM xx_%s WHERE fldID=30" % config.tmp) - rec = crsr.fetchone() - self.assertEqual(rec[0], "six:five") - - def testRollBack(self): - conn = self.getConnection() - crsr = conn.cursor() - assert not crsr.connection.autocommit, "Unexpected beginning condition" - self.helpCreateAndPopulateTableTemp(crsr) - crsr.connection.commit() # commit the first bunch - - crsr.execute("INSERT INTO xx_%s (fldData) VALUES(100)" % config.tmp) - - selectSql = "SELECT fldData FROM xx_%s WHERE fldData=100" % config.tmp - crsr.execute(selectSql) - rs = crsr.fetchall() - assert len(rs) == 1 - self.conn.rollback() - crsr.execute(selectSql) - assert ( - crsr.fetchone() == None - ), "cursor.fetchone should return None if a query retrieves no rows" - crsr.execute("SELECT fldData from xx_%s" % config.tmp) - rs = crsr.fetchall() - assert len(rs) == 9, "the original records should still be present" - self.helpRollbackTblTemp() - - def testCommit(self): - try: - con2 = self.getAnotherConnection() - except NotImplementedError: - return # should be "SKIP" for ACCESS - assert not con2.autocommit, "default should be manual commit" - crsr = con2.cursor() - self.helpCreateAndPopulateTableTemp(crsr) - - crsr.execute("INSERT INTO xx_%s (fldData) VALUES(100)" % config.tmp) - con2.commit() - - selectSql = "SELECT fldData FROM xx_%s WHERE fldData=100" % config.tmp - crsr.execute(selectSql) - rs = crsr.fetchall() - assert len(rs) == 1 - crsr.close() - con2.close() - conn = self.getConnection() - crsr = self.getCursor() - with conn.cursor() as crsr: - crsr.execute(selectSql) - rs = crsr.fetchall() - assert len(rs) == 1 - assert rs[0][0] == 100 - self.helpRollbackTblTemp() - - def testAutoRollback(self): - try: - con2 = self.getAnotherConnection() - except NotImplementedError: - return # should be "SKIP" for ACCESS - assert not con2.autocommit, "unexpected beginning condition" - crsr = con2.cursor() - self.helpCreateAndPopulateTableTemp(crsr) - crsr.execute("INSERT INTO xx_%s (fldData) VALUES(100)" % config.tmp) - selectSql = "SELECT fldData FROM xx_%s WHERE fldData=100" % config.tmp - crsr.execute(selectSql) - rs = crsr.fetchall() - assert len(rs) == 1 - crsr.close() - con2.close() - crsr = self.getCursor() - try: - crsr.execute( - selectSql - ) # closing the connection should have forced rollback - row = crsr.fetchone() - except api.DatabaseError: - row = None # if the entire table disappeared the rollback was perfect and the test passed - assert row == None, ( - "cursor.fetchone should return None if a query retrieves no rows. Got %s" - % repr(row) - ) - self.helpRollbackTblTemp() - - def testAutoCommit(self): - try: - ac_conn = self.getAnotherConnection({"autocommit": True}) - except NotImplementedError: - return # should be "SKIP" for ACCESS - crsr = ac_conn.cursor() - self.helpCreateAndPopulateTableTemp(crsr) - crsr.execute("INSERT INTO xx_%s (fldData) VALUES(100)" % config.tmp) - crsr.close() - with self.getCursor() as crsr: - selectSql = "SELECT fldData from xx_%s" % config.tmp - crsr.execute( - selectSql - ) # closing the connection should _not_ have forced rollback - rs = crsr.fetchall() - assert len(rs) == 10, "all records should still be present" - ac_conn.close() - self.helpRollbackTblTemp() - - def testSwitchedAutoCommit(self): - try: - ac_conn = self.getAnotherConnection() - except NotImplementedError: - return # should be "SKIP" for ACCESS - ac_conn.autocommit = True - crsr = ac_conn.cursor() - self.helpCreateAndPopulateTableTemp(crsr) - crsr.execute("INSERT INTO xx_%s (fldData) VALUES(100)" % config.tmp) - crsr.close() - conn = self.getConnection() - ac_conn.close() - with self.getCursor() as crsr: - selectSql = "SELECT fldData from xx_%s" % config.tmp - crsr.execute( - selectSql - ) # closing the connection should _not_ have forced rollback - rs = crsr.fetchall() - assert len(rs) == 10, "all records should still be present" - self.helpRollbackTblTemp() - - def testExtendedTypeHandling(self): - class XtendString(str): - pass - - class XtendInt(int): - pass - - class XtendFloat(float): - pass - - xs = XtendString(randomstring(30)) - xi = XtendInt(random.randint(-100, 500)) - xf = XtendFloat(random.random()) - self.helpForceDropOnTblTemp() - conn = self.getConnection() - crsr = conn.cursor() - tabdef = ( - """ - CREATE TABLE xx_%s ( - s VARCHAR(40) NOT NULL, - i INTEGER NOT NULL, - f REAL NOT NULL)""" - % config.tmp - ) - crsr.execute(tabdef) - crsr.execute( - "INSERT INTO xx_%s (s, i, f) VALUES (?, ?, ?)" % config.tmp, (xs, xi, xf) - ) - crsr.close() - conn = self.getConnection() - with self.getCursor() as crsr: - selectSql = "SELECT s, i, f from xx_%s" % config.tmp - crsr.execute( - selectSql - ) # closing the connection should _not_ have forced rollback - row = crsr.fetchone() - self.assertEqual(row.s, xs) - self.assertEqual(row.i, xi) - self.assertAlmostEqual(row.f, xf) - self.helpRollbackTblTemp() - - -class TestADOwithSQLServer(CommonDBTests): - def setUp(self): - self.conn = config.dbSqlServerconnect( - *config.connStrSQLServer[0], **config.connStrSQLServer[1] - ) - self.conn.timeout = 30 # turn timeout back up - self.engine = "MSSQL" - self.db = config.dbSqlServerconnect - self.remote = config.connStrSQLServer[2] - - def tearDown(self): - try: - self.conn.rollback() - except: - pass - try: - self.conn.close() - except: - pass - self.conn = None - - def getConnection(self): - return self.conn - - def getAnotherConnection(self, addkeys=None): - keys = dict(config.connStrSQLServer[1]) - if addkeys: - keys.update(addkeys) - return config.dbSqlServerconnect(*config.connStrSQLServer[0], **keys) - - def testVariableReturningStoredProcedure(self): - crsr = self.conn.cursor() - spdef = """ - CREATE PROCEDURE sp_DeleteMeOnlyForTesting - @theInput varchar(50), - @theOtherInput varchar(50), - @theOutput varchar(100) OUTPUT - AS - SET @theOutput=@theInput+@theOtherInput - """ - try: - crsr.execute("DROP PROCEDURE sp_DeleteMeOnlyForTesting") - self.conn.commit() - except: # Make sure it is empty - pass - crsr.execute(spdef) - - retvalues = crsr.callproc( - "sp_DeleteMeOnlyForTesting", ("Dodsworth", "Anne", " ") - ) - assert retvalues[0] == "Dodsworth", '%s is not "Dodsworth"' % repr(retvalues[0]) - assert retvalues[1] == "Anne", '%s is not "Anne"' % repr(retvalues[1]) - assert retvalues[2] == "DodsworthAnne", '%s is not "DodsworthAnne"' % repr( - retvalues[2] - ) - self.conn.rollback() - - def testMultipleSetReturn(self): - crsr = self.getCursor() - self.helpCreateAndPopulateTableTemp(crsr) - - spdef = """ - CREATE PROCEDURE sp_DeleteMe_OnlyForTesting - AS - SELECT fldData FROM xx_%s ORDER BY fldData ASC - SELECT fldData From xx_%s where fldData = -9999 - SELECT fldData FROM xx_%s ORDER BY fldData DESC - """ % ( - config.tmp, - config.tmp, - config.tmp, - ) - try: - crsr.execute("DROP PROCEDURE sp_DeleteMe_OnlyForTesting") - self.conn.commit() - except: # Make sure it is empty - pass - crsr.execute(spdef) - - retvalues = crsr.callproc("sp_DeleteMe_OnlyForTesting") - row = crsr.fetchone() - self.assertEqual(row[0], 0) - assert crsr.nextset() == True, "Operation should succeed" - assert not crsr.fetchall(), "Should be an empty second set" - assert crsr.nextset() == True, "third set should be present" - rowdesc = crsr.fetchall() - self.assertEqual(rowdesc[0][0], 8) - assert crsr.nextset() == None, "No more return sets, should return None" - - self.helpRollbackTblTemp() - - def testDatetimeProcedureParameter(self): - crsr = self.conn.cursor() - spdef = """ - CREATE PROCEDURE sp_DeleteMeOnlyForTesting - @theInput DATETIME, - @theOtherInput varchar(50), - @theOutput varchar(100) OUTPUT - AS - SET @theOutput = CONVERT(CHARACTER(20), @theInput, 0) + @theOtherInput - """ - try: - crsr.execute("DROP PROCEDURE sp_DeleteMeOnlyForTesting") - self.conn.commit() - except: # Make sure it is empty - pass - crsr.execute(spdef) - - result = crsr.callproc( - "sp_DeleteMeOnlyForTesting", - [adodbapi.Timestamp(2014, 12, 25, 0, 1, 0), "Beep", " " * 30], - ) - - assert result[2] == "Dec 25 2014 12:01AM Beep", 'value was="%s"' % result[2] - self.conn.rollback() - - def testIncorrectStoredProcedureParameter(self): - crsr = self.conn.cursor() - spdef = """ - CREATE PROCEDURE sp_DeleteMeOnlyForTesting - @theInput DATETIME, - @theOtherInput varchar(50), - @theOutput varchar(100) OUTPUT - AS - SET @theOutput = CONVERT(CHARACTER(20), @theInput) + @theOtherInput - """ - try: - crsr.execute("DROP PROCEDURE sp_DeleteMeOnlyForTesting") - self.conn.commit() - except: # Make sure it is empty - pass - crsr.execute(spdef) - - # calling the sproc with a string for the first parameter where a DateTime is expected - result = tryconnection.try_operation_with_expected_exception( - (api.DataError, api.DatabaseError), - crsr.callproc, - ["sp_DeleteMeOnlyForTesting"], - {"parameters": ["this is wrong", "Anne", "not Alice"]}, - ) - if result[0]: # the expected exception was raised - assert "@theInput" in str(result[1]) or "DatabaseError" in str( - result - ), "Identifies the wrong erroneous parameter" - else: - assert result[0], result[1] # incorrect or no exception - self.conn.rollback() - - -class TestADOwithAccessDB(CommonDBTests): - def setUp(self): - self.conn = config.dbAccessconnect( - *config.connStrAccess[0], **config.connStrAccess[1] - ) - self.conn.timeout = 30 # turn timeout back up - self.engine = "ACCESS" - self.db = config.dbAccessconnect - self.remote = config.connStrAccess[2] - - def tearDown(self): - try: - self.conn.rollback() - except: - pass - try: - self.conn.close() - except: - pass - self.conn = None - - def getConnection(self): - return self.conn - - def getAnotherConnection(self, addkeys=None): - raise NotImplementedError("Jet cannot use a second connection to the database") - - def testOkConnect(self): - c = self.db(*config.connStrAccess[0], **config.connStrAccess[1]) - assert c != None - c.close() - - -class TestADOwithMySql(CommonDBTests): - def setUp(self): - self.conn = config.dbMySqlconnect( - *config.connStrMySql[0], **config.connStrMySql[1] - ) - self.conn.timeout = 30 # turn timeout back up - self.engine = "MySQL" - self.db = config.dbMySqlconnect - self.remote = config.connStrMySql[2] - - def tearDown(self): - try: - self.conn.rollback() - except: - pass - try: - self.conn.close() - except: - pass - self.conn = None - - def getConnection(self): - return self.conn - - def getAnotherConnection(self, addkeys=None): - keys = dict(config.connStrMySql[1]) - if addkeys: - keys.update(addkeys) - return config.dbMySqlconnect(*config.connStrMySql[0], **keys) - - def testOkConnect(self): - c = self.db(*config.connStrMySql[0], **config.connStrMySql[1]) - assert c != None - - # def testStoredProcedure(self): - # crsr=self.conn.cursor() - # try: - # crsr.execute("DROP PROCEDURE DeleteMeOnlyForTesting") - # self.conn.commit() - # except: #Make sure it is empty - # pass - # spdef= """ - # DELIMITER $$ - # CREATE PROCEDURE DeleteMeOnlyForTesting (onein CHAR(10), twoin CHAR(10), OUT theout CHAR(20)) - # DETERMINISTIC - # BEGIN - # SET theout = onein //|| twoin; - # /* (SELECT 'a small string' as result; */ - # END $$ - # """ - # - # crsr.execute(spdef) - # - # retvalues=crsr.callproc('DeleteMeOnlyForTesting',('Dodsworth','Anne',' ')) - # print 'return value (mysql)=',repr(crsr.returnValue) ### - # assert retvalues[0]=='Dodsworth', '%s is not "Dodsworth"'%repr(retvalues[0]) - # assert retvalues[1]=='Anne','%s is not "Anne"'%repr(retvalues[1]) - # assert retvalues[2]=='DodsworthAnne','%s is not "DodsworthAnne"'%repr(retvalues[2]) - # - # try: - # crsr.execute("DROP PROCEDURE, DeleteMeOnlyForTesting") - # self.conn.commit() - # except: #Make sure it is empty - # pass - - -class TestADOwithPostgres(CommonDBTests): - def setUp(self): - self.conn = config.dbPostgresConnect( - *config.connStrPostgres[0], **config.connStrPostgres[1] - ) - self.conn.timeout = 30 # turn timeout back up - self.engine = "PostgreSQL" - self.db = config.dbPostgresConnect - self.remote = config.connStrPostgres[2] - - def tearDown(self): - try: - self.conn.rollback() - except: - pass - try: - self.conn.close() - except: - pass - self.conn = None - - def getConnection(self): - return self.conn - - def getAnotherConnection(self, addkeys=None): - keys = dict(config.connStrPostgres[1]) - if addkeys: - keys.update(addkeys) - return config.dbPostgresConnect(*config.connStrPostgres[0], **keys) - - def testOkConnect(self): - c = self.db(*config.connStrPostgres[0], **config.connStrPostgres[1]) - assert c != None - - # def testStoredProcedure(self): - # crsr=self.conn.cursor() - # spdef= """ - # CREATE OR REPLACE FUNCTION DeleteMeOnlyForTesting (text, text) - # RETURNS text AS $funk$ - # BEGIN - # RETURN $1 || $2; - # END; - # $funk$ - # LANGUAGE SQL; - # """ - # - # crsr.execute(spdef) - # retvalues = crsr.callproc('DeleteMeOnlyForTesting',('Dodsworth','Anne',' ')) - # ### print 'return value (pg)=',repr(crsr.returnValue) ### - # assert retvalues[0]=='Dodsworth', '%s is not "Dodsworth"'%repr(retvalues[0]) - # assert retvalues[1]=='Anne','%s is not "Anne"'%repr(retvalues[1]) - # assert retvalues[2]=='Dodsworth Anne','%s is not "Dodsworth Anne"'%repr(retvalues[2]) - # self.conn.rollback() - # try: - # crsr.execute("DROP PROCEDURE, DeleteMeOnlyForTesting") - # self.conn.commit() - # except: #Make sure it is empty - # pass - - -class TimeConverterInterfaceTest(unittest.TestCase): - def testIDate(self): - assert self.tc.Date(1990, 2, 2) - - def testITime(self): - assert self.tc.Time(13, 2, 2) - - def testITimestamp(self): - assert self.tc.Timestamp(1990, 2, 2, 13, 2, 1) - - def testIDateObjectFromCOMDate(self): - assert self.tc.DateObjectFromCOMDate(37435.7604282) - - def testICOMDate(self): - assert hasattr(self.tc, "COMDate") - - def testExactDate(self): - d = self.tc.Date(1994, 11, 15) - comDate = self.tc.COMDate(d) - correct = 34653.0 - assert comDate == correct, comDate - - def testExactTimestamp(self): - d = self.tc.Timestamp(1994, 11, 15, 12, 0, 0) - comDate = self.tc.COMDate(d) - correct = 34653.5 - self.assertEqual(comDate, correct) - - d = self.tc.Timestamp(2003, 5, 6, 14, 15, 17) - comDate = self.tc.COMDate(d) - correct = 37747.593946759262 - self.assertEqual(comDate, correct) - - def testIsoFormat(self): - d = self.tc.Timestamp(1994, 11, 15, 12, 3, 10) - iso = self.tc.DateObjectToIsoFormatString(d) - self.assertEqual(str(iso[:19]), "1994-11-15 12:03:10") - - dt = self.tc.Date(2003, 5, 2) - iso = self.tc.DateObjectToIsoFormatString(dt) - self.assertEqual(str(iso[:10]), "2003-05-02") - - -if config.doMxDateTimeTest: - import mx.DateTime - - -class TestMXDateTimeConverter(TimeConverterInterfaceTest): - def setUp(self): - self.tc = api.mxDateTimeConverter() - - def testCOMDate(self): - t = mx.DateTime.DateTime(2002, 6, 28, 18, 15, 2) - cmd = self.tc.COMDate(t) - assert cmd == t.COMDate() - - def testDateObjectFromCOMDate(self): - cmd = self.tc.DateObjectFromCOMDate(37435.7604282) - t = mx.DateTime.DateTime(2002, 6, 28, 18, 15, 0) - t2 = mx.DateTime.DateTime(2002, 6, 28, 18, 15, 2) - assert t2 > cmd > t - - def testDate(self): - assert mx.DateTime.Date(1980, 11, 4) == self.tc.Date(1980, 11, 4) - - def testTime(self): - assert mx.DateTime.Time(13, 11, 4) == self.tc.Time(13, 11, 4) - - def testTimestamp(self): - t = mx.DateTime.DateTime(2002, 6, 28, 18, 15, 1) - obj = self.tc.Timestamp(2002, 6, 28, 18, 15, 1) - assert t == obj - - -import time - - -class TestPythonTimeConverter(TimeConverterInterfaceTest): - def setUp(self): - self.tc = api.pythonTimeConverter() - - def testCOMDate(self): - mk = time.mktime((2002, 6, 28, 18, 15, 1, 4, 31 + 28 + 31 + 30 + 31 + 28, -1)) - t = time.localtime(mk) - # Fri, 28 Jun 2002 18:15:01 +0000 - cmd = self.tc.COMDate(t) - assert abs(cmd - 37435.7604282) < 1.0 / 24, "%f more than an hour wrong" % cmd - - def testDateObjectFromCOMDate(self): - cmd = self.tc.DateObjectFromCOMDate(37435.7604282) - t1 = time.gmtime( - time.mktime((2002, 6, 28, 0, 14, 1, 4, 31 + 28 + 31 + 30 + 31 + 28, -1)) - ) - # there are errors in the implementation of gmtime which we ignore - t2 = time.gmtime( - time.mktime((2002, 6, 29, 12, 14, 2, 4, 31 + 28 + 31 + 30 + 31 + 28, -1)) - ) - assert t1 < cmd < t2, '"%s" should be about 2002-6-28 12:15:01' % repr(cmd) - - def testDate(self): - t1 = time.mktime((2002, 6, 28, 18, 15, 1, 4, 31 + 28 + 31 + 30 + 31 + 30, 0)) - t2 = time.mktime((2002, 6, 30, 18, 15, 1, 4, 31 + 28 + 31 + 30 + 31 + 28, 0)) - obj = self.tc.Date(2002, 6, 29) - assert t1 < time.mktime(obj) < t2, obj - - def testTime(self): - self.assertEqual( - self.tc.Time(18, 15, 2), time.gmtime(18 * 60 * 60 + 15 * 60 + 2) - ) - - def testTimestamp(self): - t1 = time.localtime( - time.mktime((2002, 6, 28, 18, 14, 1, 4, 31 + 28 + 31 + 30 + 31 + 28, -1)) - ) - t2 = time.localtime( - time.mktime((2002, 6, 28, 18, 16, 1, 4, 31 + 28 + 31 + 30 + 31 + 28, -1)) - ) - obj = self.tc.Timestamp(2002, 6, 28, 18, 15, 2) - assert t1 < obj < t2, obj - - -class TestPythonDateTimeConverter(TimeConverterInterfaceTest): - def setUp(self): - self.tc = api.pythonDateTimeConverter() - - def testCOMDate(self): - t = datetime.datetime(2002, 6, 28, 18, 15, 1) - # Fri, 28 Jun 2002 18:15:01 +0000 - cmd = self.tc.COMDate(t) - assert abs(cmd - 37435.7604282) < 1.0 / 24, "more than an hour wrong" - - def testDateObjectFromCOMDate(self): - cmd = self.tc.DateObjectFromCOMDate(37435.7604282) - t1 = datetime.datetime(2002, 6, 28, 18, 14, 1) - t2 = datetime.datetime(2002, 6, 28, 18, 16, 1) - assert t1 < cmd < t2, cmd - - tx = datetime.datetime( - 2002, 6, 28, 18, 14, 1, 900000 - ) # testing that microseconds don't become milliseconds - c1 = self.tc.DateObjectFromCOMDate(self.tc.COMDate(tx)) - assert t1 < c1 < t2, c1 - - def testDate(self): - t1 = datetime.date(2002, 6, 28) - t2 = datetime.date(2002, 6, 30) - obj = self.tc.Date(2002, 6, 29) - assert t1 < obj < t2, obj - - def testTime(self): - self.assertEqual(self.tc.Time(18, 15, 2).isoformat()[:8], "18:15:02") - - def testTimestamp(self): - t1 = datetime.datetime(2002, 6, 28, 18, 14, 1) - t2 = datetime.datetime(2002, 6, 28, 18, 16, 1) - obj = self.tc.Timestamp(2002, 6, 28, 18, 15, 2) - assert t1 < obj < t2, obj - - -suites = [] -suites.append(unittest.makeSuite(TestPythonDateTimeConverter, "test")) -if config.doMxDateTimeTest: - suites.append(unittest.makeSuite(TestMXDateTimeConverter, "test")) -if config.doTimeTest: - suites.append(unittest.makeSuite(TestPythonTimeConverter, "test")) - -if config.doAccessTest: - suites.append(unittest.makeSuite(TestADOwithAccessDB, "test")) -if config.doSqlServerTest: - suites.append(unittest.makeSuite(TestADOwithSQLServer, "test")) -if config.doMySqlTest: - suites.append(unittest.makeSuite(TestADOwithMySql, "test")) -if config.doPostgresTest: - suites.append(unittest.makeSuite(TestADOwithPostgres, "test")) - - -class cleanup_manager(object): - def __enter__(self): - pass - - def __exit__(self, exc_type, exc_val, exc_tb): - config.cleanup(config.testfolder, config.mdb_name) - - -suite = unittest.TestSuite(suites) -if __name__ == "__main__": - mysuite = copy.deepcopy(suite) - with cleanup_manager(): - defaultDateConverter = adodbapi.dateconverter - print(__doc__) - print("Default Date Converter is %s" % (defaultDateConverter,)) - dateconverter = defaultDateConverter - tag = "datetime" - unittest.TextTestRunner().run(mysuite) - - if config.iterateOverTimeTests: - for test, dateconverter, tag in ( - (config.doTimeTest, api.pythonTimeConverter, "pythontime"), - (config.doMxDateTimeTest, api.mxDateTimeConverter, "mx"), - ): - if test: - mysuite = copy.deepcopy( - suite - ) # work around a side effect of unittest.TextTestRunner - adodbapi.adodbapi.dateconverter = dateconverter() - print("Changed dateconverter to ") - print(adodbapi.adodbapi.dateconverter) - unittest.TextTestRunner().run(mysuite) diff --git a/server/venv/Lib/site-packages/adodbapi/test/adodbapitestconfig.py b/server/venv/Lib/site-packages/adodbapi/test/adodbapitestconfig.py deleted file mode 100644 index 98f2544..0000000 --- a/server/venv/Lib/site-packages/adodbapi/test/adodbapitestconfig.py +++ /dev/null @@ -1,221 +0,0 @@ -# Configure this to _YOUR_ environment in order to run the testcases. -"testADOdbapiConfig.py v 2.6.2.B00" - -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -# # -# # TESTERS: -# # -# # You will need to make numerous modifications to this file -# # to adapt it to your own testing environment. -# # -# # Skip down to the next "# #" line -- -# # -- the things you need to change are below it. -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -import platform -import random -import sys - -import is64bit -import setuptestframework -import tryconnection - -print("\nPython", sys.version) -node = platform.node() -try: - print( - "node=%s, is64bit.os()= %s, is64bit.Python()= %s" - % (node, is64bit.os(), is64bit.Python()) - ) -except: - pass - -if "--help" in sys.argv: - print( - """Valid command-line switches are: - --package - create a temporary test package, run 2to3 if needed. - --all - run all possible tests - --time - loop over time format tests (including mxdatetime if present) - --nojet - do not test against an ACCESS database file - --mssql - test against Microsoft SQL server - --pg - test against PostgreSQL - --mysql - test against MariaDB - --remote= - test unsing remote server at= (experimental) - """ - ) - exit() -try: - onWindows = bool(sys.getwindowsversion()) # seems to work on all versions of Python -except: - onWindows = False - -# create a random name for temporary table names -_alphabet = ( - "PYFGCRLAOEUIDHTNSQJKXBMWVZ" # why, yes, I do happen to use a dvorak keyboard -) -tmp = "".join([random.choice(_alphabet) for x in range(9)]) -mdb_name = "xx_" + tmp + ".mdb" # generate a non-colliding name for the temporary .mdb -testfolder = setuptestframework.maketemp() - -if "--package" in sys.argv: - # create a new adodbapi module -- running 2to3 if needed. - pth = setuptestframework.makeadopackage(testfolder) -else: - # use the adodbapi module in which this file appears - pth = setuptestframework.find_ado_path() -if pth not in sys.path: - # look here _first_ to find modules - sys.path.insert(1, pth) - -proxy_host = None -for arg in sys.argv: - if arg.startswith("--remote="): - proxy_host = arg.split("=")[1] - import adodbapi.remote as remote - - break - - -# function to clean up the temporary folder -- calling program must run this function before exit. -cleanup = setuptestframework.getcleanupfunction() -try: - import adodbapi # will (hopefully) be imported using the "pth" discovered above -except SyntaxError: - print( - '\n* * * Are you trying to run Python2 code using Python3? Re-run this test using the "--package" switch.' - ) - sys.exit(11) -try: - print(adodbapi.version) # show version -except: - print('"adodbapi.version" not present or not working.') -print(__doc__) - -verbose = False -for a in sys.argv: - if a.startswith("--verbose"): - arg = True - try: - arg = int(a.split("=")[1]) - except IndexError: - pass - adodbapi.adodbapi.verbose = arg - verbose = arg - -doAllTests = "--all" in sys.argv -doAccessTest = not ("--nojet" in sys.argv) -doSqlServerTest = "--mssql" in sys.argv or doAllTests -doMySqlTest = "--mysql" in sys.argv or doAllTests -doPostgresTest = "--pg" in sys.argv or doAllTests -iterateOverTimeTests = ("--time" in sys.argv or doAllTests) and onWindows - -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -# # start your environment setup here v v v -SQL_HOST_NODE = "testsql.2txt.us,1430" - -try: # If mx extensions are installed, use mxDateTime - import mx.DateTime - - doMxDateTimeTest = True -except: - doMxDateTimeTest = False # Requires eGenixMXExtensions - -doTimeTest = True # obsolete python time format - -if doAccessTest: - if proxy_host: # determine the (probably remote) database file folder - c = {"macro_find_temp_test_path": ["mdb", mdb_name], "proxy_host": proxy_host} - else: - c = {"mdb": setuptestframework.makemdb(testfolder, mdb_name)} - - # macro definition for keyword "provider" using macro "is64bit" -- see documentation - # is64bit will return true for 64 bit versions of Python, so the macro will select the ACE provider - # (If running a remote ADO service, this will test the 64-bitedness of the ADO server.) - c["macro_is64bit"] = [ - "provider", - "Microsoft.ACE.OLEDB.12.0", # 64 bit provider - "Microsoft.Jet.OLEDB.4.0", - ] # 32 bit provider - connStrAccess = "Provider=%(provider)s;Data Source=%(mdb)s" # ;Mode=ReadWrite;Persist Security Info=False;Jet OLEDB:Bypass UserInfo Validation=True" - print( - " ...Testing ACCESS connection to {} file...".format( - c.get("mdb", "remote .mdb") - ) - ) - doAccessTest, connStrAccess, dbAccessconnect = tryconnection.try_connection( - verbose, connStrAccess, 10, **c - ) - -if doSqlServerTest: - c = { - "host": SQL_HOST_NODE, # name of computer with SQL Server - "database": "adotest", - "user": "adotestuser", # None implies Windows security - "password": "Sq1234567", - # macro definition for keyword "security" using macro "auto_security" - "macro_auto_security": "security", - "provider": "MSOLEDBSQL; MARS Connection=True", - } - if proxy_host: - c["proxy_host"] = proxy_host - connStr = "Provider=%(provider)s; Initial Catalog=%(database)s; Data Source=%(host)s; %(security)s;" - print(" ...Testing MS-SQL login to {}...".format(c["host"])) - ( - doSqlServerTest, - connStrSQLServer, - dbSqlServerconnect, - ) = tryconnection.try_connection(verbose, connStr, 30, **c) - -if doMySqlTest: - c = { - "host": "testmysql.2txt.us", - "database": "adodbapitest", - "user": "adotest", - "password": "12345678", - "port": "3330", # note the nonstandard port for obfuscation - "driver": "MySQL ODBC 5.1 Driver", - } # or _driver="MySQL ODBC 3.51 Driver - if proxy_host: - c["proxy_host"] = proxy_host - c["macro_is64bit"] = [ - "provider", - "Provider=MSDASQL;", - ] # turn on the 64 bit ODBC adapter only if needed - cs = ( - "%(provider)sDriver={%(driver)s};Server=%(host)s;Port=3330;" - + "Database=%(database)s;user=%(user)s;password=%(password)s;Option=3;" - ) - print(" ...Testing MySql login to {}...".format(c["host"])) - doMySqlTest, connStrMySql, dbMySqlconnect = tryconnection.try_connection( - verbose, cs, 5, **c - ) - - -if doPostgresTest: - _computername = "testpg.2txt.us" - _databasename = "adotest" - _username = "adotestuser" - _password = "12345678" - kws = {"timeout": 4} - kws["macro_is64bit"] = [ - "prov_drv", - "Provider=MSDASQL;Driver={PostgreSQL Unicode(x64)}", - "Driver=PostgreSQL Unicode", - ] - # get driver from http://www.postgresql.org/ftp/odbc/versions/ - # test using positional and keyword arguments (bad example for real code) - if proxy_host: - kws["proxy_host"] = proxy_host - print(" ...Testing PostgreSQL login to {}...".format(_computername)) - doPostgresTest, connStrPostgres, dbPostgresConnect = tryconnection.try_connection( - verbose, - "%(prov_drv)s;Server=%(host)s;Database=%(database)s;uid=%(user)s;pwd=%(password)s;port=5430;", # note nonstandard port - _username, - _password, - _computername, - _databasename, - **kws - ) - -assert ( - doAccessTest or doSqlServerTest or doMySqlTest or doPostgresTest -), "No database engine found for testing" diff --git a/server/venv/Lib/site-packages/adodbapi/test/dbapi20.py b/server/venv/Lib/site-packages/adodbapi/test/dbapi20.py deleted file mode 100644 index e378b19..0000000 --- a/server/venv/Lib/site-packages/adodbapi/test/dbapi20.py +++ /dev/null @@ -1,939 +0,0 @@ -#!/usr/bin/env python -""" Python DB API 2.0 driver compliance unit test suite. - - This software is Public Domain and may be used without restrictions. - - "Now we have booze and barflies entering the discussion, plus rumours of - DBAs on drugs... and I won't tell you what flashes through my mind each - time I read the subject line with 'Anal Compliance' in it. All around - this is turning out to be a thoroughly unwholesome unit test." - - -- Ian Bicking -""" - -__version__ = "$Revision: 1.15.0 $"[11:-2] -__author__ = "Stuart Bishop " - -import sys -import time -import unittest - -if sys.version[0] >= "3": # python 3.x - _BaseException = Exception - - def _failUnless(self, expr, msg=None): - self.assertTrue(expr, msg) - -else: # python 2.x - from exceptions import Exception as _BaseException - - def _failUnless(self, expr, msg=None): - self.failUnless(expr, msg) ## deprecated since Python 2.6 - - -# set this to "True" to follow API 2.0 to the letter -TEST_FOR_NON_IDEMPOTENT_CLOSE = False - -# Revision 1.15 2019/11/22 00:50:00 kf7xm -# Make Turn off IDEMPOTENT_CLOSE a proper skipTest - -# Revision 1.14 2013/05/20 11:02:05 kf7xm -# Add a literal string to the format insertion test to catch trivial re-format algorithms - -# Revision 1.13 2013/05/08 14:31:50 kf7xm -# Quick switch to Turn off IDEMPOTENT_CLOSE test. Also: Silence teardown failure - - -# Revision 1.12 2009/02/06 03:35:11 kf7xm -# Tested okay with Python 3.0, includes last minute patches from Mark H. -# -# Revision 1.1.1.1.2.1 2008/09/20 19:54:59 rupole -# Include latest changes from main branch -# Updates for py3k -# -# Revision 1.11 2005/01/02 02:41:01 zenzen -# Update author email address -# -# Revision 1.10 2003/10/09 03:14:14 zenzen -# Add test for DB API 2.0 optional extension, where database exceptions -# are exposed as attributes on the Connection object. -# -# Revision 1.9 2003/08/13 01:16:36 zenzen -# Minor tweak from Stefan Fleiter -# -# Revision 1.8 2003/04/10 00:13:25 zenzen -# Changes, as per suggestions by M.-A. Lemburg -# - Add a table prefix, to ensure namespace collisions can always be avoided -# -# Revision 1.7 2003/02/26 23:33:37 zenzen -# Break out DDL into helper functions, as per request by David Rushby -# -# Revision 1.6 2003/02/21 03:04:33 zenzen -# Stuff from Henrik Ekelund: -# added test_None -# added test_nextset & hooks -# -# Revision 1.5 2003/02/17 22:08:43 zenzen -# Implement suggestions and code from Henrik Eklund - test that cursor.arraysize -# defaults to 1 & generic cursor.callproc test added -# -# Revision 1.4 2003/02/15 00:16:33 zenzen -# Changes, as per suggestions and bug reports by M.-A. Lemburg, -# Matthew T. Kromer, Federico Di Gregorio and Daniel Dittmar -# - Class renamed -# - Now a subclass of TestCase, to avoid requiring the driver stub -# to use multiple inheritance -# - Reversed the polarity of buggy test in test_description -# - Test exception heirarchy correctly -# - self.populate is now self._populate(), so if a driver stub -# overrides self.ddl1 this change propogates -# - VARCHAR columns now have a width, which will hopefully make the -# DDL even more portible (this will be reversed if it causes more problems) -# - cursor.rowcount being checked after various execute and fetchXXX methods -# - Check for fetchall and fetchmany returning empty lists after results -# are exhausted (already checking for empty lists if select retrieved -# nothing -# - Fix bugs in test_setoutputsize_basic and test_setinputsizes -# -def str2bytes(sval): - if sys.version_info < (3, 0) and isinstance(sval, str): - sval = sval.decode("latin1") - return sval.encode("latin1") # python 3 make unicode into bytes - - -class DatabaseAPI20Test(unittest.TestCase): - """Test a database self.driver for DB API 2.0 compatibility. - This implementation tests Gadfly, but the TestCase - is structured so that other self.drivers can subclass this - test case to ensure compiliance with the DB-API. It is - expected that this TestCase may be expanded in the future - if ambiguities or edge conditions are discovered. - - The 'Optional Extensions' are not yet being tested. - - self.drivers should subclass this test, overriding setUp, tearDown, - self.driver, connect_args and connect_kw_args. Class specification - should be as follows: - - import dbapi20 - class mytest(dbapi20.DatabaseAPI20Test): - [...] - - Don't 'import DatabaseAPI20Test from dbapi20', or you will - confuse the unit tester - just 'import dbapi20'. - """ - - # The self.driver module. This should be the module where the 'connect' - # method is to be found - driver = None - connect_args = () # List of arguments to pass to connect - connect_kw_args = {} # Keyword arguments for connect - table_prefix = "dbapi20test_" # If you need to specify a prefix for tables - - ddl1 = "create table %sbooze (name varchar(20))" % table_prefix - ddl2 = "create table %sbarflys (name varchar(20), drink varchar(30))" % table_prefix - xddl1 = "drop table %sbooze" % table_prefix - xddl2 = "drop table %sbarflys" % table_prefix - - lowerfunc = "lower" # Name of stored procedure to convert string->lowercase - - # Some drivers may need to override these helpers, for example adding - # a 'commit' after the execute. - def executeDDL1(self, cursor): - cursor.execute(self.ddl1) - - def executeDDL2(self, cursor): - cursor.execute(self.ddl2) - - def setUp(self): - """self.drivers should override this method to perform required setup - if any is necessary, such as creating the database. - """ - pass - - def tearDown(self): - """self.drivers should override this method to perform required cleanup - if any is necessary, such as deleting the test database. - The default drops the tables that may be created. - """ - try: - con = self._connect() - try: - cur = con.cursor() - for ddl in (self.xddl1, self.xddl2): - try: - cur.execute(ddl) - con.commit() - except self.driver.Error: - # Assume table didn't exist. Other tests will check if - # execute is busted. - pass - finally: - con.close() - except _BaseException: - pass - - def _connect(self): - try: - r = self.driver.connect(*self.connect_args, **self.connect_kw_args) - except AttributeError: - self.fail("No connect method found in self.driver module") - return r - - def test_connect(self): - con = self._connect() - con.close() - - def test_apilevel(self): - try: - # Must exist - apilevel = self.driver.apilevel - # Must equal 2.0 - self.assertEqual(apilevel, "2.0") - except AttributeError: - self.fail("Driver doesn't define apilevel") - - def test_threadsafety(self): - try: - # Must exist - threadsafety = self.driver.threadsafety - # Must be a valid value - _failUnless(self, threadsafety in (0, 1, 2, 3)) - except AttributeError: - self.fail("Driver doesn't define threadsafety") - - def test_paramstyle(self): - try: - # Must exist - paramstyle = self.driver.paramstyle - # Must be a valid value - _failUnless( - self, paramstyle in ("qmark", "numeric", "named", "format", "pyformat") - ) - except AttributeError: - self.fail("Driver doesn't define paramstyle") - - def test_Exceptions(self): - # Make sure required exceptions exist, and are in the - # defined heirarchy. - if sys.version[0] == "3": # under Python 3 StardardError no longer exists - self.assertTrue(issubclass(self.driver.Warning, Exception)) - self.assertTrue(issubclass(self.driver.Error, Exception)) - else: - self.failUnless(issubclass(self.driver.Warning, Exception)) - self.failUnless(issubclass(self.driver.Error, Exception)) - - _failUnless(self, issubclass(self.driver.InterfaceError, self.driver.Error)) - _failUnless(self, issubclass(self.driver.DatabaseError, self.driver.Error)) - _failUnless(self, issubclass(self.driver.OperationalError, self.driver.Error)) - _failUnless(self, issubclass(self.driver.IntegrityError, self.driver.Error)) - _failUnless(self, issubclass(self.driver.InternalError, self.driver.Error)) - _failUnless(self, issubclass(self.driver.ProgrammingError, self.driver.Error)) - _failUnless(self, issubclass(self.driver.NotSupportedError, self.driver.Error)) - - def test_ExceptionsAsConnectionAttributes(self): - # OPTIONAL EXTENSION - # Test for the optional DB API 2.0 extension, where the exceptions - # are exposed as attributes on the Connection object - # I figure this optional extension will be implemented by any - # driver author who is using this test suite, so it is enabled - # by default. - con = self._connect() - drv = self.driver - _failUnless(self, con.Warning is drv.Warning) - _failUnless(self, con.Error is drv.Error) - _failUnless(self, con.InterfaceError is drv.InterfaceError) - _failUnless(self, con.DatabaseError is drv.DatabaseError) - _failUnless(self, con.OperationalError is drv.OperationalError) - _failUnless(self, con.IntegrityError is drv.IntegrityError) - _failUnless(self, con.InternalError is drv.InternalError) - _failUnless(self, con.ProgrammingError is drv.ProgrammingError) - _failUnless(self, con.NotSupportedError is drv.NotSupportedError) - - def test_commit(self): - con = self._connect() - try: - # Commit must work, even if it doesn't do anything - con.commit() - finally: - con.close() - - def test_rollback(self): - con = self._connect() - # If rollback is defined, it should either work or throw - # the documented exception - if hasattr(con, "rollback"): - try: - con.rollback() - except self.driver.NotSupportedError: - pass - - def test_cursor(self): - con = self._connect() - try: - cur = con.cursor() - finally: - con.close() - - def test_cursor_isolation(self): - con = self._connect() - try: - # Make sure cursors created from the same connection have - # the documented transaction isolation level - cur1 = con.cursor() - cur2 = con.cursor() - self.executeDDL1(cur1) - cur1.execute( - "insert into %sbooze values ('Victoria Bitter')" % (self.table_prefix) - ) - cur2.execute("select name from %sbooze" % self.table_prefix) - booze = cur2.fetchall() - self.assertEqual(len(booze), 1) - self.assertEqual(len(booze[0]), 1) - self.assertEqual(booze[0][0], "Victoria Bitter") - finally: - con.close() - - def test_description(self): - con = self._connect() - try: - cur = con.cursor() - self.executeDDL1(cur) - self.assertEqual( - cur.description, - None, - "cursor.description should be none after executing a " - "statement that can return no rows (such as DDL)", - ) - cur.execute("select name from %sbooze" % self.table_prefix) - self.assertEqual( - len(cur.description), 1, "cursor.description describes too many columns" - ) - self.assertEqual( - len(cur.description[0]), - 7, - "cursor.description[x] tuples must have 7 elements", - ) - self.assertEqual( - cur.description[0][0].lower(), - "name", - "cursor.description[x][0] must return column name", - ) - self.assertEqual( - cur.description[0][1], - self.driver.STRING, - "cursor.description[x][1] must return column type. Got %r" - % cur.description[0][1], - ) - - # Make sure self.description gets reset - self.executeDDL2(cur) - self.assertEqual( - cur.description, - None, - "cursor.description not being set to None when executing " - "no-result statements (eg. DDL)", - ) - finally: - con.close() - - def test_rowcount(self): - con = self._connect() - try: - cur = con.cursor() - self.executeDDL1(cur) - _failUnless( - self, - cur.rowcount in (-1, 0), # Bug #543885 - "cursor.rowcount should be -1 or 0 after executing no-result " - "statements", - ) - cur.execute( - "insert into %sbooze values ('Victoria Bitter')" % (self.table_prefix) - ) - _failUnless( - self, - cur.rowcount in (-1, 1), - "cursor.rowcount should == number or rows inserted, or " - "set to -1 after executing an insert statement", - ) - cur.execute("select name from %sbooze" % self.table_prefix) - _failUnless( - self, - cur.rowcount in (-1, 1), - "cursor.rowcount should == number of rows returned, or " - "set to -1 after executing a select statement", - ) - self.executeDDL2(cur) - self.assertEqual( - cur.rowcount, - -1, - "cursor.rowcount not being reset to -1 after executing " - "no-result statements", - ) - finally: - con.close() - - lower_func = "lower" - - def test_callproc(self): - con = self._connect() - try: - cur = con.cursor() - if self.lower_func and hasattr(cur, "callproc"): - r = cur.callproc(self.lower_func, ("FOO",)) - self.assertEqual(len(r), 1) - self.assertEqual(r[0], "FOO") - r = cur.fetchall() - self.assertEqual(len(r), 1, "callproc produced no result set") - self.assertEqual(len(r[0]), 1, "callproc produced invalid result set") - self.assertEqual(r[0][0], "foo", "callproc produced invalid results") - finally: - con.close() - - def test_close(self): - con = self._connect() - try: - cur = con.cursor() - finally: - con.close() - - # cursor.execute should raise an Error if called after connection - # closed - self.assertRaises(self.driver.Error, self.executeDDL1, cur) - - # connection.commit should raise an Error if called after connection' - # closed.' - self.assertRaises(self.driver.Error, con.commit) - - # connection.close should raise an Error if called more than once - #!!! reasonable persons differ about the usefulness of this test and this feature !!! - if TEST_FOR_NON_IDEMPOTENT_CLOSE: - self.assertRaises(self.driver.Error, con.close) - else: - self.skipTest( - "Non-idempotent close is considered a bad thing by some people." - ) - - def test_execute(self): - con = self._connect() - try: - cur = con.cursor() - self._paraminsert(cur) - finally: - con.close() - - def _paraminsert(self, cur): - self.executeDDL2(cur) - cur.execute( - "insert into %sbarflys values ('Victoria Bitter', 'thi%%s :may ca%%(u)se? troub:1e')" - % (self.table_prefix) - ) - _failUnless(self, cur.rowcount in (-1, 1)) - - if self.driver.paramstyle == "qmark": - cur.execute( - "insert into %sbarflys values (?, 'thi%%s :may ca%%(u)se? troub:1e')" - % self.table_prefix, - ("Cooper's",), - ) - elif self.driver.paramstyle == "numeric": - cur.execute( - "insert into %sbarflys values (:1, 'thi%%s :may ca%%(u)se? troub:1e')" - % self.table_prefix, - ("Cooper's",), - ) - elif self.driver.paramstyle == "named": - cur.execute( - "insert into %sbarflys values (:beer, 'thi%%s :may ca%%(u)se? troub:1e')" - % self.table_prefix, - {"beer": "Cooper's"}, - ) - elif self.driver.paramstyle == "format": - cur.execute( - "insert into %sbarflys values (%%s, 'thi%%s :may ca%%(u)se? troub:1e')" - % self.table_prefix, - ("Cooper's",), - ) - elif self.driver.paramstyle == "pyformat": - cur.execute( - "insert into %sbarflys values (%%(beer)s, 'thi%%s :may ca%%(u)se? troub:1e')" - % self.table_prefix, - {"beer": "Cooper's"}, - ) - else: - self.fail("Invalid paramstyle") - _failUnless(self, cur.rowcount in (-1, 1)) - - cur.execute("select name, drink from %sbarflys" % self.table_prefix) - res = cur.fetchall() - self.assertEqual(len(res), 2, "cursor.fetchall returned too few rows") - beers = [res[0][0], res[1][0]] - beers.sort() - self.assertEqual( - beers[0], - "Cooper's", - "cursor.fetchall retrieved incorrect data, or data inserted " "incorrectly", - ) - self.assertEqual( - beers[1], - "Victoria Bitter", - "cursor.fetchall retrieved incorrect data, or data inserted " "incorrectly", - ) - trouble = "thi%s :may ca%(u)se? troub:1e" - self.assertEqual( - res[0][1], - trouble, - "cursor.fetchall retrieved incorrect data, or data inserted " - "incorrectly. Got=%s, Expected=%s" % (repr(res[0][1]), repr(trouble)), - ) - self.assertEqual( - res[1][1], - trouble, - "cursor.fetchall retrieved incorrect data, or data inserted " - "incorrectly. Got=%s, Expected=%s" % (repr(res[1][1]), repr(trouble)), - ) - - def test_executemany(self): - con = self._connect() - try: - cur = con.cursor() - self.executeDDL1(cur) - largs = [("Cooper's",), ("Boag's",)] - margs = [{"beer": "Cooper's"}, {"beer": "Boag's"}] - if self.driver.paramstyle == "qmark": - cur.executemany( - "insert into %sbooze values (?)" % self.table_prefix, largs - ) - elif self.driver.paramstyle == "numeric": - cur.executemany( - "insert into %sbooze values (:1)" % self.table_prefix, largs - ) - elif self.driver.paramstyle == "named": - cur.executemany( - "insert into %sbooze values (:beer)" % self.table_prefix, margs - ) - elif self.driver.paramstyle == "format": - cur.executemany( - "insert into %sbooze values (%%s)" % self.table_prefix, largs - ) - elif self.driver.paramstyle == "pyformat": - cur.executemany( - "insert into %sbooze values (%%(beer)s)" % (self.table_prefix), - margs, - ) - else: - self.fail("Unknown paramstyle") - _failUnless( - self, - cur.rowcount in (-1, 2), - "insert using cursor.executemany set cursor.rowcount to " - "incorrect value %r" % cur.rowcount, - ) - cur.execute("select name from %sbooze" % self.table_prefix) - res = cur.fetchall() - self.assertEqual( - len(res), 2, "cursor.fetchall retrieved incorrect number of rows" - ) - beers = [res[0][0], res[1][0]] - beers.sort() - self.assertEqual( - beers[0], "Boag's", 'incorrect data "%s" retrieved' % beers[0] - ) - self.assertEqual(beers[1], "Cooper's", "incorrect data retrieved") - finally: - con.close() - - def test_fetchone(self): - con = self._connect() - try: - cur = con.cursor() - - # cursor.fetchone should raise an Error if called before - # executing a select-type query - self.assertRaises(self.driver.Error, cur.fetchone) - - # cursor.fetchone should raise an Error if called after - # executing a query that cannnot return rows - self.executeDDL1(cur) - self.assertRaises(self.driver.Error, cur.fetchone) - - cur.execute("select name from %sbooze" % self.table_prefix) - self.assertEqual( - cur.fetchone(), - None, - "cursor.fetchone should return None if a query retrieves " "no rows", - ) - _failUnless(self, cur.rowcount in (-1, 0)) - - # cursor.fetchone should raise an Error if called after - # executing a query that cannnot return rows - cur.execute( - "insert into %sbooze values ('Victoria Bitter')" % (self.table_prefix) - ) - self.assertRaises(self.driver.Error, cur.fetchone) - - cur.execute("select name from %sbooze" % self.table_prefix) - r = cur.fetchone() - self.assertEqual( - len(r), 1, "cursor.fetchone should have retrieved a single row" - ) - self.assertEqual( - r[0], "Victoria Bitter", "cursor.fetchone retrieved incorrect data" - ) - self.assertEqual( - cur.fetchone(), - None, - "cursor.fetchone should return None if no more rows available", - ) - _failUnless(self, cur.rowcount in (-1, 1)) - finally: - con.close() - - samples = [ - "Carlton Cold", - "Carlton Draft", - "Mountain Goat", - "Redback", - "Victoria Bitter", - "XXXX", - ] - - def _populate(self): - """Return a list of sql commands to setup the DB for the fetch - tests. - """ - populate = [ - "insert into %sbooze values ('%s')" % (self.table_prefix, s) - for s in self.samples - ] - return populate - - def test_fetchmany(self): - con = self._connect() - try: - cur = con.cursor() - - # cursor.fetchmany should raise an Error if called without - # issuing a query - self.assertRaises(self.driver.Error, cur.fetchmany, 4) - - self.executeDDL1(cur) - for sql in self._populate(): - cur.execute(sql) - - cur.execute("select name from %sbooze" % self.table_prefix) - r = cur.fetchmany() - self.assertEqual( - len(r), - 1, - "cursor.fetchmany retrieved incorrect number of rows, " - "default of arraysize is one.", - ) - cur.arraysize = 10 - r = cur.fetchmany(3) # Should get 3 rows - self.assertEqual( - len(r), 3, "cursor.fetchmany retrieved incorrect number of rows" - ) - r = cur.fetchmany(4) # Should get 2 more - self.assertEqual( - len(r), 2, "cursor.fetchmany retrieved incorrect number of rows" - ) - r = cur.fetchmany(4) # Should be an empty sequence - self.assertEqual( - len(r), - 0, - "cursor.fetchmany should return an empty sequence after " - "results are exhausted", - ) - _failUnless(self, cur.rowcount in (-1, 6)) - - # Same as above, using cursor.arraysize - cur.arraysize = 4 - cur.execute("select name from %sbooze" % self.table_prefix) - r = cur.fetchmany() # Should get 4 rows - self.assertEqual( - len(r), 4, "cursor.arraysize not being honoured by fetchmany" - ) - r = cur.fetchmany() # Should get 2 more - self.assertEqual(len(r), 2) - r = cur.fetchmany() # Should be an empty sequence - self.assertEqual(len(r), 0) - _failUnless(self, cur.rowcount in (-1, 6)) - - cur.arraysize = 6 - cur.execute("select name from %sbooze" % self.table_prefix) - rows = cur.fetchmany() # Should get all rows - _failUnless(self, cur.rowcount in (-1, 6)) - self.assertEqual(len(rows), 6) - self.assertEqual(len(rows), 6) - rows = [r[0] for r in rows] - rows.sort() - - # Make sure we get the right data back out - for i in range(0, 6): - self.assertEqual( - rows[i], - self.samples[i], - "incorrect data retrieved by cursor.fetchmany", - ) - - rows = cur.fetchmany() # Should return an empty list - self.assertEqual( - len(rows), - 0, - "cursor.fetchmany should return an empty sequence if " - "called after the whole result set has been fetched", - ) - _failUnless(self, cur.rowcount in (-1, 6)) - - self.executeDDL2(cur) - cur.execute("select name from %sbarflys" % self.table_prefix) - r = cur.fetchmany() # Should get empty sequence - self.assertEqual( - len(r), - 0, - "cursor.fetchmany should return an empty sequence if " - "query retrieved no rows", - ) - _failUnless(self, cur.rowcount in (-1, 0)) - - finally: - con.close() - - def test_fetchall(self): - con = self._connect() - try: - cur = con.cursor() - # cursor.fetchall should raise an Error if called - # without executing a query that may return rows (such - # as a select) - self.assertRaises(self.driver.Error, cur.fetchall) - - self.executeDDL1(cur) - for sql in self._populate(): - cur.execute(sql) - - # cursor.fetchall should raise an Error if called - # after executing a a statement that cannot return rows - self.assertRaises(self.driver.Error, cur.fetchall) - - cur.execute("select name from %sbooze" % self.table_prefix) - rows = cur.fetchall() - _failUnless(self, cur.rowcount in (-1, len(self.samples))) - self.assertEqual( - len(rows), - len(self.samples), - "cursor.fetchall did not retrieve all rows", - ) - rows = [r[0] for r in rows] - rows.sort() - for i in range(0, len(self.samples)): - self.assertEqual( - rows[i], self.samples[i], "cursor.fetchall retrieved incorrect rows" - ) - rows = cur.fetchall() - self.assertEqual( - len(rows), - 0, - "cursor.fetchall should return an empty list if called " - "after the whole result set has been fetched", - ) - _failUnless(self, cur.rowcount in (-1, len(self.samples))) - - self.executeDDL2(cur) - cur.execute("select name from %sbarflys" % self.table_prefix) - rows = cur.fetchall() - _failUnless(self, cur.rowcount in (-1, 0)) - self.assertEqual( - len(rows), - 0, - "cursor.fetchall should return an empty list if " - "a select query returns no rows", - ) - - finally: - con.close() - - def test_mixedfetch(self): - con = self._connect() - try: - cur = con.cursor() - self.executeDDL1(cur) - for sql in self._populate(): - cur.execute(sql) - - cur.execute("select name from %sbooze" % self.table_prefix) - rows1 = cur.fetchone() - rows23 = cur.fetchmany(2) - rows4 = cur.fetchone() - rows56 = cur.fetchall() - _failUnless(self, cur.rowcount in (-1, 6)) - self.assertEqual( - len(rows23), 2, "fetchmany returned incorrect number of rows" - ) - self.assertEqual( - len(rows56), 2, "fetchall returned incorrect number of rows" - ) - - rows = [rows1[0]] - rows.extend([rows23[0][0], rows23[1][0]]) - rows.append(rows4[0]) - rows.extend([rows56[0][0], rows56[1][0]]) - rows.sort() - for i in range(0, len(self.samples)): - self.assertEqual( - rows[i], self.samples[i], "incorrect data retrieved or inserted" - ) - finally: - con.close() - - def help_nextset_setUp(self, cur): - """Should create a procedure called deleteme - that returns two result sets, first the - number of rows in booze then "name from booze" - """ - raise NotImplementedError("Helper not implemented") - # sql=""" - # create procedure deleteme as - # begin - # select count(*) from booze - # select name from booze - # end - # """ - # cur.execute(sql) - - def help_nextset_tearDown(self, cur): - "If cleaning up is needed after nextSetTest" - raise NotImplementedError("Helper not implemented") - # cur.execute("drop procedure deleteme") - - def test_nextset(self): - con = self._connect() - try: - cur = con.cursor() - if not hasattr(cur, "nextset"): - return - - try: - self.executeDDL1(cur) - sql = self._populate() - for sql in self._populate(): - cur.execute(sql) - - self.help_nextset_setUp(cur) - - cur.callproc("deleteme") - numberofrows = cur.fetchone() - assert numberofrows[0] == len(self.samples) - assert cur.nextset() - names = cur.fetchall() - assert len(names) == len(self.samples) - s = cur.nextset() - assert s == None, "No more return sets, should return None" - finally: - self.help_nextset_tearDown(cur) - - finally: - con.close() - - def test_nextset(self): - raise NotImplementedError("Drivers need to override this test") - - def test_arraysize(self): - # Not much here - rest of the tests for this are in test_fetchmany - con = self._connect() - try: - cur = con.cursor() - _failUnless( - self, hasattr(cur, "arraysize"), "cursor.arraysize must be defined" - ) - finally: - con.close() - - def test_setinputsizes(self): - con = self._connect() - try: - cur = con.cursor() - cur.setinputsizes((25,)) - self._paraminsert(cur) # Make sure cursor still works - finally: - con.close() - - def test_setoutputsize_basic(self): - # Basic test is to make sure setoutputsize doesn't blow up - con = self._connect() - try: - cur = con.cursor() - cur.setoutputsize(1000) - cur.setoutputsize(2000, 0) - self._paraminsert(cur) # Make sure the cursor still works - finally: - con.close() - - def test_setoutputsize(self): - # Real test for setoutputsize is driver dependant - raise NotImplementedError("Driver needed to override this test") - - def test_None(self): - con = self._connect() - try: - cur = con.cursor() - self.executeDDL1(cur) - cur.execute("insert into %sbooze values (NULL)" % self.table_prefix) - cur.execute("select name from %sbooze" % self.table_prefix) - r = cur.fetchall() - self.assertEqual(len(r), 1) - self.assertEqual(len(r[0]), 1) - self.assertEqual(r[0][0], None, "NULL value not returned as None") - finally: - con.close() - - def test_Date(self): - d1 = self.driver.Date(2002, 12, 25) - d2 = self.driver.DateFromTicks(time.mktime((2002, 12, 25, 0, 0, 0, 0, 0, 0))) - # Can we assume this? API doesn't specify, but it seems implied - # self.assertEqual(str(d1),str(d2)) - - def test_Time(self): - t1 = self.driver.Time(13, 45, 30) - t2 = self.driver.TimeFromTicks(time.mktime((2001, 1, 1, 13, 45, 30, 0, 0, 0))) - # Can we assume this? API doesn't specify, but it seems implied - # self.assertEqual(str(t1),str(t2)) - - def test_Timestamp(self): - t1 = self.driver.Timestamp(2002, 12, 25, 13, 45, 30) - t2 = self.driver.TimestampFromTicks( - time.mktime((2002, 12, 25, 13, 45, 30, 0, 0, 0)) - ) - # Can we assume this? API doesn't specify, but it seems implied - # self.assertEqual(str(t1),str(t2)) - - def test_Binary(self): - b = self.driver.Binary(str2bytes("Something")) - b = self.driver.Binary(str2bytes("")) - - def test_STRING(self): - _failUnless( - self, hasattr(self.driver, "STRING"), "module.STRING must be defined" - ) - - def test_BINARY(self): - _failUnless( - self, hasattr(self.driver, "BINARY"), "module.BINARY must be defined." - ) - - def test_NUMBER(self): - _failUnless( - self, hasattr(self.driver, "NUMBER"), "module.NUMBER must be defined." - ) - - def test_DATETIME(self): - _failUnless( - self, hasattr(self.driver, "DATETIME"), "module.DATETIME must be defined." - ) - - def test_ROWID(self): - _failUnless( - self, hasattr(self.driver, "ROWID"), "module.ROWID must be defined." - ) diff --git a/server/venv/Lib/site-packages/adodbapi/test/is64bit.py b/server/venv/Lib/site-packages/adodbapi/test/is64bit.py deleted file mode 100644 index ed390fa..0000000 --- a/server/venv/Lib/site-packages/adodbapi/test/is64bit.py +++ /dev/null @@ -1,41 +0,0 @@ -"""is64bit.Python() --> boolean value of detected Python word size. is64bit.os() --> os build version""" -import sys - - -def Python(): - if sys.platform == "cli": # IronPython - import System - - return System.IntPtr.Size == 8 - else: - try: - return sys.maxsize > 2147483647 - except AttributeError: - return sys.maxint > 2147483647 - - -def os(): - import platform - - pm = platform.machine() - if pm != ".." and pm.endswith("64"): # recent Python (not Iron) - return True - else: - import os - - if "PROCESSOR_ARCHITEW6432" in os.environ: - return True # 32 bit program running on 64 bit Windows - try: - return os.environ["PROCESSOR_ARCHITECTURE"].endswith( - "64" - ) # 64 bit Windows 64 bit program - except IndexError: - pass # not Windows - try: - return "64" in platform.architecture()[0] # this often works in Linux - except: - return False # is an older version of Python, assume also an older os (best we can guess) - - -if __name__ == "__main__": - print("is64bit.Python() =", Python(), "is64bit.os() =", os()) diff --git a/server/venv/Lib/site-packages/adodbapi/test/setuptestframework.py b/server/venv/Lib/site-packages/adodbapi/test/setuptestframework.py deleted file mode 100644 index 83fe5ca..0000000 --- a/server/venv/Lib/site-packages/adodbapi/test/setuptestframework.py +++ /dev/null @@ -1,134 +0,0 @@ -#!/usr/bin/python2 -# Configure this in order to run the testcases. -"setuptestframework.py v 2.6.0.8" -import os -import shutil -import sys -import tempfile - -try: - OSErrors = (WindowsError, OSError) -except NameError: # not running on Windows - OSErrors = OSError - - -def maketemp(): - temphome = tempfile.gettempdir() - tempdir = os.path.join(temphome, "adodbapi_test") - try: - os.mkdir(tempdir) - except: - pass - return tempdir - - -def _cleanup_function(testfolder, mdb_name): - try: - os.unlink(os.path.join(testfolder, mdb_name)) - except: - pass # mdb database not present - try: - shutil.rmtree(testfolder) - print(" cleaned up folder", testfolder) - except: - pass # test package not present - - -def getcleanupfunction(): - return _cleanup_function - - -def find_ado_path(): - adoName = os.path.normpath(os.getcwd() + "/../../adodbapi.py") - adoPackage = os.path.dirname(adoName) - return adoPackage - - -# make a new package directory for the test copy of ado -def makeadopackage(testfolder): - adoName = os.path.normpath(os.getcwd() + "/../adodbapi.py") - adoPath = os.path.dirname(adoName) - if os.path.exists(adoName): - newpackage = os.path.join(testfolder, "adodbapi") - try: - os.mkdir(newpackage) - except OSErrors: - print( - "*Note: temporary adodbapi package already exists: may be two versions running?" - ) - for f in os.listdir(adoPath): - if f.endswith(".py"): - shutil.copy(os.path.join(adoPath, f), newpackage) - if sys.version_info >= (3, 0): # only when running Py3.n - save = sys.stdout - sys.stdout = None - from lib2to3.main import main # use 2to3 to make test package - - main("lib2to3.fixes", args=["-n", "-w", newpackage]) - sys.stdout = save - return testfolder - else: - raise EnvironmentError("Connot find source of adodbapi to test.") - - -def makemdb(testfolder, mdb_name): - # following setup code borrowed from pywin32 odbc test suite - # kindly contributed by Frank Millman. - import os - - _accessdatasource = os.path.join(testfolder, mdb_name) - if os.path.isfile(_accessdatasource): - print("using JET database=", _accessdatasource) - else: - try: - from win32com.client import constants - from win32com.client.gencache import EnsureDispatch - - win32 = True - except ImportError: # perhaps we are running IronPython - win32 = False # iron Python - try: - from System import Activator, Type - except: - pass - - # Create a brand-new database - what is the story with these? - dbe = None - for suffix in (".36", ".35", ".30"): - try: - if win32: - dbe = EnsureDispatch("DAO.DBEngine" + suffix) - else: - type = Type.GetTypeFromProgID("DAO.DBEngine" + suffix) - dbe = Activator.CreateInstance(type) - break - except: - pass - if dbe: - print(" ...Creating ACCESS db at " + _accessdatasource) - if win32: - workspace = dbe.Workspaces(0) - newdb = workspace.CreateDatabase( - _accessdatasource, constants.dbLangGeneral, constants.dbVersion40 - ) - else: - newdb = dbe.CreateDatabase( - _accessdatasource, ";LANGID=0x0409;CP=1252;COUNTRY=0" - ) - newdb.Close() - else: - print(" ...copying test ACCESS db to " + _accessdatasource) - mdbName = os.path.abspath( - os.path.join(os.path.dirname(__file__), "..", "examples", "test.mdb") - ) - import shutil - - shutil.copy(mdbName, _accessdatasource) - - return _accessdatasource - - -if __name__ == "__main__": - print("Setting up a Jet database for server to use for remote testing...") - temp = maketemp() - makemdb(temp, "server_test.mdb") diff --git a/server/venv/Lib/site-packages/adodbapi/test/test_adodbapi_dbapi20.py b/server/venv/Lib/site-packages/adodbapi/test/test_adodbapi_dbapi20.py deleted file mode 100644 index 31bbae4..0000000 --- a/server/venv/Lib/site-packages/adodbapi/test/test_adodbapi_dbapi20.py +++ /dev/null @@ -1,200 +0,0 @@ -print("This module depends on the dbapi20 compliance tests created by Stuart Bishop") -print("(see db-sig mailing list history for info)") -import platform -import sys -import unittest - -import dbapi20 -import setuptestframework - -testfolder = setuptestframework.maketemp() -if "--package" in sys.argv: - pth = setuptestframework.makeadopackage(testfolder) - sys.argv.remove("--package") -else: - pth = setuptestframework.find_ado_path() -if pth not in sys.path: - sys.path.insert(1, pth) -# function to clean up the temporary folder -- calling program must run this function before exit. -cleanup = setuptestframework.getcleanupfunction() - -import adodbapi -import adodbapi.is64bit as is64bit - -db = adodbapi - -if "--verbose" in sys.argv: - db.adodbapi.verbose = 3 - -print(adodbapi.version) -print("Tested with dbapi20 %s" % dbapi20.__version__) - -try: - onWindows = bool(sys.getwindowsversion()) # seems to work on all versions of Python -except: - onWindows = False - -node = platform.node() - -conn_kws = {} -host = "testsql.2txt.us,1430" # if None, will use macro to fill in node name -instance = r"%s\SQLEXPRESS" -conn_kws["name"] = "adotest" - -conn_kws["user"] = "adotestuser" # None implies Windows security -conn_kws["password"] = "Sq1234567" -# macro definition for keyword "security" using macro "auto_security" -conn_kws["macro_auto_security"] = "security" - -if host is None: - conn_kws["macro_getnode"] = ["host", instance] -else: - conn_kws["host"] = host - -conn_kws[ - "provider" -] = "Provider=MSOLEDBSQL;DataTypeCompatibility=80;MARS Connection=True;" -connStr = "%(provider)s; %(security)s; Initial Catalog=%(name)s;Data Source=%(host)s" - -if onWindows and node != "z-PC": - pass # default should make a local SQL Server connection -elif node == "xxx": # try Postgres database - _computername = "25.223.161.222" - _databasename = "adotest" - _username = "adotestuser" - _password = "12345678" - _driver = "PostgreSQL Unicode" - _provider = "" - connStr = "%sDriver={%s};Server=%s;Database=%s;uid=%s;pwd=%s;" % ( - _provider, - _driver, - _computername, - _databasename, - _username, - _password, - ) -elif node == "yyy": # ACCESS data base is known to fail some tests. - if is64bit.Python(): - driver = "Microsoft.ACE.OLEDB.12.0" - else: - driver = "Microsoft.Jet.OLEDB.4.0" - testmdb = setuptestframework.makemdb(testfolder) - connStr = r"Provider=%s;Data Source=%s" % (driver, testmdb) -else: # try a remote connection to an SQL server - conn_kws["proxy_host"] = "25.44.77.176" - import adodbapi.remote - - db = adodbapi.remote - -print("Using Connection String like=%s" % connStr) -print("Keywords=%s" % repr(conn_kws)) - - -class test_adodbapi(dbapi20.DatabaseAPI20Test): - driver = db - connect_args = (connStr,) - connect_kw_args = conn_kws - - def __init__(self, arg): - dbapi20.DatabaseAPI20Test.__init__(self, arg) - - def getTestMethodName(self): - return self.id().split(".")[-1] - - def setUp(self): - # Call superclass setUp In case this does something in the - # future - dbapi20.DatabaseAPI20Test.setUp(self) - if self.getTestMethodName() == "test_callproc": - con = self._connect() - engine = con.dbms_name - ## print('Using database Engine=%s' % engine) ## - if engine != "MS Jet": - sql = """ - create procedure templower - @theData varchar(50) - as - select lower(@theData) - """ - else: # Jet - sql = """ - create procedure templower - (theData varchar(50)) - as - select lower(theData); - """ - cur = con.cursor() - try: - cur.execute(sql) - con.commit() - except: - pass - cur.close() - con.close() - self.lower_func = "templower" - - def tearDown(self): - if self.getTestMethodName() == "test_callproc": - con = self._connect() - cur = con.cursor() - try: - cur.execute("drop procedure templower") - except: - pass - con.commit() - dbapi20.DatabaseAPI20Test.tearDown(self) - - def help_nextset_setUp(self, cur): - "Should create a procedure called deleteme" - 'that returns two result sets, first the number of rows in booze then "name from booze"' - sql = """ - create procedure deleteme as - begin - select count(*) from %sbooze - select name from %sbooze - end - """ % ( - self.table_prefix, - self.table_prefix, - ) - cur.execute(sql) - - def help_nextset_tearDown(self, cur): - "If cleaning up is needed after nextSetTest" - try: - cur.execute("drop procedure deleteme") - except: - pass - - def test_nextset(self): - con = self._connect() - try: - cur = con.cursor() - - stmts = [self.ddl1] + self._populate() - for sql in stmts: - cur.execute(sql) - - self.help_nextset_setUp(cur) - - cur.callproc("deleteme") - numberofrows = cur.fetchone() - assert numberofrows[0] == 6 - assert cur.nextset() - names = cur.fetchall() - assert len(names) == len(self.samples) - s = cur.nextset() - assert s == None, "No more return sets, should return None" - finally: - try: - self.help_nextset_tearDown(cur) - finally: - con.close() - - def test_setoutputsize(self): - pass - - -if __name__ == "__main__": - unittest.main() - cleanup(testfolder, None) diff --git a/server/venv/Lib/site-packages/adodbapi/test/tryconnection.py b/server/venv/Lib/site-packages/adodbapi/test/tryconnection.py deleted file mode 100644 index 9d3901a..0000000 --- a/server/venv/Lib/site-packages/adodbapi/test/tryconnection.py +++ /dev/null @@ -1,33 +0,0 @@ -remote = False # automatic testing of remote access has been removed here - - -def try_connection(verbose, *args, **kwargs): - import adodbapi - - dbconnect = adodbapi.connect - try: - s = dbconnect(*args, **kwargs) # connect to server - if verbose: - print("Connected to:", s.connection_string) - print("which has tables:", s.get_table_names()) - s.close() # thanks, it worked, goodbye - except adodbapi.DatabaseError as inst: - print(inst.args[0]) # should be the error message - print("***Failed getting connection using=", repr(args), repr(kwargs)) - return False, (args, kwargs), None - - print(" (successful)") - - return True, (args, kwargs, remote), dbconnect - - -def try_operation_with_expected_exception( - expected_exception_list, some_function, *args, **kwargs -): - try: - some_function(*args, **kwargs) - except expected_exception_list as e: - return True, e - except: - raise # an exception other than the expected occurred - return False, "The expected exception did not occur" diff --git a/server/venv/Lib/site-packages/annotated_types-0.6.0.dist-info/INSTALLER b/server/venv/Lib/site-packages/annotated_types-0.6.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/server/venv/Lib/site-packages/annotated_types-0.6.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/server/venv/Lib/site-packages/annotated_types-0.6.0.dist-info/METADATA b/server/venv/Lib/site-packages/annotated_types-0.6.0.dist-info/METADATA deleted file mode 100644 index 36ec9b0..0000000 --- a/server/venv/Lib/site-packages/annotated_types-0.6.0.dist-info/METADATA +++ /dev/null @@ -1,242 +0,0 @@ -Metadata-Version: 2.1 -Name: annotated-types -Version: 0.6.0 -Summary: Reusable constraint types to use with typing.Annotated -Author-email: Samuel Colvin , Adrian Garcia Badaracco <1755071+adriangb@users.noreply.github.com>, Zac Hatfield-Dodds -License-File: LICENSE -Classifier: Development Status :: 4 - Beta -Classifier: Environment :: Console -Classifier: Environment :: MacOS X -Classifier: Intended Audience :: Developers -Classifier: Intended Audience :: Information Technology -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: POSIX :: Linux -Classifier: Operating System :: Unix -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Typing :: Typed -Requires-Python: >=3.8 -Requires-Dist: typing-extensions>=4.0.0; python_version < '3.9' -Description-Content-Type: text/markdown - -# annotated-types - -[![CI](https://github.com/annotated-types/annotated-types/workflows/CI/badge.svg?event=push)](https://github.com/annotated-types/annotated-types/actions?query=event%3Apush+branch%3Amain+workflow%3ACI) -[![pypi](https://img.shields.io/pypi/v/annotated-types.svg)](https://pypi.python.org/pypi/annotated-types) -[![versions](https://img.shields.io/pypi/pyversions/annotated-types.svg)](https://github.com/annotated-types/annotated-types) -[![license](https://img.shields.io/github/license/annotated-types/annotated-types.svg)](https://github.com/annotated-types/annotated-types/blob/main/LICENSE) - -[PEP-593](https://peps.python.org/pep-0593/) added `typing.Annotated` as a way of -adding context-specific metadata to existing types, and specifies that -`Annotated[T, x]` _should_ be treated as `T` by any tool or library without special -logic for `x`. - -This package provides metadata objects which can be used to represent common -constraints such as upper and lower bounds on scalar values and collection sizes, -a `Predicate` marker for runtime checks, and -descriptions of how we intend these metadata to be interpreted. In some cases, -we also note alternative representations which do not require this package. - -## Install - -```bash -pip install annotated-types -``` - -## Examples - -```python -from typing import Annotated -from annotated_types import Gt, Len, Predicate - -class MyClass: - age: Annotated[int, Gt(18)] # Valid: 19, 20, ... - # Invalid: 17, 18, "19", 19.0, ... - factors: list[Annotated[int, Predicate(is_prime)]] # Valid: 2, 3, 5, 7, 11, ... - # Invalid: 4, 8, -2, 5.0, "prime", ... - - my_list: Annotated[list[int], Len(0, 10)] # Valid: [], [10, 20, 30, 40, 50] - # Invalid: (1, 2), ["abc"], [0] * 20 -``` - -## Documentation - -_While `annotated-types` avoids runtime checks for performance, users should not -construct invalid combinations such as `MultipleOf("non-numeric")` or `Annotated[int, Len(3)]`. -Downstream implementors may choose to raise an error, emit a warning, silently ignore -a metadata item, etc., if the metadata objects described below are used with an -incompatible type - or for any other reason!_ - -### Gt, Ge, Lt, Le - -Express inclusive and/or exclusive bounds on orderable values - which may be numbers, -dates, times, strings, sets, etc. Note that the boundary value need not be of the -same type that was annotated, so long as they can be compared: `Annotated[int, Gt(1.5)]` -is fine, for example, and implies that the value is an integer x such that `x > 1.5`. - -We suggest that implementors may also interpret `functools.partial(operator.le, 1.5)` -as being equivalent to `Gt(1.5)`, for users who wish to avoid a runtime dependency on -the `annotated-types` package. - -To be explicit, these types have the following meanings: - -* `Gt(x)` - value must be "Greater Than" `x` - equivalent to exclusive minimum -* `Ge(x)` - value must be "Greater than or Equal" to `x` - equivalent to inclusive minimum -* `Lt(x)` - value must be "Less Than" `x` - equivalent to exclusive maximum -* `Le(x)` - value must be "Less than or Equal" to `x` - equivalent to inclusive maximum - -### Interval - -`Interval(gt, ge, lt, le)` allows you to specify an upper and lower bound with a single -metadata object. `None` attributes should be ignored, and non-`None` attributes -treated as per the single bounds above. - -### MultipleOf - -`MultipleOf(multiple_of=x)` might be interpreted in two ways: - -1. Python semantics, implying `value % multiple_of == 0`, or -2. [JSONschema semantics](https://json-schema.org/draft/2020-12/json-schema-validation.html#rfc.section.6.2.1), - where `int(value / multiple_of) == value / multiple_of`. - -We encourage users to be aware of these two common interpretations and their -distinct behaviours, especially since very large or non-integer numbers make -it easy to cause silent data corruption due to floating-point imprecision. - -We encourage libraries to carefully document which interpretation they implement. - -### MinLen, MaxLen, Len - -`Len()` implies that `min_length <= len(value) <= max_length` - lower and upper bounds are inclusive. - -As well as `Len()` which can optionally include upper and lower bounds, we also -provide `MinLen(x)` and `MaxLen(y)` which are equivalent to `Len(min_length=x)` -and `Len(max_length=y)` respectively. - -`Len`, `MinLen`, and `MaxLen` may be used with any type which supports `len(value)`. - -Examples of usage: - -* `Annotated[list, MaxLen(10)]` (or `Annotated[list, Len(max_length=10))`) - list must have a length of 10 or less -* `Annotated[str, MaxLen(10)]` - string must have a length of 10 or less -* `Annotated[list, MinLen(3))` (or `Annotated[list, Len(min_length=3))`) - list must have a length of 3 or more -* `Annotated[list, Len(4, 6)]` - list must have a length of 4, 5, or 6 -* `Annotated[list, Len(8, 8)]` - list must have a length of exactly 8 - -#### Changed in v0.4.0 - -* `min_inclusive` has been renamed to `min_length`, no change in meaning -* `max_exclusive` has been renamed to `max_length`, upper bound is now **inclusive** instead of **exclusive** -* The recommendation that slices are interpreted as `Len` has been removed due to ambiguity and different semantic - meaning of the upper bound in slices vs. `Len` - -See [issue #23](https://github.com/annotated-types/annotated-types/issues/23) for discussion. - -### Timezone - -`Timezone` can be used with a `datetime` or a `time` to express which timezones -are allowed. `Annotated[datetime, Timezone(None)]` must be a naive datetime. -`Timezone[...]` ([literal ellipsis](https://docs.python.org/3/library/constants.html#Ellipsis)) -expresses that any timezone-aware datetime is allowed. You may also pass a specific -timezone string or `timezone` object such as `Timezone(timezone.utc)` or -`Timezone("Africa/Abidjan")` to express that you only allow a specific timezone, -though we note that this is often a symptom of fragile design. - -### Predicate - -`Predicate(func: Callable)` expresses that `func(value)` is truthy for valid values. -Users should prefer the statically inspectable metadata above, but if you need -the full power and flexibility of arbitrary runtime predicates... here it is. - -We provide a few predefined predicates for common string constraints: - -* `IsLower = Predicate(str.islower)` -* `IsUpper = Predicate(str.isupper)` -* `IsDigit = Predicate(str.isdigit)` -* `IsFinite = Predicate(math.isfinite)` -* `IsNotFinite = Predicate(Not(math.isfinite))` -* `IsNan = Predicate(math.isnan)` -* `IsNotNan = Predicate(Not(math.isnan))` -* `IsInfinite = Predicate(math.isinf)` -* `IsNotInfinite = Predicate(Not(math.isinf))` - -Some libraries might have special logic to handle known or understandable predicates, -for example by checking for `str.isdigit` and using its presence to both call custom -logic to enforce digit-only strings, and customise some generated external schema. -Users are therefore encouraged to avoid indirection like `lambda s: s.lower()`, in -favor of introspectable methods such as `str.lower` or `re.compile("pattern").search`. - -To enable basic negation of commonly used predicates like `math.isnan` without introducing introspection that makes it impossible for implementers to introspect the predicate we provide a `Not` wrapper that simply negates the predicate in an introspectable manner. Several of the predicates listed above are created in this manner. - -We do not specify what behaviour should be expected for predicates that raise -an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently -skip invalid constraints, or statically raise an error; or it might try calling it -and then propogate or discard the resulting -`TypeError: descriptor 'isdigit' for 'str' objects doesn't apply to a 'int' object` -exception. We encourage libraries to document the behaviour they choose. - -### Doc - -`doc()` can be used to add documentation information in `Annotated`, for function and method parameters, variables, class attributes, return types, and any place where `Annotated` can be used. - -It expects a value that can be statically analyzed, as the main use case is for static analysis, editors, documentation generators, and similar tools. - -It returns a `DocInfo` class with a single attribute `documentation` containing the value passed to `doc()`. - -This is the early adopter's alternative form of the [`typing-doc` proposal](https://github.com/tiangolo/fastapi/blob/typing-doc/typing_doc.md). - -### Integrating downstream types with `GroupedMetadata` - -Implementers may choose to provide a convenience wrapper that groups multiple pieces of metadata. -This can help reduce verbosity and cognitive overhead for users. -For example, an implementer like Pydantic might provide a `Field` or `Meta` type that accepts keyword arguments and transforms these into low-level metadata: - -```python -from dataclasses import dataclass -from typing import Iterator -from annotated_types import GroupedMetadata, Ge - -@dataclass -class Field(GroupedMetadata): - ge: int | None = None - description: str | None = None - - def __iter__(self) -> Iterator[object]: - # Iterating over a GroupedMetadata object should yield annotated-types - # constraint metadata objects which describe it as fully as possible, - # and may include other unknown objects too. - if self.ge is not None: - yield Ge(self.ge) - if self.description is not None: - yield Description(self.description) -``` - -Libraries consuming annotated-types constraints should check for `GroupedMetadata` and unpack it by iterating over the object and treating the results as if they had been "unpacked" in the `Annotated` type. The same logic should be applied to the [PEP 646 `Unpack` type](https://peps.python.org/pep-0646/), so that `Annotated[T, Field(...)]`, `Annotated[T, Unpack[Field(...)]]` and `Annotated[T, *Field(...)]` are all treated consistently. - -Libraries consuming annotated-types should also ignore any metadata they do not recongize that came from unpacking a `GroupedMetadata`, just like they ignore unrecognized metadata in `Annotated` itself. - -Our own `annotated_types.Interval` class is a `GroupedMetadata` which unpacks itself into `Gt`, `Lt`, etc., so this is not an abstract concern. Similarly, `annotated_types.Len` is a `GroupedMetadata` which unpacks itself into `MinLen` (optionally) and `MaxLen`. - -### Consuming metadata - -We intend to not be prescriptive as to _how_ the metadata and constraints are used, but as an example of how one might parse constraints from types annotations see our [implementation in `test_main.py`](https://github.com/annotated-types/annotated-types/blob/f59cf6d1b5255a0fe359b93896759a180bec30ae/tests/test_main.py#L94-L103). - -It is up to the implementer to determine how this metadata is used. -You could use the metadata for runtime type checking, for generating schemas or to generate example data, amongst other use cases. - -## Design & History - -This package was designed at the PyCon 2022 sprints by the maintainers of Pydantic -and Hypothesis, with the goal of making it as easy as possible for end-users to -provide more informative annotations for use by runtime libraries. - -It is deliberately minimal, and following PEP-593 allows considerable downstream -discretion in what (if anything!) they choose to support. Nonetheless, we expect -that staying simple and covering _only_ the most common use-cases will give users -and maintainers the best experience we can. If you'd like more constraints for your -types - follow our lead, by defining them and documenting them downstream! diff --git a/server/venv/Lib/site-packages/annotated_types-0.6.0.dist-info/RECORD b/server/venv/Lib/site-packages/annotated_types-0.6.0.dist-info/RECORD deleted file mode 100644 index 2fcef41..0000000 --- a/server/venv/Lib/site-packages/annotated_types-0.6.0.dist-info/RECORD +++ /dev/null @@ -1,11 +0,0 @@ -annotated_types-0.6.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -annotated_types-0.6.0.dist-info/METADATA,sha256=78YeruT3b_doh8TbsK7qcbqY4cvCMz_QQRCGUbyAo6M,12879 -annotated_types-0.6.0.dist-info/RECORD,, -annotated_types-0.6.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -annotated_types-0.6.0.dist-info/WHEEL,sha256=9QBuHhg6FNW7lppboF2vKVbCGTVzsFykgRQjjlajrhA,87 -annotated_types-0.6.0.dist-info/licenses/LICENSE,sha256=_hBJiEsaDZNCkB6I4H8ykl0ksxIdmXK2poBfuYJLCV0,1083 -annotated_types/__init__.py,sha256=txLoRt8iSiAc-Su37esWpMI0wFjN92V7gE5HdFIeeKI,12151 -annotated_types/__pycache__/__init__.cpython-311.pyc,, -annotated_types/__pycache__/test_cases.cpython-311.pyc,, -annotated_types/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -annotated_types/test_cases.py,sha256=LfFyURZwr7X3NVfoCrSXSMMxTxJD4o8Xz-Y8qbmY7JU,6327 diff --git a/server/venv/Lib/site-packages/annotated_types-0.6.0.dist-info/REQUESTED b/server/venv/Lib/site-packages/annotated_types-0.6.0.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/annotated_types-0.6.0.dist-info/WHEEL b/server/venv/Lib/site-packages/annotated_types-0.6.0.dist-info/WHEEL deleted file mode 100644 index ba1a8af..0000000 --- a/server/venv/Lib/site-packages/annotated_types-0.6.0.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: hatchling 1.18.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/server/venv/Lib/site-packages/annotated_types-0.6.0.dist-info/licenses/LICENSE b/server/venv/Lib/site-packages/annotated_types-0.6.0.dist-info/licenses/LICENSE deleted file mode 100644 index d99323a..0000000 --- a/server/venv/Lib/site-packages/annotated_types-0.6.0.dist-info/licenses/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2022 the contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/server/venv/Lib/site-packages/annotated_types/__init__.py b/server/venv/Lib/site-packages/annotated_types/__init__.py deleted file mode 100644 index 2f98950..0000000 --- a/server/venv/Lib/site-packages/annotated_types/__init__.py +++ /dev/null @@ -1,396 +0,0 @@ -import math -import sys -from dataclasses import dataclass -from datetime import timezone -from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, SupportsFloat, SupportsIndex, TypeVar, Union - -if sys.version_info < (3, 8): - from typing_extensions import Protocol, runtime_checkable -else: - from typing import Protocol, runtime_checkable - -if sys.version_info < (3, 9): - from typing_extensions import Annotated, Literal -else: - from typing import Annotated, Literal - -if sys.version_info < (3, 10): - EllipsisType = type(Ellipsis) - KW_ONLY = {} - SLOTS = {} -else: - from types import EllipsisType - - KW_ONLY = {"kw_only": True} - SLOTS = {"slots": True} - - -__all__ = ( - 'BaseMetadata', - 'GroupedMetadata', - 'Gt', - 'Ge', - 'Lt', - 'Le', - 'Interval', - 'MultipleOf', - 'MinLen', - 'MaxLen', - 'Len', - 'Timezone', - 'Predicate', - 'LowerCase', - 'UpperCase', - 'IsDigits', - 'IsFinite', - 'IsNotFinite', - 'IsNan', - 'IsNotNan', - 'IsInfinite', - 'IsNotInfinite', - 'doc', - 'DocInfo', - '__version__', -) - -__version__ = '0.6.0' - - -T = TypeVar('T') - - -# arguments that start with __ are considered -# positional only -# see https://peps.python.org/pep-0484/#positional-only-arguments - - -class SupportsGt(Protocol): - def __gt__(self: T, __other: T) -> bool: - ... - - -class SupportsGe(Protocol): - def __ge__(self: T, __other: T) -> bool: - ... - - -class SupportsLt(Protocol): - def __lt__(self: T, __other: T) -> bool: - ... - - -class SupportsLe(Protocol): - def __le__(self: T, __other: T) -> bool: - ... - - -class SupportsMod(Protocol): - def __mod__(self: T, __other: T) -> T: - ... - - -class SupportsDiv(Protocol): - def __div__(self: T, __other: T) -> T: - ... - - -class BaseMetadata: - """Base class for all metadata. - - This exists mainly so that implementers - can do `isinstance(..., BaseMetadata)` while traversing field annotations. - """ - - __slots__ = () - - -@dataclass(frozen=True, **SLOTS) -class Gt(BaseMetadata): - """Gt(gt=x) implies that the value must be greater than x. - - It can be used with any type that supports the ``>`` operator, - including numbers, dates and times, strings, sets, and so on. - """ - - gt: SupportsGt - - -@dataclass(frozen=True, **SLOTS) -class Ge(BaseMetadata): - """Ge(ge=x) implies that the value must be greater than or equal to x. - - It can be used with any type that supports the ``>=`` operator, - including numbers, dates and times, strings, sets, and so on. - """ - - ge: SupportsGe - - -@dataclass(frozen=True, **SLOTS) -class Lt(BaseMetadata): - """Lt(lt=x) implies that the value must be less than x. - - It can be used with any type that supports the ``<`` operator, - including numbers, dates and times, strings, sets, and so on. - """ - - lt: SupportsLt - - -@dataclass(frozen=True, **SLOTS) -class Le(BaseMetadata): - """Le(le=x) implies that the value must be less than or equal to x. - - It can be used with any type that supports the ``<=`` operator, - including numbers, dates and times, strings, sets, and so on. - """ - - le: SupportsLe - - -@runtime_checkable -class GroupedMetadata(Protocol): - """A grouping of multiple BaseMetadata objects. - - `GroupedMetadata` on its own is not metadata and has no meaning. - All it the the constraint and metadata should be fully expressable - in terms of the `BaseMetadata`'s returned by `GroupedMetadata.__iter__()`. - - Concrete implementations should override `GroupedMetadata.__iter__()` - to add their own metadata. - For example: - - >>> @dataclass - >>> class Field(GroupedMetadata): - >>> gt: float | None = None - >>> description: str | None = None - ... - >>> def __iter__(self) -> Iterable[BaseMetadata]: - >>> if self.gt is not None: - >>> yield Gt(self.gt) - >>> if self.description is not None: - >>> yield Description(self.gt) - - Also see the implementation of `Interval` below for an example. - - Parsers should recognize this and unpack it so that it can be used - both with and without unpacking: - - - `Annotated[int, Field(...)]` (parser must unpack Field) - - `Annotated[int, *Field(...)]` (PEP-646) - """ # noqa: trailing-whitespace - - @property - def __is_annotated_types_grouped_metadata__(self) -> Literal[True]: - return True - - def __iter__(self) -> Iterator[BaseMetadata]: - ... - - if not TYPE_CHECKING: - __slots__ = () # allow subclasses to use slots - - def __init_subclass__(cls, *args: Any, **kwargs: Any) -> None: - # Basic ABC like functionality without the complexity of an ABC - super().__init_subclass__(*args, **kwargs) - if cls.__iter__ is GroupedMetadata.__iter__: - raise TypeError("Can't subclass GroupedMetadata without implementing __iter__") - - def __iter__(self) -> Iterator[BaseMetadata]: # noqa: F811 - raise NotImplementedError # more helpful than "None has no attribute..." type errors - - -@dataclass(frozen=True, **KW_ONLY, **SLOTS) -class Interval(GroupedMetadata): - """Interval can express inclusive or exclusive bounds with a single object. - - It accepts keyword arguments ``gt``, ``ge``, ``lt``, and/or ``le``, which - are interpreted the same way as the single-bound constraints. - """ - - gt: Union[SupportsGt, None] = None - ge: Union[SupportsGe, None] = None - lt: Union[SupportsLt, None] = None - le: Union[SupportsLe, None] = None - - def __iter__(self) -> Iterator[BaseMetadata]: - """Unpack an Interval into zero or more single-bounds.""" - if self.gt is not None: - yield Gt(self.gt) - if self.ge is not None: - yield Ge(self.ge) - if self.lt is not None: - yield Lt(self.lt) - if self.le is not None: - yield Le(self.le) - - -@dataclass(frozen=True, **SLOTS) -class MultipleOf(BaseMetadata): - """MultipleOf(multiple_of=x) might be interpreted in two ways: - - 1. Python semantics, implying ``value % multiple_of == 0``, or - 2. JSONschema semantics, where ``int(value / multiple_of) == value / multiple_of`` - - We encourage users to be aware of these two common interpretations, - and libraries to carefully document which they implement. - """ - - multiple_of: Union[SupportsDiv, SupportsMod] - - -@dataclass(frozen=True, **SLOTS) -class MinLen(BaseMetadata): - """ - MinLen() implies minimum inclusive length, - e.g. ``len(value) >= min_length``. - """ - - min_length: Annotated[int, Ge(0)] - - -@dataclass(frozen=True, **SLOTS) -class MaxLen(BaseMetadata): - """ - MaxLen() implies maximum inclusive length, - e.g. ``len(value) <= max_length``. - """ - - max_length: Annotated[int, Ge(0)] - - -@dataclass(frozen=True, **SLOTS) -class Len(GroupedMetadata): - """ - Len() implies that ``min_length <= len(value) <= max_length``. - - Upper bound may be omitted or ``None`` to indicate no upper length bound. - """ - - min_length: Annotated[int, Ge(0)] = 0 - max_length: Optional[Annotated[int, Ge(0)]] = None - - def __iter__(self) -> Iterator[BaseMetadata]: - """Unpack a Len into zone or more single-bounds.""" - if self.min_length > 0: - yield MinLen(self.min_length) - if self.max_length is not None: - yield MaxLen(self.max_length) - - -@dataclass(frozen=True, **SLOTS) -class Timezone(BaseMetadata): - """Timezone(tz=...) requires a datetime to be aware (or ``tz=None``, naive). - - ``Annotated[datetime, Timezone(None)]`` must be a naive datetime. - ``Timezone[...]`` (the ellipsis literal) expresses that the datetime must be - tz-aware but any timezone is allowed. - - You may also pass a specific timezone string or timezone object such as - ``Timezone(timezone.utc)`` or ``Timezone("Africa/Abidjan")`` to express that - you only allow a specific timezone, though we note that this is often - a symptom of poor design. - """ - - tz: Union[str, timezone, EllipsisType, None] - - -@dataclass(frozen=True, **SLOTS) -class Predicate(BaseMetadata): - """``Predicate(func: Callable)`` implies `func(value)` is truthy for valid values. - - Users should prefer statically inspectable metadata, but if you need the full - power and flexibility of arbitrary runtime predicates... here it is. - - We provide a few predefined predicates for common string constraints: - ``IsLower = Predicate(str.islower)``, ``IsUpper = Predicate(str.isupper)``, and - ``IsDigit = Predicate(str.isdigit)``. Users are encouraged to use methods which - can be given special handling, and avoid indirection like ``lambda s: s.lower()``. - - Some libraries might have special logic to handle certain predicates, e.g. by - checking for `str.isdigit` and using its presence to both call custom logic to - enforce digit-only strings, and customise some generated external schema. - - We do not specify what behaviour should be expected for predicates that raise - an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently - skip invalid constraints, or statically raise an error; or it might try calling it - and then propogate or discard the resulting exception. - """ - - func: Callable[[Any], bool] - - -@dataclass -class Not: - func: Callable[[Any], bool] - - def __call__(self, __v: Any) -> bool: - return not self.func(__v) - - -_StrType = TypeVar("_StrType", bound=str) - -LowerCase = Annotated[_StrType, Predicate(str.islower)] -""" -Return True if the string is a lowercase string, False otherwise. - -A string is lowercase if all cased characters in the string are lowercase and there is at least one cased character in the string. -""" # noqa: E501 -UpperCase = Annotated[_StrType, Predicate(str.isupper)] -""" -Return True if the string is an uppercase string, False otherwise. - -A string is uppercase if all cased characters in the string are uppercase and there is at least one cased character in the string. -""" # noqa: E501 -IsDigits = Annotated[_StrType, Predicate(str.isdigit)] -""" -Return True if the string is a digit string, False otherwise. - -A string is a digit string if all characters in the string are digits and there is at least one character in the string. -""" # noqa: E501 -IsAscii = Annotated[_StrType, Predicate(str.isascii)] -""" -Return True if all characters in the string are ASCII, False otherwise. - -ASCII characters have code points in the range U+0000-U+007F. Empty string is ASCII too. -""" - -_NumericType = TypeVar('_NumericType', bound=Union[SupportsFloat, SupportsIndex]) -IsFinite = Annotated[_NumericType, Predicate(math.isfinite)] -"""Return True if x is neither an infinity nor a NaN, and False otherwise.""" -IsNotFinite = Annotated[_NumericType, Predicate(Not(math.isfinite))] -"""Return True if x is one of infinity or NaN, and False otherwise""" -IsNan = Annotated[_NumericType, Predicate(math.isnan)] -"""Return True if x is a NaN (not a number), and False otherwise.""" -IsNotNan = Annotated[_NumericType, Predicate(Not(math.isnan))] -"""Return True if x is anything but NaN (not a number), and False otherwise.""" -IsInfinite = Annotated[_NumericType, Predicate(math.isinf)] -"""Return True if x is a positive or negative infinity, and False otherwise.""" -IsNotInfinite = Annotated[_NumericType, Predicate(Not(math.isinf))] -"""Return True if x is neither a positive or negative infinity, and False otherwise.""" - -try: - from typing_extensions import DocInfo, doc # type: ignore [attr-defined] -except ImportError: - - @dataclass(frozen=True, **SLOTS) - class DocInfo: # type: ignore [no-redef] - """ " - The return value of doc(), mainly to be used by tools that want to extract the - Annotated documentation at runtime. - """ - - documentation: str - """The documentation string passed to doc().""" - - def doc( - documentation: str, - ) -> DocInfo: - """ - Add documentation to a type annotation inside of Annotated. - - For example: - - >>> def hi(name: Annotated[int, doc("The name of the user")]) -> None: ... - """ - return DocInfo(documentation) diff --git a/server/venv/Lib/site-packages/annotated_types/py.typed b/server/venv/Lib/site-packages/annotated_types/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/annotated_types/test_cases.py b/server/venv/Lib/site-packages/annotated_types/test_cases.py deleted file mode 100644 index f54df70..0000000 --- a/server/venv/Lib/site-packages/annotated_types/test_cases.py +++ /dev/null @@ -1,147 +0,0 @@ -import math -import sys -from datetime import date, datetime, timedelta, timezone -from decimal import Decimal -from typing import Any, Dict, Iterable, Iterator, List, NamedTuple, Set, Tuple - -if sys.version_info < (3, 9): - from typing_extensions import Annotated -else: - from typing import Annotated - -import annotated_types as at - - -class Case(NamedTuple): - """ - A test case for `annotated_types`. - """ - - annotation: Any - valid_cases: Iterable[Any] - invalid_cases: Iterable[Any] - - -def cases() -> Iterable[Case]: - # Gt, Ge, Lt, Le - yield Case(Annotated[int, at.Gt(4)], (5, 6, 1000), (4, 0, -1)) - yield Case(Annotated[float, at.Gt(0.5)], (0.6, 0.7, 0.8, 0.9), (0.5, 0.0, -0.1)) - yield Case( - Annotated[datetime, at.Gt(datetime(2000, 1, 1))], - [datetime(2000, 1, 2), datetime(2000, 1, 3)], - [datetime(2000, 1, 1), datetime(1999, 12, 31)], - ) - yield Case( - Annotated[datetime, at.Gt(date(2000, 1, 1))], - [date(2000, 1, 2), date(2000, 1, 3)], - [date(2000, 1, 1), date(1999, 12, 31)], - ) - yield Case( - Annotated[datetime, at.Gt(Decimal('1.123'))], - [Decimal('1.1231'), Decimal('123')], - [Decimal('1.123'), Decimal('0')], - ) - - yield Case(Annotated[int, at.Ge(4)], (4, 5, 6, 1000, 4), (0, -1)) - yield Case(Annotated[float, at.Ge(0.5)], (0.5, 0.6, 0.7, 0.8, 0.9), (0.4, 0.0, -0.1)) - yield Case( - Annotated[datetime, at.Ge(datetime(2000, 1, 1))], - [datetime(2000, 1, 2), datetime(2000, 1, 3)], - [datetime(1998, 1, 1), datetime(1999, 12, 31)], - ) - - yield Case(Annotated[int, at.Lt(4)], (0, -1), (4, 5, 6, 1000, 4)) - yield Case(Annotated[float, at.Lt(0.5)], (0.4, 0.0, -0.1), (0.5, 0.6, 0.7, 0.8, 0.9)) - yield Case( - Annotated[datetime, at.Lt(datetime(2000, 1, 1))], - [datetime(1999, 12, 31), datetime(1999, 12, 31)], - [datetime(2000, 1, 2), datetime(2000, 1, 3)], - ) - - yield Case(Annotated[int, at.Le(4)], (4, 0, -1), (5, 6, 1000)) - yield Case(Annotated[float, at.Le(0.5)], (0.5, 0.0, -0.1), (0.6, 0.7, 0.8, 0.9)) - yield Case( - Annotated[datetime, at.Le(datetime(2000, 1, 1))], - [datetime(2000, 1, 1), datetime(1999, 12, 31)], - [datetime(2000, 1, 2), datetime(2000, 1, 3)], - ) - - # Interval - yield Case(Annotated[int, at.Interval(gt=4)], (5, 6, 1000), (4, 0, -1)) - yield Case(Annotated[int, at.Interval(gt=4, lt=10)], (5, 6), (4, 10, 1000, 0, -1)) - yield Case(Annotated[float, at.Interval(ge=0.5, le=1)], (0.5, 0.9, 1), (0.49, 1.1)) - yield Case( - Annotated[datetime, at.Interval(gt=datetime(2000, 1, 1), le=datetime(2000, 1, 3))], - [datetime(2000, 1, 2), datetime(2000, 1, 3)], - [datetime(2000, 1, 1), datetime(2000, 1, 4)], - ) - - yield Case(Annotated[int, at.MultipleOf(multiple_of=3)], (0, 3, 9), (1, 2, 4)) - yield Case(Annotated[float, at.MultipleOf(multiple_of=0.5)], (0, 0.5, 1, 1.5), (0.4, 1.1)) - - # lengths - - yield Case(Annotated[str, at.MinLen(3)], ('123', '1234', 'x' * 10), ('', '1', '12')) - yield Case(Annotated[str, at.Len(3)], ('123', '1234', 'x' * 10), ('', '1', '12')) - yield Case(Annotated[List[int], at.MinLen(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2])) - yield Case(Annotated[List[int], at.Len(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2])) - - yield Case(Annotated[str, at.MaxLen(4)], ('', '1234'), ('12345', 'x' * 10)) - yield Case(Annotated[str, at.Len(0, 4)], ('', '1234'), ('12345', 'x' * 10)) - yield Case(Annotated[List[str], at.MaxLen(4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10)) - yield Case(Annotated[List[str], at.Len(0, 4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10)) - - yield Case(Annotated[str, at.Len(3, 5)], ('123', '12345'), ('', '1', '12', '123456', 'x' * 10)) - yield Case(Annotated[str, at.Len(3, 3)], ('123',), ('12', '1234')) - - yield Case(Annotated[Dict[int, int], at.Len(2, 3)], [{1: 1, 2: 2}], [{}, {1: 1}, {1: 1, 2: 2, 3: 3, 4: 4}]) - yield Case(Annotated[Set[int], at.Len(2, 3)], ({1, 2}, {1, 2, 3}), (set(), {1}, {1, 2, 3, 4})) - yield Case(Annotated[Tuple[int, ...], at.Len(2, 3)], ((1, 2), (1, 2, 3)), ((), (1,), (1, 2, 3, 4))) - - # Timezone - - yield Case( - Annotated[datetime, at.Timezone(None)], [datetime(2000, 1, 1)], [datetime(2000, 1, 1, tzinfo=timezone.utc)] - ) - yield Case( - Annotated[datetime, at.Timezone(...)], [datetime(2000, 1, 1, tzinfo=timezone.utc)], [datetime(2000, 1, 1)] - ) - yield Case( - Annotated[datetime, at.Timezone(timezone.utc)], - [datetime(2000, 1, 1, tzinfo=timezone.utc)], - [datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))], - ) - yield Case( - Annotated[datetime, at.Timezone('Europe/London')], - [datetime(2000, 1, 1, tzinfo=timezone(timedelta(0), name='Europe/London'))], - [datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))], - ) - - # predicate types - - yield Case(at.LowerCase[str], ['abc', 'foobar'], ['', 'A', 'Boom']) - yield Case(at.UpperCase[str], ['ABC', 'DEFO'], ['', 'a', 'abc', 'AbC']) - yield Case(at.IsDigits[str], ['123'], ['', 'ab', 'a1b2']) - yield Case(at.IsAscii[str], ['123', 'foo bar'], ['£100', '😊', 'whatever 👀']) - - yield Case(Annotated[int, at.Predicate(lambda x: x % 2 == 0)], [0, 2, 4], [1, 3, 5]) - - yield Case(at.IsFinite[float], [1.23], [math.nan, math.inf, -math.inf]) - yield Case(at.IsNotFinite[float], [math.nan, math.inf], [1.23]) - yield Case(at.IsNan[float], [math.nan], [1.23, math.inf]) - yield Case(at.IsNotNan[float], [1.23, math.inf], [math.nan]) - yield Case(at.IsInfinite[float], [math.inf], [math.nan, 1.23]) - yield Case(at.IsNotInfinite[float], [math.nan, 1.23], [math.inf]) - - # check stacked predicates - yield Case(at.IsInfinite[Annotated[float, at.Predicate(lambda x: x > 0)]], [math.inf], [-math.inf, 1.23, math.nan]) - - # doc - yield Case(Annotated[int, at.doc("A number")], [1, 2], []) - - # custom GroupedMetadata - class MyCustomGroupedMetadata(at.GroupedMetadata): - def __iter__(self) -> Iterator[at.Predicate]: - yield at.Predicate(lambda x: float(x).is_integer()) - - yield Case(Annotated[float, MyCustomGroupedMetadata()], [0, 2.0], [0.01, 1.5]) diff --git a/server/venv/Lib/site-packages/anyio-3.7.1.dist-info/INSTALLER b/server/venv/Lib/site-packages/anyio-3.7.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/server/venv/Lib/site-packages/anyio-3.7.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/server/venv/Lib/site-packages/anyio-3.7.1.dist-info/LICENSE b/server/venv/Lib/site-packages/anyio-3.7.1.dist-info/LICENSE deleted file mode 100644 index 104eebf..0000000 --- a/server/venv/Lib/site-packages/anyio-3.7.1.dist-info/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2018 Alex Grönholm - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/server/venv/Lib/site-packages/anyio-3.7.1.dist-info/METADATA b/server/venv/Lib/site-packages/anyio-3.7.1.dist-info/METADATA deleted file mode 100644 index 5e46476..0000000 --- a/server/venv/Lib/site-packages/anyio-3.7.1.dist-info/METADATA +++ /dev/null @@ -1,105 +0,0 @@ -Metadata-Version: 2.1 -Name: anyio -Version: 3.7.1 -Summary: High level compatibility layer for multiple asynchronous event loop implementations -Author-email: Alex Grönholm -License: MIT -Project-URL: Documentation, https://anyio.readthedocs.io/en/latest/ -Project-URL: Changelog, https://anyio.readthedocs.io/en/stable/versionhistory.html -Project-URL: Source code, https://github.com/agronholm/anyio -Project-URL: Issue tracker, https://github.com/agronholm/anyio/issues -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Framework :: AnyIO -Classifier: Typing :: Typed -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Requires-Python: >=3.7 -Description-Content-Type: text/x-rst -License-File: LICENSE -Requires-Dist: idna (>=2.8) -Requires-Dist: sniffio (>=1.1) -Requires-Dist: exceptiongroup ; python_version < "3.11" -Requires-Dist: typing-extensions ; python_version < "3.8" -Provides-Extra: doc -Requires-Dist: packaging ; extra == 'doc' -Requires-Dist: Sphinx ; extra == 'doc' -Requires-Dist: sphinx-rtd-theme (>=1.2.2) ; extra == 'doc' -Requires-Dist: sphinxcontrib-jquery ; extra == 'doc' -Requires-Dist: sphinx-autodoc-typehints (>=1.2.0) ; extra == 'doc' -Provides-Extra: test -Requires-Dist: anyio[trio] ; extra == 'test' -Requires-Dist: coverage[toml] (>=4.5) ; extra == 'test' -Requires-Dist: hypothesis (>=4.0) ; extra == 'test' -Requires-Dist: psutil (>=5.9) ; extra == 'test' -Requires-Dist: pytest (>=7.0) ; extra == 'test' -Requires-Dist: pytest-mock (>=3.6.1) ; extra == 'test' -Requires-Dist: trustme ; extra == 'test' -Requires-Dist: uvloop (>=0.17) ; (python_version < "3.12" and platform_python_implementation == "CPython" and platform_system != "Windows") and extra == 'test' -Requires-Dist: mock (>=4) ; (python_version < "3.8") and extra == 'test' -Provides-Extra: trio -Requires-Dist: trio (<0.22) ; extra == 'trio' - -.. image:: https://github.com/agronholm/anyio/actions/workflows/test.yml/badge.svg - :target: https://github.com/agronholm/anyio/actions/workflows/test.yml - :alt: Build Status -.. image:: https://coveralls.io/repos/github/agronholm/anyio/badge.svg?branch=master - :target: https://coveralls.io/github/agronholm/anyio?branch=master - :alt: Code Coverage -.. image:: https://readthedocs.org/projects/anyio/badge/?version=latest - :target: https://anyio.readthedocs.io/en/latest/?badge=latest - :alt: Documentation -.. image:: https://badges.gitter.im/gitterHQ/gitter.svg - :target: https://gitter.im/python-trio/AnyIO - :alt: Gitter chat - -AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio_ or -trio_. It implements trio-like `structured concurrency`_ (SC) on top of asyncio and works in harmony -with the native SC of trio itself. - -Applications and libraries written against AnyIO's API will run unmodified on either asyncio_ or -trio_. AnyIO can also be adopted into a library or application incrementally – bit by bit, no full -refactoring necessary. It will blend in with the native libraries of your chosen backend. - -Documentation -------------- - -View full documentation at: https://anyio.readthedocs.io/ - -Features --------- - -AnyIO offers the following functionality: - -* Task groups (nurseries_ in trio terminology) -* High-level networking (TCP, UDP and UNIX sockets) - - * `Happy eyeballs`_ algorithm for TCP connections (more robust than that of asyncio on Python - 3.8) - * async/await style UDP sockets (unlike asyncio where you still have to use Transports and - Protocols) - -* A versatile API for byte streams and object streams -* Inter-task synchronization and communication (locks, conditions, events, semaphores, object - streams) -* Worker threads -* Subprocesses -* Asynchronous file I/O (using worker threads) -* Signal handling - -AnyIO also comes with its own pytest_ plugin which also supports asynchronous fixtures. -It even works with the popular Hypothesis_ library. - -.. _asyncio: https://docs.python.org/3/library/asyncio.html -.. _trio: https://github.com/python-trio/trio -.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency -.. _nurseries: https://trio.readthedocs.io/en/stable/reference-core.html#nurseries-and-spawning -.. _Happy eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs -.. _pytest: https://docs.pytest.org/en/latest/ -.. _Hypothesis: https://hypothesis.works/ diff --git a/server/venv/Lib/site-packages/anyio-3.7.1.dist-info/RECORD b/server/venv/Lib/site-packages/anyio-3.7.1.dist-info/RECORD deleted file mode 100644 index ed78f26..0000000 --- a/server/venv/Lib/site-packages/anyio-3.7.1.dist-info/RECORD +++ /dev/null @@ -1,83 +0,0 @@ -anyio-3.7.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -anyio-3.7.1.dist-info/LICENSE,sha256=U2GsncWPLvX9LpsJxoKXwX8ElQkJu8gCO9uC6s8iwrA,1081 -anyio-3.7.1.dist-info/METADATA,sha256=mOhfXPB7qKVQh3dUtp2NgLysa10jHWeDBNnRg-93A_c,4708 -anyio-3.7.1.dist-info/RECORD,, -anyio-3.7.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -anyio-3.7.1.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92 -anyio-3.7.1.dist-info/entry_points.txt,sha256=_d6Yu6uiaZmNe0CydowirE9Cmg7zUL2g08tQpoS3Qvc,39 -anyio-3.7.1.dist-info/top_level.txt,sha256=QglSMiWX8_5dpoVAEIHdEYzvqFMdSYWmCj6tYw2ITkQ,6 -anyio/__init__.py,sha256=Pq9lO03Zm5ynIPlhkquaOuIc1dTTeLGNUQ5HT5qwYMI,4073 -anyio/__pycache__/__init__.cpython-311.pyc,, -anyio/__pycache__/from_thread.cpython-311.pyc,, -anyio/__pycache__/lowlevel.cpython-311.pyc,, -anyio/__pycache__/pytest_plugin.cpython-311.pyc,, -anyio/__pycache__/to_process.cpython-311.pyc,, -anyio/__pycache__/to_thread.cpython-311.pyc,, -anyio/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -anyio/_backends/__pycache__/__init__.cpython-311.pyc,, -anyio/_backends/__pycache__/_asyncio.cpython-311.pyc,, -anyio/_backends/__pycache__/_trio.cpython-311.pyc,, -anyio/_backends/_asyncio.py,sha256=fgwZmYnGOxT_pX0OZTPPgRdFqKLjnKvQUk7tsfuNmfM,67056 -anyio/_backends/_trio.py,sha256=EJAj0tNi0JRM2y3QWP7oS4ct7wnjMSYDG8IZUWMta-E,30035 -anyio/_core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -anyio/_core/__pycache__/__init__.cpython-311.pyc,, -anyio/_core/__pycache__/_compat.cpython-311.pyc,, -anyio/_core/__pycache__/_eventloop.cpython-311.pyc,, -anyio/_core/__pycache__/_exceptions.cpython-311.pyc,, -anyio/_core/__pycache__/_fileio.cpython-311.pyc,, -anyio/_core/__pycache__/_resources.cpython-311.pyc,, -anyio/_core/__pycache__/_signals.cpython-311.pyc,, -anyio/_core/__pycache__/_sockets.cpython-311.pyc,, -anyio/_core/__pycache__/_streams.cpython-311.pyc,, -anyio/_core/__pycache__/_subprocesses.cpython-311.pyc,, -anyio/_core/__pycache__/_synchronization.cpython-311.pyc,, -anyio/_core/__pycache__/_tasks.cpython-311.pyc,, -anyio/_core/__pycache__/_testing.cpython-311.pyc,, -anyio/_core/__pycache__/_typedattr.cpython-311.pyc,, -anyio/_core/_compat.py,sha256=XZfBUInEt7jaiTBI2Qbul7EpJdngbwTtG4Qj26un1YE,5726 -anyio/_core/_eventloop.py,sha256=xJ8KflV1bJ9GAuQRr4o1ojv8wWya4nt_XARta8uLPwc,4083 -anyio/_core/_exceptions.py,sha256=uOrN5l98o6UrOU6O3kPf0VCDl_zPP-kgZs4IyaLVgwU,2916 -anyio/_core/_fileio.py,sha256=DWuIul5izCocmJpgqDDNKc_GhMUwayHKdM5R-sbT_A8,18026 -anyio/_core/_resources.py,sha256=NbmU5O5UX3xEyACnkmYX28Fmwdl-f-ny0tHym26e0w0,435 -anyio/_core/_signals.py,sha256=KKkZAYL08auydjZnK9S4FQsxx555jT4gXAMcTXdNaok,863 -anyio/_core/_sockets.py,sha256=szcPd7kKBmlHnx8g_KJWZo2k6syouRNF2614ZrtqiV0,20667 -anyio/_core/_streams.py,sha256=5gryxQiUisED8uFUAHje5O44RL9wyndNMANzzQWUn1U,1518 -anyio/_core/_subprocesses.py,sha256=OSAcLAsjfCplXlRyTjWonfS1xU8d5MaZblXYqqY-BM4,4977 -anyio/_core/_synchronization.py,sha256=Uquo_52vZ7iZzDDoaN_j-N7jeyAlefzOZ8Pxt9mU6gY,16747 -anyio/_core/_tasks.py,sha256=1wZZWlpDkr6w3kMD629vzJDkPselDvx4XVElgTCVwyM,5316 -anyio/_core/_testing.py,sha256=7Yll-DOI0uIlIF5VHLUpGGyDPWtDEjFZ85-6ZniwIJU,2217 -anyio/_core/_typedattr.py,sha256=8o0gwQYSl04zlO9uHqcHu1T6hOw7peY9NW1mOX5DKnY,2551 -anyio/abc/__init__.py,sha256=UkC-KDbyIoKeDUDhJciwANSoyzz_qaFh4Fb7_AvwjZc,2159 -anyio/abc/__pycache__/__init__.cpython-311.pyc,, -anyio/abc/__pycache__/_resources.cpython-311.pyc,, -anyio/abc/__pycache__/_sockets.cpython-311.pyc,, -anyio/abc/__pycache__/_streams.cpython-311.pyc,, -anyio/abc/__pycache__/_subprocesses.cpython-311.pyc,, -anyio/abc/__pycache__/_tasks.cpython-311.pyc,, -anyio/abc/__pycache__/_testing.cpython-311.pyc,, -anyio/abc/_resources.py,sha256=h1rkzr3E0MFqdXLh9aLLXe-A5W7k_Jc-5XzNr6SJ4w4,763 -anyio/abc/_sockets.py,sha256=WWYJ6HndKCEuvobAPDkmX0tjwN2FOxf3eTGb1DB7wHE,5243 -anyio/abc/_streams.py,sha256=yGhOmlVI3W9whmzPuewwYQ2BrKhrUFuWZ4zpVLWOK84,6584 -anyio/abc/_subprocesses.py,sha256=r-totaRbFX6kKV-4WTeuswz8n01aap8cvkYVQCRKN0M,2067 -anyio/abc/_tasks.py,sha256=a_5DLyiCbp0K57LJPOyF-PZyXmUcv_p9VRXPFj_K03M,3413 -anyio/abc/_testing.py,sha256=Eub7gXJ0tVPo_WN5iJAw10FrvC7C1uaL3b2neGr_pfs,1924 -anyio/from_thread.py,sha256=aUVKXctPgZ5wK3p5VTyrtjDj9tSQSrH6xCjBuo-hv3A,16563 -anyio/lowlevel.py,sha256=cOTncxRW5KeswqYQQdp0pfAw6OFWXius1SPhCYwHZL4,4647 -anyio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -anyio/pytest_plugin.py,sha256=_Txgl0-I3kO1rk_KATXmIUV57C34hajcJCGcgV26CU0,5022 -anyio/streams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -anyio/streams/__pycache__/__init__.cpython-311.pyc,, -anyio/streams/__pycache__/buffered.cpython-311.pyc,, -anyio/streams/__pycache__/file.cpython-311.pyc,, -anyio/streams/__pycache__/memory.cpython-311.pyc,, -anyio/streams/__pycache__/stapled.cpython-311.pyc,, -anyio/streams/__pycache__/text.cpython-311.pyc,, -anyio/streams/__pycache__/tls.cpython-311.pyc,, -anyio/streams/buffered.py,sha256=2ifplNLwT73d1UKBxrkFdlC9wTAze9LhPL7pt_7cYgY,4473 -anyio/streams/file.py,sha256=-NP6jMcUd2f1VJwgcxgiRHdEsNnhE0lANl0ov_i7FrE,4356 -anyio/streams/memory.py,sha256=QZhc5qdomBpGCgrUVWAaqEBxI0oklVxK_62atW6tnNk,9274 -anyio/streams/stapled.py,sha256=9u2GxpiOPsGtgO1qsj2tVoW4b8bgiwp5rSDs1BFKkLM,4275 -anyio/streams/text.py,sha256=1K4ZCLKl2b7yywrW6wKEeMu3xyQHE_T0aU5_oC9GPTE,5043 -anyio/streams/tls.py,sha256=TbdCz1KtfEnp3mxHvkROXRefhE6S1LHiwgWiJX8zYaU,12099 -anyio/to_process.py,sha256=_RSsG8UME2nGxeFEdg3OEfv9XshSQwrMU7DAbwWGx9U,9242 -anyio/to_thread.py,sha256=HVpTvBei2sSXgJJeNKdwhJwQaW76LDbb1htQ-Mc6zDs,2146 diff --git a/server/venv/Lib/site-packages/anyio-3.7.1.dist-info/REQUESTED b/server/venv/Lib/site-packages/anyio-3.7.1.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/anyio-3.7.1.dist-info/WHEEL b/server/venv/Lib/site-packages/anyio-3.7.1.dist-info/WHEEL deleted file mode 100644 index 1f37c02..0000000 --- a/server/venv/Lib/site-packages/anyio-3.7.1.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.40.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/server/venv/Lib/site-packages/anyio-3.7.1.dist-info/entry_points.txt b/server/venv/Lib/site-packages/anyio-3.7.1.dist-info/entry_points.txt deleted file mode 100644 index 44dd9bd..0000000 --- a/server/venv/Lib/site-packages/anyio-3.7.1.dist-info/entry_points.txt +++ /dev/null @@ -1,2 +0,0 @@ -[pytest11] -anyio = anyio.pytest_plugin diff --git a/server/venv/Lib/site-packages/anyio-3.7.1.dist-info/top_level.txt b/server/venv/Lib/site-packages/anyio-3.7.1.dist-info/top_level.txt deleted file mode 100644 index c77c069..0000000 --- a/server/venv/Lib/site-packages/anyio-3.7.1.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -anyio diff --git a/server/venv/Lib/site-packages/anyio/__init__.py b/server/venv/Lib/site-packages/anyio/__init__.py deleted file mode 100644 index 29fb356..0000000 --- a/server/venv/Lib/site-packages/anyio/__init__.py +++ /dev/null @@ -1,169 +0,0 @@ -from __future__ import annotations - -__all__ = ( - "maybe_async", - "maybe_async_cm", - "run", - "sleep", - "sleep_forever", - "sleep_until", - "current_time", - "get_all_backends", - "get_cancelled_exc_class", - "BrokenResourceError", - "BrokenWorkerProcess", - "BusyResourceError", - "ClosedResourceError", - "DelimiterNotFound", - "EndOfStream", - "ExceptionGroup", - "IncompleteRead", - "TypedAttributeLookupError", - "WouldBlock", - "AsyncFile", - "Path", - "open_file", - "wrap_file", - "aclose_forcefully", - "open_signal_receiver", - "connect_tcp", - "connect_unix", - "create_tcp_listener", - "create_unix_listener", - "create_udp_socket", - "create_connected_udp_socket", - "getaddrinfo", - "getnameinfo", - "wait_socket_readable", - "wait_socket_writable", - "create_memory_object_stream", - "run_process", - "open_process", - "create_lock", - "CapacityLimiter", - "CapacityLimiterStatistics", - "Condition", - "ConditionStatistics", - "Event", - "EventStatistics", - "Lock", - "LockStatistics", - "Semaphore", - "SemaphoreStatistics", - "create_condition", - "create_event", - "create_semaphore", - "create_capacity_limiter", - "open_cancel_scope", - "fail_after", - "move_on_after", - "current_effective_deadline", - "TASK_STATUS_IGNORED", - "CancelScope", - "create_task_group", - "TaskInfo", - "get_current_task", - "get_running_tasks", - "wait_all_tasks_blocked", - "run_sync_in_worker_thread", - "run_async_from_thread", - "run_sync_from_thread", - "current_default_worker_thread_limiter", - "create_blocking_portal", - "start_blocking_portal", - "typed_attribute", - "TypedAttributeSet", - "TypedAttributeProvider", -) - -from typing import Any - -from ._core._compat import maybe_async, maybe_async_cm -from ._core._eventloop import ( - current_time, - get_all_backends, - get_cancelled_exc_class, - run, - sleep, - sleep_forever, - sleep_until, -) -from ._core._exceptions import ( - BrokenResourceError, - BrokenWorkerProcess, - BusyResourceError, - ClosedResourceError, - DelimiterNotFound, - EndOfStream, - ExceptionGroup, - IncompleteRead, - TypedAttributeLookupError, - WouldBlock, -) -from ._core._fileio import AsyncFile, Path, open_file, wrap_file -from ._core._resources import aclose_forcefully -from ._core._signals import open_signal_receiver -from ._core._sockets import ( - connect_tcp, - connect_unix, - create_connected_udp_socket, - create_tcp_listener, - create_udp_socket, - create_unix_listener, - getaddrinfo, - getnameinfo, - wait_socket_readable, - wait_socket_writable, -) -from ._core._streams import create_memory_object_stream -from ._core._subprocesses import open_process, run_process -from ._core._synchronization import ( - CapacityLimiter, - CapacityLimiterStatistics, - Condition, - ConditionStatistics, - Event, - EventStatistics, - Lock, - LockStatistics, - Semaphore, - SemaphoreStatistics, - create_capacity_limiter, - create_condition, - create_event, - create_lock, - create_semaphore, -) -from ._core._tasks import ( - TASK_STATUS_IGNORED, - CancelScope, - create_task_group, - current_effective_deadline, - fail_after, - move_on_after, - open_cancel_scope, -) -from ._core._testing import ( - TaskInfo, - get_current_task, - get_running_tasks, - wait_all_tasks_blocked, -) -from ._core._typedattr import TypedAttributeProvider, TypedAttributeSet, typed_attribute - -# Re-exported here, for backwards compatibility -# isort: off -from .to_thread import current_default_worker_thread_limiter, run_sync_in_worker_thread -from .from_thread import ( - create_blocking_portal, - run_async_from_thread, - run_sync_from_thread, - start_blocking_portal, -) - -# Re-export imports so they look like they live directly in this package -key: str -value: Any -for key, value in list(locals().items()): - if getattr(value, "__module__", "").startswith("anyio."): - value.__module__ = __name__ diff --git a/server/venv/Lib/site-packages/anyio/_backends/__init__.py b/server/venv/Lib/site-packages/anyio/_backends/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/anyio/_backends/_asyncio.py b/server/venv/Lib/site-packages/anyio/_backends/_asyncio.py deleted file mode 100644 index bfdb4ea..0000000 --- a/server/venv/Lib/site-packages/anyio/_backends/_asyncio.py +++ /dev/null @@ -1,2117 +0,0 @@ -from __future__ import annotations - -import array -import asyncio -import concurrent.futures -import math -import socket -import sys -from asyncio.base_events import _run_until_complete_cb # type: ignore[attr-defined] -from collections import OrderedDict, deque -from concurrent.futures import Future -from contextvars import Context, copy_context -from dataclasses import dataclass -from functools import partial, wraps -from inspect import ( - CORO_RUNNING, - CORO_SUSPENDED, - GEN_RUNNING, - GEN_SUSPENDED, - getcoroutinestate, - getgeneratorstate, -) -from io import IOBase -from os import PathLike -from queue import Queue -from socket import AddressFamily, SocketKind -from threading import Thread -from types import TracebackType -from typing import ( - IO, - Any, - AsyncGenerator, - Awaitable, - Callable, - Collection, - Coroutine, - Generator, - Iterable, - Mapping, - Optional, - Sequence, - Tuple, - TypeVar, - Union, - cast, -) -from weakref import WeakKeyDictionary - -import sniffio - -from .. import CapacityLimiterStatistics, EventStatistics, TaskInfo, abc -from .._core._compat import DeprecatedAsyncContextManager, DeprecatedAwaitable -from .._core._eventloop import claim_worker_thread, threadlocals -from .._core._exceptions import ( - BrokenResourceError, - BusyResourceError, - ClosedResourceError, - EndOfStream, - WouldBlock, -) -from .._core._exceptions import ExceptionGroup as BaseExceptionGroup -from .._core._sockets import GetAddrInfoReturnType, convert_ipv6_sockaddr -from .._core._synchronization import CapacityLimiter as BaseCapacityLimiter -from .._core._synchronization import Event as BaseEvent -from .._core._synchronization import ResourceGuard -from .._core._tasks import CancelScope as BaseCancelScope -from ..abc import IPSockAddrType, UDPPacketType -from ..lowlevel import RunVar - -if sys.version_info >= (3, 8): - - def get_coro(task: asyncio.Task) -> Generator | Awaitable[Any]: - return task.get_coro() - -else: - - def get_coro(task: asyncio.Task) -> Generator | Awaitable[Any]: - return task._coro - - -from asyncio import all_tasks, create_task, current_task, get_running_loop -from asyncio import run as native_run - - -def _get_task_callbacks(task: asyncio.Task) -> Iterable[Callable]: - return [cb for cb, context in task._callbacks] - - -T_Retval = TypeVar("T_Retval") -T_contra = TypeVar("T_contra", contravariant=True) - -# Check whether there is native support for task names in asyncio (3.8+) -_native_task_names = hasattr(asyncio.Task, "get_name") - - -_root_task: RunVar[asyncio.Task | None] = RunVar("_root_task") - - -def find_root_task() -> asyncio.Task: - root_task = _root_task.get(None) - if root_task is not None and not root_task.done(): - return root_task - - # Look for a task that has been started via run_until_complete() - for task in all_tasks(): - if task._callbacks and not task.done(): - for cb in _get_task_callbacks(task): - if ( - cb is _run_until_complete_cb - or getattr(cb, "__module__", None) == "uvloop.loop" - ): - _root_task.set(task) - return task - - # Look up the topmost task in the AnyIO task tree, if possible - task = cast(asyncio.Task, current_task()) - state = _task_states.get(task) - if state: - cancel_scope = state.cancel_scope - while cancel_scope and cancel_scope._parent_scope is not None: - cancel_scope = cancel_scope._parent_scope - - if cancel_scope is not None: - return cast(asyncio.Task, cancel_scope._host_task) - - return task - - -def get_callable_name(func: Callable) -> str: - module = getattr(func, "__module__", None) - qualname = getattr(func, "__qualname__", None) - return ".".join([x for x in (module, qualname) if x]) - - -# -# Event loop -# - -_run_vars = ( - WeakKeyDictionary() -) # type: WeakKeyDictionary[asyncio.AbstractEventLoop, Any] - -current_token = get_running_loop - - -def _task_started(task: asyncio.Task) -> bool: - """Return ``True`` if the task has been started and has not finished.""" - coro = cast(Coroutine[Any, Any, Any], get_coro(task)) - try: - return getcoroutinestate(coro) in (CORO_RUNNING, CORO_SUSPENDED) - except AttributeError: - try: - return getgeneratorstate(cast(Generator, coro)) in ( - GEN_RUNNING, - GEN_SUSPENDED, - ) - except AttributeError: - # task coro is async_genenerator_asend https://bugs.python.org/issue37771 - raise Exception(f"Cannot determine if task {task} has started or not") - - -def _maybe_set_event_loop_policy( - policy: asyncio.AbstractEventLoopPolicy | None, use_uvloop: bool -) -> None: - # On CPython, use uvloop when possible if no other policy has been given and if not - # explicitly disabled - if policy is None and use_uvloop and sys.implementation.name == "cpython": - try: - import uvloop - except ImportError: - pass - else: - # Test for missing shutdown_default_executor() (uvloop 0.14.0 and earlier) - if not hasattr( - asyncio.AbstractEventLoop, "shutdown_default_executor" - ) or hasattr(uvloop.loop.Loop, "shutdown_default_executor"): - policy = uvloop.EventLoopPolicy() - - if policy is not None: - asyncio.set_event_loop_policy(policy) - - -def run( - func: Callable[..., Awaitable[T_Retval]], - *args: object, - debug: bool = False, - use_uvloop: bool = False, - policy: asyncio.AbstractEventLoopPolicy | None = None, -) -> T_Retval: - @wraps(func) - async def wrapper() -> T_Retval: - task = cast(asyncio.Task, current_task()) - task_state = TaskState(None, get_callable_name(func), None) - _task_states[task] = task_state - if _native_task_names: - task.set_name(task_state.name) - - try: - return await func(*args) - finally: - del _task_states[task] - - _maybe_set_event_loop_policy(policy, use_uvloop) - return native_run(wrapper(), debug=debug) - - -# -# Miscellaneous -# - -sleep = asyncio.sleep - - -# -# Timeouts and cancellation -# - -CancelledError = asyncio.CancelledError - - -class CancelScope(BaseCancelScope): - def __new__( - cls, *, deadline: float = math.inf, shield: bool = False - ) -> CancelScope: - return object.__new__(cls) - - def __init__(self, deadline: float = math.inf, shield: bool = False): - self._deadline = deadline - self._shield = shield - self._parent_scope: CancelScope | None = None - self._cancel_called = False - self._active = False - self._timeout_handle: asyncio.TimerHandle | None = None - self._cancel_handle: asyncio.Handle | None = None - self._tasks: set[asyncio.Task] = set() - self._host_task: asyncio.Task | None = None - self._timeout_expired = False - self._cancel_calls: int = 0 - - def __enter__(self) -> CancelScope: - if self._active: - raise RuntimeError( - "Each CancelScope may only be used for a single 'with' block" - ) - - self._host_task = host_task = cast(asyncio.Task, current_task()) - self._tasks.add(host_task) - try: - task_state = _task_states[host_task] - except KeyError: - task_name = host_task.get_name() if _native_task_names else None - task_state = TaskState(None, task_name, self) - _task_states[host_task] = task_state - else: - self._parent_scope = task_state.cancel_scope - task_state.cancel_scope = self - - self._timeout() - self._active = True - - # Start cancelling the host task if the scope was cancelled before entering - if self._cancel_called: - self._deliver_cancellation() - - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - if not self._active: - raise RuntimeError("This cancel scope is not active") - if current_task() is not self._host_task: - raise RuntimeError( - "Attempted to exit cancel scope in a different task than it was " - "entered in" - ) - - assert self._host_task is not None - host_task_state = _task_states.get(self._host_task) - if host_task_state is None or host_task_state.cancel_scope is not self: - raise RuntimeError( - "Attempted to exit a cancel scope that isn't the current tasks's " - "current cancel scope" - ) - - self._active = False - if self._timeout_handle: - self._timeout_handle.cancel() - self._timeout_handle = None - - self._tasks.remove(self._host_task) - - host_task_state.cancel_scope = self._parent_scope - - # Restart the cancellation effort in the farthest directly cancelled parent scope if this - # one was shielded - if self._shield: - self._deliver_cancellation_to_parent() - - if exc_val is not None: - exceptions = ( - exc_val.exceptions if isinstance(exc_val, ExceptionGroup) else [exc_val] - ) - if all(isinstance(exc, CancelledError) for exc in exceptions): - if self._timeout_expired: - return self._uncancel() - elif not self._cancel_called: - # Task was cancelled natively - return None - elif not self._parent_cancelled(): - # This scope was directly cancelled - return self._uncancel() - - return None - - def _uncancel(self) -> bool: - if sys.version_info < (3, 11) or self._host_task is None: - self._cancel_calls = 0 - return True - - # Uncancel all AnyIO cancellations - for i in range(self._cancel_calls): - self._host_task.uncancel() - - self._cancel_calls = 0 - return not self._host_task.cancelling() - - def _timeout(self) -> None: - if self._deadline != math.inf: - loop = get_running_loop() - if loop.time() >= self._deadline: - self._timeout_expired = True - self.cancel() - else: - self._timeout_handle = loop.call_at(self._deadline, self._timeout) - - def _deliver_cancellation(self) -> None: - """ - Deliver cancellation to directly contained tasks and nested cancel scopes. - - Schedule another run at the end if we still have tasks eligible for cancellation. - """ - should_retry = False - current = current_task() - for task in self._tasks: - if task._must_cancel: # type: ignore[attr-defined] - continue - - # The task is eligible for cancellation if it has started and is not in a cancel - # scope shielded from this one - cancel_scope = _task_states[task].cancel_scope - while cancel_scope is not self: - if cancel_scope is None or cancel_scope._shield: - break - else: - cancel_scope = cancel_scope._parent_scope - else: - should_retry = True - if task is not current and ( - task is self._host_task or _task_started(task) - ): - self._cancel_calls += 1 - task.cancel() - - # Schedule another callback if there are still tasks left - if should_retry: - self._cancel_handle = get_running_loop().call_soon( - self._deliver_cancellation - ) - else: - self._cancel_handle = None - - def _deliver_cancellation_to_parent(self) -> None: - """Start cancellation effort in the farthest directly cancelled parent scope""" - scope = self._parent_scope - scope_to_cancel: CancelScope | None = None - while scope is not None: - if scope._cancel_called and scope._cancel_handle is None: - scope_to_cancel = scope - - # No point in looking beyond any shielded scope - if scope._shield: - break - - scope = scope._parent_scope - - if scope_to_cancel is not None: - scope_to_cancel._deliver_cancellation() - - def _parent_cancelled(self) -> bool: - # Check whether any parent has been cancelled - cancel_scope = self._parent_scope - while cancel_scope is not None and not cancel_scope._shield: - if cancel_scope._cancel_called: - return True - else: - cancel_scope = cancel_scope._parent_scope - - return False - - def cancel(self) -> DeprecatedAwaitable: - if not self._cancel_called: - if self._timeout_handle: - self._timeout_handle.cancel() - self._timeout_handle = None - - self._cancel_called = True - if self._host_task is not None: - self._deliver_cancellation() - - return DeprecatedAwaitable(self.cancel) - - @property - def deadline(self) -> float: - return self._deadline - - @deadline.setter - def deadline(self, value: float) -> None: - self._deadline = float(value) - if self._timeout_handle is not None: - self._timeout_handle.cancel() - self._timeout_handle = None - - if self._active and not self._cancel_called: - self._timeout() - - @property - def cancel_called(self) -> bool: - return self._cancel_called - - @property - def shield(self) -> bool: - return self._shield - - @shield.setter - def shield(self, value: bool) -> None: - if self._shield != value: - self._shield = value - if not value: - self._deliver_cancellation_to_parent() - - -async def checkpoint() -> None: - await sleep(0) - - -async def checkpoint_if_cancelled() -> None: - task = current_task() - if task is None: - return - - try: - cancel_scope = _task_states[task].cancel_scope - except KeyError: - return - - while cancel_scope: - if cancel_scope.cancel_called: - await sleep(0) - elif cancel_scope.shield: - break - else: - cancel_scope = cancel_scope._parent_scope - - -async def cancel_shielded_checkpoint() -> None: - with CancelScope(shield=True): - await sleep(0) - - -def current_effective_deadline() -> float: - try: - cancel_scope = _task_states[current_task()].cancel_scope # type: ignore[index] - except KeyError: - return math.inf - - deadline = math.inf - while cancel_scope: - deadline = min(deadline, cancel_scope.deadline) - if cancel_scope._cancel_called: - deadline = -math.inf - break - elif cancel_scope.shield: - break - else: - cancel_scope = cancel_scope._parent_scope - - return deadline - - -def current_time() -> float: - return get_running_loop().time() - - -# -# Task states -# - - -class TaskState: - """ - Encapsulates auxiliary task information that cannot be added to the Task instance itself - because there are no guarantees about its implementation. - """ - - __slots__ = "parent_id", "name", "cancel_scope" - - def __init__( - self, - parent_id: int | None, - name: str | None, - cancel_scope: CancelScope | None, - ): - self.parent_id = parent_id - self.name = name - self.cancel_scope = cancel_scope - - -_task_states = WeakKeyDictionary() # type: WeakKeyDictionary[asyncio.Task, TaskState] - - -# -# Task groups -# - - -class ExceptionGroup(BaseExceptionGroup): - def __init__(self, exceptions: list[BaseException]): - super().__init__() - self.exceptions = exceptions - - -class _AsyncioTaskStatus(abc.TaskStatus): - def __init__(self, future: asyncio.Future, parent_id: int): - self._future = future - self._parent_id = parent_id - - def started(self, value: T_contra | None = None) -> None: - try: - self._future.set_result(value) - except asyncio.InvalidStateError: - raise RuntimeError( - "called 'started' twice on the same task status" - ) from None - - task = cast(asyncio.Task, current_task()) - _task_states[task].parent_id = self._parent_id - - -class TaskGroup(abc.TaskGroup): - def __init__(self) -> None: - self.cancel_scope: CancelScope = CancelScope() - self._active = False - self._exceptions: list[BaseException] = [] - - async def __aenter__(self) -> TaskGroup: - self.cancel_scope.__enter__() - self._active = True - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - ignore_exception = self.cancel_scope.__exit__(exc_type, exc_val, exc_tb) - if exc_val is not None: - self.cancel_scope.cancel() - self._exceptions.append(exc_val) - - while self.cancel_scope._tasks: - try: - await asyncio.wait(self.cancel_scope._tasks) - except asyncio.CancelledError: - self.cancel_scope.cancel() - - self._active = False - if not self.cancel_scope._parent_cancelled(): - exceptions = self._filter_cancellation_errors(self._exceptions) - else: - exceptions = self._exceptions - - try: - if len(exceptions) > 1: - if all( - isinstance(e, CancelledError) and not e.args for e in exceptions - ): - # Tasks were cancelled natively, without a cancellation message - raise CancelledError - else: - raise ExceptionGroup(exceptions) - elif exceptions and exceptions[0] is not exc_val: - raise exceptions[0] - except BaseException as exc: - # Clear the context here, as it can only be done in-flight. - # If the context is not cleared, it can result in recursive tracebacks (see #145). - exc.__context__ = None - raise - - return ignore_exception - - @staticmethod - def _filter_cancellation_errors( - exceptions: Sequence[BaseException], - ) -> list[BaseException]: - filtered_exceptions: list[BaseException] = [] - for exc in exceptions: - if isinstance(exc, ExceptionGroup): - new_exceptions = TaskGroup._filter_cancellation_errors(exc.exceptions) - if len(new_exceptions) > 1: - filtered_exceptions.append(exc) - elif len(new_exceptions) == 1: - filtered_exceptions.append(new_exceptions[0]) - elif new_exceptions: - new_exc = ExceptionGroup(new_exceptions) - new_exc.__cause__ = exc.__cause__ - new_exc.__context__ = exc.__context__ - new_exc.__traceback__ = exc.__traceback__ - filtered_exceptions.append(new_exc) - elif not isinstance(exc, CancelledError) or exc.args: - filtered_exceptions.append(exc) - - return filtered_exceptions - - async def _run_wrapped_task( - self, coro: Coroutine, task_status_future: asyncio.Future | None - ) -> None: - # This is the code path for Python 3.7 on which asyncio freaks out if a task - # raises a BaseException. - __traceback_hide__ = __tracebackhide__ = True # noqa: F841 - task = cast(asyncio.Task, current_task()) - try: - await coro - except BaseException as exc: - if task_status_future is None or task_status_future.done(): - self._exceptions.append(exc) - self.cancel_scope.cancel() - else: - task_status_future.set_exception(exc) - else: - if task_status_future is not None and not task_status_future.done(): - task_status_future.set_exception( - RuntimeError("Child exited without calling task_status.started()") - ) - finally: - if task in self.cancel_scope._tasks: - self.cancel_scope._tasks.remove(task) - del _task_states[task] - - def _spawn( - self, - func: Callable[..., Awaitable[Any]], - args: tuple, - name: object, - task_status_future: asyncio.Future | None = None, - ) -> asyncio.Task: - def task_done(_task: asyncio.Task) -> None: - # This is the code path for Python 3.8+ - assert _task in self.cancel_scope._tasks - self.cancel_scope._tasks.remove(_task) - del _task_states[_task] - - try: - exc = _task.exception() - except CancelledError as e: - while isinstance(e.__context__, CancelledError): - e = e.__context__ - - exc = e - - if exc is not None: - if task_status_future is None or task_status_future.done(): - self._exceptions.append(exc) - self.cancel_scope.cancel() - else: - task_status_future.set_exception(exc) - elif task_status_future is not None and not task_status_future.done(): - task_status_future.set_exception( - RuntimeError("Child exited without calling task_status.started()") - ) - - if not self._active: - raise RuntimeError( - "This task group is not active; no new tasks can be started." - ) - - options: dict[str, Any] = {} - name = get_callable_name(func) if name is None else str(name) - if _native_task_names: - options["name"] = name - - kwargs = {} - if task_status_future: - parent_id = id(current_task()) - kwargs["task_status"] = _AsyncioTaskStatus( - task_status_future, id(self.cancel_scope._host_task) - ) - else: - parent_id = id(self.cancel_scope._host_task) - - coro = func(*args, **kwargs) - if not asyncio.iscoroutine(coro): - raise TypeError( - f"Expected an async function, but {func} appears to be synchronous" - ) - - foreign_coro = not hasattr(coro, "cr_frame") and not hasattr(coro, "gi_frame") - if foreign_coro or sys.version_info < (3, 8): - coro = self._run_wrapped_task(coro, task_status_future) - - task = create_task(coro, **options) - if not foreign_coro and sys.version_info >= (3, 8): - task.add_done_callback(task_done) - - # Make the spawned task inherit the task group's cancel scope - _task_states[task] = TaskState( - parent_id=parent_id, name=name, cancel_scope=self.cancel_scope - ) - self.cancel_scope._tasks.add(task) - return task - - def start_soon( - self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None - ) -> None: - self._spawn(func, args, name) - - async def start( - self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None - ) -> None: - future: asyncio.Future = asyncio.Future() - task = self._spawn(func, args, name, future) - - # If the task raises an exception after sending a start value without a switch point - # between, the task group is cancelled and this method never proceeds to process the - # completed future. That's why we have to have a shielded cancel scope here. - with CancelScope(shield=True): - try: - return await future - except CancelledError: - task.cancel() - raise - - -# -# Threads -# - -_Retval_Queue_Type = Tuple[Optional[T_Retval], Optional[BaseException]] - - -class WorkerThread(Thread): - MAX_IDLE_TIME = 10 # seconds - - def __init__( - self, - root_task: asyncio.Task, - workers: set[WorkerThread], - idle_workers: deque[WorkerThread], - ): - super().__init__(name="AnyIO worker thread") - self.root_task = root_task - self.workers = workers - self.idle_workers = idle_workers - self.loop = root_task._loop - self.queue: Queue[ - tuple[Context, Callable, tuple, asyncio.Future] | None - ] = Queue(2) - self.idle_since = current_time() - self.stopping = False - - def _report_result( - self, future: asyncio.Future, result: Any, exc: BaseException | None - ) -> None: - self.idle_since = current_time() - if not self.stopping: - self.idle_workers.append(self) - - if not future.cancelled(): - if exc is not None: - if isinstance(exc, StopIteration): - new_exc = RuntimeError("coroutine raised StopIteration") - new_exc.__cause__ = exc - exc = new_exc - - future.set_exception(exc) - else: - future.set_result(result) - - def run(self) -> None: - with claim_worker_thread("asyncio"): - threadlocals.loop = self.loop - while True: - item = self.queue.get() - if item is None: - # Shutdown command received - return - - context, func, args, future = item - if not future.cancelled(): - result = None - exception: BaseException | None = None - try: - result = context.run(func, *args) - except BaseException as exc: - exception = exc - - if not self.loop.is_closed(): - self.loop.call_soon_threadsafe( - self._report_result, future, result, exception - ) - - self.queue.task_done() - - def stop(self, f: asyncio.Task | None = None) -> None: - self.stopping = True - self.queue.put_nowait(None) - self.workers.discard(self) - try: - self.idle_workers.remove(self) - except ValueError: - pass - - -_threadpool_idle_workers: RunVar[deque[WorkerThread]] = RunVar( - "_threadpool_idle_workers" -) -_threadpool_workers: RunVar[set[WorkerThread]] = RunVar("_threadpool_workers") - - -async def run_sync_in_worker_thread( - func: Callable[..., T_Retval], - *args: object, - cancellable: bool = False, - limiter: CapacityLimiter | None = None, -) -> T_Retval: - await checkpoint() - - # If this is the first run in this event loop thread, set up the necessary variables - try: - idle_workers = _threadpool_idle_workers.get() - workers = _threadpool_workers.get() - except LookupError: - idle_workers = deque() - workers = set() - _threadpool_idle_workers.set(idle_workers) - _threadpool_workers.set(workers) - - async with (limiter or current_default_thread_limiter()): - with CancelScope(shield=not cancellable): - future: asyncio.Future = asyncio.Future() - root_task = find_root_task() - if not idle_workers: - worker = WorkerThread(root_task, workers, idle_workers) - worker.start() - workers.add(worker) - root_task.add_done_callback(worker.stop) - else: - worker = idle_workers.pop() - - # Prune any other workers that have been idle for MAX_IDLE_TIME seconds or longer - now = current_time() - while idle_workers: - if now - idle_workers[0].idle_since < WorkerThread.MAX_IDLE_TIME: - break - - expired_worker = idle_workers.popleft() - expired_worker.root_task.remove_done_callback(expired_worker.stop) - expired_worker.stop() - - context = copy_context() - context.run(sniffio.current_async_library_cvar.set, None) - worker.queue.put_nowait((context, func, args, future)) - return await future - - -def run_sync_from_thread( - func: Callable[..., T_Retval], - *args: object, - loop: asyncio.AbstractEventLoop | None = None, -) -> T_Retval: - @wraps(func) - def wrapper() -> None: - try: - f.set_result(func(*args)) - except BaseException as exc: - f.set_exception(exc) - if not isinstance(exc, Exception): - raise - - f: concurrent.futures.Future[T_Retval] = Future() - loop = loop or threadlocals.loop - loop.call_soon_threadsafe(wrapper) - return f.result() - - -def run_async_from_thread( - func: Callable[..., Awaitable[T_Retval]], *args: object -) -> T_Retval: - f: concurrent.futures.Future[T_Retval] = asyncio.run_coroutine_threadsafe( - func(*args), threadlocals.loop - ) - return f.result() - - -class BlockingPortal(abc.BlockingPortal): - def __new__(cls) -> BlockingPortal: - return object.__new__(cls) - - def __init__(self) -> None: - super().__init__() - self._loop = get_running_loop() - - def _spawn_task_from_thread( - self, - func: Callable, - args: tuple, - kwargs: dict[str, Any], - name: object, - future: Future, - ) -> None: - run_sync_from_thread( - partial(self._task_group.start_soon, name=name), - self._call_func, - func, - args, - kwargs, - future, - loop=self._loop, - ) - - -# -# Subprocesses -# - - -@dataclass(eq=False) -class StreamReaderWrapper(abc.ByteReceiveStream): - _stream: asyncio.StreamReader - - async def receive(self, max_bytes: int = 65536) -> bytes: - data = await self._stream.read(max_bytes) - if data: - return data - else: - raise EndOfStream - - async def aclose(self) -> None: - self._stream.feed_eof() - - -@dataclass(eq=False) -class StreamWriterWrapper(abc.ByteSendStream): - _stream: asyncio.StreamWriter - - async def send(self, item: bytes) -> None: - self._stream.write(item) - await self._stream.drain() - - async def aclose(self) -> None: - self._stream.close() - - -@dataclass(eq=False) -class Process(abc.Process): - _process: asyncio.subprocess.Process - _stdin: StreamWriterWrapper | None - _stdout: StreamReaderWrapper | None - _stderr: StreamReaderWrapper | None - - async def aclose(self) -> None: - if self._stdin: - await self._stdin.aclose() - if self._stdout: - await self._stdout.aclose() - if self._stderr: - await self._stderr.aclose() - - await self.wait() - - async def wait(self) -> int: - return await self._process.wait() - - def terminate(self) -> None: - self._process.terminate() - - def kill(self) -> None: - self._process.kill() - - def send_signal(self, signal: int) -> None: - self._process.send_signal(signal) - - @property - def pid(self) -> int: - return self._process.pid - - @property - def returncode(self) -> int | None: - return self._process.returncode - - @property - def stdin(self) -> abc.ByteSendStream | None: - return self._stdin - - @property - def stdout(self) -> abc.ByteReceiveStream | None: - return self._stdout - - @property - def stderr(self) -> abc.ByteReceiveStream | None: - return self._stderr - - -async def open_process( - command: str | bytes | Sequence[str | bytes], - *, - shell: bool, - stdin: int | IO[Any] | None, - stdout: int | IO[Any] | None, - stderr: int | IO[Any] | None, - cwd: str | bytes | PathLike | None = None, - env: Mapping[str, str] | None = None, - start_new_session: bool = False, -) -> Process: - await checkpoint() - if shell: - process = await asyncio.create_subprocess_shell( - cast(Union[str, bytes], command), - stdin=stdin, - stdout=stdout, - stderr=stderr, - cwd=cwd, - env=env, - start_new_session=start_new_session, - ) - else: - process = await asyncio.create_subprocess_exec( - *command, - stdin=stdin, - stdout=stdout, - stderr=stderr, - cwd=cwd, - env=env, - start_new_session=start_new_session, - ) - - stdin_stream = StreamWriterWrapper(process.stdin) if process.stdin else None - stdout_stream = StreamReaderWrapper(process.stdout) if process.stdout else None - stderr_stream = StreamReaderWrapper(process.stderr) if process.stderr else None - return Process(process, stdin_stream, stdout_stream, stderr_stream) - - -def _forcibly_shutdown_process_pool_on_exit( - workers: set[Process], _task: object -) -> None: - """ - Forcibly shuts down worker processes belonging to this event loop.""" - child_watcher: asyncio.AbstractChildWatcher | None - try: - child_watcher = asyncio.get_event_loop_policy().get_child_watcher() - except NotImplementedError: - child_watcher = None - - # Close as much as possible (w/o async/await) to avoid warnings - for process in workers: - if process.returncode is None: - continue - - process._stdin._stream._transport.close() # type: ignore[union-attr] - process._stdout._stream._transport.close() # type: ignore[union-attr] - process._stderr._stream._transport.close() # type: ignore[union-attr] - process.kill() - if child_watcher: - child_watcher.remove_child_handler(process.pid) - - -async def _shutdown_process_pool_on_exit(workers: set[Process]) -> None: - """ - Shuts down worker processes belonging to this event loop. - - NOTE: this only works when the event loop was started using asyncio.run() or anyio.run(). - - """ - process: Process - try: - await sleep(math.inf) - except asyncio.CancelledError: - for process in workers: - if process.returncode is None: - process.kill() - - for process in workers: - await process.aclose() - - -def setup_process_pool_exit_at_shutdown(workers: set[Process]) -> None: - kwargs: dict[str, Any] = ( - {"name": "AnyIO process pool shutdown task"} if _native_task_names else {} - ) - create_task(_shutdown_process_pool_on_exit(workers), **kwargs) - find_root_task().add_done_callback( - partial(_forcibly_shutdown_process_pool_on_exit, workers) - ) - - -# -# Sockets and networking -# - - -class StreamProtocol(asyncio.Protocol): - read_queue: deque[bytes] - read_event: asyncio.Event - write_event: asyncio.Event - exception: Exception | None = None - - def connection_made(self, transport: asyncio.BaseTransport) -> None: - self.read_queue = deque() - self.read_event = asyncio.Event() - self.write_event = asyncio.Event() - self.write_event.set() - cast(asyncio.Transport, transport).set_write_buffer_limits(0) - - def connection_lost(self, exc: Exception | None) -> None: - if exc: - self.exception = BrokenResourceError() - self.exception.__cause__ = exc - - self.read_event.set() - self.write_event.set() - - def data_received(self, data: bytes) -> None: - self.read_queue.append(data) - self.read_event.set() - - def eof_received(self) -> bool | None: - self.read_event.set() - return True - - def pause_writing(self) -> None: - self.write_event = asyncio.Event() - - def resume_writing(self) -> None: - self.write_event.set() - - -class DatagramProtocol(asyncio.DatagramProtocol): - read_queue: deque[tuple[bytes, IPSockAddrType]] - read_event: asyncio.Event - write_event: asyncio.Event - exception: Exception | None = None - - def connection_made(self, transport: asyncio.BaseTransport) -> None: - self.read_queue = deque(maxlen=100) # arbitrary value - self.read_event = asyncio.Event() - self.write_event = asyncio.Event() - self.write_event.set() - - def connection_lost(self, exc: Exception | None) -> None: - self.read_event.set() - self.write_event.set() - - def datagram_received(self, data: bytes, addr: IPSockAddrType) -> None: - addr = convert_ipv6_sockaddr(addr) - self.read_queue.append((data, addr)) - self.read_event.set() - - def error_received(self, exc: Exception) -> None: - self.exception = exc - - def pause_writing(self) -> None: - self.write_event.clear() - - def resume_writing(self) -> None: - self.write_event.set() - - -class SocketStream(abc.SocketStream): - def __init__(self, transport: asyncio.Transport, protocol: StreamProtocol): - self._transport = transport - self._protocol = protocol - self._receive_guard = ResourceGuard("reading from") - self._send_guard = ResourceGuard("writing to") - self._closed = False - - @property - def _raw_socket(self) -> socket.socket: - return self._transport.get_extra_info("socket") - - async def receive(self, max_bytes: int = 65536) -> bytes: - with self._receive_guard: - await checkpoint() - - if ( - not self._protocol.read_event.is_set() - and not self._transport.is_closing() - ): - self._transport.resume_reading() - await self._protocol.read_event.wait() - self._transport.pause_reading() - - try: - chunk = self._protocol.read_queue.popleft() - except IndexError: - if self._closed: - raise ClosedResourceError from None - elif self._protocol.exception: - raise self._protocol.exception - else: - raise EndOfStream from None - - if len(chunk) > max_bytes: - # Split the oversized chunk - chunk, leftover = chunk[:max_bytes], chunk[max_bytes:] - self._protocol.read_queue.appendleft(leftover) - - # If the read queue is empty, clear the flag so that the next call will block until - # data is available - if not self._protocol.read_queue: - self._protocol.read_event.clear() - - return chunk - - async def send(self, item: bytes) -> None: - with self._send_guard: - await checkpoint() - - if self._closed: - raise ClosedResourceError - elif self._protocol.exception is not None: - raise self._protocol.exception - - try: - self._transport.write(item) - except RuntimeError as exc: - if self._transport.is_closing(): - raise BrokenResourceError from exc - else: - raise - - await self._protocol.write_event.wait() - - async def send_eof(self) -> None: - try: - self._transport.write_eof() - except OSError: - pass - - async def aclose(self) -> None: - if not self._transport.is_closing(): - self._closed = True - try: - self._transport.write_eof() - except OSError: - pass - - self._transport.close() - await sleep(0) - self._transport.abort() - - -class UNIXSocketStream(abc.SocketStream): - _receive_future: asyncio.Future | None = None - _send_future: asyncio.Future | None = None - _closing = False - - def __init__(self, raw_socket: socket.socket): - self.__raw_socket = raw_socket - self._loop = get_running_loop() - self._receive_guard = ResourceGuard("reading from") - self._send_guard = ResourceGuard("writing to") - - @property - def _raw_socket(self) -> socket.socket: - return self.__raw_socket - - def _wait_until_readable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: - def callback(f: object) -> None: - del self._receive_future - loop.remove_reader(self.__raw_socket) - - f = self._receive_future = asyncio.Future() - self._loop.add_reader(self.__raw_socket, f.set_result, None) - f.add_done_callback(callback) - return f - - def _wait_until_writable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: - def callback(f: object) -> None: - del self._send_future - loop.remove_writer(self.__raw_socket) - - f = self._send_future = asyncio.Future() - self._loop.add_writer(self.__raw_socket, f.set_result, None) - f.add_done_callback(callback) - return f - - async def send_eof(self) -> None: - with self._send_guard: - self._raw_socket.shutdown(socket.SHUT_WR) - - async def receive(self, max_bytes: int = 65536) -> bytes: - loop = get_running_loop() - await checkpoint() - with self._receive_guard: - while True: - try: - data = self.__raw_socket.recv(max_bytes) - except BlockingIOError: - await self._wait_until_readable(loop) - except OSError as exc: - if self._closing: - raise ClosedResourceError from None - else: - raise BrokenResourceError from exc - else: - if not data: - raise EndOfStream - - return data - - async def send(self, item: bytes) -> None: - loop = get_running_loop() - await checkpoint() - with self._send_guard: - view = memoryview(item) - while view: - try: - bytes_sent = self.__raw_socket.send(view) - except BlockingIOError: - await self._wait_until_writable(loop) - except OSError as exc: - if self._closing: - raise ClosedResourceError from None - else: - raise BrokenResourceError from exc - else: - view = view[bytes_sent:] - - async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: - if not isinstance(msglen, int) or msglen < 0: - raise ValueError("msglen must be a non-negative integer") - if not isinstance(maxfds, int) or maxfds < 1: - raise ValueError("maxfds must be a positive integer") - - loop = get_running_loop() - fds = array.array("i") - await checkpoint() - with self._receive_guard: - while True: - try: - message, ancdata, flags, addr = self.__raw_socket.recvmsg( - msglen, socket.CMSG_LEN(maxfds * fds.itemsize) - ) - except BlockingIOError: - await self._wait_until_readable(loop) - except OSError as exc: - if self._closing: - raise ClosedResourceError from None - else: - raise BrokenResourceError from exc - else: - if not message and not ancdata: - raise EndOfStream - - break - - for cmsg_level, cmsg_type, cmsg_data in ancdata: - if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: - raise RuntimeError( - f"Received unexpected ancillary data; message = {message!r}, " - f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}" - ) - - fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) - - return message, list(fds) - - async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: - if not message: - raise ValueError("message must not be empty") - if not fds: - raise ValueError("fds must not be empty") - - loop = get_running_loop() - filenos: list[int] = [] - for fd in fds: - if isinstance(fd, int): - filenos.append(fd) - elif isinstance(fd, IOBase): - filenos.append(fd.fileno()) - - fdarray = array.array("i", filenos) - await checkpoint() - with self._send_guard: - while True: - try: - # The ignore can be removed after mypy picks up - # https://github.com/python/typeshed/pull/5545 - self.__raw_socket.sendmsg( - [message], [(socket.SOL_SOCKET, socket.SCM_RIGHTS, fdarray)] - ) - break - except BlockingIOError: - await self._wait_until_writable(loop) - except OSError as exc: - if self._closing: - raise ClosedResourceError from None - else: - raise BrokenResourceError from exc - - async def aclose(self) -> None: - if not self._closing: - self._closing = True - if self.__raw_socket.fileno() != -1: - self.__raw_socket.close() - - if self._receive_future: - self._receive_future.set_result(None) - if self._send_future: - self._send_future.set_result(None) - - -class TCPSocketListener(abc.SocketListener): - _accept_scope: CancelScope | None = None - _closed = False - - def __init__(self, raw_socket: socket.socket): - self.__raw_socket = raw_socket - self._loop = cast(asyncio.BaseEventLoop, get_running_loop()) - self._accept_guard = ResourceGuard("accepting connections from") - - @property - def _raw_socket(self) -> socket.socket: - return self.__raw_socket - - async def accept(self) -> abc.SocketStream: - if self._closed: - raise ClosedResourceError - - with self._accept_guard: - await checkpoint() - with CancelScope() as self._accept_scope: - try: - client_sock, _addr = await self._loop.sock_accept(self._raw_socket) - except asyncio.CancelledError: - # Workaround for https://bugs.python.org/issue41317 - try: - self._loop.remove_reader(self._raw_socket) - except (ValueError, NotImplementedError): - pass - - if self._closed: - raise ClosedResourceError from None - - raise - finally: - self._accept_scope = None - - client_sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - transport, protocol = await self._loop.connect_accepted_socket( - StreamProtocol, client_sock - ) - return SocketStream(transport, protocol) - - async def aclose(self) -> None: - if self._closed: - return - - self._closed = True - if self._accept_scope: - # Workaround for https://bugs.python.org/issue41317 - try: - self._loop.remove_reader(self._raw_socket) - except (ValueError, NotImplementedError): - pass - - self._accept_scope.cancel() - await sleep(0) - - self._raw_socket.close() - - -class UNIXSocketListener(abc.SocketListener): - def __init__(self, raw_socket: socket.socket): - self.__raw_socket = raw_socket - self._loop = get_running_loop() - self._accept_guard = ResourceGuard("accepting connections from") - self._closed = False - - async def accept(self) -> abc.SocketStream: - await checkpoint() - with self._accept_guard: - while True: - try: - client_sock, _ = self.__raw_socket.accept() - client_sock.setblocking(False) - return UNIXSocketStream(client_sock) - except BlockingIOError: - f: asyncio.Future = asyncio.Future() - self._loop.add_reader(self.__raw_socket, f.set_result, None) - f.add_done_callback( - lambda _: self._loop.remove_reader(self.__raw_socket) - ) - await f - except OSError as exc: - if self._closed: - raise ClosedResourceError from None - else: - raise BrokenResourceError from exc - - async def aclose(self) -> None: - self._closed = True - self.__raw_socket.close() - - @property - def _raw_socket(self) -> socket.socket: - return self.__raw_socket - - -class UDPSocket(abc.UDPSocket): - def __init__( - self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol - ): - self._transport = transport - self._protocol = protocol - self._receive_guard = ResourceGuard("reading from") - self._send_guard = ResourceGuard("writing to") - self._closed = False - - @property - def _raw_socket(self) -> socket.socket: - return self._transport.get_extra_info("socket") - - async def aclose(self) -> None: - if not self._transport.is_closing(): - self._closed = True - self._transport.close() - - async def receive(self) -> tuple[bytes, IPSockAddrType]: - with self._receive_guard: - await checkpoint() - - # If the buffer is empty, ask for more data - if not self._protocol.read_queue and not self._transport.is_closing(): - self._protocol.read_event.clear() - await self._protocol.read_event.wait() - - try: - return self._protocol.read_queue.popleft() - except IndexError: - if self._closed: - raise ClosedResourceError from None - else: - raise BrokenResourceError from None - - async def send(self, item: UDPPacketType) -> None: - with self._send_guard: - await checkpoint() - await self._protocol.write_event.wait() - if self._closed: - raise ClosedResourceError - elif self._transport.is_closing(): - raise BrokenResourceError - else: - self._transport.sendto(*item) - - -class ConnectedUDPSocket(abc.ConnectedUDPSocket): - def __init__( - self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol - ): - self._transport = transport - self._protocol = protocol - self._receive_guard = ResourceGuard("reading from") - self._send_guard = ResourceGuard("writing to") - self._closed = False - - @property - def _raw_socket(self) -> socket.socket: - return self._transport.get_extra_info("socket") - - async def aclose(self) -> None: - if not self._transport.is_closing(): - self._closed = True - self._transport.close() - - async def receive(self) -> bytes: - with self._receive_guard: - await checkpoint() - - # If the buffer is empty, ask for more data - if not self._protocol.read_queue and not self._transport.is_closing(): - self._protocol.read_event.clear() - await self._protocol.read_event.wait() - - try: - packet = self._protocol.read_queue.popleft() - except IndexError: - if self._closed: - raise ClosedResourceError from None - else: - raise BrokenResourceError from None - - return packet[0] - - async def send(self, item: bytes) -> None: - with self._send_guard: - await checkpoint() - await self._protocol.write_event.wait() - if self._closed: - raise ClosedResourceError - elif self._transport.is_closing(): - raise BrokenResourceError - else: - self._transport.sendto(item) - - -async def connect_tcp( - host: str, port: int, local_addr: tuple[str, int] | None = None -) -> SocketStream: - transport, protocol = cast( - Tuple[asyncio.Transport, StreamProtocol], - await get_running_loop().create_connection( - StreamProtocol, host, port, local_addr=local_addr - ), - ) - transport.pause_reading() - return SocketStream(transport, protocol) - - -async def connect_unix(path: str) -> UNIXSocketStream: - await checkpoint() - loop = get_running_loop() - raw_socket = socket.socket(socket.AF_UNIX) - raw_socket.setblocking(False) - while True: - try: - raw_socket.connect(path) - except BlockingIOError: - f: asyncio.Future = asyncio.Future() - loop.add_writer(raw_socket, f.set_result, None) - f.add_done_callback(lambda _: loop.remove_writer(raw_socket)) - await f - except BaseException: - raw_socket.close() - raise - else: - return UNIXSocketStream(raw_socket) - - -async def create_udp_socket( - family: socket.AddressFamily, - local_address: IPSockAddrType | None, - remote_address: IPSockAddrType | None, - reuse_port: bool, -) -> UDPSocket | ConnectedUDPSocket: - result = await get_running_loop().create_datagram_endpoint( - DatagramProtocol, - local_addr=local_address, - remote_addr=remote_address, - family=family, - reuse_port=reuse_port, - ) - transport = result[0] - protocol = result[1] - if protocol.exception: - transport.close() - raise protocol.exception - - if not remote_address: - return UDPSocket(transport, protocol) - else: - return ConnectedUDPSocket(transport, protocol) - - -async def getaddrinfo( - host: bytes | str, - port: str | int | None, - *, - family: int | AddressFamily = 0, - type: int | SocketKind = 0, - proto: int = 0, - flags: int = 0, -) -> GetAddrInfoReturnType: - # https://github.com/python/typeshed/pull/4304 - result = await get_running_loop().getaddrinfo( - host, port, family=family, type=type, proto=proto, flags=flags - ) - return cast(GetAddrInfoReturnType, result) - - -async def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> tuple[str, str]: - return await get_running_loop().getnameinfo(sockaddr, flags) - - -_read_events: RunVar[dict[Any, asyncio.Event]] = RunVar("read_events") -_write_events: RunVar[dict[Any, asyncio.Event]] = RunVar("write_events") - - -async def wait_socket_readable(sock: socket.socket) -> None: - await checkpoint() - try: - read_events = _read_events.get() - except LookupError: - read_events = {} - _read_events.set(read_events) - - if read_events.get(sock): - raise BusyResourceError("reading from") from None - - loop = get_running_loop() - event = read_events[sock] = asyncio.Event() - loop.add_reader(sock, event.set) - try: - await event.wait() - finally: - if read_events.pop(sock, None) is not None: - loop.remove_reader(sock) - readable = True - else: - readable = False - - if not readable: - raise ClosedResourceError - - -async def wait_socket_writable(sock: socket.socket) -> None: - await checkpoint() - try: - write_events = _write_events.get() - except LookupError: - write_events = {} - _write_events.set(write_events) - - if write_events.get(sock): - raise BusyResourceError("writing to") from None - - loop = get_running_loop() - event = write_events[sock] = asyncio.Event() - loop.add_writer(sock.fileno(), event.set) - try: - await event.wait() - finally: - if write_events.pop(sock, None) is not None: - loop.remove_writer(sock) - writable = True - else: - writable = False - - if not writable: - raise ClosedResourceError - - -# -# Synchronization -# - - -class Event(BaseEvent): - def __new__(cls) -> Event: - return object.__new__(cls) - - def __init__(self) -> None: - self._event = asyncio.Event() - - def set(self) -> DeprecatedAwaitable: - self._event.set() - return DeprecatedAwaitable(self.set) - - def is_set(self) -> bool: - return self._event.is_set() - - async def wait(self) -> None: - if await self._event.wait(): - await checkpoint() - - def statistics(self) -> EventStatistics: - return EventStatistics(len(self._event._waiters)) # type: ignore[attr-defined] - - -class CapacityLimiter(BaseCapacityLimiter): - _total_tokens: float = 0 - - def __new__(cls, total_tokens: float) -> CapacityLimiter: - return object.__new__(cls) - - def __init__(self, total_tokens: float): - self._borrowers: set[Any] = set() - self._wait_queue: OrderedDict[Any, asyncio.Event] = OrderedDict() - self.total_tokens = total_tokens - - async def __aenter__(self) -> None: - await self.acquire() - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - self.release() - - @property - def total_tokens(self) -> float: - return self._total_tokens - - @total_tokens.setter - def total_tokens(self, value: float) -> None: - if not isinstance(value, int) and not math.isinf(value): - raise TypeError("total_tokens must be an int or math.inf") - if value < 1: - raise ValueError("total_tokens must be >= 1") - - old_value = self._total_tokens - self._total_tokens = value - events = [] - for event in self._wait_queue.values(): - if value <= old_value: - break - - if not event.is_set(): - events.append(event) - old_value += 1 - - for event in events: - event.set() - - @property - def borrowed_tokens(self) -> int: - return len(self._borrowers) - - @property - def available_tokens(self) -> float: - return self._total_tokens - len(self._borrowers) - - def acquire_nowait(self) -> DeprecatedAwaitable: - self.acquire_on_behalf_of_nowait(current_task()) - return DeprecatedAwaitable(self.acquire_nowait) - - def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable: - if borrower in self._borrowers: - raise RuntimeError( - "this borrower is already holding one of this CapacityLimiter's " - "tokens" - ) - - if self._wait_queue or len(self._borrowers) >= self._total_tokens: - raise WouldBlock - - self._borrowers.add(borrower) - return DeprecatedAwaitable(self.acquire_on_behalf_of_nowait) - - async def acquire(self) -> None: - return await self.acquire_on_behalf_of(current_task()) - - async def acquire_on_behalf_of(self, borrower: object) -> None: - await checkpoint_if_cancelled() - try: - self.acquire_on_behalf_of_nowait(borrower) - except WouldBlock: - event = asyncio.Event() - self._wait_queue[borrower] = event - try: - await event.wait() - except BaseException: - self._wait_queue.pop(borrower, None) - raise - - self._borrowers.add(borrower) - else: - try: - await cancel_shielded_checkpoint() - except BaseException: - self.release() - raise - - def release(self) -> None: - self.release_on_behalf_of(current_task()) - - def release_on_behalf_of(self, borrower: object) -> None: - try: - self._borrowers.remove(borrower) - except KeyError: - raise RuntimeError( - "this borrower isn't holding any of this CapacityLimiter's " "tokens" - ) from None - - # Notify the next task in line if this limiter has free capacity now - if self._wait_queue and len(self._borrowers) < self._total_tokens: - event = self._wait_queue.popitem(last=False)[1] - event.set() - - def statistics(self) -> CapacityLimiterStatistics: - return CapacityLimiterStatistics( - self.borrowed_tokens, - self.total_tokens, - tuple(self._borrowers), - len(self._wait_queue), - ) - - -_default_thread_limiter: RunVar[CapacityLimiter] = RunVar("_default_thread_limiter") - - -def current_default_thread_limiter() -> CapacityLimiter: - try: - return _default_thread_limiter.get() - except LookupError: - limiter = CapacityLimiter(40) - _default_thread_limiter.set(limiter) - return limiter - - -# -# Operating system signals -# - - -class _SignalReceiver(DeprecatedAsyncContextManager["_SignalReceiver"]): - def __init__(self, signals: tuple[int, ...]): - self._signals = signals - self._loop = get_running_loop() - self._signal_queue: deque[int] = deque() - self._future: asyncio.Future = asyncio.Future() - self._handled_signals: set[int] = set() - - def _deliver(self, signum: int) -> None: - self._signal_queue.append(signum) - if not self._future.done(): - self._future.set_result(None) - - def __enter__(self) -> _SignalReceiver: - for sig in set(self._signals): - self._loop.add_signal_handler(sig, self._deliver, sig) - self._handled_signals.add(sig) - - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - for sig in self._handled_signals: - self._loop.remove_signal_handler(sig) - return None - - def __aiter__(self) -> _SignalReceiver: - return self - - async def __anext__(self) -> int: - await checkpoint() - if not self._signal_queue: - self._future = asyncio.Future() - await self._future - - return self._signal_queue.popleft() - - -def open_signal_receiver(*signals: int) -> _SignalReceiver: - return _SignalReceiver(signals) - - -# -# Testing and debugging -# - - -def _create_task_info(task: asyncio.Task) -> TaskInfo: - task_state = _task_states.get(task) - if task_state is None: - name = task.get_name() if _native_task_names else None - parent_id = None - else: - name = task_state.name - parent_id = task_state.parent_id - - return TaskInfo(id(task), parent_id, name, get_coro(task)) - - -def get_current_task() -> TaskInfo: - return _create_task_info(current_task()) # type: ignore[arg-type] - - -def get_running_tasks() -> list[TaskInfo]: - return [_create_task_info(task) for task in all_tasks() if not task.done()] - - -async def wait_all_tasks_blocked() -> None: - await checkpoint() - this_task = current_task() - while True: - for task in all_tasks(): - if task is this_task: - continue - - if task._fut_waiter is None or task._fut_waiter.done(): # type: ignore[attr-defined] - await sleep(0.1) - break - else: - return - - -class TestRunner(abc.TestRunner): - def __init__( - self, - debug: bool = False, - use_uvloop: bool = False, - policy: asyncio.AbstractEventLoopPolicy | None = None, - ): - self._exceptions: list[BaseException] = [] - _maybe_set_event_loop_policy(policy, use_uvloop) - self._loop = asyncio.new_event_loop() - self._loop.set_debug(debug) - self._loop.set_exception_handler(self._exception_handler) - asyncio.set_event_loop(self._loop) - - def _cancel_all_tasks(self) -> None: - to_cancel = all_tasks(self._loop) - if not to_cancel: - return - - for task in to_cancel: - task.cancel() - - self._loop.run_until_complete( - asyncio.gather(*to_cancel, return_exceptions=True) - ) - - for task in to_cancel: - if task.cancelled(): - continue - if task.exception() is not None: - raise cast(BaseException, task.exception()) - - def _exception_handler( - self, loop: asyncio.AbstractEventLoop, context: dict[str, Any] - ) -> None: - if isinstance(context.get("exception"), Exception): - self._exceptions.append(context["exception"]) - else: - loop.default_exception_handler(context) - - def _raise_async_exceptions(self) -> None: - # Re-raise any exceptions raised in asynchronous callbacks - if self._exceptions: - exceptions, self._exceptions = self._exceptions, [] - if len(exceptions) == 1: - raise exceptions[0] - elif exceptions: - raise ExceptionGroup(exceptions) - - def close(self) -> None: - try: - self._cancel_all_tasks() - self._loop.run_until_complete(self._loop.shutdown_asyncgens()) - finally: - asyncio.set_event_loop(None) - self._loop.close() - - def run_asyncgen_fixture( - self, - fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]], - kwargs: dict[str, Any], - ) -> Iterable[T_Retval]: - async def fixture_runner() -> None: - agen = fixture_func(**kwargs) - try: - retval = await agen.asend(None) - self._raise_async_exceptions() - except BaseException as exc: - f.set_exception(exc) - return - else: - f.set_result(retval) - - await event.wait() - try: - await agen.asend(None) - except StopAsyncIteration: - pass - else: - await agen.aclose() - raise RuntimeError("Async generator fixture did not stop") - - f = self._loop.create_future() - event = asyncio.Event() - fixture_task = self._loop.create_task(fixture_runner()) - self._loop.run_until_complete(f) - yield f.result() - event.set() - self._loop.run_until_complete(fixture_task) - self._raise_async_exceptions() - - def run_fixture( - self, - fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]], - kwargs: dict[str, Any], - ) -> T_Retval: - retval = self._loop.run_until_complete(fixture_func(**kwargs)) - self._raise_async_exceptions() - return retval - - def run_test( - self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] - ) -> None: - try: - self._loop.run_until_complete(test_func(**kwargs)) - except Exception as exc: - self._exceptions.append(exc) - - self._raise_async_exceptions() diff --git a/server/venv/Lib/site-packages/anyio/_backends/_trio.py b/server/venv/Lib/site-packages/anyio/_backends/_trio.py deleted file mode 100644 index cf28943..0000000 --- a/server/venv/Lib/site-packages/anyio/_backends/_trio.py +++ /dev/null @@ -1,996 +0,0 @@ -from __future__ import annotations - -import array -import math -import socket -from concurrent.futures import Future -from contextvars import copy_context -from dataclasses import dataclass -from functools import partial -from io import IOBase -from os import PathLike -from signal import Signals -from types import TracebackType -from typing import ( - IO, - TYPE_CHECKING, - Any, - AsyncGenerator, - AsyncIterator, - Awaitable, - Callable, - Collection, - Coroutine, - Generic, - Iterable, - Mapping, - NoReturn, - Sequence, - TypeVar, - cast, -) - -import sniffio -import trio.from_thread -from outcome import Error, Outcome, Value -from trio.socket import SocketType as TrioSocketType -from trio.to_thread import run_sync - -from .. import CapacityLimiterStatistics, EventStatistics, TaskInfo, abc -from .._core._compat import DeprecatedAsyncContextManager, DeprecatedAwaitable -from .._core._eventloop import claim_worker_thread -from .._core._exceptions import ( - BrokenResourceError, - BusyResourceError, - ClosedResourceError, - EndOfStream, -) -from .._core._exceptions import ExceptionGroup as BaseExceptionGroup -from .._core._sockets import convert_ipv6_sockaddr -from .._core._synchronization import CapacityLimiter as BaseCapacityLimiter -from .._core._synchronization import Event as BaseEvent -from .._core._synchronization import ResourceGuard -from .._core._tasks import CancelScope as BaseCancelScope -from ..abc import IPSockAddrType, UDPPacketType - -if TYPE_CHECKING: - from trio_typing import TaskStatus - -try: - from trio import lowlevel as trio_lowlevel -except ImportError: - from trio import hazmat as trio_lowlevel # type: ignore[no-redef] - from trio.hazmat import wait_readable, wait_writable -else: - from trio.lowlevel import wait_readable, wait_writable - -try: - trio_open_process = trio_lowlevel.open_process -except AttributeError: - # isort: off - from trio import ( # type: ignore[attr-defined, no-redef] - open_process as trio_open_process, - ) - -T_Retval = TypeVar("T_Retval") -T_SockAddr = TypeVar("T_SockAddr", str, IPSockAddrType) - - -# -# Event loop -# - -run = trio.run -current_token = trio.lowlevel.current_trio_token -RunVar = trio.lowlevel.RunVar - - -# -# Miscellaneous -# - -sleep = trio.sleep - - -# -# Timeouts and cancellation -# - - -class CancelScope(BaseCancelScope): - def __new__( - cls, original: trio.CancelScope | None = None, **kwargs: object - ) -> CancelScope: - return object.__new__(cls) - - def __init__(self, original: trio.CancelScope | None = None, **kwargs: Any) -> None: - self.__original = original or trio.CancelScope(**kwargs) - - def __enter__(self) -> CancelScope: - self.__original.__enter__() - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - # https://github.com/python-trio/trio-typing/pull/79 - return self.__original.__exit__( # type: ignore[func-returns-value] - exc_type, exc_val, exc_tb - ) - - def cancel(self) -> DeprecatedAwaitable: - self.__original.cancel() - return DeprecatedAwaitable(self.cancel) - - @property - def deadline(self) -> float: - return self.__original.deadline - - @deadline.setter - def deadline(self, value: float) -> None: - self.__original.deadline = value - - @property - def cancel_called(self) -> bool: - return self.__original.cancel_called - - @property - def shield(self) -> bool: - return self.__original.shield - - @shield.setter - def shield(self, value: bool) -> None: - self.__original.shield = value - - -CancelledError = trio.Cancelled -checkpoint = trio.lowlevel.checkpoint -checkpoint_if_cancelled = trio.lowlevel.checkpoint_if_cancelled -cancel_shielded_checkpoint = trio.lowlevel.cancel_shielded_checkpoint -current_effective_deadline = trio.current_effective_deadline -current_time = trio.current_time - - -# -# Task groups -# - - -class ExceptionGroup(BaseExceptionGroup, trio.MultiError): - pass - - -class TaskGroup(abc.TaskGroup): - def __init__(self) -> None: - self._active = False - self._nursery_manager = trio.open_nursery() - self.cancel_scope = None # type: ignore[assignment] - - async def __aenter__(self) -> TaskGroup: - self._active = True - self._nursery = await self._nursery_manager.__aenter__() - self.cancel_scope = CancelScope(self._nursery.cancel_scope) - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - try: - return await self._nursery_manager.__aexit__(exc_type, exc_val, exc_tb) - except trio.MultiError as exc: - raise ExceptionGroup(exc.exceptions) from None - finally: - self._active = False - - def start_soon( - self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None - ) -> None: - if not self._active: - raise RuntimeError( - "This task group is not active; no new tasks can be started." - ) - - self._nursery.start_soon(func, *args, name=name) - - async def start( - self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None - ) -> object: - if not self._active: - raise RuntimeError( - "This task group is not active; no new tasks can be started." - ) - - return await self._nursery.start(func, *args, name=name) - - -# -# Threads -# - - -async def run_sync_in_worker_thread( - func: Callable[..., T_Retval], - *args: object, - cancellable: bool = False, - limiter: trio.CapacityLimiter | None = None, -) -> T_Retval: - def wrapper() -> T_Retval: - with claim_worker_thread("trio"): - return func(*args) - - # TODO: remove explicit context copying when trio 0.20 is the minimum requirement - context = copy_context() - context.run(sniffio.current_async_library_cvar.set, None) - return await run_sync( - context.run, wrapper, cancellable=cancellable, limiter=limiter - ) - - -# TODO: remove this workaround when trio 0.20 is the minimum requirement -def run_async_from_thread( - fn: Callable[..., Awaitable[T_Retval]], *args: Any -) -> T_Retval: - async def wrapper() -> T_Retval: - retval: T_Retval - - async def inner() -> None: - nonlocal retval - __tracebackhide__ = True - retval = await fn(*args) - - async with trio.open_nursery() as n: - context.run(n.start_soon, inner) - - __tracebackhide__ = True - return retval # noqa: F821 - - context = copy_context() - context.run(sniffio.current_async_library_cvar.set, "trio") - return trio.from_thread.run(wrapper) - - -def run_sync_from_thread(fn: Callable[..., T_Retval], *args: Any) -> T_Retval: - # TODO: remove explicit context copying when trio 0.20 is the minimum requirement - retval = trio.from_thread.run_sync(copy_context().run, fn, *args) - return cast(T_Retval, retval) - - -class BlockingPortal(abc.BlockingPortal): - def __new__(cls) -> BlockingPortal: - return object.__new__(cls) - - def __init__(self) -> None: - super().__init__() - self._token = trio.lowlevel.current_trio_token() - - def _spawn_task_from_thread( - self, - func: Callable, - args: tuple, - kwargs: dict[str, Any], - name: object, - future: Future, - ) -> None: - context = copy_context() - context.run(sniffio.current_async_library_cvar.set, "trio") - trio.from_thread.run_sync( - context.run, - partial(self._task_group.start_soon, name=name), - self._call_func, - func, - args, - kwargs, - future, - trio_token=self._token, - ) - - -# -# Subprocesses -# - - -@dataclass(eq=False) -class ReceiveStreamWrapper(abc.ByteReceiveStream): - _stream: trio.abc.ReceiveStream - - async def receive(self, max_bytes: int | None = None) -> bytes: - try: - data = await self._stream.receive_some(max_bytes) - except trio.ClosedResourceError as exc: - raise ClosedResourceError from exc.__cause__ - except trio.BrokenResourceError as exc: - raise BrokenResourceError from exc.__cause__ - - if data: - return data - else: - raise EndOfStream - - async def aclose(self) -> None: - await self._stream.aclose() - - -@dataclass(eq=False) -class SendStreamWrapper(abc.ByteSendStream): - _stream: trio.abc.SendStream - - async def send(self, item: bytes) -> None: - try: - await self._stream.send_all(item) - except trio.ClosedResourceError as exc: - raise ClosedResourceError from exc.__cause__ - except trio.BrokenResourceError as exc: - raise BrokenResourceError from exc.__cause__ - - async def aclose(self) -> None: - await self._stream.aclose() - - -@dataclass(eq=False) -class Process(abc.Process): - _process: trio.Process - _stdin: abc.ByteSendStream | None - _stdout: abc.ByteReceiveStream | None - _stderr: abc.ByteReceiveStream | None - - async def aclose(self) -> None: - if self._stdin: - await self._stdin.aclose() - if self._stdout: - await self._stdout.aclose() - if self._stderr: - await self._stderr.aclose() - - await self.wait() - - async def wait(self) -> int: - return await self._process.wait() - - def terminate(self) -> None: - self._process.terminate() - - def kill(self) -> None: - self._process.kill() - - def send_signal(self, signal: Signals) -> None: - self._process.send_signal(signal) - - @property - def pid(self) -> int: - return self._process.pid - - @property - def returncode(self) -> int | None: - return self._process.returncode - - @property - def stdin(self) -> abc.ByteSendStream | None: - return self._stdin - - @property - def stdout(self) -> abc.ByteReceiveStream | None: - return self._stdout - - @property - def stderr(self) -> abc.ByteReceiveStream | None: - return self._stderr - - -async def open_process( - command: str | bytes | Sequence[str | bytes], - *, - shell: bool, - stdin: int | IO[Any] | None, - stdout: int | IO[Any] | None, - stderr: int | IO[Any] | None, - cwd: str | bytes | PathLike | None = None, - env: Mapping[str, str] | None = None, - start_new_session: bool = False, -) -> Process: - process = await trio_open_process( # type: ignore[misc] - command, # type: ignore[arg-type] - stdin=stdin, - stdout=stdout, - stderr=stderr, - shell=shell, - cwd=cwd, - env=env, - start_new_session=start_new_session, - ) - stdin_stream = SendStreamWrapper(process.stdin) if process.stdin else None - stdout_stream = ReceiveStreamWrapper(process.stdout) if process.stdout else None - stderr_stream = ReceiveStreamWrapper(process.stderr) if process.stderr else None - return Process(process, stdin_stream, stdout_stream, stderr_stream) - - -class _ProcessPoolShutdownInstrument(trio.abc.Instrument): - def after_run(self) -> None: - super().after_run() - - -current_default_worker_process_limiter: RunVar = RunVar( - "current_default_worker_process_limiter" -) - - -async def _shutdown_process_pool(workers: set[Process]) -> None: - process: Process - try: - await sleep(math.inf) - except trio.Cancelled: - for process in workers: - if process.returncode is None: - process.kill() - - with CancelScope(shield=True): - for process in workers: - await process.aclose() - - -def setup_process_pool_exit_at_shutdown(workers: set[Process]) -> None: - trio.lowlevel.spawn_system_task(_shutdown_process_pool, workers) - - -# -# Sockets and networking -# - - -class _TrioSocketMixin(Generic[T_SockAddr]): - def __init__(self, trio_socket: TrioSocketType) -> None: - self._trio_socket = trio_socket - self._closed = False - - def _check_closed(self) -> None: - if self._closed: - raise ClosedResourceError - if self._trio_socket.fileno() < 0: - raise BrokenResourceError - - @property - def _raw_socket(self) -> socket.socket: - return self._trio_socket._sock # type: ignore[attr-defined] - - async def aclose(self) -> None: - if self._trio_socket.fileno() >= 0: - self._closed = True - self._trio_socket.close() - - def _convert_socket_error(self, exc: BaseException) -> NoReturn: - if isinstance(exc, trio.ClosedResourceError): - raise ClosedResourceError from exc - elif self._trio_socket.fileno() < 0 and self._closed: - raise ClosedResourceError from None - elif isinstance(exc, OSError): - raise BrokenResourceError from exc - else: - raise exc - - -class SocketStream(_TrioSocketMixin, abc.SocketStream): - def __init__(self, trio_socket: TrioSocketType) -> None: - super().__init__(trio_socket) - self._receive_guard = ResourceGuard("reading from") - self._send_guard = ResourceGuard("writing to") - - async def receive(self, max_bytes: int = 65536) -> bytes: - with self._receive_guard: - try: - data = await self._trio_socket.recv(max_bytes) - except BaseException as exc: - self._convert_socket_error(exc) - - if data: - return data - else: - raise EndOfStream - - async def send(self, item: bytes) -> None: - with self._send_guard: - view = memoryview(item) - while view: - try: - bytes_sent = await self._trio_socket.send(view) - except BaseException as exc: - self._convert_socket_error(exc) - - view = view[bytes_sent:] - - async def send_eof(self) -> None: - self._trio_socket.shutdown(socket.SHUT_WR) - - -class UNIXSocketStream(SocketStream, abc.UNIXSocketStream): - async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: - if not isinstance(msglen, int) or msglen < 0: - raise ValueError("msglen must be a non-negative integer") - if not isinstance(maxfds, int) or maxfds < 1: - raise ValueError("maxfds must be a positive integer") - - fds = array.array("i") - await checkpoint() - with self._receive_guard: - while True: - try: - message, ancdata, flags, addr = await self._trio_socket.recvmsg( - msglen, socket.CMSG_LEN(maxfds * fds.itemsize) - ) - except BaseException as exc: - self._convert_socket_error(exc) - else: - if not message and not ancdata: - raise EndOfStream - - break - - for cmsg_level, cmsg_type, cmsg_data in ancdata: - if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: - raise RuntimeError( - f"Received unexpected ancillary data; message = {message!r}, " - f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}" - ) - - fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) - - return message, list(fds) - - async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: - if not message: - raise ValueError("message must not be empty") - if not fds: - raise ValueError("fds must not be empty") - - filenos: list[int] = [] - for fd in fds: - if isinstance(fd, int): - filenos.append(fd) - elif isinstance(fd, IOBase): - filenos.append(fd.fileno()) - - fdarray = array.array("i", filenos) - await checkpoint() - with self._send_guard: - while True: - try: - await self._trio_socket.sendmsg( - [message], - [ - ( - socket.SOL_SOCKET, - socket.SCM_RIGHTS, # type: ignore[list-item] - fdarray, - ) - ], - ) - break - except BaseException as exc: - self._convert_socket_error(exc) - - -class TCPSocketListener(_TrioSocketMixin, abc.SocketListener): - def __init__(self, raw_socket: socket.socket): - super().__init__(trio.socket.from_stdlib_socket(raw_socket)) - self._accept_guard = ResourceGuard("accepting connections from") - - async def accept(self) -> SocketStream: - with self._accept_guard: - try: - trio_socket, _addr = await self._trio_socket.accept() - except BaseException as exc: - self._convert_socket_error(exc) - - trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - return SocketStream(trio_socket) - - -class UNIXSocketListener(_TrioSocketMixin, abc.SocketListener): - def __init__(self, raw_socket: socket.socket): - super().__init__(trio.socket.from_stdlib_socket(raw_socket)) - self._accept_guard = ResourceGuard("accepting connections from") - - async def accept(self) -> UNIXSocketStream: - with self._accept_guard: - try: - trio_socket, _addr = await self._trio_socket.accept() - except BaseException as exc: - self._convert_socket_error(exc) - - return UNIXSocketStream(trio_socket) - - -class UDPSocket(_TrioSocketMixin[IPSockAddrType], abc.UDPSocket): - def __init__(self, trio_socket: TrioSocketType) -> None: - super().__init__(trio_socket) - self._receive_guard = ResourceGuard("reading from") - self._send_guard = ResourceGuard("writing to") - - async def receive(self) -> tuple[bytes, IPSockAddrType]: - with self._receive_guard: - try: - data, addr = await self._trio_socket.recvfrom(65536) - return data, convert_ipv6_sockaddr(addr) - except BaseException as exc: - self._convert_socket_error(exc) - - async def send(self, item: UDPPacketType) -> None: - with self._send_guard: - try: - await self._trio_socket.sendto(*item) - except BaseException as exc: - self._convert_socket_error(exc) - - -class ConnectedUDPSocket(_TrioSocketMixin[IPSockAddrType], abc.ConnectedUDPSocket): - def __init__(self, trio_socket: TrioSocketType) -> None: - super().__init__(trio_socket) - self._receive_guard = ResourceGuard("reading from") - self._send_guard = ResourceGuard("writing to") - - async def receive(self) -> bytes: - with self._receive_guard: - try: - return await self._trio_socket.recv(65536) - except BaseException as exc: - self._convert_socket_error(exc) - - async def send(self, item: bytes) -> None: - with self._send_guard: - try: - await self._trio_socket.send(item) - except BaseException as exc: - self._convert_socket_error(exc) - - -async def connect_tcp( - host: str, port: int, local_address: IPSockAddrType | None = None -) -> SocketStream: - family = socket.AF_INET6 if ":" in host else socket.AF_INET - trio_socket = trio.socket.socket(family) - trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - if local_address: - await trio_socket.bind(local_address) - - try: - await trio_socket.connect((host, port)) - except BaseException: - trio_socket.close() - raise - - return SocketStream(trio_socket) - - -async def connect_unix(path: str) -> UNIXSocketStream: - trio_socket = trio.socket.socket(socket.AF_UNIX) - try: - await trio_socket.connect(path) - except BaseException: - trio_socket.close() - raise - - return UNIXSocketStream(trio_socket) - - -async def create_udp_socket( - family: socket.AddressFamily, - local_address: IPSockAddrType | None, - remote_address: IPSockAddrType | None, - reuse_port: bool, -) -> UDPSocket | ConnectedUDPSocket: - trio_socket = trio.socket.socket(family=family, type=socket.SOCK_DGRAM) - - if reuse_port: - trio_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) - - if local_address: - await trio_socket.bind(local_address) - - if remote_address: - await trio_socket.connect(remote_address) - return ConnectedUDPSocket(trio_socket) - else: - return UDPSocket(trio_socket) - - -getaddrinfo = trio.socket.getaddrinfo -getnameinfo = trio.socket.getnameinfo - - -async def wait_socket_readable(sock: socket.socket) -> None: - try: - await wait_readable(sock) - except trio.ClosedResourceError as exc: - raise ClosedResourceError().with_traceback(exc.__traceback__) from None - except trio.BusyResourceError: - raise BusyResourceError("reading from") from None - - -async def wait_socket_writable(sock: socket.socket) -> None: - try: - await wait_writable(sock) - except trio.ClosedResourceError as exc: - raise ClosedResourceError().with_traceback(exc.__traceback__) from None - except trio.BusyResourceError: - raise BusyResourceError("writing to") from None - - -# -# Synchronization -# - - -class Event(BaseEvent): - def __new__(cls) -> Event: - return object.__new__(cls) - - def __init__(self) -> None: - self.__original = trio.Event() - - def is_set(self) -> bool: - return self.__original.is_set() - - async def wait(self) -> None: - return await self.__original.wait() - - def statistics(self) -> EventStatistics: - orig_statistics = self.__original.statistics() - return EventStatistics(tasks_waiting=orig_statistics.tasks_waiting) - - def set(self) -> DeprecatedAwaitable: - self.__original.set() - return DeprecatedAwaitable(self.set) - - -class CapacityLimiter(BaseCapacityLimiter): - def __new__(cls, *args: object, **kwargs: object) -> CapacityLimiter: - return object.__new__(cls) - - def __init__( - self, *args: Any, original: trio.CapacityLimiter | None = None - ) -> None: - self.__original = original or trio.CapacityLimiter(*args) - - async def __aenter__(self) -> None: - return await self.__original.__aenter__() - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - await self.__original.__aexit__(exc_type, exc_val, exc_tb) - - @property - def total_tokens(self) -> float: - return self.__original.total_tokens - - @total_tokens.setter - def total_tokens(self, value: float) -> None: - self.__original.total_tokens = value - - @property - def borrowed_tokens(self) -> int: - return self.__original.borrowed_tokens - - @property - def available_tokens(self) -> float: - return self.__original.available_tokens - - def acquire_nowait(self) -> DeprecatedAwaitable: - self.__original.acquire_nowait() - return DeprecatedAwaitable(self.acquire_nowait) - - def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable: - self.__original.acquire_on_behalf_of_nowait(borrower) - return DeprecatedAwaitable(self.acquire_on_behalf_of_nowait) - - async def acquire(self) -> None: - await self.__original.acquire() - - async def acquire_on_behalf_of(self, borrower: object) -> None: - await self.__original.acquire_on_behalf_of(borrower) - - def release(self) -> None: - return self.__original.release() - - def release_on_behalf_of(self, borrower: object) -> None: - return self.__original.release_on_behalf_of(borrower) - - def statistics(self) -> CapacityLimiterStatistics: - orig = self.__original.statistics() - return CapacityLimiterStatistics( - borrowed_tokens=orig.borrowed_tokens, - total_tokens=orig.total_tokens, - borrowers=orig.borrowers, - tasks_waiting=orig.tasks_waiting, - ) - - -_capacity_limiter_wrapper: RunVar = RunVar("_capacity_limiter_wrapper") - - -def current_default_thread_limiter() -> CapacityLimiter: - try: - return _capacity_limiter_wrapper.get() - except LookupError: - limiter = CapacityLimiter( - original=trio.to_thread.current_default_thread_limiter() - ) - _capacity_limiter_wrapper.set(limiter) - return limiter - - -# -# Signal handling -# - - -class _SignalReceiver(DeprecatedAsyncContextManager["_SignalReceiver"]): - _iterator: AsyncIterator[int] - - def __init__(self, signals: tuple[Signals, ...]): - self._signals = signals - - def __enter__(self) -> _SignalReceiver: - self._cm = trio.open_signal_receiver(*self._signals) - self._iterator = self._cm.__enter__() - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - return self._cm.__exit__(exc_type, exc_val, exc_tb) - - def __aiter__(self) -> _SignalReceiver: - return self - - async def __anext__(self) -> Signals: - signum = await self._iterator.__anext__() - return Signals(signum) - - -def open_signal_receiver(*signals: Signals) -> _SignalReceiver: - return _SignalReceiver(signals) - - -# -# Testing and debugging -# - - -def get_current_task() -> TaskInfo: - task = trio_lowlevel.current_task() - - parent_id = None - if task.parent_nursery and task.parent_nursery.parent_task: - parent_id = id(task.parent_nursery.parent_task) - - return TaskInfo(id(task), parent_id, task.name, task.coro) - - -def get_running_tasks() -> list[TaskInfo]: - root_task = trio_lowlevel.current_root_task() - task_infos = [TaskInfo(id(root_task), None, root_task.name, root_task.coro)] - nurseries = root_task.child_nurseries - while nurseries: - new_nurseries: list[trio.Nursery] = [] - for nursery in nurseries: - for task in nursery.child_tasks: - task_infos.append( - TaskInfo(id(task), id(nursery.parent_task), task.name, task.coro) - ) - new_nurseries.extend(task.child_nurseries) - - nurseries = new_nurseries - - return task_infos - - -def wait_all_tasks_blocked() -> Awaitable[None]: - import trio.testing - - return trio.testing.wait_all_tasks_blocked() - - -class TestRunner(abc.TestRunner): - def __init__(self, **options: Any) -> None: - from collections import deque - from queue import Queue - - self._call_queue: Queue[Callable[..., object]] = Queue() - self._result_queue: deque[Outcome] = deque() - self._stop_event: trio.Event | None = None - self._nursery: trio.Nursery | None = None - self._options = options - - async def _trio_main(self) -> None: - self._stop_event = trio.Event() - async with trio.open_nursery() as self._nursery: - await self._stop_event.wait() - - async def _call_func( - self, func: Callable[..., Awaitable[object]], args: tuple, kwargs: dict - ) -> None: - try: - retval = await func(*args, **kwargs) - except BaseException as exc: - self._result_queue.append(Error(exc)) - else: - self._result_queue.append(Value(retval)) - - def _main_task_finished(self, outcome: object) -> None: - self._nursery = None - - def _get_nursery(self) -> trio.Nursery: - if self._nursery is None: - trio.lowlevel.start_guest_run( - self._trio_main, - run_sync_soon_threadsafe=self._call_queue.put, - done_callback=self._main_task_finished, - **self._options, - ) - while self._nursery is None: - self._call_queue.get()() - - return self._nursery - - def _call( - self, func: Callable[..., Awaitable[T_Retval]], *args: object, **kwargs: object - ) -> T_Retval: - self._get_nursery().start_soon(self._call_func, func, args, kwargs) - while not self._result_queue: - self._call_queue.get()() - - outcome = self._result_queue.pop() - return outcome.unwrap() - - def close(self) -> None: - if self._stop_event: - self._stop_event.set() - while self._nursery is not None: - self._call_queue.get()() - - def run_asyncgen_fixture( - self, - fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]], - kwargs: dict[str, Any], - ) -> Iterable[T_Retval]: - async def fixture_runner(*, task_status: TaskStatus[T_Retval]) -> None: - agen = fixture_func(**kwargs) - retval = await agen.asend(None) - task_status.started(retval) - await teardown_event.wait() - try: - await agen.asend(None) - except StopAsyncIteration: - pass - else: - await agen.aclose() - raise RuntimeError("Async generator fixture did not stop") - - teardown_event = trio.Event() - fixture_value = self._call(lambda: self._get_nursery().start(fixture_runner)) - yield fixture_value - teardown_event.set() - - def run_fixture( - self, - fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]], - kwargs: dict[str, Any], - ) -> T_Retval: - return self._call(fixture_func, **kwargs) - - def run_test( - self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] - ) -> None: - self._call(test_func, **kwargs) diff --git a/server/venv/Lib/site-packages/anyio/_core/__init__.py b/server/venv/Lib/site-packages/anyio/_core/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/anyio/_core/_compat.py b/server/venv/Lib/site-packages/anyio/_core/_compat.py deleted file mode 100644 index 22d29ab..0000000 --- a/server/venv/Lib/site-packages/anyio/_core/_compat.py +++ /dev/null @@ -1,217 +0,0 @@ -from __future__ import annotations - -from abc import ABCMeta, abstractmethod -from contextlib import AbstractContextManager -from types import TracebackType -from typing import ( - TYPE_CHECKING, - Any, - AsyncContextManager, - Callable, - ContextManager, - Generator, - Generic, - Iterable, - List, - TypeVar, - Union, - overload, -) -from warnings import warn - -if TYPE_CHECKING: - from ._testing import TaskInfo -else: - TaskInfo = object - -T = TypeVar("T") -AnyDeprecatedAwaitable = Union[ - "DeprecatedAwaitable", - "DeprecatedAwaitableFloat", - "DeprecatedAwaitableList[T]", - TaskInfo, -] - - -@overload -async def maybe_async(__obj: TaskInfo) -> TaskInfo: - ... - - -@overload -async def maybe_async(__obj: DeprecatedAwaitableFloat) -> float: - ... - - -@overload -async def maybe_async(__obj: DeprecatedAwaitableList[T]) -> list[T]: - ... - - -@overload -async def maybe_async(__obj: DeprecatedAwaitable) -> None: - ... - - -async def maybe_async( - __obj: AnyDeprecatedAwaitable[T], -) -> TaskInfo | float | list[T] | None: - """ - Await on the given object if necessary. - - This function is intended to bridge the gap between AnyIO 2.x and 3.x where some functions and - methods were converted from coroutine functions into regular functions. - - Do **not** try to use this for any other purpose! - - :return: the result of awaiting on the object if coroutine, or the object itself otherwise - - .. versionadded:: 2.2 - - """ - return __obj._unwrap() - - -class _ContextManagerWrapper: - def __init__(self, cm: ContextManager[T]): - self._cm = cm - - async def __aenter__(self) -> T: - return self._cm.__enter__() - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - return self._cm.__exit__(exc_type, exc_val, exc_tb) - - -def maybe_async_cm( - cm: ContextManager[T] | AsyncContextManager[T], -) -> AsyncContextManager[T]: - """ - Wrap a regular context manager as an async one if necessary. - - This function is intended to bridge the gap between AnyIO 2.x and 3.x where some functions and - methods were changed to return regular context managers instead of async ones. - - :param cm: a regular or async context manager - :return: an async context manager - - .. versionadded:: 2.2 - - """ - if not isinstance(cm, AbstractContextManager): - raise TypeError("Given object is not an context manager") - - return _ContextManagerWrapper(cm) - - -def _warn_deprecation( - awaitable: AnyDeprecatedAwaitable[Any], stacklevel: int = 1 -) -> None: - warn( - f'Awaiting on {awaitable._name}() is deprecated. Use "await ' - f"anyio.maybe_async({awaitable._name}(...)) if you have to support both AnyIO 2.x " - f'and 3.x, or just remove the "await" if you are completely migrating to AnyIO 3+.', - DeprecationWarning, - stacklevel=stacklevel + 1, - ) - - -class DeprecatedAwaitable: - def __init__(self, func: Callable[..., DeprecatedAwaitable]): - self._name = f"{func.__module__}.{func.__qualname__}" - - def __await__(self) -> Generator[None, None, None]: - _warn_deprecation(self) - if False: - yield - - def __reduce__(self) -> tuple[type[None], tuple[()]]: - return type(None), () - - def _unwrap(self) -> None: - return None - - -class DeprecatedAwaitableFloat(float): - def __new__( - cls, x: float, func: Callable[..., DeprecatedAwaitableFloat] - ) -> DeprecatedAwaitableFloat: - return super().__new__(cls, x) - - def __init__(self, x: float, func: Callable[..., DeprecatedAwaitableFloat]): - self._name = f"{func.__module__}.{func.__qualname__}" - - def __await__(self) -> Generator[None, None, float]: - _warn_deprecation(self) - if False: - yield - - return float(self) - - def __reduce__(self) -> tuple[type[float], tuple[float]]: - return float, (float(self),) - - def _unwrap(self) -> float: - return float(self) - - -class DeprecatedAwaitableList(List[T]): - def __init__( - self, - iterable: Iterable[T] = (), - *, - func: Callable[..., DeprecatedAwaitableList[T]], - ): - super().__init__(iterable) - self._name = f"{func.__module__}.{func.__qualname__}" - - def __await__(self) -> Generator[None, None, list[T]]: - _warn_deprecation(self) - if False: - yield - - return list(self) - - def __reduce__(self) -> tuple[type[list[T]], tuple[list[T]]]: - return list, (list(self),) - - def _unwrap(self) -> list[T]: - return list(self) - - -class DeprecatedAsyncContextManager(Generic[T], metaclass=ABCMeta): - @abstractmethod - def __enter__(self) -> T: - pass - - @abstractmethod - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - pass - - async def __aenter__(self) -> T: - warn( - f"Using {self.__class__.__name__} as an async context manager has been deprecated. " - f'Use "async with anyio.maybe_async_cm(yourcontextmanager) as foo:" if you have to ' - f'support both AnyIO 2.x and 3.x, or just remove the "async" from "async with" if ' - f"you are completely migrating to AnyIO 3+.", - DeprecationWarning, - ) - return self.__enter__() - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - return self.__exit__(exc_type, exc_val, exc_tb) diff --git a/server/venv/Lib/site-packages/anyio/_core/_eventloop.py b/server/venv/Lib/site-packages/anyio/_core/_eventloop.py deleted file mode 100644 index ae98648..0000000 --- a/server/venv/Lib/site-packages/anyio/_core/_eventloop.py +++ /dev/null @@ -1,153 +0,0 @@ -from __future__ import annotations - -import math -import sys -import threading -from contextlib import contextmanager -from importlib import import_module -from typing import ( - Any, - Awaitable, - Callable, - Generator, - TypeVar, -) - -import sniffio - -# This must be updated when new backends are introduced -from ._compat import DeprecatedAwaitableFloat - -BACKENDS = "asyncio", "trio" - -T_Retval = TypeVar("T_Retval") -threadlocals = threading.local() - - -def run( - func: Callable[..., Awaitable[T_Retval]], - *args: object, - backend: str = "asyncio", - backend_options: dict[str, Any] | None = None, -) -> T_Retval: - """ - Run the given coroutine function in an asynchronous event loop. - - The current thread must not be already running an event loop. - - :param func: a coroutine function - :param args: positional arguments to ``func`` - :param backend: name of the asynchronous event loop implementation – currently either - ``asyncio`` or ``trio`` - :param backend_options: keyword arguments to call the backend ``run()`` implementation with - (documented :ref:`here `) - :return: the return value of the coroutine function - :raises RuntimeError: if an asynchronous event loop is already running in this thread - :raises LookupError: if the named backend is not found - - """ - try: - asynclib_name = sniffio.current_async_library() - except sniffio.AsyncLibraryNotFoundError: - pass - else: - raise RuntimeError(f"Already running {asynclib_name} in this thread") - - try: - asynclib = import_module(f"..._backends._{backend}", package=__name__) - except ImportError as exc: - raise LookupError(f"No such backend: {backend}") from exc - - token = None - if sniffio.current_async_library_cvar.get(None) is None: - # Since we're in control of the event loop, we can cache the name of the async library - token = sniffio.current_async_library_cvar.set(backend) - - try: - backend_options = backend_options or {} - return asynclib.run(func, *args, **backend_options) - finally: - if token: - sniffio.current_async_library_cvar.reset(token) - - -async def sleep(delay: float) -> None: - """ - Pause the current task for the specified duration. - - :param delay: the duration, in seconds - - """ - return await get_asynclib().sleep(delay) - - -async def sleep_forever() -> None: - """ - Pause the current task until it's cancelled. - - This is a shortcut for ``sleep(math.inf)``. - - .. versionadded:: 3.1 - - """ - await sleep(math.inf) - - -async def sleep_until(deadline: float) -> None: - """ - Pause the current task until the given time. - - :param deadline: the absolute time to wake up at (according to the internal monotonic clock of - the event loop) - - .. versionadded:: 3.1 - - """ - now = current_time() - await sleep(max(deadline - now, 0)) - - -def current_time() -> DeprecatedAwaitableFloat: - """ - Return the current value of the event loop's internal clock. - - :return: the clock value (seconds) - - """ - return DeprecatedAwaitableFloat(get_asynclib().current_time(), current_time) - - -def get_all_backends() -> tuple[str, ...]: - """Return a tuple of the names of all built-in backends.""" - return BACKENDS - - -def get_cancelled_exc_class() -> type[BaseException]: - """Return the current async library's cancellation exception class.""" - return get_asynclib().CancelledError - - -# -# Private API -# - - -@contextmanager -def claim_worker_thread(backend: str) -> Generator[Any, None, None]: - module = sys.modules["anyio._backends._" + backend] - threadlocals.current_async_module = module - try: - yield - finally: - del threadlocals.current_async_module - - -def get_asynclib(asynclib_name: str | None = None) -> Any: - if asynclib_name is None: - asynclib_name = sniffio.current_async_library() - - modulename = "anyio._backends._" + asynclib_name - try: - return sys.modules[modulename] - except KeyError: - return import_module(modulename) diff --git a/server/venv/Lib/site-packages/anyio/_core/_exceptions.py b/server/venv/Lib/site-packages/anyio/_core/_exceptions.py deleted file mode 100644 index 92ccd77..0000000 --- a/server/venv/Lib/site-packages/anyio/_core/_exceptions.py +++ /dev/null @@ -1,94 +0,0 @@ -from __future__ import annotations - -from traceback import format_exception - - -class BrokenResourceError(Exception): - """ - Raised when trying to use a resource that has been rendered unusable due to external causes - (e.g. a send stream whose peer has disconnected). - """ - - -class BrokenWorkerProcess(Exception): - """ - Raised by :func:`run_sync_in_process` if the worker process terminates abruptly or otherwise - misbehaves. - """ - - -class BusyResourceError(Exception): - """Raised when two tasks are trying to read from or write to the same resource concurrently.""" - - def __init__(self, action: str): - super().__init__(f"Another task is already {action} this resource") - - -class ClosedResourceError(Exception): - """Raised when trying to use a resource that has been closed.""" - - -class DelimiterNotFound(Exception): - """ - Raised during :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the - maximum number of bytes has been read without the delimiter being found. - """ - - def __init__(self, max_bytes: int) -> None: - super().__init__( - f"The delimiter was not found among the first {max_bytes} bytes" - ) - - -class EndOfStream(Exception): - """Raised when trying to read from a stream that has been closed from the other end.""" - - -class ExceptionGroup(BaseException): - """ - Raised when multiple exceptions have been raised in a task group. - - :var ~typing.Sequence[BaseException] exceptions: the sequence of exceptions raised together - """ - - SEPARATOR = "----------------------------\n" - - exceptions: list[BaseException] - - def __str__(self) -> str: - tracebacks = [ - "".join(format_exception(type(exc), exc, exc.__traceback__)) - for exc in self.exceptions - ] - return ( - f"{len(self.exceptions)} exceptions were raised in the task group:\n" - f"{self.SEPARATOR}{self.SEPARATOR.join(tracebacks)}" - ) - - def __repr__(self) -> str: - exception_reprs = ", ".join(repr(exc) for exc in self.exceptions) - return f"<{self.__class__.__name__}: {exception_reprs}>" - - -class IncompleteRead(Exception): - """ - Raised during :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_exactly` or - :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the - connection is closed before the requested amount of bytes has been read. - """ - - def __init__(self) -> None: - super().__init__( - "The stream was closed before the read operation could be completed" - ) - - -class TypedAttributeLookupError(LookupError): - """ - Raised by :meth:`~anyio.TypedAttributeProvider.extra` when the given typed attribute is not - found and no default value has been given. - """ - - -class WouldBlock(Exception): - """Raised by ``X_nowait`` functions if ``X()`` would block.""" diff --git a/server/venv/Lib/site-packages/anyio/_core/_fileio.py b/server/venv/Lib/site-packages/anyio/_core/_fileio.py deleted file mode 100644 index 35e8e8a..0000000 --- a/server/venv/Lib/site-packages/anyio/_core/_fileio.py +++ /dev/null @@ -1,603 +0,0 @@ -from __future__ import annotations - -import os -import pathlib -import sys -from dataclasses import dataclass -from functools import partial -from os import PathLike -from typing import ( - IO, - TYPE_CHECKING, - Any, - AnyStr, - AsyncIterator, - Callable, - Generic, - Iterable, - Iterator, - Sequence, - cast, - overload, -) - -from .. import to_thread -from ..abc import AsyncResource - -if sys.version_info >= (3, 8): - from typing import Final -else: - from typing_extensions import Final - -if TYPE_CHECKING: - from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer -else: - ReadableBuffer = OpenBinaryMode = OpenTextMode = WriteableBuffer = object - - -class AsyncFile(AsyncResource, Generic[AnyStr]): - """ - An asynchronous file object. - - This class wraps a standard file object and provides async friendly versions of the following - blocking methods (where available on the original file object): - - * read - * read1 - * readline - * readlines - * readinto - * readinto1 - * write - * writelines - * truncate - * seek - * tell - * flush - - All other methods are directly passed through. - - This class supports the asynchronous context manager protocol which closes the underlying file - at the end of the context block. - - This class also supports asynchronous iteration:: - - async with await open_file(...) as f: - async for line in f: - print(line) - """ - - def __init__(self, fp: IO[AnyStr]) -> None: - self._fp: Any = fp - - def __getattr__(self, name: str) -> object: - return getattr(self._fp, name) - - @property - def wrapped(self) -> IO[AnyStr]: - """The wrapped file object.""" - return self._fp - - async def __aiter__(self) -> AsyncIterator[AnyStr]: - while True: - line = await self.readline() - if line: - yield line - else: - break - - async def aclose(self) -> None: - return await to_thread.run_sync(self._fp.close) - - async def read(self, size: int = -1) -> AnyStr: - return await to_thread.run_sync(self._fp.read, size) - - async def read1(self: AsyncFile[bytes], size: int = -1) -> bytes: - return await to_thread.run_sync(self._fp.read1, size) - - async def readline(self) -> AnyStr: - return await to_thread.run_sync(self._fp.readline) - - async def readlines(self) -> list[AnyStr]: - return await to_thread.run_sync(self._fp.readlines) - - async def readinto(self: AsyncFile[bytes], b: WriteableBuffer) -> bytes: - return await to_thread.run_sync(self._fp.readinto, b) - - async def readinto1(self: AsyncFile[bytes], b: WriteableBuffer) -> bytes: - return await to_thread.run_sync(self._fp.readinto1, b) - - @overload - async def write(self: AsyncFile[bytes], b: ReadableBuffer) -> int: - ... - - @overload - async def write(self: AsyncFile[str], b: str) -> int: - ... - - async def write(self, b: ReadableBuffer | str) -> int: - return await to_thread.run_sync(self._fp.write, b) - - @overload - async def writelines( - self: AsyncFile[bytes], lines: Iterable[ReadableBuffer] - ) -> None: - ... - - @overload - async def writelines(self: AsyncFile[str], lines: Iterable[str]) -> None: - ... - - async def writelines(self, lines: Iterable[ReadableBuffer] | Iterable[str]) -> None: - return await to_thread.run_sync(self._fp.writelines, lines) - - async def truncate(self, size: int | None = None) -> int: - return await to_thread.run_sync(self._fp.truncate, size) - - async def seek(self, offset: int, whence: int | None = os.SEEK_SET) -> int: - return await to_thread.run_sync(self._fp.seek, offset, whence) - - async def tell(self) -> int: - return await to_thread.run_sync(self._fp.tell) - - async def flush(self) -> None: - return await to_thread.run_sync(self._fp.flush) - - -@overload -async def open_file( - file: str | PathLike[str] | int, - mode: OpenBinaryMode, - buffering: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., - closefd: bool = ..., - opener: Callable[[str, int], int] | None = ..., -) -> AsyncFile[bytes]: - ... - - -@overload -async def open_file( - file: str | PathLike[str] | int, - mode: OpenTextMode = ..., - buffering: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., - closefd: bool = ..., - opener: Callable[[str, int], int] | None = ..., -) -> AsyncFile[str]: - ... - - -async def open_file( - file: str | PathLike[str] | int, - mode: str = "r", - buffering: int = -1, - encoding: str | None = None, - errors: str | None = None, - newline: str | None = None, - closefd: bool = True, - opener: Callable[[str, int], int] | None = None, -) -> AsyncFile[Any]: - """ - Open a file asynchronously. - - The arguments are exactly the same as for the builtin :func:`open`. - - :return: an asynchronous file object - - """ - fp = await to_thread.run_sync( - open, file, mode, buffering, encoding, errors, newline, closefd, opener - ) - return AsyncFile(fp) - - -def wrap_file(file: IO[AnyStr]) -> AsyncFile[AnyStr]: - """ - Wrap an existing file as an asynchronous file. - - :param file: an existing file-like object - :return: an asynchronous file object - - """ - return AsyncFile(file) - - -@dataclass(eq=False) -class _PathIterator(AsyncIterator["Path"]): - iterator: Iterator[PathLike[str]] - - async def __anext__(self) -> Path: - nextval = await to_thread.run_sync(next, self.iterator, None, cancellable=True) - if nextval is None: - raise StopAsyncIteration from None - - return Path(cast("PathLike[str]", nextval)) - - -class Path: - """ - An asynchronous version of :class:`pathlib.Path`. - - This class cannot be substituted for :class:`pathlib.Path` or :class:`pathlib.PurePath`, but - it is compatible with the :class:`os.PathLike` interface. - - It implements the Python 3.10 version of :class:`pathlib.Path` interface, except for the - deprecated :meth:`~pathlib.Path.link_to` method. - - Any methods that do disk I/O need to be awaited on. These methods are: - - * :meth:`~pathlib.Path.absolute` - * :meth:`~pathlib.Path.chmod` - * :meth:`~pathlib.Path.cwd` - * :meth:`~pathlib.Path.exists` - * :meth:`~pathlib.Path.expanduser` - * :meth:`~pathlib.Path.group` - * :meth:`~pathlib.Path.hardlink_to` - * :meth:`~pathlib.Path.home` - * :meth:`~pathlib.Path.is_block_device` - * :meth:`~pathlib.Path.is_char_device` - * :meth:`~pathlib.Path.is_dir` - * :meth:`~pathlib.Path.is_fifo` - * :meth:`~pathlib.Path.is_file` - * :meth:`~pathlib.Path.is_mount` - * :meth:`~pathlib.Path.lchmod` - * :meth:`~pathlib.Path.lstat` - * :meth:`~pathlib.Path.mkdir` - * :meth:`~pathlib.Path.open` - * :meth:`~pathlib.Path.owner` - * :meth:`~pathlib.Path.read_bytes` - * :meth:`~pathlib.Path.read_text` - * :meth:`~pathlib.Path.readlink` - * :meth:`~pathlib.Path.rename` - * :meth:`~pathlib.Path.replace` - * :meth:`~pathlib.Path.rmdir` - * :meth:`~pathlib.Path.samefile` - * :meth:`~pathlib.Path.stat` - * :meth:`~pathlib.Path.touch` - * :meth:`~pathlib.Path.unlink` - * :meth:`~pathlib.Path.write_bytes` - * :meth:`~pathlib.Path.write_text` - - Additionally, the following methods return an async iterator yielding :class:`~.Path` objects: - - * :meth:`~pathlib.Path.glob` - * :meth:`~pathlib.Path.iterdir` - * :meth:`~pathlib.Path.rglob` - """ - - __slots__ = "_path", "__weakref__" - - __weakref__: Any - - def __init__(self, *args: str | PathLike[str]) -> None: - self._path: Final[pathlib.Path] = pathlib.Path(*args) - - def __fspath__(self) -> str: - return self._path.__fspath__() - - def __str__(self) -> str: - return self._path.__str__() - - def __repr__(self) -> str: - return f"{self.__class__.__name__}({self.as_posix()!r})" - - def __bytes__(self) -> bytes: - return self._path.__bytes__() - - def __hash__(self) -> int: - return self._path.__hash__() - - def __eq__(self, other: object) -> bool: - target = other._path if isinstance(other, Path) else other - return self._path.__eq__(target) - - def __lt__(self, other: Path) -> bool: - target = other._path if isinstance(other, Path) else other - return self._path.__lt__(target) - - def __le__(self, other: Path) -> bool: - target = other._path if isinstance(other, Path) else other - return self._path.__le__(target) - - def __gt__(self, other: Path) -> bool: - target = other._path if isinstance(other, Path) else other - return self._path.__gt__(target) - - def __ge__(self, other: Path) -> bool: - target = other._path if isinstance(other, Path) else other - return self._path.__ge__(target) - - def __truediv__(self, other: Any) -> Path: - return Path(self._path / other) - - def __rtruediv__(self, other: Any) -> Path: - return Path(other) / self - - @property - def parts(self) -> tuple[str, ...]: - return self._path.parts - - @property - def drive(self) -> str: - return self._path.drive - - @property - def root(self) -> str: - return self._path.root - - @property - def anchor(self) -> str: - return self._path.anchor - - @property - def parents(self) -> Sequence[Path]: - return tuple(Path(p) for p in self._path.parents) - - @property - def parent(self) -> Path: - return Path(self._path.parent) - - @property - def name(self) -> str: - return self._path.name - - @property - def suffix(self) -> str: - return self._path.suffix - - @property - def suffixes(self) -> list[str]: - return self._path.suffixes - - @property - def stem(self) -> str: - return self._path.stem - - async def absolute(self) -> Path: - path = await to_thread.run_sync(self._path.absolute) - return Path(path) - - def as_posix(self) -> str: - return self._path.as_posix() - - def as_uri(self) -> str: - return self._path.as_uri() - - def match(self, path_pattern: str) -> bool: - return self._path.match(path_pattern) - - def is_relative_to(self, *other: str | PathLike[str]) -> bool: - try: - self.relative_to(*other) - return True - except ValueError: - return False - - async def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None: - func = partial(os.chmod, follow_symlinks=follow_symlinks) - return await to_thread.run_sync(func, self._path, mode) - - @classmethod - async def cwd(cls) -> Path: - path = await to_thread.run_sync(pathlib.Path.cwd) - return cls(path) - - async def exists(self) -> bool: - return await to_thread.run_sync(self._path.exists, cancellable=True) - - async def expanduser(self) -> Path: - return Path(await to_thread.run_sync(self._path.expanduser, cancellable=True)) - - def glob(self, pattern: str) -> AsyncIterator[Path]: - gen = self._path.glob(pattern) - return _PathIterator(gen) - - async def group(self) -> str: - return await to_thread.run_sync(self._path.group, cancellable=True) - - async def hardlink_to(self, target: str | pathlib.Path | Path) -> None: - if isinstance(target, Path): - target = target._path - - await to_thread.run_sync(os.link, target, self) - - @classmethod - async def home(cls) -> Path: - home_path = await to_thread.run_sync(pathlib.Path.home) - return cls(home_path) - - def is_absolute(self) -> bool: - return self._path.is_absolute() - - async def is_block_device(self) -> bool: - return await to_thread.run_sync(self._path.is_block_device, cancellable=True) - - async def is_char_device(self) -> bool: - return await to_thread.run_sync(self._path.is_char_device, cancellable=True) - - async def is_dir(self) -> bool: - return await to_thread.run_sync(self._path.is_dir, cancellable=True) - - async def is_fifo(self) -> bool: - return await to_thread.run_sync(self._path.is_fifo, cancellable=True) - - async def is_file(self) -> bool: - return await to_thread.run_sync(self._path.is_file, cancellable=True) - - async def is_mount(self) -> bool: - return await to_thread.run_sync(os.path.ismount, self._path, cancellable=True) - - def is_reserved(self) -> bool: - return self._path.is_reserved() - - async def is_socket(self) -> bool: - return await to_thread.run_sync(self._path.is_socket, cancellable=True) - - async def is_symlink(self) -> bool: - return await to_thread.run_sync(self._path.is_symlink, cancellable=True) - - def iterdir(self) -> AsyncIterator[Path]: - gen = self._path.iterdir() - return _PathIterator(gen) - - def joinpath(self, *args: str | PathLike[str]) -> Path: - return Path(self._path.joinpath(*args)) - - async def lchmod(self, mode: int) -> None: - await to_thread.run_sync(self._path.lchmod, mode) - - async def lstat(self) -> os.stat_result: - return await to_thread.run_sync(self._path.lstat, cancellable=True) - - async def mkdir( - self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False - ) -> None: - await to_thread.run_sync(self._path.mkdir, mode, parents, exist_ok) - - @overload - async def open( - self, - mode: OpenBinaryMode, - buffering: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., - ) -> AsyncFile[bytes]: - ... - - @overload - async def open( - self, - mode: OpenTextMode = ..., - buffering: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., - ) -> AsyncFile[str]: - ... - - async def open( - self, - mode: str = "r", - buffering: int = -1, - encoding: str | None = None, - errors: str | None = None, - newline: str | None = None, - ) -> AsyncFile[Any]: - fp = await to_thread.run_sync( - self._path.open, mode, buffering, encoding, errors, newline - ) - return AsyncFile(fp) - - async def owner(self) -> str: - return await to_thread.run_sync(self._path.owner, cancellable=True) - - async def read_bytes(self) -> bytes: - return await to_thread.run_sync(self._path.read_bytes) - - async def read_text( - self, encoding: str | None = None, errors: str | None = None - ) -> str: - return await to_thread.run_sync(self._path.read_text, encoding, errors) - - def relative_to(self, *other: str | PathLike[str]) -> Path: - return Path(self._path.relative_to(*other)) - - async def readlink(self) -> Path: - target = await to_thread.run_sync(os.readlink, self._path) - return Path(cast(str, target)) - - async def rename(self, target: str | pathlib.PurePath | Path) -> Path: - if isinstance(target, Path): - target = target._path - - await to_thread.run_sync(self._path.rename, target) - return Path(target) - - async def replace(self, target: str | pathlib.PurePath | Path) -> Path: - if isinstance(target, Path): - target = target._path - - await to_thread.run_sync(self._path.replace, target) - return Path(target) - - async def resolve(self, strict: bool = False) -> Path: - func = partial(self._path.resolve, strict=strict) - return Path(await to_thread.run_sync(func, cancellable=True)) - - def rglob(self, pattern: str) -> AsyncIterator[Path]: - gen = self._path.rglob(pattern) - return _PathIterator(gen) - - async def rmdir(self) -> None: - await to_thread.run_sync(self._path.rmdir) - - async def samefile( - self, other_path: str | bytes | int | pathlib.Path | Path - ) -> bool: - if isinstance(other_path, Path): - other_path = other_path._path - - return await to_thread.run_sync( - self._path.samefile, other_path, cancellable=True - ) - - async def stat(self, *, follow_symlinks: bool = True) -> os.stat_result: - func = partial(os.stat, follow_symlinks=follow_symlinks) - return await to_thread.run_sync(func, self._path, cancellable=True) - - async def symlink_to( - self, - target: str | pathlib.Path | Path, - target_is_directory: bool = False, - ) -> None: - if isinstance(target, Path): - target = target._path - - await to_thread.run_sync(self._path.symlink_to, target, target_is_directory) - - async def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None: - await to_thread.run_sync(self._path.touch, mode, exist_ok) - - async def unlink(self, missing_ok: bool = False) -> None: - try: - await to_thread.run_sync(self._path.unlink) - except FileNotFoundError: - if not missing_ok: - raise - - def with_name(self, name: str) -> Path: - return Path(self._path.with_name(name)) - - def with_stem(self, stem: str) -> Path: - return Path(self._path.with_name(stem + self._path.suffix)) - - def with_suffix(self, suffix: str) -> Path: - return Path(self._path.with_suffix(suffix)) - - async def write_bytes(self, data: bytes) -> int: - return await to_thread.run_sync(self._path.write_bytes, data) - - async def write_text( - self, - data: str, - encoding: str | None = None, - errors: str | None = None, - newline: str | None = None, - ) -> int: - # Path.write_text() does not support the "newline" parameter before Python 3.10 - def sync_write_text() -> int: - with self._path.open( - "w", encoding=encoding, errors=errors, newline=newline - ) as fp: - return fp.write(data) - - return await to_thread.run_sync(sync_write_text) - - -PathLike.register(Path) diff --git a/server/venv/Lib/site-packages/anyio/_core/_resources.py b/server/venv/Lib/site-packages/anyio/_core/_resources.py deleted file mode 100644 index b9a5344..0000000 --- a/server/venv/Lib/site-packages/anyio/_core/_resources.py +++ /dev/null @@ -1,18 +0,0 @@ -from __future__ import annotations - -from ..abc import AsyncResource -from ._tasks import CancelScope - - -async def aclose_forcefully(resource: AsyncResource) -> None: - """ - Close an asynchronous resource in a cancelled scope. - - Doing this closes the resource without waiting on anything. - - :param resource: the resource to close - - """ - with CancelScope() as scope: - scope.cancel() - await resource.aclose() diff --git a/server/venv/Lib/site-packages/anyio/_core/_signals.py b/server/venv/Lib/site-packages/anyio/_core/_signals.py deleted file mode 100644 index 8ea54af..0000000 --- a/server/venv/Lib/site-packages/anyio/_core/_signals.py +++ /dev/null @@ -1,26 +0,0 @@ -from __future__ import annotations - -from typing import AsyncIterator - -from ._compat import DeprecatedAsyncContextManager -from ._eventloop import get_asynclib - - -def open_signal_receiver( - *signals: int, -) -> DeprecatedAsyncContextManager[AsyncIterator[int]]: - """ - Start receiving operating system signals. - - :param signals: signals to receive (e.g. ``signal.SIGINT``) - :return: an asynchronous context manager for an asynchronous iterator which yields signal - numbers - - .. warning:: Windows does not support signals natively so it is best to avoid relying on this - in cross-platform applications. - - .. warning:: On asyncio, this permanently replaces any previous signal handler for the given - signals, as set via :meth:`~asyncio.loop.add_signal_handler`. - - """ - return get_asynclib().open_signal_receiver(*signals) diff --git a/server/venv/Lib/site-packages/anyio/_core/_sockets.py b/server/venv/Lib/site-packages/anyio/_core/_sockets.py deleted file mode 100644 index e6970be..0000000 --- a/server/venv/Lib/site-packages/anyio/_core/_sockets.py +++ /dev/null @@ -1,607 +0,0 @@ -from __future__ import annotations - -import socket -import ssl -import sys -from ipaddress import IPv6Address, ip_address -from os import PathLike, chmod -from pathlib import Path -from socket import AddressFamily, SocketKind -from typing import Awaitable, List, Tuple, cast, overload - -from .. import to_thread -from ..abc import ( - ConnectedUDPSocket, - IPAddressType, - IPSockAddrType, - SocketListener, - SocketStream, - UDPSocket, - UNIXSocketStream, -) -from ..streams.stapled import MultiListener -from ..streams.tls import TLSStream -from ._eventloop import get_asynclib -from ._resources import aclose_forcefully -from ._synchronization import Event -from ._tasks import create_task_group, move_on_after - -if sys.version_info >= (3, 8): - from typing import Literal -else: - from typing_extensions import Literal - -IPPROTO_IPV6 = getattr(socket, "IPPROTO_IPV6", 41) # https://bugs.python.org/issue29515 - -GetAddrInfoReturnType = List[ - Tuple[AddressFamily, SocketKind, int, str, Tuple[str, int]] -] -AnyIPAddressFamily = Literal[ - AddressFamily.AF_UNSPEC, AddressFamily.AF_INET, AddressFamily.AF_INET6 -] -IPAddressFamily = Literal[AddressFamily.AF_INET, AddressFamily.AF_INET6] - - -# tls_hostname given -@overload -async def connect_tcp( - remote_host: IPAddressType, - remote_port: int, - *, - local_host: IPAddressType | None = ..., - ssl_context: ssl.SSLContext | None = ..., - tls_standard_compatible: bool = ..., - tls_hostname: str, - happy_eyeballs_delay: float = ..., -) -> TLSStream: - ... - - -# ssl_context given -@overload -async def connect_tcp( - remote_host: IPAddressType, - remote_port: int, - *, - local_host: IPAddressType | None = ..., - ssl_context: ssl.SSLContext, - tls_standard_compatible: bool = ..., - tls_hostname: str | None = ..., - happy_eyeballs_delay: float = ..., -) -> TLSStream: - ... - - -# tls=True -@overload -async def connect_tcp( - remote_host: IPAddressType, - remote_port: int, - *, - local_host: IPAddressType | None = ..., - tls: Literal[True], - ssl_context: ssl.SSLContext | None = ..., - tls_standard_compatible: bool = ..., - tls_hostname: str | None = ..., - happy_eyeballs_delay: float = ..., -) -> TLSStream: - ... - - -# tls=False -@overload -async def connect_tcp( - remote_host: IPAddressType, - remote_port: int, - *, - local_host: IPAddressType | None = ..., - tls: Literal[False], - ssl_context: ssl.SSLContext | None = ..., - tls_standard_compatible: bool = ..., - tls_hostname: str | None = ..., - happy_eyeballs_delay: float = ..., -) -> SocketStream: - ... - - -# No TLS arguments -@overload -async def connect_tcp( - remote_host: IPAddressType, - remote_port: int, - *, - local_host: IPAddressType | None = ..., - happy_eyeballs_delay: float = ..., -) -> SocketStream: - ... - - -async def connect_tcp( - remote_host: IPAddressType, - remote_port: int, - *, - local_host: IPAddressType | None = None, - tls: bool = False, - ssl_context: ssl.SSLContext | None = None, - tls_standard_compatible: bool = True, - tls_hostname: str | None = None, - happy_eyeballs_delay: float = 0.25, -) -> SocketStream | TLSStream: - """ - Connect to a host using the TCP protocol. - - This function implements the stateless version of the Happy Eyeballs algorithm (RFC - 6555). If ``remote_host`` is a host name that resolves to multiple IP addresses, - each one is tried until one connection attempt succeeds. If the first attempt does - not connected within 250 milliseconds, a second attempt is started using the next - address in the list, and so on. On IPv6 enabled systems, an IPv6 address (if - available) is tried first. - - When the connection has been established, a TLS handshake will be done if either - ``ssl_context`` or ``tls_hostname`` is not ``None``, or if ``tls`` is ``True``. - - :param remote_host: the IP address or host name to connect to - :param remote_port: port on the target host to connect to - :param local_host: the interface address or name to bind the socket to before connecting - :param tls: ``True`` to do a TLS handshake with the connected stream and return a - :class:`~anyio.streams.tls.TLSStream` instead - :param ssl_context: the SSL context object to use (if omitted, a default context is created) - :param tls_standard_compatible: If ``True``, performs the TLS shutdown handshake before closing - the stream and requires that the server does this as well. Otherwise, - :exc:`~ssl.SSLEOFError` may be raised during reads from the stream. - Some protocols, such as HTTP, require this option to be ``False``. - See :meth:`~ssl.SSLContext.wrap_socket` for details. - :param tls_hostname: host name to check the server certificate against (defaults to the value - of ``remote_host``) - :param happy_eyeballs_delay: delay (in seconds) before starting the next connection attempt - :return: a socket stream object if no TLS handshake was done, otherwise a TLS stream - :raises OSError: if the connection attempt fails - - """ - # Placed here due to https://github.com/python/mypy/issues/7057 - connected_stream: SocketStream | None = None - - async def try_connect(remote_host: str, event: Event) -> None: - nonlocal connected_stream - try: - stream = await asynclib.connect_tcp(remote_host, remote_port, local_address) - except OSError as exc: - oserrors.append(exc) - return - else: - if connected_stream is None: - connected_stream = stream - tg.cancel_scope.cancel() - else: - await stream.aclose() - finally: - event.set() - - asynclib = get_asynclib() - local_address: IPSockAddrType | None = None - family = socket.AF_UNSPEC - if local_host: - gai_res = await getaddrinfo(str(local_host), None) - family, *_, local_address = gai_res[0] - - target_host = str(remote_host) - try: - addr_obj = ip_address(remote_host) - except ValueError: - # getaddrinfo() will raise an exception if name resolution fails - gai_res = await getaddrinfo( - target_host, remote_port, family=family, type=socket.SOCK_STREAM - ) - - # Organize the list so that the first address is an IPv6 address (if available) and the - # second one is an IPv4 addresses. The rest can be in whatever order. - v6_found = v4_found = False - target_addrs: list[tuple[socket.AddressFamily, str]] = [] - for af, *rest, sa in gai_res: - if af == socket.AF_INET6 and not v6_found: - v6_found = True - target_addrs.insert(0, (af, sa[0])) - elif af == socket.AF_INET and not v4_found and v6_found: - v4_found = True - target_addrs.insert(1, (af, sa[0])) - else: - target_addrs.append((af, sa[0])) - else: - if isinstance(addr_obj, IPv6Address): - target_addrs = [(socket.AF_INET6, addr_obj.compressed)] - else: - target_addrs = [(socket.AF_INET, addr_obj.compressed)] - - oserrors: list[OSError] = [] - async with create_task_group() as tg: - for i, (af, addr) in enumerate(target_addrs): - event = Event() - tg.start_soon(try_connect, addr, event) - with move_on_after(happy_eyeballs_delay): - await event.wait() - - if connected_stream is None: - cause = oserrors[0] if len(oserrors) == 1 else asynclib.ExceptionGroup(oserrors) - raise OSError("All connection attempts failed") from cause - - if tls or tls_hostname or ssl_context: - try: - return await TLSStream.wrap( - connected_stream, - server_side=False, - hostname=tls_hostname or str(remote_host), - ssl_context=ssl_context, - standard_compatible=tls_standard_compatible, - ) - except BaseException: - await aclose_forcefully(connected_stream) - raise - - return connected_stream - - -async def connect_unix(path: str | PathLike[str]) -> UNIXSocketStream: - """ - Connect to the given UNIX socket. - - Not available on Windows. - - :param path: path to the socket - :return: a socket stream object - - """ - path = str(Path(path)) - return await get_asynclib().connect_unix(path) - - -async def create_tcp_listener( - *, - local_host: IPAddressType | None = None, - local_port: int = 0, - family: AnyIPAddressFamily = socket.AddressFamily.AF_UNSPEC, - backlog: int = 65536, - reuse_port: bool = False, -) -> MultiListener[SocketStream]: - """ - Create a TCP socket listener. - - :param local_port: port number to listen on - :param local_host: IP address of the interface to listen on. If omitted, listen on - all IPv4 and IPv6 interfaces. To listen on all interfaces on a specific address - family, use ``0.0.0.0`` for IPv4 or ``::`` for IPv6. - :param family: address family (used if ``local_host`` was omitted) - :param backlog: maximum number of queued incoming connections (up to a maximum of - 2**16, or 65536) - :param reuse_port: ``True`` to allow multiple sockets to bind to the same - address/port (not supported on Windows) - :return: a list of listener objects - - """ - asynclib = get_asynclib() - backlog = min(backlog, 65536) - local_host = str(local_host) if local_host is not None else None - gai_res = await getaddrinfo( - local_host, # type: ignore[arg-type] - local_port, - family=family, - type=socket.SocketKind.SOCK_STREAM if sys.platform == "win32" else 0, - flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, - ) - listeners: list[SocketListener] = [] - try: - # The set() is here to work around a glibc bug: - # https://sourceware.org/bugzilla/show_bug.cgi?id=14969 - sockaddr: tuple[str, int] | tuple[str, int, int, int] - for fam, kind, *_, sockaddr in sorted(set(gai_res)): - # Workaround for an uvloop bug where we don't get the correct scope ID for - # IPv6 link-local addresses when passing type=socket.SOCK_STREAM to - # getaddrinfo(): https://github.com/MagicStack/uvloop/issues/539 - if sys.platform != "win32" and kind is not SocketKind.SOCK_STREAM: - continue - - raw_socket = socket.socket(fam) - raw_socket.setblocking(False) - - # For Windows, enable exclusive address use. For others, enable address reuse. - if sys.platform == "win32": - raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) - else: - raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - - if reuse_port: - raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) - - # If only IPv6 was requested, disable dual stack operation - if fam == socket.AF_INET6: - raw_socket.setsockopt(IPPROTO_IPV6, socket.IPV6_V6ONLY, 1) - - # Workaround for #554 - if "%" in sockaddr[0]: - addr, scope_id = sockaddr[0].split("%", 1) - sockaddr = (addr, sockaddr[1], 0, int(scope_id)) - - raw_socket.bind(sockaddr) - raw_socket.listen(backlog) - listener = asynclib.TCPSocketListener(raw_socket) - listeners.append(listener) - except BaseException: - for listener in listeners: - await listener.aclose() - - raise - - return MultiListener(listeners) - - -async def create_unix_listener( - path: str | PathLike[str], - *, - mode: int | None = None, - backlog: int = 65536, -) -> SocketListener: - """ - Create a UNIX socket listener. - - Not available on Windows. - - :param path: path of the socket - :param mode: permissions to set on the socket - :param backlog: maximum number of queued incoming connections (up to a maximum of 2**16, or - 65536) - :return: a listener object - - .. versionchanged:: 3.0 - If a socket already exists on the file system in the given path, it will be removed first. - - """ - path_str = str(path) - path = Path(path) - if path.is_socket(): - path.unlink() - - backlog = min(backlog, 65536) - raw_socket = socket.socket(socket.AF_UNIX) - raw_socket.setblocking(False) - try: - await to_thread.run_sync(raw_socket.bind, path_str, cancellable=True) - if mode is not None: - await to_thread.run_sync(chmod, path_str, mode, cancellable=True) - - raw_socket.listen(backlog) - return get_asynclib().UNIXSocketListener(raw_socket) - except BaseException: - raw_socket.close() - raise - - -async def create_udp_socket( - family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, - *, - local_host: IPAddressType | None = None, - local_port: int = 0, - reuse_port: bool = False, -) -> UDPSocket: - """ - Create a UDP socket. - - If ``local_port`` has been given, the socket will be bound to this port on the local - machine, making this socket suitable for providing UDP based services. - - :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically determined from - ``local_host`` if omitted - :param local_host: IP address or host name of the local interface to bind to - :param local_port: local port to bind to - :param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port - (not supported on Windows) - :return: a UDP socket - - """ - if family is AddressFamily.AF_UNSPEC and not local_host: - raise ValueError('Either "family" or "local_host" must be given') - - if local_host: - gai_res = await getaddrinfo( - str(local_host), - local_port, - family=family, - type=socket.SOCK_DGRAM, - flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, - ) - family = cast(AnyIPAddressFamily, gai_res[0][0]) - local_address = gai_res[0][-1] - elif family is AddressFamily.AF_INET6: - local_address = ("::", 0) - else: - local_address = ("0.0.0.0", 0) - - return await get_asynclib().create_udp_socket( - family, local_address, None, reuse_port - ) - - -async def create_connected_udp_socket( - remote_host: IPAddressType, - remote_port: int, - *, - family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, - local_host: IPAddressType | None = None, - local_port: int = 0, - reuse_port: bool = False, -) -> ConnectedUDPSocket: - """ - Create a connected UDP socket. - - Connected UDP sockets can only communicate with the specified remote host/port, and any packets - sent from other sources are dropped. - - :param remote_host: remote host to set as the default target - :param remote_port: port on the remote host to set as the default target - :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically determined from - ``local_host`` or ``remote_host`` if omitted - :param local_host: IP address or host name of the local interface to bind to - :param local_port: local port to bind to - :param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port - (not supported on Windows) - :return: a connected UDP socket - - """ - local_address = None - if local_host: - gai_res = await getaddrinfo( - str(local_host), - local_port, - family=family, - type=socket.SOCK_DGRAM, - flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, - ) - family = cast(AnyIPAddressFamily, gai_res[0][0]) - local_address = gai_res[0][-1] - - gai_res = await getaddrinfo( - str(remote_host), remote_port, family=family, type=socket.SOCK_DGRAM - ) - family = cast(AnyIPAddressFamily, gai_res[0][0]) - remote_address = gai_res[0][-1] - - return await get_asynclib().create_udp_socket( - family, local_address, remote_address, reuse_port - ) - - -async def getaddrinfo( - host: bytearray | bytes | str, - port: str | int | None, - *, - family: int | AddressFamily = 0, - type: int | SocketKind = 0, - proto: int = 0, - flags: int = 0, -) -> GetAddrInfoReturnType: - """ - Look up a numeric IP address given a host name. - - Internationalized domain names are translated according to the (non-transitional) IDNA 2008 - standard. - - .. note:: 4-tuple IPv6 socket addresses are automatically converted to 2-tuples of - (host, port), unlike what :func:`socket.getaddrinfo` does. - - :param host: host name - :param port: port number - :param family: socket family (`'AF_INET``, ...) - :param type: socket type (``SOCK_STREAM``, ...) - :param proto: protocol number - :param flags: flags to pass to upstream ``getaddrinfo()`` - :return: list of tuples containing (family, type, proto, canonname, sockaddr) - - .. seealso:: :func:`socket.getaddrinfo` - - """ - # Handle unicode hostnames - if isinstance(host, str): - try: - encoded_host = host.encode("ascii") - except UnicodeEncodeError: - import idna - - encoded_host = idna.encode(host, uts46=True) - else: - encoded_host = host - - gai_res = await get_asynclib().getaddrinfo( - encoded_host, port, family=family, type=type, proto=proto, flags=flags - ) - return [ - (family, type, proto, canonname, convert_ipv6_sockaddr(sockaddr)) - for family, type, proto, canonname, sockaddr in gai_res - ] - - -def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Awaitable[tuple[str, str]]: - """ - Look up the host name of an IP address. - - :param sockaddr: socket address (e.g. (ipaddress, port) for IPv4) - :param flags: flags to pass to upstream ``getnameinfo()`` - :return: a tuple of (host name, service name) - - .. seealso:: :func:`socket.getnameinfo` - - """ - return get_asynclib().getnameinfo(sockaddr, flags) - - -def wait_socket_readable(sock: socket.socket) -> Awaitable[None]: - """ - Wait until the given socket has data to be read. - - This does **NOT** work on Windows when using the asyncio backend with a proactor event loop - (default on py3.8+). - - .. warning:: Only use this on raw sockets that have not been wrapped by any higher level - constructs like socket streams! - - :param sock: a socket object - :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the - socket to become readable - :raises ~anyio.BusyResourceError: if another task is already waiting for the socket - to become readable - - """ - return get_asynclib().wait_socket_readable(sock) - - -def wait_socket_writable(sock: socket.socket) -> Awaitable[None]: - """ - Wait until the given socket can be written to. - - This does **NOT** work on Windows when using the asyncio backend with a proactor event loop - (default on py3.8+). - - .. warning:: Only use this on raw sockets that have not been wrapped by any higher level - constructs like socket streams! - - :param sock: a socket object - :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the - socket to become writable - :raises ~anyio.BusyResourceError: if another task is already waiting for the socket - to become writable - - """ - return get_asynclib().wait_socket_writable(sock) - - -# -# Private API -# - - -def convert_ipv6_sockaddr( - sockaddr: tuple[str, int, int, int] | tuple[str, int] -) -> tuple[str, int]: - """ - Convert a 4-tuple IPv6 socket address to a 2-tuple (address, port) format. - - If the scope ID is nonzero, it is added to the address, separated with ``%``. - Otherwise the flow id and scope id are simply cut off from the tuple. - Any other kinds of socket addresses are returned as-is. - - :param sockaddr: the result of :meth:`~socket.socket.getsockname` - :return: the converted socket address - - """ - # This is more complicated than it should be because of MyPy - if isinstance(sockaddr, tuple) and len(sockaddr) == 4: - host, port, flowinfo, scope_id = cast(Tuple[str, int, int, int], sockaddr) - if scope_id: - # PyPy (as of v7.3.11) leaves the interface name in the result, so - # we discard it and only get the scope ID from the end - # (https://foss.heptapod.net/pypy/pypy/-/issues/3938) - host = host.split("%")[0] - - # Add scope_id to the address - return f"{host}%{scope_id}", port - else: - return host, port - else: - return cast(Tuple[str, int], sockaddr) diff --git a/server/venv/Lib/site-packages/anyio/_core/_streams.py b/server/venv/Lib/site-packages/anyio/_core/_streams.py deleted file mode 100644 index 54ea2b2..0000000 --- a/server/venv/Lib/site-packages/anyio/_core/_streams.py +++ /dev/null @@ -1,47 +0,0 @@ -from __future__ import annotations - -import math -from typing import Any, TypeVar, overload - -from ..streams.memory import ( - MemoryObjectReceiveStream, - MemoryObjectSendStream, - MemoryObjectStreamState, -) - -T_Item = TypeVar("T_Item") - - -@overload -def create_memory_object_stream( - max_buffer_size: float = ..., -) -> tuple[MemoryObjectSendStream[Any], MemoryObjectReceiveStream[Any]]: - ... - - -@overload -def create_memory_object_stream( - max_buffer_size: float = ..., item_type: type[T_Item] = ... -) -> tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]]: - ... - - -def create_memory_object_stream( - max_buffer_size: float = 0, item_type: type[T_Item] | None = None -) -> tuple[MemoryObjectSendStream[Any], MemoryObjectReceiveStream[Any]]: - """ - Create a memory object stream. - - :param max_buffer_size: number of items held in the buffer until ``send()`` starts blocking - :param item_type: type of item, for marking the streams with the right generic type for - static typing (not used at run time) - :return: a tuple of (send stream, receive stream) - - """ - if max_buffer_size != math.inf and not isinstance(max_buffer_size, int): - raise ValueError("max_buffer_size must be either an integer or math.inf") - if max_buffer_size < 0: - raise ValueError("max_buffer_size cannot be negative") - - state: MemoryObjectStreamState = MemoryObjectStreamState(max_buffer_size) - return MemoryObjectSendStream(state), MemoryObjectReceiveStream(state) diff --git a/server/venv/Lib/site-packages/anyio/_core/_subprocesses.py b/server/venv/Lib/site-packages/anyio/_core/_subprocesses.py deleted file mode 100644 index 1a26ac8..0000000 --- a/server/venv/Lib/site-packages/anyio/_core/_subprocesses.py +++ /dev/null @@ -1,135 +0,0 @@ -from __future__ import annotations - -from io import BytesIO -from os import PathLike -from subprocess import DEVNULL, PIPE, CalledProcessError, CompletedProcess -from typing import ( - IO, - Any, - AsyncIterable, - Mapping, - Sequence, - cast, -) - -from ..abc import Process -from ._eventloop import get_asynclib -from ._tasks import create_task_group - - -async def run_process( - command: str | bytes | Sequence[str | bytes], - *, - input: bytes | None = None, - stdout: int | IO[Any] | None = PIPE, - stderr: int | IO[Any] | None = PIPE, - check: bool = True, - cwd: str | bytes | PathLike[str] | None = None, - env: Mapping[str, str] | None = None, - start_new_session: bool = False, -) -> CompletedProcess[bytes]: - """ - Run an external command in a subprocess and wait until it completes. - - .. seealso:: :func:`subprocess.run` - - :param command: either a string to pass to the shell, or an iterable of strings containing the - executable name or path and its arguments - :param input: bytes passed to the standard input of the subprocess - :param stdout: either :data:`subprocess.PIPE` or :data:`subprocess.DEVNULL` - :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL` or - :data:`subprocess.STDOUT` - :param check: if ``True``, raise :exc:`~subprocess.CalledProcessError` if the process - terminates with a return code other than 0 - :param cwd: If not ``None``, change the working directory to this before running the command - :param env: if not ``None``, this mapping replaces the inherited environment variables from the - parent process - :param start_new_session: if ``true`` the setsid() system call will be made in the child - process prior to the execution of the subprocess. (POSIX only) - :return: an object representing the completed process - :raises ~subprocess.CalledProcessError: if ``check`` is ``True`` and the process exits with a - nonzero return code - - """ - - async def drain_stream(stream: AsyncIterable[bytes], index: int) -> None: - buffer = BytesIO() - async for chunk in stream: - buffer.write(chunk) - - stream_contents[index] = buffer.getvalue() - - async with await open_process( - command, - stdin=PIPE if input else DEVNULL, - stdout=stdout, - stderr=stderr, - cwd=cwd, - env=env, - start_new_session=start_new_session, - ) as process: - stream_contents: list[bytes | None] = [None, None] - try: - async with create_task_group() as tg: - if process.stdout: - tg.start_soon(drain_stream, process.stdout, 0) - if process.stderr: - tg.start_soon(drain_stream, process.stderr, 1) - if process.stdin and input: - await process.stdin.send(input) - await process.stdin.aclose() - - await process.wait() - except BaseException: - process.kill() - raise - - output, errors = stream_contents - if check and process.returncode != 0: - raise CalledProcessError(cast(int, process.returncode), command, output, errors) - - return CompletedProcess(command, cast(int, process.returncode), output, errors) - - -async def open_process( - command: str | bytes | Sequence[str | bytes], - *, - stdin: int | IO[Any] | None = PIPE, - stdout: int | IO[Any] | None = PIPE, - stderr: int | IO[Any] | None = PIPE, - cwd: str | bytes | PathLike[str] | None = None, - env: Mapping[str, str] | None = None, - start_new_session: bool = False, -) -> Process: - """ - Start an external command in a subprocess. - - .. seealso:: :class:`subprocess.Popen` - - :param command: either a string to pass to the shell, or an iterable of strings containing the - executable name or path and its arguments - :param stdin: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, a - file-like object, or ``None`` - :param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, - a file-like object, or ``None`` - :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, - :data:`subprocess.STDOUT`, a file-like object, or ``None`` - :param cwd: If not ``None``, the working directory is changed before executing - :param env: If env is not ``None``, it must be a mapping that defines the environment - variables for the new process - :param start_new_session: if ``true`` the setsid() system call will be made in the child - process prior to the execution of the subprocess. (POSIX only) - :return: an asynchronous process object - - """ - shell = isinstance(command, str) - return await get_asynclib().open_process( - command, - shell=shell, - stdin=stdin, - stdout=stdout, - stderr=stderr, - cwd=cwd, - env=env, - start_new_session=start_new_session, - ) diff --git a/server/venv/Lib/site-packages/anyio/_core/_synchronization.py b/server/venv/Lib/site-packages/anyio/_core/_synchronization.py deleted file mode 100644 index 783570c..0000000 --- a/server/venv/Lib/site-packages/anyio/_core/_synchronization.py +++ /dev/null @@ -1,596 +0,0 @@ -from __future__ import annotations - -from collections import deque -from dataclasses import dataclass -from types import TracebackType -from warnings import warn - -from ..lowlevel import cancel_shielded_checkpoint, checkpoint, checkpoint_if_cancelled -from ._compat import DeprecatedAwaitable -from ._eventloop import get_asynclib -from ._exceptions import BusyResourceError, WouldBlock -from ._tasks import CancelScope -from ._testing import TaskInfo, get_current_task - - -@dataclass(frozen=True) -class EventStatistics: - """ - :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Event.wait` - """ - - tasks_waiting: int - - -@dataclass(frozen=True) -class CapacityLimiterStatistics: - """ - :ivar int borrowed_tokens: number of tokens currently borrowed by tasks - :ivar float total_tokens: total number of available tokens - :ivar tuple borrowers: tasks or other objects currently holding tokens borrowed from this - limiter - :ivar int tasks_waiting: number of tasks waiting on :meth:`~.CapacityLimiter.acquire` or - :meth:`~.CapacityLimiter.acquire_on_behalf_of` - """ - - borrowed_tokens: int - total_tokens: float - borrowers: tuple[object, ...] - tasks_waiting: int - - -@dataclass(frozen=True) -class LockStatistics: - """ - :ivar bool locked: flag indicating if this lock is locked or not - :ivar ~anyio.TaskInfo owner: task currently holding the lock (or ``None`` if the lock is not - held by any task) - :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Lock.acquire` - """ - - locked: bool - owner: TaskInfo | None - tasks_waiting: int - - -@dataclass(frozen=True) -class ConditionStatistics: - """ - :ivar int tasks_waiting: number of tasks blocked on :meth:`~.Condition.wait` - :ivar ~anyio.LockStatistics lock_statistics: statistics of the underlying :class:`~.Lock` - """ - - tasks_waiting: int - lock_statistics: LockStatistics - - -@dataclass(frozen=True) -class SemaphoreStatistics: - """ - :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Semaphore.acquire` - - """ - - tasks_waiting: int - - -class Event: - def __new__(cls) -> Event: - return get_asynclib().Event() - - def set(self) -> DeprecatedAwaitable: - """Set the flag, notifying all listeners.""" - raise NotImplementedError - - def is_set(self) -> bool: - """Return ``True`` if the flag is set, ``False`` if not.""" - raise NotImplementedError - - async def wait(self) -> None: - """ - Wait until the flag has been set. - - If the flag has already been set when this method is called, it returns immediately. - - """ - raise NotImplementedError - - def statistics(self) -> EventStatistics: - """Return statistics about the current state of this event.""" - raise NotImplementedError - - -class Lock: - _owner_task: TaskInfo | None = None - - def __init__(self) -> None: - self._waiters: deque[tuple[TaskInfo, Event]] = deque() - - async def __aenter__(self) -> None: - await self.acquire() - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - self.release() - - async def acquire(self) -> None: - """Acquire the lock.""" - await checkpoint_if_cancelled() - try: - self.acquire_nowait() - except WouldBlock: - task = get_current_task() - event = Event() - token = task, event - self._waiters.append(token) - try: - await event.wait() - except BaseException: - if not event.is_set(): - self._waiters.remove(token) - elif self._owner_task == task: - self.release() - - raise - - assert self._owner_task == task - else: - try: - await cancel_shielded_checkpoint() - except BaseException: - self.release() - raise - - def acquire_nowait(self) -> None: - """ - Acquire the lock, without blocking. - - :raises ~anyio.WouldBlock: if the operation would block - - """ - task = get_current_task() - if self._owner_task == task: - raise RuntimeError("Attempted to acquire an already held Lock") - - if self._owner_task is not None: - raise WouldBlock - - self._owner_task = task - - def release(self) -> DeprecatedAwaitable: - """Release the lock.""" - if self._owner_task != get_current_task(): - raise RuntimeError("The current task is not holding this lock") - - if self._waiters: - self._owner_task, event = self._waiters.popleft() - event.set() - else: - del self._owner_task - - return DeprecatedAwaitable(self.release) - - def locked(self) -> bool: - """Return True if the lock is currently held.""" - return self._owner_task is not None - - def statistics(self) -> LockStatistics: - """ - Return statistics about the current state of this lock. - - .. versionadded:: 3.0 - """ - return LockStatistics(self.locked(), self._owner_task, len(self._waiters)) - - -class Condition: - _owner_task: TaskInfo | None = None - - def __init__(self, lock: Lock | None = None): - self._lock = lock or Lock() - self._waiters: deque[Event] = deque() - - async def __aenter__(self) -> None: - await self.acquire() - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - self.release() - - def _check_acquired(self) -> None: - if self._owner_task != get_current_task(): - raise RuntimeError("The current task is not holding the underlying lock") - - async def acquire(self) -> None: - """Acquire the underlying lock.""" - await self._lock.acquire() - self._owner_task = get_current_task() - - def acquire_nowait(self) -> None: - """ - Acquire the underlying lock, without blocking. - - :raises ~anyio.WouldBlock: if the operation would block - - """ - self._lock.acquire_nowait() - self._owner_task = get_current_task() - - def release(self) -> DeprecatedAwaitable: - """Release the underlying lock.""" - self._lock.release() - return DeprecatedAwaitable(self.release) - - def locked(self) -> bool: - """Return True if the lock is set.""" - return self._lock.locked() - - def notify(self, n: int = 1) -> None: - """Notify exactly n listeners.""" - self._check_acquired() - for _ in range(n): - try: - event = self._waiters.popleft() - except IndexError: - break - - event.set() - - def notify_all(self) -> None: - """Notify all the listeners.""" - self._check_acquired() - for event in self._waiters: - event.set() - - self._waiters.clear() - - async def wait(self) -> None: - """Wait for a notification.""" - await checkpoint() - event = Event() - self._waiters.append(event) - self.release() - try: - await event.wait() - except BaseException: - if not event.is_set(): - self._waiters.remove(event) - - raise - finally: - with CancelScope(shield=True): - await self.acquire() - - def statistics(self) -> ConditionStatistics: - """ - Return statistics about the current state of this condition. - - .. versionadded:: 3.0 - """ - return ConditionStatistics(len(self._waiters), self._lock.statistics()) - - -class Semaphore: - def __init__(self, initial_value: int, *, max_value: int | None = None): - if not isinstance(initial_value, int): - raise TypeError("initial_value must be an integer") - if initial_value < 0: - raise ValueError("initial_value must be >= 0") - if max_value is not None: - if not isinstance(max_value, int): - raise TypeError("max_value must be an integer or None") - if max_value < initial_value: - raise ValueError( - "max_value must be equal to or higher than initial_value" - ) - - self._value = initial_value - self._max_value = max_value - self._waiters: deque[Event] = deque() - - async def __aenter__(self) -> Semaphore: - await self.acquire() - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - self.release() - - async def acquire(self) -> None: - """Decrement the semaphore value, blocking if necessary.""" - await checkpoint_if_cancelled() - try: - self.acquire_nowait() - except WouldBlock: - event = Event() - self._waiters.append(event) - try: - await event.wait() - except BaseException: - if not event.is_set(): - self._waiters.remove(event) - else: - self.release() - - raise - else: - try: - await cancel_shielded_checkpoint() - except BaseException: - self.release() - raise - - def acquire_nowait(self) -> None: - """ - Acquire the underlying lock, without blocking. - - :raises ~anyio.WouldBlock: if the operation would block - - """ - if self._value == 0: - raise WouldBlock - - self._value -= 1 - - def release(self) -> DeprecatedAwaitable: - """Increment the semaphore value.""" - if self._max_value is not None and self._value == self._max_value: - raise ValueError("semaphore released too many times") - - if self._waiters: - self._waiters.popleft().set() - else: - self._value += 1 - - return DeprecatedAwaitable(self.release) - - @property - def value(self) -> int: - """The current value of the semaphore.""" - return self._value - - @property - def max_value(self) -> int | None: - """The maximum value of the semaphore.""" - return self._max_value - - def statistics(self) -> SemaphoreStatistics: - """ - Return statistics about the current state of this semaphore. - - .. versionadded:: 3.0 - """ - return SemaphoreStatistics(len(self._waiters)) - - -class CapacityLimiter: - def __new__(cls, total_tokens: float) -> CapacityLimiter: - return get_asynclib().CapacityLimiter(total_tokens) - - async def __aenter__(self) -> None: - raise NotImplementedError - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - raise NotImplementedError - - @property - def total_tokens(self) -> float: - """ - The total number of tokens available for borrowing. - - This is a read-write property. If the total number of tokens is increased, the - proportionate number of tasks waiting on this limiter will be granted their tokens. - - .. versionchanged:: 3.0 - The property is now writable. - - """ - raise NotImplementedError - - @total_tokens.setter - def total_tokens(self, value: float) -> None: - raise NotImplementedError - - async def set_total_tokens(self, value: float) -> None: - warn( - "CapacityLimiter.set_total_tokens has been deprecated. Set the value of the" - '"total_tokens" attribute directly.', - DeprecationWarning, - ) - self.total_tokens = value - - @property - def borrowed_tokens(self) -> int: - """The number of tokens that have currently been borrowed.""" - raise NotImplementedError - - @property - def available_tokens(self) -> float: - """The number of tokens currently available to be borrowed""" - raise NotImplementedError - - def acquire_nowait(self) -> DeprecatedAwaitable: - """ - Acquire a token for the current task without waiting for one to become available. - - :raises ~anyio.WouldBlock: if there are no tokens available for borrowing - - """ - raise NotImplementedError - - def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable: - """ - Acquire a token without waiting for one to become available. - - :param borrower: the entity borrowing a token - :raises ~anyio.WouldBlock: if there are no tokens available for borrowing - - """ - raise NotImplementedError - - async def acquire(self) -> None: - """ - Acquire a token for the current task, waiting if necessary for one to become available. - - """ - raise NotImplementedError - - async def acquire_on_behalf_of(self, borrower: object) -> None: - """ - Acquire a token, waiting if necessary for one to become available. - - :param borrower: the entity borrowing a token - - """ - raise NotImplementedError - - def release(self) -> None: - """ - Release the token held by the current task. - :raises RuntimeError: if the current task has not borrowed a token from this limiter. - - """ - raise NotImplementedError - - def release_on_behalf_of(self, borrower: object) -> None: - """ - Release the token held by the given borrower. - - :raises RuntimeError: if the borrower has not borrowed a token from this limiter. - - """ - raise NotImplementedError - - def statistics(self) -> CapacityLimiterStatistics: - """ - Return statistics about the current state of this limiter. - - .. versionadded:: 3.0 - - """ - raise NotImplementedError - - -def create_lock() -> Lock: - """ - Create an asynchronous lock. - - :return: a lock object - - .. deprecated:: 3.0 - Use :class:`~Lock` directly. - - """ - warn("create_lock() is deprecated -- use Lock() directly", DeprecationWarning) - return Lock() - - -def create_condition(lock: Lock | None = None) -> Condition: - """ - Create an asynchronous condition. - - :param lock: the lock to base the condition object on - :return: a condition object - - .. deprecated:: 3.0 - Use :class:`~Condition` directly. - - """ - warn( - "create_condition() is deprecated -- use Condition() directly", - DeprecationWarning, - ) - return Condition(lock=lock) - - -def create_event() -> Event: - """ - Create an asynchronous event object. - - :return: an event object - - .. deprecated:: 3.0 - Use :class:`~Event` directly. - - """ - warn("create_event() is deprecated -- use Event() directly", DeprecationWarning) - return get_asynclib().Event() - - -def create_semaphore(value: int, *, max_value: int | None = None) -> Semaphore: - """ - Create an asynchronous semaphore. - - :param value: the semaphore's initial value - :param max_value: if set, makes this a "bounded" semaphore that raises :exc:`ValueError` if the - semaphore's value would exceed this number - :return: a semaphore object - - .. deprecated:: 3.0 - Use :class:`~Semaphore` directly. - - """ - warn( - "create_semaphore() is deprecated -- use Semaphore() directly", - DeprecationWarning, - ) - return Semaphore(value, max_value=max_value) - - -def create_capacity_limiter(total_tokens: float) -> CapacityLimiter: - """ - Create a capacity limiter. - - :param total_tokens: the total number of tokens available for borrowing (can be an integer or - :data:`math.inf`) - :return: a capacity limiter object - - .. deprecated:: 3.0 - Use :class:`~CapacityLimiter` directly. - - """ - warn( - "create_capacity_limiter() is deprecated -- use CapacityLimiter() directly", - DeprecationWarning, - ) - return get_asynclib().CapacityLimiter(total_tokens) - - -class ResourceGuard: - __slots__ = "action", "_guarded" - - def __init__(self, action: str): - self.action = action - self._guarded = False - - def __enter__(self) -> None: - if self._guarded: - raise BusyResourceError(self.action) - - self._guarded = True - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - self._guarded = False - return None diff --git a/server/venv/Lib/site-packages/anyio/_core/_tasks.py b/server/venv/Lib/site-packages/anyio/_core/_tasks.py deleted file mode 100644 index e9d9c2b..0000000 --- a/server/venv/Lib/site-packages/anyio/_core/_tasks.py +++ /dev/null @@ -1,180 +0,0 @@ -from __future__ import annotations - -import math -from types import TracebackType -from warnings import warn - -from ..abc._tasks import TaskGroup, TaskStatus -from ._compat import ( - DeprecatedAsyncContextManager, - DeprecatedAwaitable, - DeprecatedAwaitableFloat, -) -from ._eventloop import get_asynclib - - -class _IgnoredTaskStatus(TaskStatus[object]): - def started(self, value: object = None) -> None: - pass - - -TASK_STATUS_IGNORED = _IgnoredTaskStatus() - - -class CancelScope(DeprecatedAsyncContextManager["CancelScope"]): - """ - Wraps a unit of work that can be made separately cancellable. - - :param deadline: The time (clock value) when this scope is cancelled automatically - :param shield: ``True`` to shield the cancel scope from external cancellation - """ - - def __new__( - cls, *, deadline: float = math.inf, shield: bool = False - ) -> CancelScope: - return get_asynclib().CancelScope(shield=shield, deadline=deadline) - - def cancel(self) -> DeprecatedAwaitable: - """Cancel this scope immediately.""" - raise NotImplementedError - - @property - def deadline(self) -> float: - """ - The time (clock value) when this scope is cancelled automatically. - - Will be ``float('inf')`` if no timeout has been set. - - """ - raise NotImplementedError - - @deadline.setter - def deadline(self, value: float) -> None: - raise NotImplementedError - - @property - def cancel_called(self) -> bool: - """``True`` if :meth:`cancel` has been called.""" - raise NotImplementedError - - @property - def shield(self) -> bool: - """ - ``True`` if this scope is shielded from external cancellation. - - While a scope is shielded, it will not receive cancellations from outside. - - """ - raise NotImplementedError - - @shield.setter - def shield(self, value: bool) -> None: - raise NotImplementedError - - def __enter__(self) -> CancelScope: - raise NotImplementedError - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - raise NotImplementedError - - -def open_cancel_scope(*, shield: bool = False) -> CancelScope: - """ - Open a cancel scope. - - :param shield: ``True`` to shield the cancel scope from external cancellation - :return: a cancel scope - - .. deprecated:: 3.0 - Use :class:`~CancelScope` directly. - - """ - warn( - "open_cancel_scope() is deprecated -- use CancelScope() directly", - DeprecationWarning, - ) - return get_asynclib().CancelScope(shield=shield) - - -class FailAfterContextManager(DeprecatedAsyncContextManager[CancelScope]): - def __init__(self, cancel_scope: CancelScope): - self._cancel_scope = cancel_scope - - def __enter__(self) -> CancelScope: - return self._cancel_scope.__enter__() - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - retval = self._cancel_scope.__exit__(exc_type, exc_val, exc_tb) - if self._cancel_scope.cancel_called: - raise TimeoutError - - return retval - - -def fail_after(delay: float | None, shield: bool = False) -> FailAfterContextManager: - """ - Create a context manager which raises a :class:`TimeoutError` if does not finish in time. - - :param delay: maximum allowed time (in seconds) before raising the exception, or ``None`` to - disable the timeout - :param shield: ``True`` to shield the cancel scope from external cancellation - :return: a context manager that yields a cancel scope - :rtype: :class:`~typing.ContextManager`\\[:class:`~anyio.CancelScope`\\] - - """ - deadline = ( - (get_asynclib().current_time() + delay) if delay is not None else math.inf - ) - cancel_scope = get_asynclib().CancelScope(deadline=deadline, shield=shield) - return FailAfterContextManager(cancel_scope) - - -def move_on_after(delay: float | None, shield: bool = False) -> CancelScope: - """ - Create a cancel scope with a deadline that expires after the given delay. - - :param delay: maximum allowed time (in seconds) before exiting the context block, or ``None`` - to disable the timeout - :param shield: ``True`` to shield the cancel scope from external cancellation - :return: a cancel scope - - """ - deadline = ( - (get_asynclib().current_time() + delay) if delay is not None else math.inf - ) - return get_asynclib().CancelScope(deadline=deadline, shield=shield) - - -def current_effective_deadline() -> DeprecatedAwaitableFloat: - """ - Return the nearest deadline among all the cancel scopes effective for the current task. - - :return: a clock value from the event loop's internal clock (or ``float('inf')`` if - there is no deadline in effect, or ``float('-inf')`` if the current scope has - been cancelled) - :rtype: float - - """ - return DeprecatedAwaitableFloat( - get_asynclib().current_effective_deadline(), current_effective_deadline - ) - - -def create_task_group() -> TaskGroup: - """ - Create a task group. - - :return: a task group - - """ - return get_asynclib().TaskGroup() diff --git a/server/venv/Lib/site-packages/anyio/_core/_testing.py b/server/venv/Lib/site-packages/anyio/_core/_testing.py deleted file mode 100644 index c8191b3..0000000 --- a/server/venv/Lib/site-packages/anyio/_core/_testing.py +++ /dev/null @@ -1,82 +0,0 @@ -from __future__ import annotations - -from typing import Any, Awaitable, Generator - -from ._compat import DeprecatedAwaitableList, _warn_deprecation -from ._eventloop import get_asynclib - - -class TaskInfo: - """ - Represents an asynchronous task. - - :ivar int id: the unique identifier of the task - :ivar parent_id: the identifier of the parent task, if any - :vartype parent_id: Optional[int] - :ivar str name: the description of the task (if any) - :ivar ~collections.abc.Coroutine coro: the coroutine object of the task - """ - - __slots__ = "_name", "id", "parent_id", "name", "coro" - - def __init__( - self, - id: int, - parent_id: int | None, - name: str | None, - coro: Generator[Any, Any, Any] | Awaitable[Any], - ): - func = get_current_task - self._name = f"{func.__module__}.{func.__qualname__}" - self.id: int = id - self.parent_id: int | None = parent_id - self.name: str | None = name - self.coro: Generator[Any, Any, Any] | Awaitable[Any] = coro - - def __eq__(self, other: object) -> bool: - if isinstance(other, TaskInfo): - return self.id == other.id - - return NotImplemented - - def __hash__(self) -> int: - return hash(self.id) - - def __repr__(self) -> str: - return f"{self.__class__.__name__}(id={self.id!r}, name={self.name!r})" - - def __await__(self) -> Generator[None, None, TaskInfo]: - _warn_deprecation(self) - if False: - yield - - return self - - def _unwrap(self) -> TaskInfo: - return self - - -def get_current_task() -> TaskInfo: - """ - Return the current task. - - :return: a representation of the current task - - """ - return get_asynclib().get_current_task() - - -def get_running_tasks() -> DeprecatedAwaitableList[TaskInfo]: - """ - Return a list of running tasks in the current event loop. - - :return: a list of task info objects - - """ - tasks = get_asynclib().get_running_tasks() - return DeprecatedAwaitableList(tasks, func=get_running_tasks) - - -async def wait_all_tasks_blocked() -> None: - """Wait until all other tasks are waiting for something.""" - await get_asynclib().wait_all_tasks_blocked() diff --git a/server/venv/Lib/site-packages/anyio/_core/_typedattr.py b/server/venv/Lib/site-packages/anyio/_core/_typedattr.py deleted file mode 100644 index bf9202e..0000000 --- a/server/venv/Lib/site-packages/anyio/_core/_typedattr.py +++ /dev/null @@ -1,83 +0,0 @@ -from __future__ import annotations - -import sys -from typing import Any, Callable, Mapping, TypeVar, overload - -from ._exceptions import TypedAttributeLookupError - -if sys.version_info >= (3, 8): - from typing import final -else: - from typing_extensions import final - -T_Attr = TypeVar("T_Attr") -T_Default = TypeVar("T_Default") -undefined = object() - - -def typed_attribute() -> Any: - """Return a unique object, used to mark typed attributes.""" - return object() - - -class TypedAttributeSet: - """ - Superclass for typed attribute collections. - - Checks that every public attribute of every subclass has a type annotation. - """ - - def __init_subclass__(cls) -> None: - annotations: dict[str, Any] = getattr(cls, "__annotations__", {}) - for attrname in dir(cls): - if not attrname.startswith("_") and attrname not in annotations: - raise TypeError( - f"Attribute {attrname!r} is missing its type annotation" - ) - - super().__init_subclass__() - - -class TypedAttributeProvider: - """Base class for classes that wish to provide typed extra attributes.""" - - @property - def extra_attributes(self) -> Mapping[T_Attr, Callable[[], T_Attr]]: - """ - A mapping of the extra attributes to callables that return the corresponding values. - - If the provider wraps another provider, the attributes from that wrapper should also be - included in the returned mapping (but the wrapper may override the callables from the - wrapped instance). - - """ - return {} - - @overload - def extra(self, attribute: T_Attr) -> T_Attr: - ... - - @overload - def extra(self, attribute: T_Attr, default: T_Default) -> T_Attr | T_Default: - ... - - @final - def extra(self, attribute: Any, default: object = undefined) -> object: - """ - extra(attribute, default=undefined) - - Return the value of the given typed extra attribute. - - :param attribute: the attribute (member of a :class:`~TypedAttributeSet`) to look for - :param default: the value that should be returned if no value is found for the attribute - :raises ~anyio.TypedAttributeLookupError: if the search failed and no default value was - given - - """ - try: - return self.extra_attributes[attribute]() - except KeyError: - if default is undefined: - raise TypedAttributeLookupError("Attribute not found") from None - else: - return default diff --git a/server/venv/Lib/site-packages/anyio/abc/__init__.py b/server/venv/Lib/site-packages/anyio/abc/__init__.py deleted file mode 100644 index 72c34e5..0000000 --- a/server/venv/Lib/site-packages/anyio/abc/__init__.py +++ /dev/null @@ -1,90 +0,0 @@ -from __future__ import annotations - -__all__ = ( - "AsyncResource", - "IPAddressType", - "IPSockAddrType", - "SocketAttribute", - "SocketStream", - "SocketListener", - "UDPSocket", - "UNIXSocketStream", - "UDPPacketType", - "ConnectedUDPSocket", - "UnreliableObjectReceiveStream", - "UnreliableObjectSendStream", - "UnreliableObjectStream", - "ObjectReceiveStream", - "ObjectSendStream", - "ObjectStream", - "ByteReceiveStream", - "ByteSendStream", - "ByteStream", - "AnyUnreliableByteReceiveStream", - "AnyUnreliableByteSendStream", - "AnyUnreliableByteStream", - "AnyByteReceiveStream", - "AnyByteSendStream", - "AnyByteStream", - "Listener", - "Process", - "Event", - "Condition", - "Lock", - "Semaphore", - "CapacityLimiter", - "CancelScope", - "TaskGroup", - "TaskStatus", - "TestRunner", - "BlockingPortal", -) - -from typing import Any - -from ._resources import AsyncResource -from ._sockets import ( - ConnectedUDPSocket, - IPAddressType, - IPSockAddrType, - SocketAttribute, - SocketListener, - SocketStream, - UDPPacketType, - UDPSocket, - UNIXSocketStream, -) -from ._streams import ( - AnyByteReceiveStream, - AnyByteSendStream, - AnyByteStream, - AnyUnreliableByteReceiveStream, - AnyUnreliableByteSendStream, - AnyUnreliableByteStream, - ByteReceiveStream, - ByteSendStream, - ByteStream, - Listener, - ObjectReceiveStream, - ObjectSendStream, - ObjectStream, - UnreliableObjectReceiveStream, - UnreliableObjectSendStream, - UnreliableObjectStream, -) -from ._subprocesses import Process -from ._tasks import TaskGroup, TaskStatus -from ._testing import TestRunner - -# Re-exported here, for backwards compatibility -# isort: off -from .._core._synchronization import CapacityLimiter, Condition, Event, Lock, Semaphore -from .._core._tasks import CancelScope -from ..from_thread import BlockingPortal - -# Re-export imports so they look like they live directly in this package -key: str -value: Any -for key, value in list(locals().items()): - if getattr(value, "__module__", "").startswith("anyio.abc."): - value.__module__ = __name__ diff --git a/server/venv/Lib/site-packages/anyio/abc/_resources.py b/server/venv/Lib/site-packages/anyio/abc/_resources.py deleted file mode 100644 index e0a283f..0000000 --- a/server/venv/Lib/site-packages/anyio/abc/_resources.py +++ /dev/null @@ -1,31 +0,0 @@ -from __future__ import annotations - -from abc import ABCMeta, abstractmethod -from types import TracebackType -from typing import TypeVar - -T = TypeVar("T") - - -class AsyncResource(metaclass=ABCMeta): - """ - Abstract base class for all closeable asynchronous resources. - - Works as an asynchronous context manager which returns the instance itself on enter, and calls - :meth:`aclose` on exit. - """ - - async def __aenter__(self: T) -> T: - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - await self.aclose() - - @abstractmethod - async def aclose(self) -> None: - """Close the resource.""" diff --git a/server/venv/Lib/site-packages/anyio/abc/_sockets.py b/server/venv/Lib/site-packages/anyio/abc/_sockets.py deleted file mode 100644 index 6aac5f7..0000000 --- a/server/venv/Lib/site-packages/anyio/abc/_sockets.py +++ /dev/null @@ -1,160 +0,0 @@ -from __future__ import annotations - -import socket -from abc import abstractmethod -from contextlib import AsyncExitStack -from io import IOBase -from ipaddress import IPv4Address, IPv6Address -from socket import AddressFamily -from typing import ( - Any, - Callable, - Collection, - Mapping, - Tuple, - TypeVar, - Union, -) - -from .._core._tasks import create_task_group -from .._core._typedattr import ( - TypedAttributeProvider, - TypedAttributeSet, - typed_attribute, -) -from ._streams import ByteStream, Listener, UnreliableObjectStream -from ._tasks import TaskGroup - -IPAddressType = Union[str, IPv4Address, IPv6Address] -IPSockAddrType = Tuple[str, int] -SockAddrType = Union[IPSockAddrType, str] -UDPPacketType = Tuple[bytes, IPSockAddrType] -T_Retval = TypeVar("T_Retval") - - -class SocketAttribute(TypedAttributeSet): - #: the address family of the underlying socket - family: AddressFamily = typed_attribute() - #: the local socket address of the underlying socket - local_address: SockAddrType = typed_attribute() - #: for IP addresses, the local port the underlying socket is bound to - local_port: int = typed_attribute() - #: the underlying stdlib socket object - raw_socket: socket.socket = typed_attribute() - #: the remote address the underlying socket is connected to - remote_address: SockAddrType = typed_attribute() - #: for IP addresses, the remote port the underlying socket is connected to - remote_port: int = typed_attribute() - - -class _SocketProvider(TypedAttributeProvider): - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - from .._core._sockets import convert_ipv6_sockaddr as convert - - attributes: dict[Any, Callable[[], Any]] = { - SocketAttribute.family: lambda: self._raw_socket.family, - SocketAttribute.local_address: lambda: convert( - self._raw_socket.getsockname() - ), - SocketAttribute.raw_socket: lambda: self._raw_socket, - } - try: - peername: tuple[str, int] | None = convert(self._raw_socket.getpeername()) - except OSError: - peername = None - - # Provide the remote address for connected sockets - if peername is not None: - attributes[SocketAttribute.remote_address] = lambda: peername - - # Provide local and remote ports for IP based sockets - if self._raw_socket.family in (AddressFamily.AF_INET, AddressFamily.AF_INET6): - attributes[ - SocketAttribute.local_port - ] = lambda: self._raw_socket.getsockname()[1] - if peername is not None: - remote_port = peername[1] - attributes[SocketAttribute.remote_port] = lambda: remote_port - - return attributes - - @property - @abstractmethod - def _raw_socket(self) -> socket.socket: - pass - - -class SocketStream(ByteStream, _SocketProvider): - """ - Transports bytes over a socket. - - Supports all relevant extra attributes from :class:`~SocketAttribute`. - """ - - -class UNIXSocketStream(SocketStream): - @abstractmethod - async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: - """ - Send file descriptors along with a message to the peer. - - :param message: a non-empty bytestring - :param fds: a collection of files (either numeric file descriptors or open file or socket - objects) - """ - - @abstractmethod - async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: - """ - Receive file descriptors along with a message from the peer. - - :param msglen: length of the message to expect from the peer - :param maxfds: maximum number of file descriptors to expect from the peer - :return: a tuple of (message, file descriptors) - """ - - -class SocketListener(Listener[SocketStream], _SocketProvider): - """ - Listens to incoming socket connections. - - Supports all relevant extra attributes from :class:`~SocketAttribute`. - """ - - @abstractmethod - async def accept(self) -> SocketStream: - """Accept an incoming connection.""" - - async def serve( - self, - handler: Callable[[SocketStream], Any], - task_group: TaskGroup | None = None, - ) -> None: - async with AsyncExitStack() as exit_stack: - if task_group is None: - task_group = await exit_stack.enter_async_context(create_task_group()) - - while True: - stream = await self.accept() - task_group.start_soon(handler, stream) - - -class UDPSocket(UnreliableObjectStream[UDPPacketType], _SocketProvider): - """ - Represents an unconnected UDP socket. - - Supports all relevant extra attributes from :class:`~SocketAttribute`. - """ - - async def sendto(self, data: bytes, host: str, port: int) -> None: - """Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, (host, port))).""" - return await self.send((data, (host, port))) - - -class ConnectedUDPSocket(UnreliableObjectStream[bytes], _SocketProvider): - """ - Represents an connected UDP socket. - - Supports all relevant extra attributes from :class:`~SocketAttribute`. - """ diff --git a/server/venv/Lib/site-packages/anyio/abc/_streams.py b/server/venv/Lib/site-packages/anyio/abc/_streams.py deleted file mode 100644 index 4fa7ccc..0000000 --- a/server/venv/Lib/site-packages/anyio/abc/_streams.py +++ /dev/null @@ -1,203 +0,0 @@ -from __future__ import annotations - -from abc import abstractmethod -from typing import Any, Callable, Generic, TypeVar, Union - -from .._core._exceptions import EndOfStream -from .._core._typedattr import TypedAttributeProvider -from ._resources import AsyncResource -from ._tasks import TaskGroup - -T_Item = TypeVar("T_Item") -T_co = TypeVar("T_co", covariant=True) -T_contra = TypeVar("T_contra", contravariant=True) - - -class UnreliableObjectReceiveStream( - Generic[T_co], AsyncResource, TypedAttributeProvider -): - """ - An interface for receiving objects. - - This interface makes no guarantees that the received messages arrive in the order in which they - were sent, or that no messages are missed. - - Asynchronously iterating over objects of this type will yield objects matching the given type - parameter. - """ - - def __aiter__(self) -> UnreliableObjectReceiveStream[T_co]: - return self - - async def __anext__(self) -> T_co: - try: - return await self.receive() - except EndOfStream: - raise StopAsyncIteration - - @abstractmethod - async def receive(self) -> T_co: - """ - Receive the next item. - - :raises ~anyio.ClosedResourceError: if the receive stream has been explicitly - closed - :raises ~anyio.EndOfStream: if this stream has been closed from the other end - :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable - due to external causes - """ - - -class UnreliableObjectSendStream( - Generic[T_contra], AsyncResource, TypedAttributeProvider -): - """ - An interface for sending objects. - - This interface makes no guarantees that the messages sent will reach the recipient(s) in the - same order in which they were sent, or at all. - """ - - @abstractmethod - async def send(self, item: T_contra) -> None: - """ - Send an item to the peer(s). - - :param item: the item to send - :raises ~anyio.ClosedResourceError: if the send stream has been explicitly - closed - :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable - due to external causes - """ - - -class UnreliableObjectStream( - UnreliableObjectReceiveStream[T_Item], UnreliableObjectSendStream[T_Item] -): - """ - A bidirectional message stream which does not guarantee the order or reliability of message - delivery. - """ - - -class ObjectReceiveStream(UnreliableObjectReceiveStream[T_co]): - """ - A receive message stream which guarantees that messages are received in the same order in - which they were sent, and that no messages are missed. - """ - - -class ObjectSendStream(UnreliableObjectSendStream[T_contra]): - """ - A send message stream which guarantees that messages are delivered in the same order in which - they were sent, without missing any messages in the middle. - """ - - -class ObjectStream( - ObjectReceiveStream[T_Item], - ObjectSendStream[T_Item], - UnreliableObjectStream[T_Item], -): - """ - A bidirectional message stream which guarantees the order and reliability of message delivery. - """ - - @abstractmethod - async def send_eof(self) -> None: - """ - Send an end-of-file indication to the peer. - - You should not try to send any further data to this stream after calling this method. - This method is idempotent (does nothing on successive calls). - """ - - -class ByteReceiveStream(AsyncResource, TypedAttributeProvider): - """ - An interface for receiving bytes from a single peer. - - Iterating this byte stream will yield a byte string of arbitrary length, but no more than - 65536 bytes. - """ - - def __aiter__(self) -> ByteReceiveStream: - return self - - async def __anext__(self) -> bytes: - try: - return await self.receive() - except EndOfStream: - raise StopAsyncIteration - - @abstractmethod - async def receive(self, max_bytes: int = 65536) -> bytes: - """ - Receive at most ``max_bytes`` bytes from the peer. - - .. note:: Implementors of this interface should not return an empty :class:`bytes` object, - and users should ignore them. - - :param max_bytes: maximum number of bytes to receive - :return: the received bytes - :raises ~anyio.EndOfStream: if this stream has been closed from the other end - """ - - -class ByteSendStream(AsyncResource, TypedAttributeProvider): - """An interface for sending bytes to a single peer.""" - - @abstractmethod - async def send(self, item: bytes) -> None: - """ - Send the given bytes to the peer. - - :param item: the bytes to send - """ - - -class ByteStream(ByteReceiveStream, ByteSendStream): - """A bidirectional byte stream.""" - - @abstractmethod - async def send_eof(self) -> None: - """ - Send an end-of-file indication to the peer. - - You should not try to send any further data to this stream after calling this method. - This method is idempotent (does nothing on successive calls). - """ - - -#: Type alias for all unreliable bytes-oriented receive streams. -AnyUnreliableByteReceiveStream = Union[ - UnreliableObjectReceiveStream[bytes], ByteReceiveStream -] -#: Type alias for all unreliable bytes-oriented send streams. -AnyUnreliableByteSendStream = Union[UnreliableObjectSendStream[bytes], ByteSendStream] -#: Type alias for all unreliable bytes-oriented streams. -AnyUnreliableByteStream = Union[UnreliableObjectStream[bytes], ByteStream] -#: Type alias for all bytes-oriented receive streams. -AnyByteReceiveStream = Union[ObjectReceiveStream[bytes], ByteReceiveStream] -#: Type alias for all bytes-oriented send streams. -AnyByteSendStream = Union[ObjectSendStream[bytes], ByteSendStream] -#: Type alias for all bytes-oriented streams. -AnyByteStream = Union[ObjectStream[bytes], ByteStream] - - -class Listener(Generic[T_co], AsyncResource, TypedAttributeProvider): - """An interface for objects that let you accept incoming connections.""" - - @abstractmethod - async def serve( - self, - handler: Callable[[T_co], Any], - task_group: TaskGroup | None = None, - ) -> None: - """ - Accept incoming connections as they come in and start tasks to handle them. - - :param handler: a callable that will be used to handle each accepted connection - :param task_group: the task group that will be used to start tasks for handling each - accepted connection (if omitted, an ad-hoc task group will be created) - """ diff --git a/server/venv/Lib/site-packages/anyio/abc/_subprocesses.py b/server/venv/Lib/site-packages/anyio/abc/_subprocesses.py deleted file mode 100644 index 704b44a..0000000 --- a/server/venv/Lib/site-packages/anyio/abc/_subprocesses.py +++ /dev/null @@ -1,79 +0,0 @@ -from __future__ import annotations - -from abc import abstractmethod -from signal import Signals - -from ._resources import AsyncResource -from ._streams import ByteReceiveStream, ByteSendStream - - -class Process(AsyncResource): - """An asynchronous version of :class:`subprocess.Popen`.""" - - @abstractmethod - async def wait(self) -> int: - """ - Wait until the process exits. - - :return: the exit code of the process - """ - - @abstractmethod - def terminate(self) -> None: - """ - Terminates the process, gracefully if possible. - - On Windows, this calls ``TerminateProcess()``. - On POSIX systems, this sends ``SIGTERM`` to the process. - - .. seealso:: :meth:`subprocess.Popen.terminate` - """ - - @abstractmethod - def kill(self) -> None: - """ - Kills the process. - - On Windows, this calls ``TerminateProcess()``. - On POSIX systems, this sends ``SIGKILL`` to the process. - - .. seealso:: :meth:`subprocess.Popen.kill` - """ - - @abstractmethod - def send_signal(self, signal: Signals) -> None: - """ - Send a signal to the subprocess. - - .. seealso:: :meth:`subprocess.Popen.send_signal` - - :param signal: the signal number (e.g. :data:`signal.SIGHUP`) - """ - - @property - @abstractmethod - def pid(self) -> int: - """The process ID of the process.""" - - @property - @abstractmethod - def returncode(self) -> int | None: - """ - The return code of the process. If the process has not yet terminated, this will be - ``None``. - """ - - @property - @abstractmethod - def stdin(self) -> ByteSendStream | None: - """The stream for the standard input of the process.""" - - @property - @abstractmethod - def stdout(self) -> ByteReceiveStream | None: - """The stream for the standard output of the process.""" - - @property - @abstractmethod - def stderr(self) -> ByteReceiveStream | None: - """The stream for the standard error output of the process.""" diff --git a/server/venv/Lib/site-packages/anyio/abc/_tasks.py b/server/venv/Lib/site-packages/anyio/abc/_tasks.py deleted file mode 100644 index e48d3c1..0000000 --- a/server/venv/Lib/site-packages/anyio/abc/_tasks.py +++ /dev/null @@ -1,119 +0,0 @@ -from __future__ import annotations - -import sys -from abc import ABCMeta, abstractmethod -from types import TracebackType -from typing import TYPE_CHECKING, Any, Awaitable, Callable, TypeVar, overload -from warnings import warn - -if sys.version_info >= (3, 8): - from typing import Protocol -else: - from typing_extensions import Protocol - -if TYPE_CHECKING: - from anyio._core._tasks import CancelScope - -T_Retval = TypeVar("T_Retval") -T_contra = TypeVar("T_contra", contravariant=True) - - -class TaskStatus(Protocol[T_contra]): - @overload - def started(self: TaskStatus[None]) -> None: - ... - - @overload - def started(self, value: T_contra) -> None: - ... - - def started(self, value: T_contra | None = None) -> None: - """ - Signal that the task has started. - - :param value: object passed back to the starter of the task - """ - - -class TaskGroup(metaclass=ABCMeta): - """ - Groups several asynchronous tasks together. - - :ivar cancel_scope: the cancel scope inherited by all child tasks - :vartype cancel_scope: CancelScope - """ - - cancel_scope: CancelScope - - async def spawn( - self, - func: Callable[..., Awaitable[Any]], - *args: object, - name: object = None, - ) -> None: - """ - Start a new task in this task group. - - :param func: a coroutine function - :param args: positional arguments to call the function with - :param name: name of the task, for the purposes of introspection and debugging - - .. deprecated:: 3.0 - Use :meth:`start_soon` instead. If your code needs AnyIO 2 compatibility, you - can keep using this until AnyIO 4. - - """ - warn( - 'spawn() is deprecated -- use start_soon() (without the "await") instead', - DeprecationWarning, - ) - self.start_soon(func, *args, name=name) - - @abstractmethod - def start_soon( - self, - func: Callable[..., Awaitable[Any]], - *args: object, - name: object = None, - ) -> None: - """ - Start a new task in this task group. - - :param func: a coroutine function - :param args: positional arguments to call the function with - :param name: name of the task, for the purposes of introspection and debugging - - .. versionadded:: 3.0 - """ - - @abstractmethod - async def start( - self, - func: Callable[..., Awaitable[Any]], - *args: object, - name: object = None, - ) -> Any: - """ - Start a new task and wait until it signals for readiness. - - :param func: a coroutine function - :param args: positional arguments to call the function with - :param name: name of the task, for the purposes of introspection and debugging - :return: the value passed to ``task_status.started()`` - :raises RuntimeError: if the task finishes without calling ``task_status.started()`` - - .. versionadded:: 3.0 - """ - - @abstractmethod - async def __aenter__(self) -> TaskGroup: - """Enter the task group context and allow starting new tasks.""" - - @abstractmethod - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - """Exit the task group context waiting for all tasks to finish.""" diff --git a/server/venv/Lib/site-packages/anyio/abc/_testing.py b/server/venv/Lib/site-packages/anyio/abc/_testing.py deleted file mode 100644 index ee2cff5..0000000 --- a/server/venv/Lib/site-packages/anyio/abc/_testing.py +++ /dev/null @@ -1,70 +0,0 @@ -from __future__ import annotations - -import types -from abc import ABCMeta, abstractmethod -from collections.abc import AsyncGenerator, Iterable -from typing import Any, Callable, Coroutine, TypeVar - -_T = TypeVar("_T") - - -class TestRunner(metaclass=ABCMeta): - """ - Encapsulates a running event loop. Every call made through this object will use the same event - loop. - """ - - def __enter__(self) -> TestRunner: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: types.TracebackType | None, - ) -> bool | None: - self.close() - return None - - @abstractmethod - def close(self) -> None: - """Close the event loop.""" - - @abstractmethod - def run_asyncgen_fixture( - self, - fixture_func: Callable[..., AsyncGenerator[_T, Any]], - kwargs: dict[str, Any], - ) -> Iterable[_T]: - """ - Run an async generator fixture. - - :param fixture_func: the fixture function - :param kwargs: keyword arguments to call the fixture function with - :return: an iterator yielding the value yielded from the async generator - """ - - @abstractmethod - def run_fixture( - self, - fixture_func: Callable[..., Coroutine[Any, Any, _T]], - kwargs: dict[str, Any], - ) -> _T: - """ - Run an async fixture. - - :param fixture_func: the fixture function - :param kwargs: keyword arguments to call the fixture function with - :return: the return value of the fixture function - """ - - @abstractmethod - def run_test( - self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] - ) -> None: - """ - Run an async test function. - - :param test_func: the test function - :param kwargs: keyword arguments to call the test function with - """ diff --git a/server/venv/Lib/site-packages/anyio/from_thread.py b/server/venv/Lib/site-packages/anyio/from_thread.py deleted file mode 100644 index 6b76861..0000000 --- a/server/venv/Lib/site-packages/anyio/from_thread.py +++ /dev/null @@ -1,500 +0,0 @@ -from __future__ import annotations - -import threading -from asyncio import iscoroutine -from concurrent.futures import FIRST_COMPLETED, Future, ThreadPoolExecutor, wait -from contextlib import AbstractContextManager, contextmanager -from types import TracebackType -from typing import ( - Any, - AsyncContextManager, - Awaitable, - Callable, - ContextManager, - Generator, - Generic, - Iterable, - TypeVar, - cast, - overload, -) -from warnings import warn - -from ._core import _eventloop -from ._core._eventloop import get_asynclib, get_cancelled_exc_class, threadlocals -from ._core._synchronization import Event -from ._core._tasks import CancelScope, create_task_group -from .abc._tasks import TaskStatus - -T_Retval = TypeVar("T_Retval") -T_co = TypeVar("T_co") - - -def run(func: Callable[..., Awaitable[T_Retval]], *args: object) -> T_Retval: - """ - Call a coroutine function from a worker thread. - - :param func: a coroutine function - :param args: positional arguments for the callable - :return: the return value of the coroutine function - - """ - try: - asynclib = threadlocals.current_async_module - except AttributeError: - raise RuntimeError("This function can only be run from an AnyIO worker thread") - - return asynclib.run_async_from_thread(func, *args) - - -def run_async_from_thread( - func: Callable[..., Awaitable[T_Retval]], *args: object -) -> T_Retval: - warn( - "run_async_from_thread() has been deprecated, use anyio.from_thread.run() instead", - DeprecationWarning, - ) - return run(func, *args) - - -def run_sync(func: Callable[..., T_Retval], *args: object) -> T_Retval: - """ - Call a function in the event loop thread from a worker thread. - - :param func: a callable - :param args: positional arguments for the callable - :return: the return value of the callable - - """ - try: - asynclib = threadlocals.current_async_module - except AttributeError: - raise RuntimeError("This function can only be run from an AnyIO worker thread") - - return asynclib.run_sync_from_thread(func, *args) - - -def run_sync_from_thread(func: Callable[..., T_Retval], *args: object) -> T_Retval: - warn( - "run_sync_from_thread() has been deprecated, use anyio.from_thread.run_sync() instead", - DeprecationWarning, - ) - return run_sync(func, *args) - - -class _BlockingAsyncContextManager(Generic[T_co], AbstractContextManager): - _enter_future: Future - _exit_future: Future - _exit_event: Event - _exit_exc_info: tuple[ - type[BaseException] | None, BaseException | None, TracebackType | None - ] = (None, None, None) - - def __init__(self, async_cm: AsyncContextManager[T_co], portal: BlockingPortal): - self._async_cm = async_cm - self._portal = portal - - async def run_async_cm(self) -> bool | None: - try: - self._exit_event = Event() - value = await self._async_cm.__aenter__() - except BaseException as exc: - self._enter_future.set_exception(exc) - raise - else: - self._enter_future.set_result(value) - - try: - # Wait for the sync context manager to exit. - # This next statement can raise `get_cancelled_exc_class()` if - # something went wrong in a task group in this async context - # manager. - await self._exit_event.wait() - finally: - # In case of cancellation, it could be that we end up here before - # `_BlockingAsyncContextManager.__exit__` is called, and an - # `_exit_exc_info` has been set. - result = await self._async_cm.__aexit__(*self._exit_exc_info) - return result - - def __enter__(self) -> T_co: - self._enter_future = Future() - self._exit_future = self._portal.start_task_soon(self.run_async_cm) - cm = self._enter_future.result() - return cast(T_co, cm) - - def __exit__( - self, - __exc_type: type[BaseException] | None, - __exc_value: BaseException | None, - __traceback: TracebackType | None, - ) -> bool | None: - self._exit_exc_info = __exc_type, __exc_value, __traceback - self._portal.call(self._exit_event.set) - return self._exit_future.result() - - -class _BlockingPortalTaskStatus(TaskStatus): - def __init__(self, future: Future): - self._future = future - - def started(self, value: object = None) -> None: - self._future.set_result(value) - - -class BlockingPortal: - """An object that lets external threads run code in an asynchronous event loop.""" - - def __new__(cls) -> BlockingPortal: - return get_asynclib().BlockingPortal() - - def __init__(self) -> None: - self._event_loop_thread_id: int | None = threading.get_ident() - self._stop_event = Event() - self._task_group = create_task_group() - self._cancelled_exc_class = get_cancelled_exc_class() - - async def __aenter__(self) -> BlockingPortal: - await self._task_group.__aenter__() - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - await self.stop() - return await self._task_group.__aexit__(exc_type, exc_val, exc_tb) - - def _check_running(self) -> None: - if self._event_loop_thread_id is None: - raise RuntimeError("This portal is not running") - if self._event_loop_thread_id == threading.get_ident(): - raise RuntimeError( - "This method cannot be called from the event loop thread" - ) - - async def sleep_until_stopped(self) -> None: - """Sleep until :meth:`stop` is called.""" - await self._stop_event.wait() - - async def stop(self, cancel_remaining: bool = False) -> None: - """ - Signal the portal to shut down. - - This marks the portal as no longer accepting new calls and exits from - :meth:`sleep_until_stopped`. - - :param cancel_remaining: ``True`` to cancel all the remaining tasks, ``False`` to let them - finish before returning - - """ - self._event_loop_thread_id = None - self._stop_event.set() - if cancel_remaining: - self._task_group.cancel_scope.cancel() - - async def _call_func( - self, func: Callable, args: tuple, kwargs: dict[str, Any], future: Future - ) -> None: - def callback(f: Future) -> None: - if f.cancelled() and self._event_loop_thread_id not in ( - None, - threading.get_ident(), - ): - self.call(scope.cancel) - - try: - retval = func(*args, **kwargs) - if iscoroutine(retval): - with CancelScope() as scope: - if future.cancelled(): - scope.cancel() - else: - future.add_done_callback(callback) - - retval = await retval - except self._cancelled_exc_class: - future.cancel() - except BaseException as exc: - if not future.cancelled(): - future.set_exception(exc) - - # Let base exceptions fall through - if not isinstance(exc, Exception): - raise - else: - if not future.cancelled(): - future.set_result(retval) - finally: - scope = None # type: ignore[assignment] - - def _spawn_task_from_thread( - self, - func: Callable, - args: tuple, - kwargs: dict[str, Any], - name: object, - future: Future, - ) -> None: - """ - Spawn a new task using the given callable. - - Implementors must ensure that the future is resolved when the task finishes. - - :param func: a callable - :param args: positional arguments to be passed to the callable - :param kwargs: keyword arguments to be passed to the callable - :param name: name of the task (will be coerced to a string if not ``None``) - :param future: a future that will resolve to the return value of the callable, or the - exception raised during its execution - - """ - raise NotImplementedError - - @overload - def call(self, func: Callable[..., Awaitable[T_Retval]], *args: object) -> T_Retval: - ... - - @overload - def call(self, func: Callable[..., T_Retval], *args: object) -> T_Retval: - ... - - def call( - self, func: Callable[..., Awaitable[T_Retval] | T_Retval], *args: object - ) -> T_Retval: - """ - Call the given function in the event loop thread. - - If the callable returns a coroutine object, it is awaited on. - - :param func: any callable - :raises RuntimeError: if the portal is not running or if this method is called from within - the event loop thread - - """ - return cast(T_Retval, self.start_task_soon(func, *args).result()) - - @overload - def spawn_task( - self, - func: Callable[..., Awaitable[T_Retval]], - *args: object, - name: object = None, - ) -> Future[T_Retval]: - ... - - @overload - def spawn_task( - self, func: Callable[..., T_Retval], *args: object, name: object = None - ) -> Future[T_Retval]: - ... - - def spawn_task( - self, - func: Callable[..., Awaitable[T_Retval] | T_Retval], - *args: object, - name: object = None, - ) -> Future[T_Retval]: - """ - Start a task in the portal's task group. - - :param func: the target coroutine function - :param args: positional arguments passed to ``func`` - :param name: name of the task (will be coerced to a string if not ``None``) - :return: a future that resolves with the return value of the callable if the task completes - successfully, or with the exception raised in the task - :raises RuntimeError: if the portal is not running or if this method is called from within - the event loop thread - - .. versionadded:: 2.1 - .. deprecated:: 3.0 - Use :meth:`start_task_soon` instead. If your code needs AnyIO 2 compatibility, you - can keep using this until AnyIO 4. - - """ - warn( - "spawn_task() is deprecated -- use start_task_soon() instead", - DeprecationWarning, - ) - return self.start_task_soon(func, *args, name=name) # type: ignore[arg-type] - - @overload - def start_task_soon( - self, - func: Callable[..., Awaitable[T_Retval]], - *args: object, - name: object = None, - ) -> Future[T_Retval]: - ... - - @overload - def start_task_soon( - self, func: Callable[..., T_Retval], *args: object, name: object = None - ) -> Future[T_Retval]: - ... - - def start_task_soon( - self, - func: Callable[..., Awaitable[T_Retval] | T_Retval], - *args: object, - name: object = None, - ) -> Future[T_Retval]: - """ - Start a task in the portal's task group. - - The task will be run inside a cancel scope which can be cancelled by cancelling the - returned future. - - :param func: the target function - :param args: positional arguments passed to ``func`` - :param name: name of the task (will be coerced to a string if not ``None``) - :return: a future that resolves with the return value of the callable if the - task completes successfully, or with the exception raised in the task - :raises RuntimeError: if the portal is not running or if this method is called - from within the event loop thread - :rtype: concurrent.futures.Future[T_Retval] - - .. versionadded:: 3.0 - - """ - self._check_running() - f: Future = Future() - self._spawn_task_from_thread(func, args, {}, name, f) - return f - - def start_task( - self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None - ) -> tuple[Future[Any], Any]: - """ - Start a task in the portal's task group and wait until it signals for readiness. - - This method works the same way as :meth:`.abc.TaskGroup.start`. - - :param func: the target function - :param args: positional arguments passed to ``func`` - :param name: name of the task (will be coerced to a string if not ``None``) - :return: a tuple of (future, task_status_value) where the ``task_status_value`` - is the value passed to ``task_status.started()`` from within the target - function - :rtype: tuple[concurrent.futures.Future[Any], Any] - - .. versionadded:: 3.0 - - """ - - def task_done(future: Future) -> None: - if not task_status_future.done(): - if future.cancelled(): - task_status_future.cancel() - elif future.exception(): - task_status_future.set_exception(future.exception()) - else: - exc = RuntimeError( - "Task exited without calling task_status.started()" - ) - task_status_future.set_exception(exc) - - self._check_running() - task_status_future: Future = Future() - task_status = _BlockingPortalTaskStatus(task_status_future) - f: Future = Future() - f.add_done_callback(task_done) - self._spawn_task_from_thread(func, args, {"task_status": task_status}, name, f) - return f, task_status_future.result() - - def wrap_async_context_manager( - self, cm: AsyncContextManager[T_co] - ) -> ContextManager[T_co]: - """ - Wrap an async context manager as a synchronous context manager via this portal. - - Spawns a task that will call both ``__aenter__()`` and ``__aexit__()``, stopping in the - middle until the synchronous context manager exits. - - :param cm: an asynchronous context manager - :return: a synchronous context manager - - .. versionadded:: 2.1 - - """ - return _BlockingAsyncContextManager(cm, self) - - -def create_blocking_portal() -> BlockingPortal: - """ - Create a portal for running functions in the event loop thread from external threads. - - Use this function in asynchronous code when you need to allow external threads access to the - event loop where your asynchronous code is currently running. - - .. deprecated:: 3.0 - Use :class:`.BlockingPortal` directly. - - """ - warn( - "create_blocking_portal() has been deprecated -- use anyio.from_thread.BlockingPortal() " - "directly", - DeprecationWarning, - ) - return BlockingPortal() - - -@contextmanager -def start_blocking_portal( - backend: str = "asyncio", backend_options: dict[str, Any] | None = None -) -> Generator[BlockingPortal, Any, None]: - """ - Start a new event loop in a new thread and run a blocking portal in its main task. - - The parameters are the same as for :func:`~anyio.run`. - - :param backend: name of the backend - :param backend_options: backend options - :return: a context manager that yields a blocking portal - - .. versionchanged:: 3.0 - Usage as a context manager is now required. - - """ - - async def run_portal() -> None: - async with BlockingPortal() as portal_: - if future.set_running_or_notify_cancel(): - future.set_result(portal_) - await portal_.sleep_until_stopped() - - future: Future[BlockingPortal] = Future() - with ThreadPoolExecutor(1) as executor: - run_future = executor.submit( - _eventloop.run, - run_portal, # type: ignore[arg-type] - backend=backend, - backend_options=backend_options, - ) - try: - wait( - cast(Iterable[Future], [run_future, future]), - return_when=FIRST_COMPLETED, - ) - except BaseException: - future.cancel() - run_future.cancel() - raise - - if future.done(): - portal = future.result() - cancel_remaining_tasks = False - try: - yield portal - except BaseException: - cancel_remaining_tasks = True - raise - finally: - try: - portal.call(portal.stop, cancel_remaining_tasks) - except RuntimeError: - pass - - run_future.result() diff --git a/server/venv/Lib/site-packages/anyio/lowlevel.py b/server/venv/Lib/site-packages/anyio/lowlevel.py deleted file mode 100644 index 0e908c6..0000000 --- a/server/venv/Lib/site-packages/anyio/lowlevel.py +++ /dev/null @@ -1,174 +0,0 @@ -from __future__ import annotations - -import enum -import sys -from dataclasses import dataclass -from typing import Any, Generic, TypeVar, overload -from weakref import WeakKeyDictionary - -from ._core._eventloop import get_asynclib - -if sys.version_info >= (3, 8): - from typing import Literal -else: - from typing_extensions import Literal - -T = TypeVar("T") -D = TypeVar("D") - - -async def checkpoint() -> None: - """ - Check for cancellation and allow the scheduler to switch to another task. - - Equivalent to (but more efficient than):: - - await checkpoint_if_cancelled() - await cancel_shielded_checkpoint() - - - .. versionadded:: 3.0 - - """ - await get_asynclib().checkpoint() - - -async def checkpoint_if_cancelled() -> None: - """ - Enter a checkpoint if the enclosing cancel scope has been cancelled. - - This does not allow the scheduler to switch to a different task. - - .. versionadded:: 3.0 - - """ - await get_asynclib().checkpoint_if_cancelled() - - -async def cancel_shielded_checkpoint() -> None: - """ - Allow the scheduler to switch to another task but without checking for cancellation. - - Equivalent to (but potentially more efficient than):: - - with CancelScope(shield=True): - await checkpoint() - - - .. versionadded:: 3.0 - - """ - await get_asynclib().cancel_shielded_checkpoint() - - -def current_token() -> object: - """Return a backend specific token object that can be used to get back to the event loop.""" - return get_asynclib().current_token() - - -_run_vars: WeakKeyDictionary[Any, dict[str, Any]] = WeakKeyDictionary() -_token_wrappers: dict[Any, _TokenWrapper] = {} - - -@dataclass(frozen=True) -class _TokenWrapper: - __slots__ = "_token", "__weakref__" - _token: object - - -class _NoValueSet(enum.Enum): - NO_VALUE_SET = enum.auto() - - -class RunvarToken(Generic[T]): - __slots__ = "_var", "_value", "_redeemed" - - def __init__(self, var: RunVar[T], value: T | Literal[_NoValueSet.NO_VALUE_SET]): - self._var = var - self._value: T | Literal[_NoValueSet.NO_VALUE_SET] = value - self._redeemed = False - - -class RunVar(Generic[T]): - """ - Like a :class:`~contextvars.ContextVar`, except scoped to the running event loop. - """ - - __slots__ = "_name", "_default" - - NO_VALUE_SET: Literal[_NoValueSet.NO_VALUE_SET] = _NoValueSet.NO_VALUE_SET - - _token_wrappers: set[_TokenWrapper] = set() - - def __init__( - self, - name: str, - default: T | Literal[_NoValueSet.NO_VALUE_SET] = NO_VALUE_SET, - ): - self._name = name - self._default = default - - @property - def _current_vars(self) -> dict[str, T]: - token = current_token() - while True: - try: - return _run_vars[token] - except TypeError: - # Happens when token isn't weak referable (TrioToken). - # This workaround does mean that some memory will leak on Trio until the problem - # is fixed on their end. - token = _TokenWrapper(token) - self._token_wrappers.add(token) - except KeyError: - run_vars = _run_vars[token] = {} - return run_vars - - @overload - def get(self, default: D) -> T | D: - ... - - @overload - def get(self) -> T: - ... - - def get( - self, default: D | Literal[_NoValueSet.NO_VALUE_SET] = NO_VALUE_SET - ) -> T | D: - try: - return self._current_vars[self._name] - except KeyError: - if default is not RunVar.NO_VALUE_SET: - return default - elif self._default is not RunVar.NO_VALUE_SET: - return self._default - - raise LookupError( - f'Run variable "{self._name}" has no value and no default set' - ) - - def set(self, value: T) -> RunvarToken[T]: - current_vars = self._current_vars - token = RunvarToken(self, current_vars.get(self._name, RunVar.NO_VALUE_SET)) - current_vars[self._name] = value - return token - - def reset(self, token: RunvarToken[T]) -> None: - if token._var is not self: - raise ValueError("This token does not belong to this RunVar") - - if token._redeemed: - raise ValueError("This token has already been used") - - if token._value is _NoValueSet.NO_VALUE_SET: - try: - del self._current_vars[self._name] - except KeyError: - pass - else: - self._current_vars[self._name] = token._value - - token._redeemed = True - - def __repr__(self) -> str: - return f"" diff --git a/server/venv/Lib/site-packages/anyio/py.typed b/server/venv/Lib/site-packages/anyio/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/anyio/pytest_plugin.py b/server/venv/Lib/site-packages/anyio/pytest_plugin.py deleted file mode 100644 index 044ce69..0000000 --- a/server/venv/Lib/site-packages/anyio/pytest_plugin.py +++ /dev/null @@ -1,142 +0,0 @@ -from __future__ import annotations - -from contextlib import contextmanager -from inspect import isasyncgenfunction, iscoroutinefunction -from typing import Any, Dict, Generator, Tuple, cast - -import pytest -import sniffio - -from ._core._eventloop import get_all_backends, get_asynclib -from .abc import TestRunner - -_current_runner: TestRunner | None = None - - -def extract_backend_and_options(backend: object) -> tuple[str, dict[str, Any]]: - if isinstance(backend, str): - return backend, {} - elif isinstance(backend, tuple) and len(backend) == 2: - if isinstance(backend[0], str) and isinstance(backend[1], dict): - return cast(Tuple[str, Dict[str, Any]], backend) - - raise TypeError("anyio_backend must be either a string or tuple of (string, dict)") - - -@contextmanager -def get_runner( - backend_name: str, backend_options: dict[str, Any] -) -> Generator[TestRunner, object, None]: - global _current_runner - if _current_runner: - yield _current_runner - return - - asynclib = get_asynclib(backend_name) - token = None - if sniffio.current_async_library_cvar.get(None) is None: - # Since we're in control of the event loop, we can cache the name of the async library - token = sniffio.current_async_library_cvar.set(backend_name) - - try: - backend_options = backend_options or {} - with asynclib.TestRunner(**backend_options) as runner: - _current_runner = runner - yield runner - finally: - _current_runner = None - if token: - sniffio.current_async_library_cvar.reset(token) - - -def pytest_configure(config: Any) -> None: - config.addinivalue_line( - "markers", - "anyio: mark the (coroutine function) test to be run " - "asynchronously via anyio.", - ) - - -def pytest_fixture_setup(fixturedef: Any, request: Any) -> None: - def wrapper(*args, anyio_backend, **kwargs): # type: ignore[no-untyped-def] - backend_name, backend_options = extract_backend_and_options(anyio_backend) - if has_backend_arg: - kwargs["anyio_backend"] = anyio_backend - - with get_runner(backend_name, backend_options) as runner: - if isasyncgenfunction(func): - yield from runner.run_asyncgen_fixture(func, kwargs) - else: - yield runner.run_fixture(func, kwargs) - - # Only apply this to coroutine functions and async generator functions in requests that involve - # the anyio_backend fixture - func = fixturedef.func - if isasyncgenfunction(func) or iscoroutinefunction(func): - if "anyio_backend" in request.fixturenames: - has_backend_arg = "anyio_backend" in fixturedef.argnames - fixturedef.func = wrapper - if not has_backend_arg: - fixturedef.argnames += ("anyio_backend",) - - -@pytest.hookimpl(tryfirst=True) -def pytest_pycollect_makeitem(collector: Any, name: Any, obj: Any) -> None: - if collector.istestfunction(obj, name): - inner_func = obj.hypothesis.inner_test if hasattr(obj, "hypothesis") else obj - if iscoroutinefunction(inner_func): - marker = collector.get_closest_marker("anyio") - own_markers = getattr(obj, "pytestmark", ()) - if marker or any(marker.name == "anyio" for marker in own_markers): - pytest.mark.usefixtures("anyio_backend")(obj) - - -@pytest.hookimpl(tryfirst=True) -def pytest_pyfunc_call(pyfuncitem: Any) -> bool | None: - def run_with_hypothesis(**kwargs: Any) -> None: - with get_runner(backend_name, backend_options) as runner: - runner.run_test(original_func, kwargs) - - backend = pyfuncitem.funcargs.get("anyio_backend") - if backend: - backend_name, backend_options = extract_backend_and_options(backend) - - if hasattr(pyfuncitem.obj, "hypothesis"): - # Wrap the inner test function unless it's already wrapped - original_func = pyfuncitem.obj.hypothesis.inner_test - if original_func.__qualname__ != run_with_hypothesis.__qualname__: - if iscoroutinefunction(original_func): - pyfuncitem.obj.hypothesis.inner_test = run_with_hypothesis - - return None - - if iscoroutinefunction(pyfuncitem.obj): - funcargs = pyfuncitem.funcargs - testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames} - with get_runner(backend_name, backend_options) as runner: - runner.run_test(pyfuncitem.obj, testargs) - - return True - - return None - - -@pytest.fixture(params=get_all_backends()) -def anyio_backend(request: Any) -> Any: - return request.param - - -@pytest.fixture -def anyio_backend_name(anyio_backend: Any) -> str: - if isinstance(anyio_backend, str): - return anyio_backend - else: - return anyio_backend[0] - - -@pytest.fixture -def anyio_backend_options(anyio_backend: Any) -> dict[str, Any]: - if isinstance(anyio_backend, str): - return {} - else: - return anyio_backend[1] diff --git a/server/venv/Lib/site-packages/anyio/streams/__init__.py b/server/venv/Lib/site-packages/anyio/streams/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/anyio/streams/buffered.py b/server/venv/Lib/site-packages/anyio/streams/buffered.py deleted file mode 100644 index 11474c1..0000000 --- a/server/venv/Lib/site-packages/anyio/streams/buffered.py +++ /dev/null @@ -1,118 +0,0 @@ -from __future__ import annotations - -from dataclasses import dataclass, field -from typing import Any, Callable, Mapping - -from .. import ClosedResourceError, DelimiterNotFound, EndOfStream, IncompleteRead -from ..abc import AnyByteReceiveStream, ByteReceiveStream - - -@dataclass(eq=False) -class BufferedByteReceiveStream(ByteReceiveStream): - """ - Wraps any bytes-based receive stream and uses a buffer to provide sophisticated receiving - capabilities in the form of a byte stream. - """ - - receive_stream: AnyByteReceiveStream - _buffer: bytearray = field(init=False, default_factory=bytearray) - _closed: bool = field(init=False, default=False) - - async def aclose(self) -> None: - await self.receive_stream.aclose() - self._closed = True - - @property - def buffer(self) -> bytes: - """The bytes currently in the buffer.""" - return bytes(self._buffer) - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - return self.receive_stream.extra_attributes - - async def receive(self, max_bytes: int = 65536) -> bytes: - if self._closed: - raise ClosedResourceError - - if self._buffer: - chunk = bytes(self._buffer[:max_bytes]) - del self._buffer[:max_bytes] - return chunk - elif isinstance(self.receive_stream, ByteReceiveStream): - return await self.receive_stream.receive(max_bytes) - else: - # With a bytes-oriented object stream, we need to handle any surplus bytes we get from - # the receive() call - chunk = await self.receive_stream.receive() - if len(chunk) > max_bytes: - # Save the surplus bytes in the buffer - self._buffer.extend(chunk[max_bytes:]) - return chunk[:max_bytes] - else: - return chunk - - async def receive_exactly(self, nbytes: int) -> bytes: - """ - Read exactly the given amount of bytes from the stream. - - :param nbytes: the number of bytes to read - :return: the bytes read - :raises ~anyio.IncompleteRead: if the stream was closed before the requested - amount of bytes could be read from the stream - - """ - while True: - remaining = nbytes - len(self._buffer) - if remaining <= 0: - retval = self._buffer[:nbytes] - del self._buffer[:nbytes] - return bytes(retval) - - try: - if isinstance(self.receive_stream, ByteReceiveStream): - chunk = await self.receive_stream.receive(remaining) - else: - chunk = await self.receive_stream.receive() - except EndOfStream as exc: - raise IncompleteRead from exc - - self._buffer.extend(chunk) - - async def receive_until(self, delimiter: bytes, max_bytes: int) -> bytes: - """ - Read from the stream until the delimiter is found or max_bytes have been read. - - :param delimiter: the marker to look for in the stream - :param max_bytes: maximum number of bytes that will be read before raising - :exc:`~anyio.DelimiterNotFound` - :return: the bytes read (not including the delimiter) - :raises ~anyio.IncompleteRead: if the stream was closed before the delimiter - was found - :raises ~anyio.DelimiterNotFound: if the delimiter is not found within the - bytes read up to the maximum allowed - - """ - delimiter_size = len(delimiter) - offset = 0 - while True: - # Check if the delimiter can be found in the current buffer - index = self._buffer.find(delimiter, offset) - if index >= 0: - found = self._buffer[:index] - del self._buffer[: index + len(delimiter) :] - return bytes(found) - - # Check if the buffer is already at or over the limit - if len(self._buffer) >= max_bytes: - raise DelimiterNotFound(max_bytes) - - # Read more data into the buffer from the socket - try: - data = await self.receive_stream.receive() - except EndOfStream as exc: - raise IncompleteRead from exc - - # Move the offset forward and add the new data to the buffer - offset = max(len(self._buffer) - delimiter_size + 1, 0) - self._buffer.extend(data) diff --git a/server/venv/Lib/site-packages/anyio/streams/file.py b/server/venv/Lib/site-packages/anyio/streams/file.py deleted file mode 100644 index 2840d40..0000000 --- a/server/venv/Lib/site-packages/anyio/streams/file.py +++ /dev/null @@ -1,147 +0,0 @@ -from __future__ import annotations - -from io import SEEK_SET, UnsupportedOperation -from os import PathLike -from pathlib import Path -from typing import Any, BinaryIO, Callable, Mapping, cast - -from .. import ( - BrokenResourceError, - ClosedResourceError, - EndOfStream, - TypedAttributeSet, - to_thread, - typed_attribute, -) -from ..abc import ByteReceiveStream, ByteSendStream - - -class FileStreamAttribute(TypedAttributeSet): - #: the open file descriptor - file: BinaryIO = typed_attribute() - #: the path of the file on the file system, if available (file must be a real file) - path: Path = typed_attribute() - #: the file number, if available (file must be a real file or a TTY) - fileno: int = typed_attribute() - - -class _BaseFileStream: - def __init__(self, file: BinaryIO): - self._file = file - - async def aclose(self) -> None: - await to_thread.run_sync(self._file.close) - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - attributes: dict[Any, Callable[[], Any]] = { - FileStreamAttribute.file: lambda: self._file, - } - - if hasattr(self._file, "name"): - attributes[FileStreamAttribute.path] = lambda: Path(self._file.name) - - try: - self._file.fileno() - except UnsupportedOperation: - pass - else: - attributes[FileStreamAttribute.fileno] = lambda: self._file.fileno() - - return attributes - - -class FileReadStream(_BaseFileStream, ByteReceiveStream): - """ - A byte stream that reads from a file in the file system. - - :param file: a file that has been opened for reading in binary mode - - .. versionadded:: 3.0 - """ - - @classmethod - async def from_path(cls, path: str | PathLike[str]) -> FileReadStream: - """ - Create a file read stream by opening the given file. - - :param path: path of the file to read from - - """ - file = await to_thread.run_sync(Path(path).open, "rb") - return cls(cast(BinaryIO, file)) - - async def receive(self, max_bytes: int = 65536) -> bytes: - try: - data = await to_thread.run_sync(self._file.read, max_bytes) - except ValueError: - raise ClosedResourceError from None - except OSError as exc: - raise BrokenResourceError from exc - - if data: - return data - else: - raise EndOfStream - - async def seek(self, position: int, whence: int = SEEK_SET) -> int: - """ - Seek the file to the given position. - - .. seealso:: :meth:`io.IOBase.seek` - - .. note:: Not all file descriptors are seekable. - - :param position: position to seek the file to - :param whence: controls how ``position`` is interpreted - :return: the new absolute position - :raises OSError: if the file is not seekable - - """ - return await to_thread.run_sync(self._file.seek, position, whence) - - async def tell(self) -> int: - """ - Return the current stream position. - - .. note:: Not all file descriptors are seekable. - - :return: the current absolute position - :raises OSError: if the file is not seekable - - """ - return await to_thread.run_sync(self._file.tell) - - -class FileWriteStream(_BaseFileStream, ByteSendStream): - """ - A byte stream that writes to a file in the file system. - - :param file: a file that has been opened for writing in binary mode - - .. versionadded:: 3.0 - """ - - @classmethod - async def from_path( - cls, path: str | PathLike[str], append: bool = False - ) -> FileWriteStream: - """ - Create a file write stream by opening the given file for writing. - - :param path: path of the file to write to - :param append: if ``True``, open the file for appending; if ``False``, any existing file - at the given path will be truncated - - """ - mode = "ab" if append else "wb" - file = await to_thread.run_sync(Path(path).open, mode) - return cls(cast(BinaryIO, file)) - - async def send(self, item: bytes) -> None: - try: - await to_thread.run_sync(self._file.write, item) - except ValueError: - raise ClosedResourceError from None - except OSError as exc: - raise BrokenResourceError from exc diff --git a/server/venv/Lib/site-packages/anyio/streams/memory.py b/server/venv/Lib/site-packages/anyio/streams/memory.py deleted file mode 100644 index a6499c1..0000000 --- a/server/venv/Lib/site-packages/anyio/streams/memory.py +++ /dev/null @@ -1,279 +0,0 @@ -from __future__ import annotations - -from collections import OrderedDict, deque -from dataclasses import dataclass, field -from types import TracebackType -from typing import Generic, NamedTuple, TypeVar - -from .. import ( - BrokenResourceError, - ClosedResourceError, - EndOfStream, - WouldBlock, - get_cancelled_exc_class, -) -from .._core._compat import DeprecatedAwaitable -from ..abc import Event, ObjectReceiveStream, ObjectSendStream -from ..lowlevel import checkpoint - -T_Item = TypeVar("T_Item") -T_co = TypeVar("T_co", covariant=True) -T_contra = TypeVar("T_contra", contravariant=True) - - -class MemoryObjectStreamStatistics(NamedTuple): - current_buffer_used: int #: number of items stored in the buffer - #: maximum number of items that can be stored on this stream (or :data:`math.inf`) - max_buffer_size: float - open_send_streams: int #: number of unclosed clones of the send stream - open_receive_streams: int #: number of unclosed clones of the receive stream - tasks_waiting_send: int #: number of tasks blocked on :meth:`MemoryObjectSendStream.send` - #: number of tasks blocked on :meth:`MemoryObjectReceiveStream.receive` - tasks_waiting_receive: int - - -@dataclass(eq=False) -class MemoryObjectStreamState(Generic[T_Item]): - max_buffer_size: float = field() - buffer: deque[T_Item] = field(init=False, default_factory=deque) - open_send_channels: int = field(init=False, default=0) - open_receive_channels: int = field(init=False, default=0) - waiting_receivers: OrderedDict[Event, list[T_Item]] = field( - init=False, default_factory=OrderedDict - ) - waiting_senders: OrderedDict[Event, T_Item] = field( - init=False, default_factory=OrderedDict - ) - - def statistics(self) -> MemoryObjectStreamStatistics: - return MemoryObjectStreamStatistics( - len(self.buffer), - self.max_buffer_size, - self.open_send_channels, - self.open_receive_channels, - len(self.waiting_senders), - len(self.waiting_receivers), - ) - - -@dataclass(eq=False) -class MemoryObjectReceiveStream(Generic[T_co], ObjectReceiveStream[T_co]): - _state: MemoryObjectStreamState[T_co] - _closed: bool = field(init=False, default=False) - - def __post_init__(self) -> None: - self._state.open_receive_channels += 1 - - def receive_nowait(self) -> T_co: - """ - Receive the next item if it can be done without waiting. - - :return: the received item - :raises ~anyio.ClosedResourceError: if this send stream has been closed - :raises ~anyio.EndOfStream: if the buffer is empty and this stream has been - closed from the sending end - :raises ~anyio.WouldBlock: if there are no items in the buffer and no tasks - waiting to send - - """ - if self._closed: - raise ClosedResourceError - - if self._state.waiting_senders: - # Get the item from the next sender - send_event, item = self._state.waiting_senders.popitem(last=False) - self._state.buffer.append(item) - send_event.set() - - if self._state.buffer: - return self._state.buffer.popleft() - elif not self._state.open_send_channels: - raise EndOfStream - - raise WouldBlock - - async def receive(self) -> T_co: - await checkpoint() - try: - return self.receive_nowait() - except WouldBlock: - # Add ourselves in the queue - receive_event = Event() - container: list[T_co] = [] - self._state.waiting_receivers[receive_event] = container - - try: - await receive_event.wait() - except get_cancelled_exc_class(): - # Ignore the immediate cancellation if we already received an item, so as not to - # lose it - if not container: - raise - finally: - self._state.waiting_receivers.pop(receive_event, None) - - if container: - return container[0] - else: - raise EndOfStream - - def clone(self) -> MemoryObjectReceiveStream[T_co]: - """ - Create a clone of this receive stream. - - Each clone can be closed separately. Only when all clones have been closed will the - receiving end of the memory stream be considered closed by the sending ends. - - :return: the cloned stream - - """ - if self._closed: - raise ClosedResourceError - - return MemoryObjectReceiveStream(_state=self._state) - - def close(self) -> None: - """ - Close the stream. - - This works the exact same way as :meth:`aclose`, but is provided as a special case for the - benefit of synchronous callbacks. - - """ - if not self._closed: - self._closed = True - self._state.open_receive_channels -= 1 - if self._state.open_receive_channels == 0: - send_events = list(self._state.waiting_senders.keys()) - for event in send_events: - event.set() - - async def aclose(self) -> None: - self.close() - - def statistics(self) -> MemoryObjectStreamStatistics: - """ - Return statistics about the current state of this stream. - - .. versionadded:: 3.0 - """ - return self._state.statistics() - - def __enter__(self) -> MemoryObjectReceiveStream[T_co]: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - self.close() - - -@dataclass(eq=False) -class MemoryObjectSendStream(Generic[T_contra], ObjectSendStream[T_contra]): - _state: MemoryObjectStreamState[T_contra] - _closed: bool = field(init=False, default=False) - - def __post_init__(self) -> None: - self._state.open_send_channels += 1 - - def send_nowait(self, item: T_contra) -> DeprecatedAwaitable: - """ - Send an item immediately if it can be done without waiting. - - :param item: the item to send - :raises ~anyio.ClosedResourceError: if this send stream has been closed - :raises ~anyio.BrokenResourceError: if the stream has been closed from the - receiving end - :raises ~anyio.WouldBlock: if the buffer is full and there are no tasks waiting - to receive - - """ - if self._closed: - raise ClosedResourceError - if not self._state.open_receive_channels: - raise BrokenResourceError - - if self._state.waiting_receivers: - receive_event, container = self._state.waiting_receivers.popitem(last=False) - container.append(item) - receive_event.set() - elif len(self._state.buffer) < self._state.max_buffer_size: - self._state.buffer.append(item) - else: - raise WouldBlock - - return DeprecatedAwaitable(self.send_nowait) - - async def send(self, item: T_contra) -> None: - await checkpoint() - try: - self.send_nowait(item) - except WouldBlock: - # Wait until there's someone on the receiving end - send_event = Event() - self._state.waiting_senders[send_event] = item - try: - await send_event.wait() - except BaseException: - self._state.waiting_senders.pop(send_event, None) # type: ignore[arg-type] - raise - - if self._state.waiting_senders.pop(send_event, None): # type: ignore[arg-type] - raise BrokenResourceError - - def clone(self) -> MemoryObjectSendStream[T_contra]: - """ - Create a clone of this send stream. - - Each clone can be closed separately. Only when all clones have been closed will the - sending end of the memory stream be considered closed by the receiving ends. - - :return: the cloned stream - - """ - if self._closed: - raise ClosedResourceError - - return MemoryObjectSendStream(_state=self._state) - - def close(self) -> None: - """ - Close the stream. - - This works the exact same way as :meth:`aclose`, but is provided as a special case for the - benefit of synchronous callbacks. - - """ - if not self._closed: - self._closed = True - self._state.open_send_channels -= 1 - if self._state.open_send_channels == 0: - receive_events = list(self._state.waiting_receivers.keys()) - self._state.waiting_receivers.clear() - for event in receive_events: - event.set() - - async def aclose(self) -> None: - self.close() - - def statistics(self) -> MemoryObjectStreamStatistics: - """ - Return statistics about the current state of this stream. - - .. versionadded:: 3.0 - """ - return self._state.statistics() - - def __enter__(self) -> MemoryObjectSendStream[T_contra]: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - self.close() diff --git a/server/venv/Lib/site-packages/anyio/streams/stapled.py b/server/venv/Lib/site-packages/anyio/streams/stapled.py deleted file mode 100644 index 1b2862e..0000000 --- a/server/venv/Lib/site-packages/anyio/streams/stapled.py +++ /dev/null @@ -1,140 +0,0 @@ -from __future__ import annotations - -from dataclasses import dataclass -from typing import Any, Callable, Generic, Mapping, Sequence, TypeVar - -from ..abc import ( - ByteReceiveStream, - ByteSendStream, - ByteStream, - Listener, - ObjectReceiveStream, - ObjectSendStream, - ObjectStream, - TaskGroup, -) - -T_Item = TypeVar("T_Item") -T_Stream = TypeVar("T_Stream") - - -@dataclass(eq=False) -class StapledByteStream(ByteStream): - """ - Combines two byte streams into a single, bidirectional byte stream. - - Extra attributes will be provided from both streams, with the receive stream providing the - values in case of a conflict. - - :param ByteSendStream send_stream: the sending byte stream - :param ByteReceiveStream receive_stream: the receiving byte stream - """ - - send_stream: ByteSendStream - receive_stream: ByteReceiveStream - - async def receive(self, max_bytes: int = 65536) -> bytes: - return await self.receive_stream.receive(max_bytes) - - async def send(self, item: bytes) -> None: - await self.send_stream.send(item) - - async def send_eof(self) -> None: - await self.send_stream.aclose() - - async def aclose(self) -> None: - await self.send_stream.aclose() - await self.receive_stream.aclose() - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - return { - **self.send_stream.extra_attributes, - **self.receive_stream.extra_attributes, - } - - -@dataclass(eq=False) -class StapledObjectStream(Generic[T_Item], ObjectStream[T_Item]): - """ - Combines two object streams into a single, bidirectional object stream. - - Extra attributes will be provided from both streams, with the receive stream providing the - values in case of a conflict. - - :param ObjectSendStream send_stream: the sending object stream - :param ObjectReceiveStream receive_stream: the receiving object stream - """ - - send_stream: ObjectSendStream[T_Item] - receive_stream: ObjectReceiveStream[T_Item] - - async def receive(self) -> T_Item: - return await self.receive_stream.receive() - - async def send(self, item: T_Item) -> None: - await self.send_stream.send(item) - - async def send_eof(self) -> None: - await self.send_stream.aclose() - - async def aclose(self) -> None: - await self.send_stream.aclose() - await self.receive_stream.aclose() - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - return { - **self.send_stream.extra_attributes, - **self.receive_stream.extra_attributes, - } - - -@dataclass(eq=False) -class MultiListener(Generic[T_Stream], Listener[T_Stream]): - """ - Combines multiple listeners into one, serving connections from all of them at once. - - Any MultiListeners in the given collection of listeners will have their listeners moved into - this one. - - Extra attributes are provided from each listener, with each successive listener overriding any - conflicting attributes from the previous one. - - :param listeners: listeners to serve - :type listeners: Sequence[Listener[T_Stream]] - """ - - listeners: Sequence[Listener[T_Stream]] - - def __post_init__(self) -> None: - listeners: list[Listener[T_Stream]] = [] - for listener in self.listeners: - if isinstance(listener, MultiListener): - listeners.extend(listener.listeners) - del listener.listeners[:] # type: ignore[attr-defined] - else: - listeners.append(listener) - - self.listeners = listeners - - async def serve( - self, handler: Callable[[T_Stream], Any], task_group: TaskGroup | None = None - ) -> None: - from .. import create_task_group - - async with create_task_group() as tg: - for listener in self.listeners: - tg.start_soon(listener.serve, handler, task_group) - - async def aclose(self) -> None: - for listener in self.listeners: - await listener.aclose() - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - attributes: dict = {} - for listener in self.listeners: - attributes.update(listener.extra_attributes) - - return attributes diff --git a/server/venv/Lib/site-packages/anyio/streams/text.py b/server/venv/Lib/site-packages/anyio/streams/text.py deleted file mode 100644 index bba2d3f..0000000 --- a/server/venv/Lib/site-packages/anyio/streams/text.py +++ /dev/null @@ -1,143 +0,0 @@ -from __future__ import annotations - -import codecs -from dataclasses import InitVar, dataclass, field -from typing import Any, Callable, Mapping - -from ..abc import ( - AnyByteReceiveStream, - AnyByteSendStream, - AnyByteStream, - ObjectReceiveStream, - ObjectSendStream, - ObjectStream, -) - - -@dataclass(eq=False) -class TextReceiveStream(ObjectReceiveStream[str]): - """ - Stream wrapper that decodes bytes to strings using the given encoding. - - Decoding is done using :class:`~codecs.IncrementalDecoder` which returns any completely - received unicode characters as soon as they come in. - - :param transport_stream: any bytes-based receive stream - :param encoding: character encoding to use for decoding bytes to strings (defaults to - ``utf-8``) - :param errors: handling scheme for decoding errors (defaults to ``strict``; see the - `codecs module documentation`_ for a comprehensive list of options) - - .. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects - """ - - transport_stream: AnyByteReceiveStream - encoding: InitVar[str] = "utf-8" - errors: InitVar[str] = "strict" - _decoder: codecs.IncrementalDecoder = field(init=False) - - def __post_init__(self, encoding: str, errors: str) -> None: - decoder_class = codecs.getincrementaldecoder(encoding) - self._decoder = decoder_class(errors=errors) - - async def receive(self) -> str: - while True: - chunk = await self.transport_stream.receive() - decoded = self._decoder.decode(chunk) - if decoded: - return decoded - - async def aclose(self) -> None: - await self.transport_stream.aclose() - self._decoder.reset() - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - return self.transport_stream.extra_attributes - - -@dataclass(eq=False) -class TextSendStream(ObjectSendStream[str]): - """ - Sends strings to the wrapped stream as bytes using the given encoding. - - :param AnyByteSendStream transport_stream: any bytes-based send stream - :param str encoding: character encoding to use for encoding strings to bytes (defaults to - ``utf-8``) - :param str errors: handling scheme for encoding errors (defaults to ``strict``; see the - `codecs module documentation`_ for a comprehensive list of options) - - .. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects - """ - - transport_stream: AnyByteSendStream - encoding: InitVar[str] = "utf-8" - errors: str = "strict" - _encoder: Callable[..., tuple[bytes, int]] = field(init=False) - - def __post_init__(self, encoding: str) -> None: - self._encoder = codecs.getencoder(encoding) - - async def send(self, item: str) -> None: - encoded = self._encoder(item, self.errors)[0] - await self.transport_stream.send(encoded) - - async def aclose(self) -> None: - await self.transport_stream.aclose() - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - return self.transport_stream.extra_attributes - - -@dataclass(eq=False) -class TextStream(ObjectStream[str]): - """ - A bidirectional stream that decodes bytes to strings on receive and encodes strings to bytes on - send. - - Extra attributes will be provided from both streams, with the receive stream providing the - values in case of a conflict. - - :param AnyByteStream transport_stream: any bytes-based stream - :param str encoding: character encoding to use for encoding/decoding strings to/from bytes - (defaults to ``utf-8``) - :param str errors: handling scheme for encoding errors (defaults to ``strict``; see the - `codecs module documentation`_ for a comprehensive list of options) - - .. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects - """ - - transport_stream: AnyByteStream - encoding: InitVar[str] = "utf-8" - errors: InitVar[str] = "strict" - _receive_stream: TextReceiveStream = field(init=False) - _send_stream: TextSendStream = field(init=False) - - def __post_init__(self, encoding: str, errors: str) -> None: - self._receive_stream = TextReceiveStream( - self.transport_stream, encoding=encoding, errors=errors - ) - self._send_stream = TextSendStream( - self.transport_stream, encoding=encoding, errors=errors - ) - - async def receive(self) -> str: - return await self._receive_stream.receive() - - async def send(self, item: str) -> None: - await self._send_stream.send(item) - - async def send_eof(self) -> None: - await self.transport_stream.send_eof() - - async def aclose(self) -> None: - await self._send_stream.aclose() - await self._receive_stream.aclose() - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - return { - **self._send_stream.extra_attributes, - **self._receive_stream.extra_attributes, - } diff --git a/server/venv/Lib/site-packages/anyio/streams/tls.py b/server/venv/Lib/site-packages/anyio/streams/tls.py deleted file mode 100644 index 9f9e9fd..0000000 --- a/server/venv/Lib/site-packages/anyio/streams/tls.py +++ /dev/null @@ -1,320 +0,0 @@ -from __future__ import annotations - -import logging -import re -import ssl -from dataclasses import dataclass -from functools import wraps -from typing import Any, Callable, Mapping, Tuple, TypeVar - -from .. import ( - BrokenResourceError, - EndOfStream, - aclose_forcefully, - get_cancelled_exc_class, -) -from .._core._typedattr import TypedAttributeSet, typed_attribute -from ..abc import AnyByteStream, ByteStream, Listener, TaskGroup - -T_Retval = TypeVar("T_Retval") -_PCTRTT = Tuple[Tuple[str, str], ...] -_PCTRTTT = Tuple[_PCTRTT, ...] - - -class TLSAttribute(TypedAttributeSet): - """Contains Transport Layer Security related attributes.""" - - #: the selected ALPN protocol - alpn_protocol: str | None = typed_attribute() - #: the channel binding for type ``tls-unique`` - channel_binding_tls_unique: bytes = typed_attribute() - #: the selected cipher - cipher: tuple[str, str, int] = typed_attribute() - #: the peer certificate in dictionary form (see :meth:`ssl.SSLSocket.getpeercert` - #: for more information) - peer_certificate: dict[str, str | _PCTRTTT | _PCTRTT] | None = typed_attribute() - #: the peer certificate in binary form - peer_certificate_binary: bytes | None = typed_attribute() - #: ``True`` if this is the server side of the connection - server_side: bool = typed_attribute() - #: ciphers shared by the client during the TLS handshake (``None`` if this is the - #: client side) - shared_ciphers: list[tuple[str, str, int]] | None = typed_attribute() - #: the :class:`~ssl.SSLObject` used for encryption - ssl_object: ssl.SSLObject = typed_attribute() - #: ``True`` if this stream does (and expects) a closing TLS handshake when the - #: stream is being closed - standard_compatible: bool = typed_attribute() - #: the TLS protocol version (e.g. ``TLSv1.2``) - tls_version: str = typed_attribute() - - -@dataclass(eq=False) -class TLSStream(ByteStream): - """ - A stream wrapper that encrypts all sent data and decrypts received data. - - This class has no public initializer; use :meth:`wrap` instead. - All extra attributes from :class:`~TLSAttribute` are supported. - - :var AnyByteStream transport_stream: the wrapped stream - - """ - - transport_stream: AnyByteStream - standard_compatible: bool - _ssl_object: ssl.SSLObject - _read_bio: ssl.MemoryBIO - _write_bio: ssl.MemoryBIO - - @classmethod - async def wrap( - cls, - transport_stream: AnyByteStream, - *, - server_side: bool | None = None, - hostname: str | None = None, - ssl_context: ssl.SSLContext | None = None, - standard_compatible: bool = True, - ) -> TLSStream: - """ - Wrap an existing stream with Transport Layer Security. - - This performs a TLS handshake with the peer. - - :param transport_stream: a bytes-transporting stream to wrap - :param server_side: ``True`` if this is the server side of the connection, - ``False`` if this is the client side (if omitted, will be set to ``False`` - if ``hostname`` has been provided, ``False`` otherwise). Used only to create - a default context when an explicit context has not been provided. - :param hostname: host name of the peer (if host name checking is desired) - :param ssl_context: the SSLContext object to use (if not provided, a secure - default will be created) - :param standard_compatible: if ``False``, skip the closing handshake when closing the - connection, and don't raise an exception if the peer does the same - :raises ~ssl.SSLError: if the TLS handshake fails - - """ - if server_side is None: - server_side = not hostname - - if not ssl_context: - purpose = ( - ssl.Purpose.CLIENT_AUTH if server_side else ssl.Purpose.SERVER_AUTH - ) - ssl_context = ssl.create_default_context(purpose) - - # Re-enable detection of unexpected EOFs if it was disabled by Python - if hasattr(ssl, "OP_IGNORE_UNEXPECTED_EOF"): - ssl_context.options &= ~ssl.OP_IGNORE_UNEXPECTED_EOF - - bio_in = ssl.MemoryBIO() - bio_out = ssl.MemoryBIO() - ssl_object = ssl_context.wrap_bio( - bio_in, bio_out, server_side=server_side, server_hostname=hostname - ) - wrapper = cls( - transport_stream=transport_stream, - standard_compatible=standard_compatible, - _ssl_object=ssl_object, - _read_bio=bio_in, - _write_bio=bio_out, - ) - await wrapper._call_sslobject_method(ssl_object.do_handshake) - return wrapper - - async def _call_sslobject_method( - self, func: Callable[..., T_Retval], *args: object - ) -> T_Retval: - while True: - try: - result = func(*args) - except ssl.SSLWantReadError: - try: - # Flush any pending writes first - if self._write_bio.pending: - await self.transport_stream.send(self._write_bio.read()) - - data = await self.transport_stream.receive() - except EndOfStream: - self._read_bio.write_eof() - except OSError as exc: - self._read_bio.write_eof() - self._write_bio.write_eof() - raise BrokenResourceError from exc - else: - self._read_bio.write(data) - except ssl.SSLWantWriteError: - await self.transport_stream.send(self._write_bio.read()) - except ssl.SSLSyscallError as exc: - self._read_bio.write_eof() - self._write_bio.write_eof() - raise BrokenResourceError from exc - except ssl.SSLError as exc: - self._read_bio.write_eof() - self._write_bio.write_eof() - if ( - isinstance(exc, ssl.SSLEOFError) - or "UNEXPECTED_EOF_WHILE_READING" in exc.strerror - ): - if self.standard_compatible: - raise BrokenResourceError from exc - else: - raise EndOfStream from None - - raise - else: - # Flush any pending writes first - if self._write_bio.pending: - await self.transport_stream.send(self._write_bio.read()) - - return result - - async def unwrap(self) -> tuple[AnyByteStream, bytes]: - """ - Does the TLS closing handshake. - - :return: a tuple of (wrapped byte stream, bytes left in the read buffer) - - """ - await self._call_sslobject_method(self._ssl_object.unwrap) - self._read_bio.write_eof() - self._write_bio.write_eof() - return self.transport_stream, self._read_bio.read() - - async def aclose(self) -> None: - if self.standard_compatible: - try: - await self.unwrap() - except BaseException: - await aclose_forcefully(self.transport_stream) - raise - - await self.transport_stream.aclose() - - async def receive(self, max_bytes: int = 65536) -> bytes: - data = await self._call_sslobject_method(self._ssl_object.read, max_bytes) - if not data: - raise EndOfStream - - return data - - async def send(self, item: bytes) -> None: - await self._call_sslobject_method(self._ssl_object.write, item) - - async def send_eof(self) -> None: - tls_version = self.extra(TLSAttribute.tls_version) - match = re.match(r"TLSv(\d+)(?:\.(\d+))?", tls_version) - if match: - major, minor = int(match.group(1)), int(match.group(2) or 0) - if (major, minor) < (1, 3): - raise NotImplementedError( - f"send_eof() requires at least TLSv1.3; current " - f"session uses {tls_version}" - ) - - raise NotImplementedError( - "send_eof() has not yet been implemented for TLS streams" - ) - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - return { - **self.transport_stream.extra_attributes, - TLSAttribute.alpn_protocol: self._ssl_object.selected_alpn_protocol, - TLSAttribute.channel_binding_tls_unique: self._ssl_object.get_channel_binding, - TLSAttribute.cipher: self._ssl_object.cipher, - TLSAttribute.peer_certificate: lambda: self._ssl_object.getpeercert(False), - TLSAttribute.peer_certificate_binary: lambda: self._ssl_object.getpeercert( - True - ), - TLSAttribute.server_side: lambda: self._ssl_object.server_side, - TLSAttribute.shared_ciphers: lambda: self._ssl_object.shared_ciphers() - if self._ssl_object.server_side - else None, - TLSAttribute.standard_compatible: lambda: self.standard_compatible, - TLSAttribute.ssl_object: lambda: self._ssl_object, - TLSAttribute.tls_version: self._ssl_object.version, - } - - -@dataclass(eq=False) -class TLSListener(Listener[TLSStream]): - """ - A convenience listener that wraps another listener and auto-negotiates a TLS session on every - accepted connection. - - If the TLS handshake times out or raises an exception, :meth:`handle_handshake_error` is - called to do whatever post-mortem processing is deemed necessary. - - Supports only the :attr:`~TLSAttribute.standard_compatible` extra attribute. - - :param Listener listener: the listener to wrap - :param ssl_context: the SSL context object - :param standard_compatible: a flag passed through to :meth:`TLSStream.wrap` - :param handshake_timeout: time limit for the TLS handshake - (passed to :func:`~anyio.fail_after`) - """ - - listener: Listener[Any] - ssl_context: ssl.SSLContext - standard_compatible: bool = True - handshake_timeout: float = 30 - - @staticmethod - async def handle_handshake_error(exc: BaseException, stream: AnyByteStream) -> None: - """ - Handle an exception raised during the TLS handshake. - - This method does 3 things: - - #. Forcefully closes the original stream - #. Logs the exception (unless it was a cancellation exception) using the - ``anyio.streams.tls`` logger - #. Reraises the exception if it was a base exception or a cancellation exception - - :param exc: the exception - :param stream: the original stream - - """ - await aclose_forcefully(stream) - - # Log all except cancellation exceptions - if not isinstance(exc, get_cancelled_exc_class()): - logging.getLogger(__name__).exception("Error during TLS handshake") - - # Only reraise base exceptions and cancellation exceptions - if not isinstance(exc, Exception) or isinstance(exc, get_cancelled_exc_class()): - raise - - async def serve( - self, - handler: Callable[[TLSStream], Any], - task_group: TaskGroup | None = None, - ) -> None: - @wraps(handler) - async def handler_wrapper(stream: AnyByteStream) -> None: - from .. import fail_after - - try: - with fail_after(self.handshake_timeout): - wrapped_stream = await TLSStream.wrap( - stream, - ssl_context=self.ssl_context, - standard_compatible=self.standard_compatible, - ) - except BaseException as exc: - await self.handle_handshake_error(exc, stream) - else: - await handler(wrapped_stream) - - await self.listener.serve(handler_wrapper, task_group) - - async def aclose(self) -> None: - await self.listener.aclose() - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - return { - TLSAttribute.standard_compatible: lambda: self.standard_compatible, - } diff --git a/server/venv/Lib/site-packages/anyio/to_process.py b/server/venv/Lib/site-packages/anyio/to_process.py deleted file mode 100644 index 7ba9d44..0000000 --- a/server/venv/Lib/site-packages/anyio/to_process.py +++ /dev/null @@ -1,249 +0,0 @@ -from __future__ import annotations - -import os -import pickle -import subprocess -import sys -from collections import deque -from importlib.util import module_from_spec, spec_from_file_location -from typing import Callable, TypeVar, cast - -from ._core._eventloop import current_time, get_asynclib, get_cancelled_exc_class -from ._core._exceptions import BrokenWorkerProcess -from ._core._subprocesses import open_process -from ._core._synchronization import CapacityLimiter -from ._core._tasks import CancelScope, fail_after -from .abc import ByteReceiveStream, ByteSendStream, Process -from .lowlevel import RunVar, checkpoint_if_cancelled -from .streams.buffered import BufferedByteReceiveStream - -WORKER_MAX_IDLE_TIME = 300 # 5 minutes - -T_Retval = TypeVar("T_Retval") -_process_pool_workers: RunVar[set[Process]] = RunVar("_process_pool_workers") -_process_pool_idle_workers: RunVar[deque[tuple[Process, float]]] = RunVar( - "_process_pool_idle_workers" -) -_default_process_limiter: RunVar[CapacityLimiter] = RunVar("_default_process_limiter") - - -async def run_sync( - func: Callable[..., T_Retval], - *args: object, - cancellable: bool = False, - limiter: CapacityLimiter | None = None, -) -> T_Retval: - """ - Call the given function with the given arguments in a worker process. - - If the ``cancellable`` option is enabled and the task waiting for its completion is cancelled, - the worker process running it will be abruptly terminated using SIGKILL (or - ``terminateProcess()`` on Windows). - - :param func: a callable - :param args: positional arguments for the callable - :param cancellable: ``True`` to allow cancellation of the operation while it's running - :param limiter: capacity limiter to use to limit the total amount of processes running - (if omitted, the default limiter is used) - :return: an awaitable that yields the return value of the function. - - """ - - async def send_raw_command(pickled_cmd: bytes) -> object: - try: - await stdin.send(pickled_cmd) - response = await buffered.receive_until(b"\n", 50) - status, length = response.split(b" ") - if status not in (b"RETURN", b"EXCEPTION"): - raise RuntimeError( - f"Worker process returned unexpected response: {response!r}" - ) - - pickled_response = await buffered.receive_exactly(int(length)) - except BaseException as exc: - workers.discard(process) - try: - process.kill() - with CancelScope(shield=True): - await process.aclose() - except ProcessLookupError: - pass - - if isinstance(exc, get_cancelled_exc_class()): - raise - else: - raise BrokenWorkerProcess from exc - - retval = pickle.loads(pickled_response) - if status == b"EXCEPTION": - assert isinstance(retval, BaseException) - raise retval - else: - return retval - - # First pickle the request before trying to reserve a worker process - await checkpoint_if_cancelled() - request = pickle.dumps(("run", func, args), protocol=pickle.HIGHEST_PROTOCOL) - - # If this is the first run in this event loop thread, set up the necessary variables - try: - workers = _process_pool_workers.get() - idle_workers = _process_pool_idle_workers.get() - except LookupError: - workers = set() - idle_workers = deque() - _process_pool_workers.set(workers) - _process_pool_idle_workers.set(idle_workers) - get_asynclib().setup_process_pool_exit_at_shutdown(workers) - - async with (limiter or current_default_process_limiter()): - # Pop processes from the pool (starting from the most recently used) until we find one that - # hasn't exited yet - process: Process - while idle_workers: - process, idle_since = idle_workers.pop() - if process.returncode is None: - stdin = cast(ByteSendStream, process.stdin) - buffered = BufferedByteReceiveStream( - cast(ByteReceiveStream, process.stdout) - ) - - # Prune any other workers that have been idle for WORKER_MAX_IDLE_TIME seconds or - # longer - now = current_time() - killed_processes: list[Process] = [] - while idle_workers: - if now - idle_workers[0][1] < WORKER_MAX_IDLE_TIME: - break - - process, idle_since = idle_workers.popleft() - process.kill() - workers.remove(process) - killed_processes.append(process) - - with CancelScope(shield=True): - for process in killed_processes: - await process.aclose() - - break - - workers.remove(process) - else: - command = [sys.executable, "-u", "-m", __name__] - process = await open_process( - command, stdin=subprocess.PIPE, stdout=subprocess.PIPE - ) - try: - stdin = cast(ByteSendStream, process.stdin) - buffered = BufferedByteReceiveStream( - cast(ByteReceiveStream, process.stdout) - ) - with fail_after(20): - message = await buffered.receive(6) - - if message != b"READY\n": - raise BrokenWorkerProcess( - f"Worker process returned unexpected response: {message!r}" - ) - - main_module_path = getattr(sys.modules["__main__"], "__file__", None) - pickled = pickle.dumps( - ("init", sys.path, main_module_path), - protocol=pickle.HIGHEST_PROTOCOL, - ) - await send_raw_command(pickled) - except (BrokenWorkerProcess, get_cancelled_exc_class()): - raise - except BaseException as exc: - process.kill() - raise BrokenWorkerProcess( - "Error during worker process initialization" - ) from exc - - workers.add(process) - - with CancelScope(shield=not cancellable): - try: - return cast(T_Retval, await send_raw_command(request)) - finally: - if process in workers: - idle_workers.append((process, current_time())) - - -def current_default_process_limiter() -> CapacityLimiter: - """ - Return the capacity limiter that is used by default to limit the number of worker processes. - - :return: a capacity limiter object - - """ - try: - return _default_process_limiter.get() - except LookupError: - limiter = CapacityLimiter(os.cpu_count() or 2) - _default_process_limiter.set(limiter) - return limiter - - -def process_worker() -> None: - # Redirect standard streams to os.devnull so that user code won't interfere with the - # parent-worker communication - stdin = sys.stdin - stdout = sys.stdout - sys.stdin = open(os.devnull) - sys.stdout = open(os.devnull, "w") - - stdout.buffer.write(b"READY\n") - while True: - retval = exception = None - try: - command, *args = pickle.load(stdin.buffer) - except EOFError: - return - except BaseException as exc: - exception = exc - else: - if command == "run": - func, args = args - try: - retval = func(*args) - except BaseException as exc: - exception = exc - elif command == "init": - main_module_path: str | None - sys.path, main_module_path = args - del sys.modules["__main__"] - if main_module_path: - # Load the parent's main module but as __mp_main__ instead of __main__ - # (like multiprocessing does) to avoid infinite recursion - try: - spec = spec_from_file_location("__mp_main__", main_module_path) - if spec and spec.loader: - main = module_from_spec(spec) - spec.loader.exec_module(main) - sys.modules["__main__"] = main - except BaseException as exc: - exception = exc - - try: - if exception is not None: - status = b"EXCEPTION" - pickled = pickle.dumps(exception, pickle.HIGHEST_PROTOCOL) - else: - status = b"RETURN" - pickled = pickle.dumps(retval, pickle.HIGHEST_PROTOCOL) - except BaseException as exc: - exception = exc - status = b"EXCEPTION" - pickled = pickle.dumps(exc, pickle.HIGHEST_PROTOCOL) - - stdout.buffer.write(b"%s %d\n" % (status, len(pickled))) - stdout.buffer.write(pickled) - - # Respect SIGTERM - if isinstance(exception, SystemExit): - raise exception - - -if __name__ == "__main__": - process_worker() diff --git a/server/venv/Lib/site-packages/anyio/to_thread.py b/server/venv/Lib/site-packages/anyio/to_thread.py deleted file mode 100644 index 9315d1e..0000000 --- a/server/venv/Lib/site-packages/anyio/to_thread.py +++ /dev/null @@ -1,67 +0,0 @@ -from __future__ import annotations - -from typing import Callable, TypeVar -from warnings import warn - -from ._core._eventloop import get_asynclib -from .abc import CapacityLimiter - -T_Retval = TypeVar("T_Retval") - - -async def run_sync( - func: Callable[..., T_Retval], - *args: object, - cancellable: bool = False, - limiter: CapacityLimiter | None = None, -) -> T_Retval: - """ - Call the given function with the given arguments in a worker thread. - - If the ``cancellable`` option is enabled and the task waiting for its completion is cancelled, - the thread will still run its course but its return value (or any raised exception) will be - ignored. - - :param func: a callable - :param args: positional arguments for the callable - :param cancellable: ``True`` to allow cancellation of the operation - :param limiter: capacity limiter to use to limit the total amount of threads running - (if omitted, the default limiter is used) - :return: an awaitable that yields the return value of the function. - - """ - return await get_asynclib().run_sync_in_worker_thread( - func, *args, cancellable=cancellable, limiter=limiter - ) - - -async def run_sync_in_worker_thread( - func: Callable[..., T_Retval], - *args: object, - cancellable: bool = False, - limiter: CapacityLimiter | None = None, -) -> T_Retval: - warn( - "run_sync_in_worker_thread() has been deprecated, use anyio.to_thread.run_sync() instead", - DeprecationWarning, - ) - return await run_sync(func, *args, cancellable=cancellable, limiter=limiter) - - -def current_default_thread_limiter() -> CapacityLimiter: - """ - Return the capacity limiter that is used by default to limit the number of concurrent threads. - - :return: a capacity limiter object - - """ - return get_asynclib().current_default_thread_limiter() - - -def current_default_worker_thread_limiter() -> CapacityLimiter: - warn( - "current_default_worker_thread_limiter() has been deprecated, " - "use anyio.to_thread.current_default_thread_limiter() instead", - DeprecationWarning, - ) - return current_default_thread_limiter() diff --git a/server/venv/Lib/site-packages/certifi-2023.7.22.dist-info/INSTALLER b/server/venv/Lib/site-packages/certifi-2023.7.22.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/server/venv/Lib/site-packages/certifi-2023.7.22.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/server/venv/Lib/site-packages/certifi-2023.7.22.dist-info/LICENSE b/server/venv/Lib/site-packages/certifi-2023.7.22.dist-info/LICENSE deleted file mode 100644 index 0a64774..0000000 --- a/server/venv/Lib/site-packages/certifi-2023.7.22.dist-info/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -This package contains a modified version of ca-bundle.crt: - -ca-bundle.crt -- Bundle of CA Root Certificates - -Certificate data from Mozilla as of: Thu Nov 3 19:04:19 2011# -This is a bundle of X.509 certificates of public Certificate Authorities -(CA). These were automatically extracted from Mozilla's root certificates -file (certdata.txt). This file can be found in the mozilla source tree: -https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt -It contains the certificates in PEM format and therefore -can be directly used with curl / libcurl / php_curl, or with -an Apache+mod_ssl webserver for SSL client authentication. -Just configure this file as the SSLCACertificateFile.# - -***** BEGIN LICENSE BLOCK ***** -This Source Code Form is subject to the terms of the Mozilla Public License, -v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain -one at http://mozilla.org/MPL/2.0/. - -***** END LICENSE BLOCK ***** -@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/server/venv/Lib/site-packages/certifi-2023.7.22.dist-info/METADATA b/server/venv/Lib/site-packages/certifi-2023.7.22.dist-info/METADATA deleted file mode 100644 index 07f4991..0000000 --- a/server/venv/Lib/site-packages/certifi-2023.7.22.dist-info/METADATA +++ /dev/null @@ -1,69 +0,0 @@ -Metadata-Version: 2.1 -Name: certifi -Version: 2023.7.22 -Summary: Python package for providing Mozilla's CA Bundle. -Home-page: https://github.com/certifi/python-certifi -Author: Kenneth Reitz -Author-email: me@kennethreitz.com -License: MPL-2.0 -Project-URL: Source, https://github.com/certifi/python-certifi -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) -Classifier: Natural Language :: English -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Requires-Python: >=3.6 -License-File: LICENSE - -Certifi: Python SSL Certificates -================================ - -Certifi provides Mozilla's carefully curated collection of Root Certificates for -validating the trustworthiness of SSL certificates while verifying the identity -of TLS hosts. It has been extracted from the `Requests`_ project. - -Installation ------------- - -``certifi`` is available on PyPI. Simply install it with ``pip``:: - - $ pip install certifi - -Usage ------ - -To reference the installed certificate authority (CA) bundle, you can use the -built-in function:: - - >>> import certifi - - >>> certifi.where() - '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem' - -Or from the command line:: - - $ python -m certifi - /usr/local/lib/python3.7/site-packages/certifi/cacert.pem - -Enjoy! - -.. _`Requests`: https://requests.readthedocs.io/en/master/ - -Addition/Removal of Certificates --------------------------------- - -Certifi does not support any addition/removal or other modification of the -CA trust store content. This project is intended to provide a reliable and -highly portable root of trust to python deployments. Look to upstream projects -for methods to use alternate trust. - - diff --git a/server/venv/Lib/site-packages/certifi-2023.7.22.dist-info/RECORD b/server/venv/Lib/site-packages/certifi-2023.7.22.dist-info/RECORD deleted file mode 100644 index 36cf845..0000000 --- a/server/venv/Lib/site-packages/certifi-2023.7.22.dist-info/RECORD +++ /dev/null @@ -1,15 +0,0 @@ -certifi-2023.7.22.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -certifi-2023.7.22.dist-info/LICENSE,sha256=oC9sY4-fuE0G93ZMOrCF2K9-2luTwWbaVDEkeQd8b7A,1052 -certifi-2023.7.22.dist-info/METADATA,sha256=oyc8gd32SOVo0IGolt8-bR7FnZ9Z99GoHoGE6ACcvFA,2191 -certifi-2023.7.22.dist-info/RECORD,, -certifi-2023.7.22.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -certifi-2023.7.22.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 -certifi-2023.7.22.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 -certifi/__init__.py,sha256=L_j-d0kYuA_MzA2_2hraF1ovf6KT6DTquRdV3paQwOk,94 -certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243 -certifi/__pycache__/__init__.cpython-311.pyc,, -certifi/__pycache__/__main__.cpython-311.pyc,, -certifi/__pycache__/core.cpython-311.pyc,, -certifi/cacert.pem,sha256=eU0Dn_3yd8BH4m8sfVj4Glhl2KDrcCSg-sEWT-pNJ88,281617 -certifi/core.py,sha256=lhewz0zFb2b4ULsQurElmloYwQoecjWzPqY67P8T7iM,4219 -certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/server/venv/Lib/site-packages/certifi-2023.7.22.dist-info/REQUESTED b/server/venv/Lib/site-packages/certifi-2023.7.22.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/certifi-2023.7.22.dist-info/WHEEL b/server/venv/Lib/site-packages/certifi-2023.7.22.dist-info/WHEEL deleted file mode 100644 index 5bad85f..0000000 --- a/server/venv/Lib/site-packages/certifi-2023.7.22.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/server/venv/Lib/site-packages/certifi-2023.7.22.dist-info/top_level.txt b/server/venv/Lib/site-packages/certifi-2023.7.22.dist-info/top_level.txt deleted file mode 100644 index 963eac5..0000000 --- a/server/venv/Lib/site-packages/certifi-2023.7.22.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -certifi diff --git a/server/venv/Lib/site-packages/certifi/__init__.py b/server/venv/Lib/site-packages/certifi/__init__.py deleted file mode 100644 index 8ce89ce..0000000 --- a/server/venv/Lib/site-packages/certifi/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .core import contents, where - -__all__ = ["contents", "where"] -__version__ = "2023.07.22" diff --git a/server/venv/Lib/site-packages/certifi/__main__.py b/server/venv/Lib/site-packages/certifi/__main__.py deleted file mode 100644 index 8945b5d..0000000 --- a/server/venv/Lib/site-packages/certifi/__main__.py +++ /dev/null @@ -1,12 +0,0 @@ -import argparse - -from certifi import contents, where - -parser = argparse.ArgumentParser() -parser.add_argument("-c", "--contents", action="store_true") -args = parser.parse_args() - -if args.contents: - print(contents()) -else: - print(where()) diff --git a/server/venv/Lib/site-packages/certifi/cacert.pem b/server/venv/Lib/site-packages/certifi/cacert.pem deleted file mode 100644 index 0212369..0000000 --- a/server/venv/Lib/site-packages/certifi/cacert.pem +++ /dev/null @@ -1,4635 +0,0 @@ - -# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Label: "GlobalSign Root CA" -# Serial: 4835703278459707669005204 -# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a -# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c -# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 ------BEGIN CERTIFICATE----- -MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG -A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv -b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw -MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i -YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT -aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ -jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp -xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp -1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG -snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ -U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 -9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E -BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B -AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz -yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE -38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP -AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad -DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME -HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== ------END CERTIFICATE----- - -# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Label: "Entrust.net Premium 2048 Secure Server CA" -# Serial: 946069240 -# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 -# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 -# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 ------BEGIN CERTIFICATE----- -MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML -RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp -bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 -IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 -MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 -LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp -YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG -A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq -K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe -sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX -MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT -XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ -HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH -4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub -j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo -U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf -zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b -u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ -bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er -fF6adulZkMV8gzURZVE= ------END CERTIFICATE----- - -# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Label: "Baltimore CyberTrust Root" -# Serial: 33554617 -# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 -# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 -# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ -RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD -VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX -DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y -ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy -VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr -mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr -IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK -mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu -XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy -dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye -jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 -BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 -DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 -9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx -jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 -Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz -ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS -R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. -# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. -# Label: "Entrust Root Certification Authority" -# Serial: 1164660820 -# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 -# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 -# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c ------BEGIN CERTIFICATE----- -MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC -VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 -Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW -KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl -cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw -NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw -NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy -ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV -BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ -KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo -Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 -4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 -KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI -rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi -94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB -sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi -gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo -kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE -vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA -A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t -O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua -AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP -9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ -eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m -0vdXcDazv/wor3ElhVsT/h5/WrQ8 ------END CERTIFICATE----- - -# Issuer: CN=AAA Certificate Services O=Comodo CA Limited -# Subject: CN=AAA Certificate Services O=Comodo CA Limited -# Label: "Comodo AAA Services root" -# Serial: 1 -# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 -# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 -# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 ------BEGIN CERTIFICATE----- -MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb -MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow -GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj -YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM -GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua -BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe -3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 -YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR -rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm -ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU -oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF -MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v -QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t -b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF -AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q -GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz -Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 -G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi -l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 -smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited -# Label: "QuoVadis Root CA 2" -# Serial: 1289 -# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b -# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 -# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 ------BEGIN CERTIFICATE----- -MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x -GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv -b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV -BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W -YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa -GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg -Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J -WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB -rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp -+ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 -ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i -Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz -PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og -/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH -oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI -yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud -EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 -A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL -MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT -ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f -BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn -g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl -fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K -WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha -B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc -hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR -TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD -mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z -ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y -4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza -8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 3" -# Serial: 1478 -# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf -# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 -# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 ------BEGIN CERTIFICATE----- -MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x -GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv -b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV -BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W -YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM -V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB -4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr -H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd -8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv -vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT -mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe -btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc -T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt -WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ -c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A -4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD -VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG -CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 -aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 -aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu -dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw -czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G -A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC -TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg -Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 -7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem -d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd -+LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B -4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN -t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x -DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 -k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s -zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j -Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT -mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK -4SVhM7JZG+Ju1zdXtg2pEto= ------END CERTIFICATE----- - -# Issuer: O=SECOM Trust.net OU=Security Communication RootCA1 -# Subject: O=SECOM Trust.net OU=Security Communication RootCA1 -# Label: "Security Communication Root CA" -# Serial: 0 -# MD5 Fingerprint: f1:bc:63:6a:54:e0:b5:27:f5:cd:e7:1a:e3:4d:6e:4a -# SHA1 Fingerprint: 36:b1:2b:49:f9:81:9e:d7:4c:9e:bc:38:0f:c6:56:8f:5d:ac:b2:f7 -# SHA256 Fingerprint: e7:5e:72:ed:9f:56:0e:ec:6e:b4:80:00:73:a4:3f:c3:ad:19:19:5a:39:22:82:01:78:95:97:4a:99:02:6b:6c ------BEGIN CERTIFICATE----- -MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY -MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t -dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5 -WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD -VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3 -DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8 -9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ -DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9 -Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N -QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ -xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G -A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T -AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG -kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr -Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5 -Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU -JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot -RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw== ------END CERTIFICATE----- - -# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Label: "XRamp Global CA Root" -# Serial: 107108908803651509692980124233745014957 -# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 -# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 -# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 ------BEGIN CERTIFICATE----- -MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB -gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk -MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY -UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx -NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 -dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy -dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB -dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 -38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP -KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q -DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 -qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa -JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi -PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P -BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs -jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 -eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD -ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR -vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt -qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa -IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy -i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ -O+7ETPTsJ3xCwnR8gooJybQDJbw= ------END CERTIFICATE----- - -# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Label: "Go Daddy Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 -# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 -# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 ------BEGIN CERTIFICATE----- -MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh -MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE -YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 -MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo -ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg -MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN -ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA -PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w -wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi -EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY -avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ -YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE -sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h -/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 -IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD -ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy -OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P -TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ -HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER -dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf -ReYNnyicsbkqWletNw+vHX/bvZ8= ------END CERTIFICATE----- - -# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Label: "Starfield Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 -# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a -# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 ------BEGIN CERTIFICATE----- -MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl -MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp -U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw -NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE -ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp -ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 -DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf -8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN -+lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 -X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa -K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA -1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G -A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR -zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 -YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD -bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w -DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 -L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D -eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl -xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp -VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY -WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root CA" -# Serial: 17154717934120587862167794914071425081 -# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 -# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 -# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c ------BEGIN CERTIFICATE----- -MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv -b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl -cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi -MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c -JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP -mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ -wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 -VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ -AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB -AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW -BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun -pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC -dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf -fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm -NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx -H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe -+o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root CA" -# Serial: 10944719598952040374951832963794454346 -# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e -# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 -# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 ------BEGIN CERTIFICATE----- -MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD -QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB -CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 -nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt -43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P -T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 -gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO -BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR -TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw -DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr -hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg -06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF -PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls -YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk -CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert High Assurance EV Root CA" -# Serial: 3553400076410547919724730734378100087 -# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a -# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 -# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j -ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 -LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug -RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm -+9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW -PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM -xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB -Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 -hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg -EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA -FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec -nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z -eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF -hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 -Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe -vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep -+OkuE6N36B9K ------END CERTIFICATE----- - -# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG -# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG -# Label: "SwissSign Gold CA - G2" -# Serial: 13492815561806991280 -# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 -# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 -# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 ------BEGIN CERTIFICATE----- -MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV -BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln -biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF -MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT -d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC -CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 -76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ -bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c -6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE -emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd -MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt -MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y -MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y -FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi -aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM -gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB -qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 -lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn -8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov -L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 -45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO -UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 -O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC -bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv -GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a -77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC -hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 -92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp -Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w -ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt -Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ ------END CERTIFICATE----- - -# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG -# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG -# Label: "SwissSign Silver CA - G2" -# Serial: 5700383053117599563 -# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 -# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb -# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 ------BEGIN CERTIFICATE----- -MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE -BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu -IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow -RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY -U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A -MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv -Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br -YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF -nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH -6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt -eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ -c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ -MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH -HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf -jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 -5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB -rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU -F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c -wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 -cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB -AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp -WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 -xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ -2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ -IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 -aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X -em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR -dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ -OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ -hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy -tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u ------END CERTIFICATE----- - -# Issuer: CN=SecureTrust CA O=SecureTrust Corporation -# Subject: CN=SecureTrust CA O=SecureTrust Corporation -# Label: "SecureTrust CA" -# Serial: 17199774589125277788362757014266862032 -# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 -# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 -# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 ------BEGIN CERTIFICATE----- -MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI -MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x -FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz -MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv -cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN -AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz -Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO -0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao -wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj -7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS -8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT -BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB -/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg -JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC -NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 -6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ -3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm -D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS -CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR -3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= ------END CERTIFICATE----- - -# Issuer: CN=Secure Global CA O=SecureTrust Corporation -# Subject: CN=Secure Global CA O=SecureTrust Corporation -# Label: "Secure Global CA" -# Serial: 9751836167731051554232119481456978597 -# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de -# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b -# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 ------BEGIN CERTIFICATE----- -MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK -MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x -GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx -MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg -Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ -iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa -/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ -jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI -HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 -sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w -gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw -KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG -AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L -URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO -H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm -I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY -iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc -f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW ------END CERTIFICATE----- - -# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO Certification Authority O=COMODO CA Limited -# Label: "COMODO Certification Authority" -# Serial: 104350513648249232941998508985834464573 -# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 -# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b -# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 ------BEGIN CERTIFICATE----- -MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB -gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV -BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw -MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl -YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P -RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 -UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI -2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 -Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp -+2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ -DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O -nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW -/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g -PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u -QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY -SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv -IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ -RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 -zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd -BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB -ZQ== ------END CERTIFICATE----- - -# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Label: "COMODO ECC Certification Authority" -# Serial: 41578283867086692638256921589707938090 -# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 -# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 -# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 ------BEGIN CERTIFICATE----- -MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT -IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw -MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy -ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N -T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv -biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR -FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J -cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW -BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm -fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv -GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= ------END CERTIFICATE----- - -# Issuer: CN=Certigna O=Dhimyotis -# Subject: CN=Certigna O=Dhimyotis -# Label: "Certigna" -# Serial: 18364802974209362175 -# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff -# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 -# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d ------BEGIN CERTIFICATE----- -MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV -BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X -DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ -BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 -DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 -QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny -gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw -zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q -130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 -JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw -DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw -ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT -AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj -AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG -9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h -bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc -fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu -HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w -t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw -WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== ------END CERTIFICATE----- - -# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority -# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority -# Label: "ePKI Root Certification Authority" -# Serial: 28956088682735189655030529057352760477 -# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 -# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 -# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 ------BEGIN CERTIFICATE----- -MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe -MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 -ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe -Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw -IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL -SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF -AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH -SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh -ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X -DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 -TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ -fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA -sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU -WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS -nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH -dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip -NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC -AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF -MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH -ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB -uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl -PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP -JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ -gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 -j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 -5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB -o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS -/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z -Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE -W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D -hNQ+IIX3Sj0rnP0qCglN6oH4EZw= ------END CERTIFICATE----- - -# Issuer: O=certSIGN OU=certSIGN ROOT CA -# Subject: O=certSIGN OU=certSIGN ROOT CA -# Label: "certSIGN ROOT CA" -# Serial: 35210227249154 -# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 -# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b -# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb ------BEGIN CERTIFICATE----- -MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT -AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD -QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP -MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC -ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do -0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ -UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d -RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ -OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv -JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C -AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O -BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ -LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY -MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ -44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I -Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw -i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN -9u6wWk5JRFRYX0KD ------END CERTIFICATE----- - -# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) -# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) -# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" -# Serial: 80544274841616 -# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 -# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 -# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 ------BEGIN CERTIFICATE----- -MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG -EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 -MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl -cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR -dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB -pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM -b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm -aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz -IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT -lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz -AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 -VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG -ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 -BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG -AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M -U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh -bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C -+C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC -bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F -uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 -XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= ------END CERTIFICATE----- - -# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. -# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. -# Label: "SecureSign RootCA11" -# Serial: 1 -# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 -# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 -# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 ------BEGIN CERTIFICATE----- -MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr -MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG -A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 -MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp -Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD -QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz -i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 -h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV -MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 -UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni -8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC -h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD -VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB -AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm -KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ -X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr -QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 -pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN -QSdJQO7e5iNEOdyhIta6A/I= ------END CERTIFICATE----- - -# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. -# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. -# Label: "Microsec e-Szigno Root CA 2009" -# Serial: 14014712776195784473 -# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 -# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e -# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 ------BEGIN CERTIFICATE----- -MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD -VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 -ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G -CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y -OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx -FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp -Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o -dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP -kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc -cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U -fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 -N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC -xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 -+rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G -A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM -Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG -SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h -mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk -ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 -tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c -2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t -HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Label: "GlobalSign Root CA - R3" -# Serial: 4835703278459759426209954 -# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 -# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad -# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b ------BEGIN CERTIFICATE----- -MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G -A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp -Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 -MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG -A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 -RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT -gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm -KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd -QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ -XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw -DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o -LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU -RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp -jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK -6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX -mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs -Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH -WD9f ------END CERTIFICATE----- - -# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" -# Serial: 6047274297262753887 -# MD5 Fingerprint: 73:3a:74:7a:ec:bb:a3:96:a6:c2:e4:e2:c8:9b:c0:c3 -# SHA1 Fingerprint: ae:c5:fb:3f:c8:e1:bf:c4:e5:4f:03:07:5a:9a:e8:00:b7:f7:b6:fa -# SHA256 Fingerprint: 04:04:80:28:bf:1f:28:64:d4:8f:9a:d4:d8:32:94:36:6a:82:88:56:55:3f:3b:14:30:3f:90:14:7f:5d:40:ef ------BEGIN CERTIFICATE----- -MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE -BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h -cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy -MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg -Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 -thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM -cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG -L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i -NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h -X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b -m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy -Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja -EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T -KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF -6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh -OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD -VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD -VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp -cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv -ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl -AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF -661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9 -am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1 -ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481 -PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS -3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k -SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF -3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM -ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g -StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz -Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB -jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V ------END CERTIFICATE----- - -# Issuer: CN=Izenpe.com O=IZENPE S.A. -# Subject: CN=Izenpe.com O=IZENPE S.A. -# Label: "Izenpe.com" -# Serial: 917563065490389241595536686991402621 -# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 -# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 -# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f ------BEGIN CERTIFICATE----- -MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 -MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 -ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD -VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j -b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq -scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO -xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H -LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX -uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD -yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ -JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q -rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN -BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L -hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB -QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ -HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu -Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg -QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB -BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx -MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA -A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb -laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 -awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo -JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw -LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT -VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk -LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb -UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ -QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ -naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls -QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== ------END CERTIFICATE----- - -# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Label: "Go Daddy Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 -# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b -# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT -EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp -ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz -NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH -EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE -AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw -DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD -E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH -/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy -DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh -GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR -tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA -AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE -FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX -WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu -9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr -gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo -2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO -LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI -4uJEvlz36hz1 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 -# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e -# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 ------BEGIN CERTIFICATE----- -MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs -ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw -MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 -b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj -aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp -Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC -ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg -nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 -HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N -Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN -dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 -HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G -CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU -sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 -4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg -8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K -pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 -mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Services Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 -# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f -# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 ------BEGIN CERTIFICATE----- -MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs -ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 -MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD -VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy -ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy -dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p -OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 -8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K -Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe -hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk -6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw -DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q -AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI -bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB -ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z -qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd -iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn -0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN -sSi6 ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Commercial O=AffirmTrust -# Subject: CN=AffirmTrust Commercial O=AffirmTrust -# Label: "AffirmTrust Commercial" -# Serial: 8608355977964138876 -# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 -# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 -# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz -dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL -MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp -cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP -Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr -ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL -MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 -yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr -VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ -nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ -KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG -XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj -vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt -Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g -N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC -nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Networking O=AffirmTrust -# Subject: CN=AffirmTrust Networking O=AffirmTrust -# Label: "AffirmTrust Networking" -# Serial: 8957382827206547757 -# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f -# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f -# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz -dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL -MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp -cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y -YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua -kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL -QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp -6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG -yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i -QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ -KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO -tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu -QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ -Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u -olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 -x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Premium O=AffirmTrust -# Subject: CN=AffirmTrust Premium O=AffirmTrust -# Label: "AffirmTrust Premium" -# Serial: 7893706540734352110 -# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 -# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 -# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a ------BEGIN CERTIFICATE----- -MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz -dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG -A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U -cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf -qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ -JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ -+jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS -s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 -HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 -70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG -V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S -qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S -5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia -C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX -OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE -FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ -BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 -KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg -Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B -8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ -MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc -0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ -u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF -u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH -YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 -GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO -RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e -KeC2uAloGRwYQw== ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust -# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust -# Label: "AffirmTrust Premium ECC" -# Serial: 8401224907861490260 -# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d -# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb -# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 ------BEGIN CERTIFICATE----- -MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC -VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ -cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ -BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt -VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D -0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 -ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G -A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs -aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I -flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Network CA" -# Serial: 279744 -# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 -# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e -# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e ------BEGIN CERTIFICATE----- -MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM -MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D -ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU -cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 -WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg -Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw -IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B -AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH -UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM -TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU -BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM -kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x -AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV -HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y -sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL -I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 -J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY -VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI -03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= ------END CERTIFICATE----- - -# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA -# Label: "TWCA Root Certification Authority" -# Serial: 1 -# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 -# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 -# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 ------BEGIN CERTIFICATE----- -MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES -MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU -V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz -WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO -LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB -AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE -AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH -K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX -RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z -rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx -3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq -hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC -MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls -XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D -lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn -aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ -YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== ------END CERTIFICATE----- - -# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 -# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 -# Label: "Security Communication RootCA2" -# Serial: 0 -# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 -# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 -# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl -MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe -U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX -DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy -dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj -YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV -OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr -zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM -VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ -hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO -ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw -awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs -OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 -DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF -coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc -okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 -t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy -1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ -SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 ------END CERTIFICATE----- - -# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 -# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 -# Label: "Actalis Authentication Root CA" -# Serial: 6271844772424770508 -# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 -# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac -# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 ------BEGIN CERTIFICATE----- -MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE -BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w -MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 -IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC -SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 -ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv -UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX -4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 -KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ -gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb -rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ -51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F -be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe -KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F -v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn -fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 -jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz -ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt -ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL -e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 -jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz -WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V -SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j -pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX -X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok -fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R -K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU -ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU -LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT -LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 -# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 -# Label: "Buypass Class 2 Root CA" -# Serial: 2 -# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 -# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 -# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 ------BEGIN CERTIFICATE----- -MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg -Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow -TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw -HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB -BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr -6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV -L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 -1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx -MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ -QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB -arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr -Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi -FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS -P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN -9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP -AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz -uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h -9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s -A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t -OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo -+fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 -KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 -DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us -H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ -I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 -5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h -3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz -Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 -# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 -# Label: "Buypass Class 3 Root CA" -# Serial: 2 -# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec -# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 -# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d ------BEGIN CERTIFICATE----- -MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg -Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow -TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw -HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB -BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y -ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E -N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 -tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX -0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c -/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X -KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY -zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS -O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D -34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP -K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 -AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv -Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj -QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV -cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS -IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 -HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa -O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv -033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u -dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE -kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 -3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD -u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq -4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= ------END CERTIFICATE----- - -# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Label: "T-TeleSec GlobalRoot Class 3" -# Serial: 1 -# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef -# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 -# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd ------BEGIN CERTIFICATE----- -MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx -KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd -BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl -YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 -OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy -aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 -ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN -8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ -RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 -hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 -ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM -EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 -A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy -WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ -1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 -6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT -91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml -e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p -TpPDpFQUWw== ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH -# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH -# Label: "D-TRUST Root Class 3 CA 2 2009" -# Serial: 623603 -# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f -# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 -# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 ------BEGIN CERTIFICATE----- -MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF -MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD -bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha -ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM -HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 -UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 -tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R -ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM -lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp -/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G -A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G -A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj -dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy -MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl -cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js -L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL -BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni -acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 -o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K -zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 -PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y -Johw1+qRzT65ysCQblrGXnRl11z+o+I= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH -# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH -# Label: "D-TRUST Root Class 3 CA 2 EV 2009" -# Serial: 623604 -# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 -# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 -# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 ------BEGIN CERTIFICATE----- -MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF -MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD -bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw -NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV -BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn -ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 -3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z -qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR -p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 -HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw -ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea -HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw -Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh -c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E -RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt -dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku -Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp -3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 -nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF -CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na -xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX -KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 ------END CERTIFICATE----- - -# Issuer: CN=CA Disig Root R2 O=Disig a.s. -# Subject: CN=CA Disig Root R2 O=Disig a.s. -# Label: "CA Disig Root R2" -# Serial: 10572350602393338211 -# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 -# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 -# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 ------BEGIN CERTIFICATE----- -MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV -BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu -MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy -MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx -EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw -ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe -NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH -PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I -x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe -QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR -yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO -QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 -H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ -QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD -i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs -nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 -rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud -DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI -hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM -tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf -GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb -lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka -+elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal -TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i -nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 -gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr -G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os -zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x -L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL ------END CERTIFICATE----- - -# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV -# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV -# Label: "ACCVRAIZ1" -# Serial: 6828503384748696800 -# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 -# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 -# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 ------BEGIN CERTIFICATE----- -MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE -AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw -CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ -BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND -VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb -qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY -HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo -G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA -lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr -IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ -0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH -k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 -4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO -m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa -cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl -uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI -KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls -ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG -AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 -VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT -VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG -CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA -cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA -QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA -7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA -cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA -QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA -czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu -aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt -aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud -DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF -BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp -D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU -JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m -AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD -vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms -tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH -7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h -I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA -h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF -d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H -pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 ------END CERTIFICATE----- - -# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA -# Label: "TWCA Global Root CA" -# Serial: 3262 -# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 -# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 -# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b ------BEGIN CERTIFICATE----- -MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx -EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT -VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 -NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT -B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF -10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz -0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh -MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH -zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc -46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 -yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi -laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP -oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA -BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE -qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm -4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB -/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL -1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn -LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF -H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo -RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ -nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh -15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW -6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW -nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j -wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz -aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy -KwbQBM0= ------END CERTIFICATE----- - -# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera -# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera -# Label: "TeliaSonera Root CA v1" -# Serial: 199041966741090107964904287217786801558 -# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c -# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 -# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 ------BEGIN CERTIFICATE----- -MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw -NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv -b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD -VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 -MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F -VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 -7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X -Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ -/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs -81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm -dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe -Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu -sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 -pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs -slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ -arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD -VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG -9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl -dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx -0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj -TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed -Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 -Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI -OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 -vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW -t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn -HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx -SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= ------END CERTIFICATE----- - -# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Label: "T-TeleSec GlobalRoot Class 2" -# Serial: 1 -# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a -# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 -# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 ------BEGIN CERTIFICATE----- -MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx -KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd -BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl -YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 -OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy -aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 -ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd -AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC -FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi -1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq -jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ -wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ -WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy -NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC -uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw -IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 -g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN -9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP -BSeOE6Fuwg== ------END CERTIFICATE----- - -# Issuer: CN=Atos TrustedRoot 2011 O=Atos -# Subject: CN=Atos TrustedRoot 2011 O=Atos -# Label: "Atos TrustedRoot 2011" -# Serial: 6643877497813316402 -# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 -# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 -# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE -AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG -EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM -FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC -REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp -Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM -VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ -SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ -4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L -cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi -eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV -HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG -A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 -DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j -vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP -DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc -maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D -lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv -KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 1 G3" -# Serial: 687049649626669250736271037606554624078720034195 -# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab -# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 -# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 -MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV -wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe -rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 -68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh -4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp -UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o -abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc -3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G -KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt -hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO -Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt -zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD -ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC -MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 -cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN -qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 -YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv -b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 -8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k -NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj -ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp -q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt -nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 2 G3" -# Serial: 390156079458959257446133169266079962026824725800 -# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 -# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 -# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 -MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf -qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW -n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym -c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ -O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 -o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j -IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq -IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz -8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh -vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l -7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG -cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD -ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 -AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC -roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga -W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n -lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE -+V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV -csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd -dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg -KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM -HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 -WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 3 G3" -# Serial: 268090761170461462463995952157327242137089239581 -# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 -# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d -# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 -MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR -/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu -FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR -U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c -ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR -FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k -A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw -eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl -sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp -VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q -A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ -ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD -ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px -KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI -FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv -oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg -u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP -0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf -3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl -8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ -DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN -PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ -ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root G2" -# Serial: 15385348160840213938643033620894905419 -# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d -# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f -# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 ------BEGIN CERTIFICATE----- -MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv -b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl -cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi -MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA -n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc -biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp -EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA -bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu -YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB -AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW -BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI -QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I -0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni -lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 -B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv -ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo -IhNzbM8m9Yop5w== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root G3" -# Serial: 15459312981008553731928384953135426796 -# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb -# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 -# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 ------BEGIN CERTIFICATE----- -MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw -CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu -ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg -RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV -UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu -Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq -hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf -Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q -RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ -BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD -AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY -JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv -6pZjamVFkpUBtA== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root G2" -# Serial: 4293743540046975378534879503202253541 -# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 -# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 -# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f ------BEGIN CERTIFICATE----- -MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH -MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI -2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx -1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ -q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz -tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ -vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP -BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV -5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY -1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 -NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG -Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 -8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe -pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl -MrY= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root G3" -# Serial: 7089244469030293291760083333884364146 -# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca -# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e -# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 ------BEGIN CERTIFICATE----- -MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw -CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu -ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe -Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw -EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x -IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF -K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG -fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO -Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd -BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx -AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ -oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 -sycX ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Trusted Root G4" -# Serial: 7451500558977370777930084869016614236 -# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 -# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 -# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 ------BEGIN CERTIFICATE----- -MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg -RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV -UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu -Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y -ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If -xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV -ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO -DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ -jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ -CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi -EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM -fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY -uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK -chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t -9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD -ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 -SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd -+SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc -fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa -sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N -cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N -0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie -4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI -r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 -/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm -gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ ------END CERTIFICATE----- - -# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited -# Label: "COMODO RSA Certification Authority" -# Serial: 101909084537582093308941363524873193117 -# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 -# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 -# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 ------BEGIN CERTIFICATE----- -MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB -hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV -BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 -MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT -EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR -Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh -dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR -6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X -pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC -9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV -/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf -Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z -+pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w -qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah -SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC -u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf -Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq -crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E -FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB -/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl -wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM -4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV -2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna -FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ -CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK -boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke -jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL -S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb -QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl -0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB -NVOFBkpdn627G190 ------END CERTIFICATE----- - -# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network -# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network -# Label: "USERTrust RSA Certification Authority" -# Serial: 2645093764781058787591871645665788717 -# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 -# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e -# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 ------BEGIN CERTIFICATE----- -MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB -iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl -cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV -BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw -MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV -BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU -aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy -dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B -3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY -tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ -Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 -VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT -79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 -c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT -Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l -c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee -UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE -Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd -BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G -A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF -Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO -VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 -ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs -8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR -iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze -Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ -XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ -qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB -VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB -L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG -jjxDah2nGN59PRbxYvnKkKj9 ------END CERTIFICATE----- - -# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network -# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network -# Label: "USERTrust ECC Certification Authority" -# Serial: 123013823720199481456569720443997572134 -# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 -# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 -# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a ------BEGIN CERTIFICATE----- -MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL -MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl -eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT -JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx -MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT -Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg -VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo -I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng -o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G -A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB -zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW -RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 -# Label: "GlobalSign ECC Root CA - R5" -# Serial: 32785792099990507226680698011560947931244 -# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 -# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa -# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 ------BEGIN CERTIFICATE----- -MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk -MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH -bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX -DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD -QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu -MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc -8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke -hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI -KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg -515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO -xwy8p2Fp8fc74SrL+SvzZpA3 ------END CERTIFICATE----- - -# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust -# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust -# Label: "IdenTrust Commercial Root CA 1" -# Serial: 13298821034946342390520003877796839426 -# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 -# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 -# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu -VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw -MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw -JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT -3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU -+ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp -S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 -bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi -T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL -vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK -Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK -dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT -c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv -l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N -iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB -/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD -ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH -6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt -LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 -nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 -+wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK -W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT -AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq -l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG -4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ -mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A -7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H ------END CERTIFICATE----- - -# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust -# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust -# Label: "IdenTrust Public Sector Root CA 1" -# Serial: 13298821034946342390521976156843933698 -# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba -# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd -# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f ------BEGIN CERTIFICATE----- -MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu -VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN -MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 -MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 -ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy -RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS -bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF -/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R -3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw -EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy -9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V -GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ -2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV -WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD -W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN -AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj -t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV -DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 -TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G -lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW -mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df -WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 -+bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ -tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA -GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv -8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - G2" -# Serial: 1246989352 -# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 -# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 -# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 ------BEGIN CERTIFICATE----- -MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC -VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 -cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs -IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz -dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy -NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu -dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt -dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 -aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK -AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T -RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN -cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW -wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 -U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 -jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP -BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN -BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ -jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ -Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v -1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R -nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH -VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - EC1" -# Serial: 51543124481930649114116133369 -# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc -# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 -# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 ------BEGIN CERTIFICATE----- -MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG -A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 -d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu -dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq -RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy -MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD -VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 -L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g -Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD -ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi -A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt -ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH -Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O -BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC -R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX -hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G ------END CERTIFICATE----- - -# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority -# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority -# Label: "CFCA EV ROOT" -# Serial: 407555286 -# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 -# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 -# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd ------BEGIN CERTIFICATE----- -MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD -TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y -aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx -MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j -aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP -T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 -sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL -TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 -/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp -7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz -EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt -hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP -a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot -aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg -TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV -PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv -cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL -tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd -BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB -ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT -ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL -jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS -ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy -P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 -xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d -Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN -5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe -/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z -AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ -5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su ------END CERTIFICATE----- - -# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GB CA" -# Serial: 157768595616588414422159278966750757568 -# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d -# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed -# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 ------BEGIN CERTIFICATE----- -MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt -MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg -Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i -YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x -CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG -b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh -bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 -HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx -WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX -1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk -u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P -99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r -M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB -BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh -cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 -gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO -ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf -aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic -Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= ------END CERTIFICATE----- - -# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. -# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. -# Label: "SZAFIR ROOT CA2" -# Serial: 357043034767186914217277344587386743377558296292 -# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 -# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de -# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe ------BEGIN CERTIFICATE----- -MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL -BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 -ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw -NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L -cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg -Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN -QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT -3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw -3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 -3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 -BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN -XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD -AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF -AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw -8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG -nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP -oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy -d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg -LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Network CA 2" -# Serial: 44979900017204383099463764357512596969 -# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 -# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 -# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 ------BEGIN CERTIFICATE----- -MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB -gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu -QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG -A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz -OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ -VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 -b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA -DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn -0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB -OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE -fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E -Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m -o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i -sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW -OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez -Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS -adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n -3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD -AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC -AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ -F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf -CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 -XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm -djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ -WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb -AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq -P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko -b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj -XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P -5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi -DrW5viSP ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions RootCA 2015" -# Serial: 0 -# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce -# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 -# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 ------BEGIN CERTIFICATE----- -MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix -DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k -IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT -N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v -dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG -A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh -ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx -QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 -dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC -AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA -4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 -AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 -4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C -ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV -9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD -gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 -Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq -NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko -LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc -Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV -HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd -ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I -XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI -M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot -9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V -Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea -j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh -X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ -l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf -bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 -pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK -e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 -vm9qp/UsQu0yrbYhnr68 ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" -# Serial: 0 -# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef -# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 -# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 ------BEGIN CERTIFICATE----- -MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN -BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl -c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl -bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv -b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ -BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj -YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 -MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 -dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg -QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa -jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC -MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi -C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep -lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof -TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR ------END CERTIFICATE----- - -# Issuer: CN=ISRG Root X1 O=Internet Security Research Group -# Subject: CN=ISRG Root X1 O=Internet Security Research Group -# Label: "ISRG Root X1" -# Serial: 172886928669790476064670243504169061120 -# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e -# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 -# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 ------BEGIN CERTIFICATE----- -MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw -TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh -cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 -WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu -ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY -MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc -h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ -0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U -A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW -T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH -B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC -B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv -KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn -OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn -jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw -qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI -rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq -hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL -ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ -3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK -NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 -ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur -TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC -jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc -oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq -4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA -mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d -emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= ------END CERTIFICATE----- - -# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM -# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM -# Label: "AC RAIZ FNMT-RCM" -# Serial: 485876308206448804701554682760554759 -# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d -# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 -# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa ------BEGIN CERTIFICATE----- -MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx -CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ -WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ -BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG -Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ -yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf -BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz -WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF -tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z -374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC -IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL -mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 -wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS -MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 -ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet -UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw -AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H -YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 -LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD -nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 -RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM -LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf -77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N -JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm -fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp -6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp -1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B -9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok -RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv -uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 1 O=Amazon -# Subject: CN=Amazon Root CA 1 O=Amazon -# Label: "Amazon Root CA 1" -# Serial: 143266978916655856878034712317230054538369994 -# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 -# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 -# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e ------BEGIN CERTIFICATE----- -MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF -ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 -b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL -MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv -b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj -ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM -9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw -IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 -VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L -93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm -jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA -A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI -U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs -N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv -o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU -5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy -rqXRfboQnoZsG4q5WTP468SQvvG5 ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 2 O=Amazon -# Subject: CN=Amazon Root CA 2 O=Amazon -# Label: "Amazon Root CA 2" -# Serial: 143266982885963551818349160658925006970653239 -# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 -# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a -# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 ------BEGIN CERTIFICATE----- -MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF -ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 -b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL -MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv -b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK -gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ -W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg -1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K -8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r -2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me -z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR -8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj -mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz -7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 -+XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI -0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB -Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm -UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 -LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY -+gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS -k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl -7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm -btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl -urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ -fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 -n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE -76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H -9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT -4PsJYGw= ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 3 O=Amazon -# Subject: CN=Amazon Root CA 3 O=Amazon -# Label: "Amazon Root CA 3" -# Serial: 143266986699090766294700635381230934788665930 -# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 -# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e -# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 ------BEGIN CERTIFICATE----- -MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 -MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g -Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG -A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg -Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl -ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr -ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr -BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM -YyRIHN8wfdVoOw== ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 4 O=Amazon -# Subject: CN=Amazon Root CA 4 O=Amazon -# Label: "Amazon Root CA 4" -# Serial: 143266989758080763974105200630763877849284878 -# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd -# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be -# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 ------BEGIN CERTIFICATE----- -MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 -MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g -Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG -A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg -Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi -9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk -M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB -/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB -MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw -CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW -1KyLa2tJElMzrdfkviT8tQp21KW8EA== ------END CERTIFICATE----- - -# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM -# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM -# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" -# Serial: 1 -# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 -# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca -# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 ------BEGIN CERTIFICATE----- -MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx -GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp -bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w -KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 -BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy -dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG -EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll -IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU -QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT -TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg -LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 -a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr -LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr -N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X -YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ -iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f -AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH -V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL -BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh -AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf -IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 -lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c -8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf -lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= ------END CERTIFICATE----- - -# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. -# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. -# Label: "GDCA TrustAUTH R5 ROOT" -# Serial: 9009899650740120186 -# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 -# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 -# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 ------BEGIN CERTIFICATE----- -MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE -BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ -IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 -MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV -BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w -HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF -AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj -Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj -TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u -KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj -qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm -MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 -ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP -zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk -L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC -jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA -HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC -AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB -/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg -p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm -DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 -COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry -L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf -JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg -IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io -2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV -09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ -XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq -T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe -MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== ------END CERTIFICATE----- - -# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation -# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation -# Label: "SSL.com Root Certification Authority RSA" -# Serial: 8875640296558310041 -# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 -# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb -# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 ------BEGIN CERTIFICATE----- -MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE -BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK -DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz -OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv -dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv -bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN -AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R -xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX -qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC -C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 -6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh -/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF -YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E -JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc -US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 -ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm -+Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi -M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV -HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G -A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV -cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc -Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs -PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ -q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 -cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr -a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I -H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y -K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu -nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf -oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY -Ic2wBlX7Jz9TkHCpBB5XJ7k= ------END CERTIFICATE----- - -# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation -# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation -# Label: "SSL.com Root Certification Authority ECC" -# Serial: 8495723813297216424 -# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e -# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a -# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 ------BEGIN CERTIFICATE----- -MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC -VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T -U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 -aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz -WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 -b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS -b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB -BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI -7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg -CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud -EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD -VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T -kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ -gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl ------END CERTIFICATE----- - -# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation -# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation -# Label: "SSL.com EV Root Certification Authority RSA R2" -# Serial: 6248227494352943350 -# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 -# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a -# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c ------BEGIN CERTIFICATE----- -MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV -BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE -CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy -dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy -MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G -A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD -DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq -M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf -OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa -4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 -HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR -aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA -b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ -Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV -PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO -pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu -UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY -MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV -HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 -9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW -s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 -Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg -cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM -79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz -/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt -ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm -Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK -QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ -w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi -S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 -mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== ------END CERTIFICATE----- - -# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation -# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation -# Label: "SSL.com EV Root Certification Authority ECC" -# Serial: 3182246526754555285 -# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 -# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d -# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 ------BEGIN CERTIFICATE----- -MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC -VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T -U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx -NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv -dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv -bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 -AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA -VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku -WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP -MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX -5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ -ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg -h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 -# Label: "GlobalSign Root CA - R6" -# Serial: 1417766617973444989252670301619537 -# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae -# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 -# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 ------BEGIN CERTIFICATE----- -MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg -MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh -bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx -MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET -MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ -KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI -xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k -ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD -aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw -LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw -1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX -k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 -SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h -bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n -WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY -rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce -MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD -AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu -bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN -nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt -Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 -55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj -vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf -cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz -oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp -nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs -pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v -JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R -8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 -5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= ------END CERTIFICATE----- - -# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GC CA" -# Serial: 44084345621038548146064804565436152554 -# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 -# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 -# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d ------BEGIN CERTIFICATE----- -MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw -CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 -bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg -Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ -BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu -ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS -b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni -eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W -p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E -BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T -rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV -57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg -Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 ------END CERTIFICATE----- - -# Issuer: CN=UCA Global G2 Root O=UniTrust -# Subject: CN=UCA Global G2 Root O=UniTrust -# Label: "UCA Global G2 Root" -# Serial: 124779693093741543919145257850076631279 -# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 -# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a -# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c ------BEGIN CERTIFICATE----- -MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 -MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH -bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x -CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds -b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr -b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 -kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm -VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R -VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc -C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj -tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY -D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv -j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl -NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 -iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP -O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ -BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV -ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj -L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 -1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl -1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU -b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV -PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj -y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb -EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg -DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI -+Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy -YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX -UB+K+wb1whnw0A== ------END CERTIFICATE----- - -# Issuer: CN=UCA Extended Validation Root O=UniTrust -# Subject: CN=UCA Extended Validation Root O=UniTrust -# Label: "UCA Extended Validation Root" -# Serial: 106100277556486529736699587978573607008 -# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 -# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a -# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH -MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF -eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx -MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV -BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog -D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS -sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop -O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk -sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi -c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj -VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz -KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ -TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G -sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs -1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD -fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T -AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN -l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR -ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ -VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 -c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp -4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s -t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj -2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO -vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C -xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx -cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM -fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax ------END CERTIFICATE----- - -# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 -# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 -# Label: "Certigna Root CA" -# Serial: 269714418870597844693661054334862075617 -# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 -# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 -# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 ------BEGIN CERTIFICATE----- -MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw -WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw -MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x -MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD -VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX -BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw -ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO -ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M -CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu -I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm -TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh -C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf -ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz -IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT -Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k -JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 -hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB -GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE -FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of -1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov -L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo -dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr -aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq -hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L -6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG -HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 -0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB -lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi -o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 -gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v -faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 -Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh -jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw -3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= ------END CERTIFICATE----- - -# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI -# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI -# Label: "emSign Root CA - G1" -# Serial: 235931866688319308814040 -# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac -# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c -# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 ------BEGIN CERTIFICATE----- -MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD -VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU -ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH -MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO -MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv -Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN -BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz -f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO -8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq -d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM -tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt -Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB -o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD -AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x -PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM -wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d -GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH -6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby -RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx -iN66zB+Afko= ------END CERTIFICATE----- - -# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI -# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI -# Label: "emSign ECC Root CA - G3" -# Serial: 287880440101571086945156 -# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 -# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 -# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b ------BEGIN CERTIFICATE----- -MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG -EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo -bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g -RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ -TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s -b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw -djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 -WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS -fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB -zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq -hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB -CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD -+JbNR6iC8hZVdyR+EhCVBCyj ------END CERTIFICATE----- - -# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI -# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI -# Label: "emSign Root CA - C1" -# Serial: 825510296613316004955058 -# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 -# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 -# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f ------BEGIN CERTIFICATE----- -MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG -A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg -SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw -MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln -biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v -dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ -BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ -HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH -3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH -GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c -xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 -aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq -TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL -BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 -/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 -kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG -YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT -+xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo -WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= ------END CERTIFICATE----- - -# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI -# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI -# Label: "emSign ECC Root CA - C3" -# Serial: 582948710642506000014504 -# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 -# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 -# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 ------BEGIN CERTIFICATE----- -MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG -EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx -IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw -MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln -biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND -IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci -MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti -sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O -BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB -Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c -3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J -0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== ------END CERTIFICATE----- - -# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post -# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post -# Label: "Hongkong Post Root CA 3" -# Serial: 46170865288971385588281144162979347873371282084 -# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 -# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 -# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 ------BEGIN CERTIFICATE----- -MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL -BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ -SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n -a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 -NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT -CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u -Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO -dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI -VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV -9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY -2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY -vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt -bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb -x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ -l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK -TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj -Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP -BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e -i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw -DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG -7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk -MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr -gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk -GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS -3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm -Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ -l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c -JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP -L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa -LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG -mpv0 ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - G4" -# Serial: 289383649854506086828220374796556676440 -# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88 -# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01 -# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88 ------BEGIN CERTIFICATE----- -MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw -gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL -Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg -MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw -BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0 -MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT -MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1 -c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ -bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg -Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B -AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ -2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E -T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j -5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM -C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T -DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX -wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A -2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm -nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8 -dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl -N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj -c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD -VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS -5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS -Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr -hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/ -B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI -AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw -H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+ -b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk -2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol -IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk -5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY -n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw== ------END CERTIFICATE----- - -# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation -# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation -# Label: "Microsoft ECC Root Certificate Authority 2017" -# Serial: 136839042543790627607696632466672567020 -# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 -# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 -# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 ------BEGIN CERTIFICATE----- -MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw -CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD -VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw -MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV -UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy -b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq -hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR -ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb -hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E -BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 -FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV -L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB -iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= ------END CERTIFICATE----- - -# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation -# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation -# Label: "Microsoft RSA Root Certificate Authority 2017" -# Serial: 40975477897264996090493496164228220339 -# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 -# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 -# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 ------BEGIN CERTIFICATE----- -MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl -MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw -NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 -IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG -EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N -aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ -Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 -ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 -HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm -gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ -jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc -aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG -YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 -W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K -UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH -+FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q -W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ -BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC -NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC -LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC -gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 -tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh -SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 -TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 -pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR -xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp -GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 -dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN -AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB -RA+GsCyRxj3qrg+E ------END CERTIFICATE----- - -# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. -# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. -# Label: "e-Szigno Root CA 2017" -# Serial: 411379200276854331539784714 -# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 -# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 -# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 ------BEGIN CERTIFICATE----- -MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV -BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk -LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv -b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ -BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg -THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v -IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv -xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H -Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G -A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB -eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo -jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ -+efcMQ== ------END CERTIFICATE----- - -# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 -# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 -# Label: "certSIGN Root CA G2" -# Serial: 313609486401300475190 -# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 -# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 -# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 ------BEGIN CERTIFICATE----- -MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV -BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g -Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ -BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ -R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF -dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw -vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ -uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp -n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs -cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW -xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P -rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF -DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx -DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy -LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C -eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB -/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ -d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq -kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC -b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl -qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 -OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c -NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk -ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO -pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj -03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk -PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE -1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX -QRBdJ3NghVdJIgc= ------END CERTIFICATE----- - -# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. -# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. -# Label: "Trustwave Global Certification Authority" -# Serial: 1846098327275375458322922162 -# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e -# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5 -# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8 ------BEGIN CERTIFICATE----- -MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw -CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x -ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1 -c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx -OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI -SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI -b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB -ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn -swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu -7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8 -1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW -80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP -JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l -RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw -hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10 -coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc -BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n -twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud -EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud -DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W -0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe -uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q -lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB -aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE -sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT -MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe -qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh -VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8 -h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9 -EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK -yeC2nOnOcXHebD8WpHk= ------END CERTIFICATE----- - -# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. -# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. -# Label: "Trustwave Global ECC P256 Certification Authority" -# Serial: 4151900041497450638097112925 -# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54 -# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf -# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4 ------BEGIN CERTIFICATE----- -MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD -VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf -BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 -YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x -NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G -A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 -d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF -Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG -SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN -FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w -DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw -CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh -DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7 ------END CERTIFICATE----- - -# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. -# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. -# Label: "Trustwave Global ECC P384 Certification Authority" -# Serial: 2704997926503831671788816187 -# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6 -# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2 -# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97 ------BEGIN CERTIFICATE----- -MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD -VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf -BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 -YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x -NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G -A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 -d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF -Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB -BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ -j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF -1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G -A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3 -AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC -MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu -Sw== ------END CERTIFICATE----- - -# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. -# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. -# Label: "NAVER Global Root Certification Authority" -# Serial: 9013692873798656336226253319739695165984492813 -# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b -# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1 -# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65 ------BEGIN CERTIFICATE----- -MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM -BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG -T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0 -aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx -CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD -b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB -dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA -iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH -38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE -HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz -kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP -szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq -vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf -nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG -YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo -0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a -CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K -AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I -36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB -Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN -qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj -cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm -+LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL -hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe -lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7 -p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8 -piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR -LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX -5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO -dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul -9XXeifdy ------END CERTIFICATE----- - -# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres -# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres -# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS" -# Serial: 131542671362353147877283741781055151509 -# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb -# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a -# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb ------BEGIN CERTIFICATE----- -MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw -CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw -FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S -Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5 -MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL -DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS -QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB -BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH -sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK -Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD -VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu -SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC -MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy -v+c= ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa -# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa -# Label: "GlobalSign Root R46" -# Serial: 1552617688466950547958867513931858518042577 -# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef -# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90 -# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA -MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD -VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy -MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt -c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ -OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG -vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud -316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo -0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE -y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF -zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE -+cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN -I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs -x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa -ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC -4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4 -7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg -JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti -2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk -pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF -FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt -rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk -ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5 -u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP -4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6 -N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3 -vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6 ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa -# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa -# Label: "GlobalSign Root E46" -# Serial: 1552617690338932563915843282459653771421763 -# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f -# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84 -# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58 ------BEGIN CERTIFICATE----- -MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx -CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD -ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw -MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex -HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA -IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq -R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd -yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud -DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ -7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8 -+RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A= ------END CERTIFICATE----- - -# Issuer: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH -# Subject: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH -# Label: "GLOBALTRUST 2020" -# Serial: 109160994242082918454945253 -# MD5 Fingerprint: 8a:c7:6f:cb:6d:e3:cc:a2:f1:7c:83:fa:0e:78:d7:e8 -# SHA1 Fingerprint: d0:67:c1:13:51:01:0c:aa:d0:c7:6a:65:37:31:16:26:4f:53:71:a2 -# SHA256 Fingerprint: 9a:29:6a:51:82:d1:d4:51:a2:e3:7f:43:9b:74:da:af:a2:67:52:33:29:f9:0f:9a:0d:20:07:c3:34:e2:3c:9a ------BEGIN CERTIFICATE----- -MIIFgjCCA2qgAwIBAgILWku9WvtPilv6ZeUwDQYJKoZIhvcNAQELBQAwTTELMAkG -A1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9uaXRvcmluZyBHbWJIMRkw -FwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMB4XDTIwMDIxMDAwMDAwMFoXDTQwMDYx -MDAwMDAwMFowTTELMAkGA1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9u -aXRvcmluZyBHbWJIMRkwFwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMIICIjANBgkq -hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAri5WrRsc7/aVj6B3GyvTY4+ETUWiD59b -RatZe1E0+eyLinjF3WuvvcTfk0Uev5E4C64OFudBc/jbu9G4UeDLgztzOG53ig9Z -YybNpyrOVPu44sB8R85gfD+yc/LAGbaKkoc1DZAoouQVBGM+uq/ufF7MpotQsjj3 -QWPKzv9pj2gOlTblzLmMCcpL3TGQlsjMH/1WljTbjhzqLL6FLmPdqqmV0/0plRPw -yJiT2S0WR5ARg6I6IqIoV6Lr/sCMKKCmfecqQjuCgGOlYx8ZzHyyZqjC0203b+J+ -BlHZRYQfEs4kUmSFC0iAToexIiIwquuuvuAC4EDosEKAA1GqtH6qRNdDYfOiaxaJ -SaSjpCuKAsR49GiKweR6NrFvG5Ybd0mN1MkGco/PU+PcF4UgStyYJ9ORJitHHmkH -r96i5OTUawuzXnzUJIBHKWk7buis/UDr2O1xcSvy6Fgd60GXIsUf1DnQJ4+H4xj0 -4KlGDfV0OoIu0G4skaMxXDtG6nsEEFZegB31pWXogvziB4xiRfUg3kZwhqG8k9Me -dKZssCz3AwyIDMvUclOGvGBG85hqwvG/Q/lwIHfKN0F5VVJjjVsSn8VoxIidrPIw -q7ejMZdnrY8XD2zHc+0klGvIg5rQmjdJBKuxFshsSUktq6HQjJLyQUp5ISXbY9e2 -nKd+Qmn7OmMCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AQYwHQYDVR0OBBYEFNwuH9FhN3nkq9XVsxJxaD1qaJwiMB8GA1UdIwQYMBaAFNwu -H9FhN3nkq9XVsxJxaD1qaJwiMA0GCSqGSIb3DQEBCwUAA4ICAQCR8EICaEDuw2jA -VC/f7GLDw56KoDEoqoOOpFaWEhCGVrqXctJUMHytGdUdaG/7FELYjQ7ztdGl4wJC -XtzoRlgHNQIw4Lx0SsFDKv/bGtCwr2zD/cuz9X9tAy5ZVp0tLTWMstZDFyySCstd -6IwPS3BD0IL/qMy/pJTAvoe9iuOTe8aPmxadJ2W8esVCgmxcB9CpwYhgROmYhRZf -+I/KARDOJcP5YBugxZfD0yyIMaK9MOzQ0MAS8cE54+X1+NZK3TTN+2/BT+MAi1bi -kvcoskJ3ciNnxz8RFbLEAwW+uxF7Cr+obuf/WEPPm2eggAe2HcqtbepBEX4tdJP7 -wry+UUTF72glJ4DjyKDUEuzZpTcdN3y0kcra1LGWge9oXHYQSa9+pTeAsRxSvTOB -TI/53WXZFM2KJVj04sWDpQmQ1GwUY7VA3+vA/MRYfg0UFodUJ25W5HCEuGwyEn6C -MUO+1918oa2u1qsgEu8KwxCMSZY13At1XrFP1U80DhEgB3VDRemjEdqso5nCtnkn -4rnvyOL2NSl6dPrFf4IFYqYK6miyeUcGbvJXqBUzxvd4Sj1Ce2t+/vdG6tHrju+I -aFvowdlxfv1k7/9nR4hYJS8+hge9+6jlgqispdNpQ80xiEmEU5LAsTkbOYMBMMTy -qfrQA71yN2BWHzZ8vTmR9W0Nv3vXkg== ------END CERTIFICATE----- - -# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz -# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz -# Label: "ANF Secure Server Root CA" -# Serial: 996390341000653745 -# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96 -# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74 -# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99 ------BEGIN CERTIFICATE----- -MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV -BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk -YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV -BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN -MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF -UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD -VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v -dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj -cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q -yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH -2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX -H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL -zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR -p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz -W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/ -SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn -LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3 -n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B -u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj -o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO -BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC -AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L -9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej -rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK -pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0 -vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq -OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ -/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9 -2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI -+PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2 -MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo -tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw= ------END CERTIFICATE----- - -# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority -# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority -# Label: "Certum EC-384 CA" -# Serial: 160250656287871593594747141429395092468 -# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1 -# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed -# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6 ------BEGIN CERTIFICATE----- -MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw -CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw -JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT -EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0 -WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT -LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX -BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE -KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm -Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8 -EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J -UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn -nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k= ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Root CA" -# Serial: 40870380103424195783807378461123655149 -# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29 -# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5 -# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd ------BEGIN CERTIFICATE----- -MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6 -MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu -MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV -BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw -MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg -U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo -b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ -n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q -p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq -NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF -8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3 -HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa -mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi -7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF -ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P -qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ -v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6 -Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1 -vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD -ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4 -WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo -zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR -5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ -GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf -5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq -0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D -P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM -qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP -0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf -E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb ------END CERTIFICATE----- - -# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique -# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique -# Label: "TunTrust Root CA" -# Serial: 108534058042236574382096126452369648152337120275 -# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4 -# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb -# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41 ------BEGIN CERTIFICATE----- -MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL -BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg -Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv -b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG -EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u -IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ -KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ -n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd -2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF -VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ -GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF -li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU -r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2 -eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb -MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg -jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB -7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW -5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE -ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0 -90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z -xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu -QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4 -FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH -22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP -xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn -dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5 -Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b -nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ -CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH -u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj -d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o= ------END CERTIFICATE----- - -# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA -# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA -# Label: "HARICA TLS RSA Root CA 2021" -# Serial: 76817823531813593706434026085292783742 -# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91 -# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d -# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d ------BEGIN CERTIFICATE----- -MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs -MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl -c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg -Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL -MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl -YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv -b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l -mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE -4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv -a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M -pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw -Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b -LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY -AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB -AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq -E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr -W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ -CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE -AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU -X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3 -f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja -H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP -JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P -zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt -jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0 -/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT -BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79 -aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW -xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU -63ZTGI0RmLo= ------END CERTIFICATE----- - -# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA -# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA -# Label: "HARICA TLS ECC Root CA 2021" -# Serial: 137515985548005187474074462014555733966 -# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0 -# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48 -# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01 ------BEGIN CERTIFICATE----- -MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw -CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh -cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v -dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG -A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj -aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg -Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7 -KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y -STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD -AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw -SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN -nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps ------END CERTIFICATE----- - -# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" -# Serial: 1977337328857672817 -# MD5 Fingerprint: 4e:6e:9b:54:4c:ca:b7:fa:48:e4:90:b1:15:4b:1c:a3 -# SHA1 Fingerprint: 0b:be:c2:27:22:49:cb:39:aa:db:35:5c:53:e3:8c:ae:78:ff:b6:fe -# SHA256 Fingerprint: 57:de:05:83:ef:d2:b2:6e:03:61:da:99:da:9d:f4:64:8d:ef:7e:e8:44:1c:3b:72:8a:fa:9b:cd:e0:f9:b2:6a ------BEGIN CERTIFICATE----- -MIIGFDCCA/ygAwIBAgIIG3Dp0v+ubHEwDQYJKoZIhvcNAQELBQAwUTELMAkGA1UE -BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h -cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0xNDA5MjMxNTIyMDdaFw0zNjA1 -MDUxNTIyMDdaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg -Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 -thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM -cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG -L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i -NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h -X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b -m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy -Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja -EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T -KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF -6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh -OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMB0GA1UdDgQWBBRlzeurNR4APn7VdMAc -tHNHDhpkLzASBgNVHRMBAf8ECDAGAQH/AgEBMIGmBgNVHSAEgZ4wgZswgZgGBFUd -IAAwgY8wLwYIKwYBBQUHAgEWI2h0dHA6Ly93d3cuZmlybWFwcm9mZXNpb25hbC5j -b20vY3BzMFwGCCsGAQUFBwICMFAeTgBQAGEAcwBlAG8AIABkAGUAIABsAGEAIABC -AG8AbgBhAG4AbwB2AGEAIAA0ADcAIABCAGEAcgBjAGUAbABvAG4AYQAgADAAOAAw -ADEANzAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQELBQADggIBAHSHKAIrdx9m -iWTtj3QuRhy7qPj4Cx2Dtjqn6EWKB7fgPiDL4QjbEwj4KKE1soCzC1HA01aajTNF -Sa9J8OA9B3pFE1r/yJfY0xgsfZb43aJlQ3CTkBW6kN/oGbDbLIpgD7dvlAceHabJ -hfa9NPhAeGIQcDq+fUs5gakQ1JZBu/hfHAsdCPKxsIl68veg4MSPi3i1O1ilI45P -Vf42O+AMt8oqMEEgtIDNrvx2ZnOorm7hfNoD6JQg5iKj0B+QXSBTFCZX2lSX3xZE -EAEeiGaPcjiT3SC3NL7X8e5jjkd5KAb881lFJWAiMxujX6i6KtoaPc1A6ozuBRWV -1aUsIC+nmCjuRfzxuIgALI9C2lHVnOUTaHFFQ4ueCyE8S1wF3BqfmI7avSKecs2t -CsvMo2ebKHTEm9caPARYpoKdrcd7b/+Alun4jWq9GJAd/0kakFI3ky88Al2CdgtR -5xbHV/g4+afNmyJU72OwFW1TZQNKXkqgsqeOSQBZONXH9IBk9W6VULgRfhVwOEqw -f9DEMnDAGf/JOC0ULGb0QkTmVXYbgBVX/8Cnp6o5qtjTcNAuuuuUavpfNIbnYrX9 -ivAwhZTJryQCL2/W3Wf+47BVTwSYT6RBVuKT0Gro1vP7ZeDOdcQxWQzugsgMYDNK -GbqEZycPvEJdvSRUDewdcAZfpLz6IHxV ------END CERTIFICATE----- - -# Issuer: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. -# Subject: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. -# Label: "vTrus ECC Root CA" -# Serial: 630369271402956006249506845124680065938238527194 -# MD5 Fingerprint: de:4b:c1:f5:52:8c:9b:43:e1:3e:8f:55:54:17:8d:85 -# SHA1 Fingerprint: f6:9c:db:b0:fc:f6:02:13:b6:52:32:a6:a3:91:3f:16:70:da:c3:e1 -# SHA256 Fingerprint: 30:fb:ba:2c:32:23:8e:2a:98:54:7a:f9:79:31:e5:50:42:8b:9b:3f:1c:8e:eb:66:33:dc:fa:86:c5:b2:7d:d3 ------BEGIN CERTIFICATE----- -MIICDzCCAZWgAwIBAgIUbmq8WapTvpg5Z6LSa6Q75m0c1towCgYIKoZIzj0EAwMw -RzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4xGjAY -BgNVBAMTEXZUcnVzIEVDQyBSb290IENBMB4XDTE4MDczMTA3MjY0NFoXDTQzMDcz -MTA3MjY0NFowRzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28u -LEx0ZC4xGjAYBgNVBAMTEXZUcnVzIEVDQyBSb290IENBMHYwEAYHKoZIzj0CAQYF -K4EEACIDYgAEZVBKrox5lkqqHAjDo6LN/llWQXf9JpRCux3NCNtzslt188+cToL0 -v/hhJoVs1oVbcnDS/dtitN9Ti72xRFhiQgnH+n9bEOf+QP3A2MMrMudwpremIFUd -e4BdS49nTPEQo0IwQDAdBgNVHQ4EFgQUmDnNvtiyjPeyq+GtJK97fKHbH88wDwYD -VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwCgYIKoZIzj0EAwMDaAAwZQIw -V53dVvHH4+m4SVBrm2nDb+zDfSXkV5UTQJtS0zvzQBm8JsctBp61ezaf9SXUY2sA -AjEA6dPGnlaaKsyh2j/IZivTWJwghfqrkYpwcBE4YGQLYgmRWAD5Tfs0aNoJrSEG -GJTO ------END CERTIFICATE----- - -# Issuer: CN=vTrus Root CA O=iTrusChina Co.,Ltd. -# Subject: CN=vTrus Root CA O=iTrusChina Co.,Ltd. -# Label: "vTrus Root CA" -# Serial: 387574501246983434957692974888460947164905180485 -# MD5 Fingerprint: b8:c9:37:df:fa:6b:31:84:64:c5:ea:11:6a:1b:75:fc -# SHA1 Fingerprint: 84:1a:69:fb:f5:cd:1a:25:34:13:3d:e3:f8:fc:b8:99:d0:c9:14:b7 -# SHA256 Fingerprint: 8a:71:de:65:59:33:6f:42:6c:26:e5:38:80:d0:0d:88:a1:8d:a4:c6:a9:1f:0d:cb:61:94:e2:06:c5:c9:63:87 ------BEGIN CERTIFICATE----- -MIIFVjCCAz6gAwIBAgIUQ+NxE9izWRRdt86M/TX9b7wFjUUwDQYJKoZIhvcNAQEL -BQAwQzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4x -FjAUBgNVBAMTDXZUcnVzIFJvb3QgQ0EwHhcNMTgwNzMxMDcyNDA1WhcNNDMwNzMx -MDcyNDA1WjBDMQswCQYDVQQGEwJDTjEcMBoGA1UEChMTaVRydXNDaGluYSBDby4s -THRkLjEWMBQGA1UEAxMNdlRydXMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQAD -ggIPADCCAgoCggIBAL1VfGHTuB0EYgWgrmy3cLRB6ksDXhA/kFocizuwZotsSKYc -IrrVQJLuM7IjWcmOvFjai57QGfIvWcaMY1q6n6MLsLOaXLoRuBLpDLvPbmyAhykU -AyyNJJrIZIO1aqwTLDPxn9wsYTwaP3BVm60AUn/PBLn+NvqcwBauYv6WTEN+VRS+ -GrPSbcKvdmaVayqwlHeFXgQPYh1jdfdr58tbmnDsPmcF8P4HCIDPKNsFxhQnL4Z9 -8Cfe/+Z+M0jnCx5Y0ScrUw5XSmXX+6KAYPxMvDVTAWqXcoKv8R1w6Jz1717CbMdH -flqUhSZNO7rrTOiwCcJlwp2dCZtOtZcFrPUGoPc2BX70kLJrxLT5ZOrpGgrIDajt -J8nU57O5q4IikCc9Kuh8kO+8T/3iCiSn3mUkpF3qwHYw03dQ+A0Em5Q2AXPKBlim -0zvc+gRGE1WKyURHuFE5Gi7oNOJ5y1lKCn+8pu8fA2dqWSslYpPZUxlmPCdiKYZN -pGvu/9ROutW04o5IWgAZCfEF2c6Rsffr6TlP9m8EQ5pV9T4FFL2/s1m02I4zhKOQ -UqqzApVg+QxMaPnu1RcN+HFXtSXkKe5lXa/R7jwXC1pDxaWG6iSe4gUH3DRCEpHW -OXSuTEGC2/KmSNGzm/MzqvOmwMVO9fSddmPmAsYiS8GVP1BkLFTltvA8Kc9XAgMB -AAGjQjBAMB0GA1UdDgQWBBRUYnBj8XWEQ1iO0RYgscasGrz2iTAPBgNVHRMBAf8E -BTADAQH/MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAKbqSSaet -8PFww+SX8J+pJdVrnjT+5hpk9jprUrIQeBqfTNqK2uwcN1LgQkv7bHbKJAs5EhWd -nxEt/Hlk3ODg9d3gV8mlsnZwUKT+twpw1aA08XXXTUm6EdGz2OyC/+sOxL9kLX1j -bhd47F18iMjrjld22VkE+rxSH0Ws8HqA7Oxvdq6R2xCOBNyS36D25q5J08FsEhvM -Kar5CKXiNxTKsbhm7xqC5PD48acWabfbqWE8n/Uxy+QARsIvdLGx14HuqCaVvIiv -TDUHKgLKeBRtRytAVunLKmChZwOgzoy8sHJnxDHO2zTlJQNgJXtxmOTAGytfdELS -S8VZCAeHvsXDf+eW2eHcKJfWjwXj9ZtOyh1QRwVTsMo554WgicEFOwE30z9J4nfr -I8iIZjs9OXYhRvHsXyO466JmdXTBQPfYaJqT4i2pLr0cox7IdMakLXogqzu4sEb9 -b91fUlV1YvCXoHzXOP0l382gmxDPi7g4Xl7FtKYCNqEeXxzP4padKar9mK5S4fNB -UvupLnKWnyfjqnN9+BojZns7q2WwMgFLFT49ok8MKzWixtlnEjUwzXYuFrOZnk1P -Ti07NEPhmg4NpGaXutIcSkwsKouLgU9xGqndXHt7CMUADTdA43x7VF8vhV929ven -sBxXVsFy6K2ir40zSbofitzmdHxghm+Hl3s= ------END CERTIFICATE----- - -# Issuer: CN=ISRG Root X2 O=Internet Security Research Group -# Subject: CN=ISRG Root X2 O=Internet Security Research Group -# Label: "ISRG Root X2" -# Serial: 87493402998870891108772069816698636114 -# MD5 Fingerprint: d3:9e:c4:1e:23:3c:a6:df:cf:a3:7e:6d:e0:14:e6:e5 -# SHA1 Fingerprint: bd:b1:b9:3c:d5:97:8d:45:c6:26:14:55:f8:db:95:c7:5a:d1:53:af -# SHA256 Fingerprint: 69:72:9b:8e:15:a8:6e:fc:17:7a:57:af:b7:17:1d:fc:64:ad:d2:8c:2f:ca:8c:f1:50:7e:34:45:3c:cb:14:70 ------BEGIN CERTIFICATE----- -MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw -CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg -R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00 -MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT -ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw -EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW -+1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9 -ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T -AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI -zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW -tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1 -/q4AaOeMSQ+2b1tbFfLn ------END CERTIFICATE----- - -# Issuer: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. -# Subject: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. -# Label: "HiPKI Root CA - G1" -# Serial: 60966262342023497858655262305426234976 -# MD5 Fingerprint: 69:45:df:16:65:4b:e8:68:9a:8f:76:5f:ff:80:9e:d3 -# SHA1 Fingerprint: 6a:92:e4:a8:ee:1b:ec:96:45:37:e3:29:57:49:cd:96:e3:e5:d2:60 -# SHA256 Fingerprint: f0:15:ce:3c:c2:39:bf:ef:06:4b:e9:f1:d2:c4:17:e1:a0:26:4a:0a:94:be:1f:0c:8d:12:18:64:eb:69:49:cc ------BEGIN CERTIFICATE----- -MIIFajCCA1KgAwIBAgIQLd2szmKXlKFD6LDNdmpeYDANBgkqhkiG9w0BAQsFADBP -MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 -ZC4xGzAZBgNVBAMMEkhpUEtJIFJvb3QgQ0EgLSBHMTAeFw0xOTAyMjIwOTQ2MDRa -Fw0zNzEyMzExNTU5NTlaME8xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3 -YSBUZWxlY29tIENvLiwgTHRkLjEbMBkGA1UEAwwSSGlQS0kgUm9vdCBDQSAtIEcx -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA9B5/UnMyDHPkvRN0o9Qw -qNCuS9i233VHZvR85zkEHmpwINJaR3JnVfSl6J3VHiGh8Ge6zCFovkRTv4354twv -Vcg3Px+kwJyz5HdcoEb+d/oaoDjq7Zpy3iu9lFc6uux55199QmQ5eiY29yTw1S+6 -lZgRZq2XNdZ1AYDgr/SEYYwNHl98h5ZeQa/rh+r4XfEuiAU+TCK72h8q3VJGZDnz -Qs7ZngyzsHeXZJzA9KMuH5UHsBffMNsAGJZMoYFL3QRtU6M9/Aes1MU3guvklQgZ -KILSQjqj2FPseYlgSGDIcpJQ3AOPgz+yQlda22rpEZfdhSi8MEyr48KxRURHH+CK -FgeW0iEPU8DtqX7UTuybCeyvQqww1r/REEXgphaypcXTT3OUM3ECoWqj1jOXTyFj -HluP2cFeRXF3D4FdXyGarYPM+l7WjSNfGz1BryB1ZlpK9p/7qxj3ccC2HTHsOyDr -y+K49a6SsvfhhEvyovKTmiKe0xRvNlS9H15ZFblzqMF8b3ti6RZsR1pl8w4Rm0bZ -/W3c1pzAtH2lsN0/Vm+h+fbkEkj9Bn8SV7apI09bA8PgcSojt/ewsTu8mL3WmKgM -a/aOEmem8rJY5AIJEzypuxC00jBF8ez3ABHfZfjcK0NVvxaXxA/VLGGEqnKG/uY6 -fsI/fe78LxQ+5oXdUG+3Se0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQU8ncX+l6o/vY9cdVouslGDDjYr7AwDgYDVR0PAQH/BAQDAgGGMA0GCSqG -SIb3DQEBCwUAA4ICAQBQUfB13HAE4/+qddRxosuej6ip0691x1TPOhwEmSKsxBHi -7zNKpiMdDg1H2DfHb680f0+BazVP6XKlMeJ45/dOlBhbQH3PayFUhuaVevvGyuqc -SE5XCV0vrPSltJczWNWseanMX/mF+lLFjfiRFOs6DRfQUsJ748JzjkZ4Bjgs6Fza -ZsT0pPBWGTMpWmWSBUdGSquEwx4noR8RkpkndZMPvDY7l1ePJlsMu5wP1G4wB9Tc -XzZoZjmDlicmisjEOf6aIW/Vcobpf2Lll07QJNBAsNB1CI69aO4I1258EHBGG3zg -iLKecoaZAeO/n0kZtCW+VmWuF2PlHt/o/0elv+EmBYTksMCv5wiZqAxeJoBF1Pho -L5aPruJKHJwWDBNvOIf2u8g0X5IDUXlwpt/L9ZlNec1OvFefQ05rLisY+GpzjLrF -Ne85akEez3GoorKGB1s6yeHvP2UEgEcyRHCVTjFnanRbEEV16rCf0OY1/k6fi8wr -kkVbbiVghUbN0aqwdmaTd5a+g744tiROJgvM7XpWGuDpWsZkrUx6AEhEL7lAuxM+ -vhV4nYWBSipX3tUZQ9rbyltHhoMLP7YNdnhzeSJesYAfz77RP1YQmCuVh6EfnWQU -YDksswBVLuT1sw5XxJFBAJw/6KXf6vb/yPCtbVKoF6ubYfwSUTXkJf2vqmqGOQ== ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 -# Label: "GlobalSign ECC Root CA - R4" -# Serial: 159662223612894884239637590694 -# MD5 Fingerprint: 26:29:f8:6d:e1:88:bf:a2:65:7f:aa:c4:cd:0f:7f:fc -# SHA1 Fingerprint: 6b:a0:b0:98:e1:71:ef:5a:ad:fe:48:15:80:77:10:f4:bd:6f:0b:28 -# SHA256 Fingerprint: b0:85:d7:0b:96:4f:19:1a:73:e4:af:0d:54:ae:7a:0e:07:aa:fd:af:9b:71:dd:08:62:13:8a:b7:32:5a:24:a2 ------BEGIN CERTIFICATE----- -MIIB3DCCAYOgAwIBAgINAgPlfvU/k/2lCSGypjAKBggqhkjOPQQDAjBQMSQwIgYD -VQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0gUjQxEzARBgNVBAoTCkdsb2Jh -bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTIxMTEzMDAwMDAwWhcNMzgw -MTE5MDMxNDA3WjBQMSQwIgYDVQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0g -UjQxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wWTAT -BgcqhkjOPQIBBggqhkjOPQMBBwNCAAS4xnnTj2wlDp8uORkcA6SumuU5BwkWymOx -uYb4ilfBV85C+nOh92VC/x7BALJucw7/xyHlGKSq2XE/qNS5zowdo0IwQDAOBgNV -HQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVLB7rUW44kB/ -+wpu+74zyTyjhNUwCgYIKoZIzj0EAwIDRwAwRAIgIk90crlgr/HmnKAWBVBfw147 -bmF0774BxL4YSFlhgjICICadVGNA3jdgUM/I2O2dgq43mLyjj0xMqTQrbO/7lZsm ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R1 O=Google Trust Services LLC -# Subject: CN=GTS Root R1 O=Google Trust Services LLC -# Label: "GTS Root R1" -# Serial: 159662320309726417404178440727 -# MD5 Fingerprint: 05:fe:d0:bf:71:a8:a3:76:63:da:01:e0:d8:52:dc:40 -# SHA1 Fingerprint: e5:8c:1c:c4:91:3b:38:63:4b:e9:10:6e:e3:ad:8e:6b:9d:d9:81:4a -# SHA256 Fingerprint: d9:47:43:2a:bd:e7:b7:fa:90:fc:2e:6b:59:10:1b:12:80:e0:e1:c7:e4:e4:0f:a3:c6:88:7f:ff:57:a7:f4:cf ------BEGIN CERTIFICATE----- -MIIFVzCCAz+gAwIBAgINAgPlk28xsBNJiGuiFzANBgkqhkiG9w0BAQwFADBHMQsw -CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU -MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw -MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp -Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUA -A4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaMf/vo -27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vXmX7w -Cl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7zUjw -TcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0Pfybl -qAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtcvfaH -szVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4Zor8 -Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUspzBmk -MiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOORc92 -wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYWk70p -aDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+DVrN -VjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgFlQID -AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E -FgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBAJ+qQibb -C5u+/x6Wki4+omVKapi6Ist9wTrYggoGxval3sBOh2Z5ofmmWJyq+bXmYOfg6LEe -QkEzCzc9zolwFcq1JKjPa7XSQCGYzyI0zzvFIoTgxQ6KfF2I5DUkzps+GlQebtuy -h6f88/qBVRRiClmpIgUxPoLW7ttXNLwzldMXG+gnoot7TiYaelpkttGsN/H9oPM4 -7HLwEXWdyzRSjeZ2axfG34arJ45JK3VmgRAhpuo+9K4l/3wV3s6MJT/KYnAK9y8J -ZgfIPxz88NtFMN9iiMG1D53Dn0reWVlHxYciNuaCp+0KueIHoI17eko8cdLiA6Ef -MgfdG+RCzgwARWGAtQsgWSl4vflVy2PFPEz0tv/bal8xa5meLMFrUKTX5hgUvYU/ -Z6tGn6D/Qqc6f1zLXbBwHSs09dR2CQzreExZBfMzQsNhFRAbd03OIozUhfJFfbdT -6u9AWpQKXCBfTkBdYiJ23//OYb2MI3jSNwLgjt7RETeJ9r/tSQdirpLsQBqvFAnZ -0E6yove+7u7Y/9waLd64NnHi/Hm3lCXRSHNboTXns5lndcEZOitHTtNCjv0xyBZm -2tIMPNuzjsmhDYAPexZ3FL//2wmUspO8IFgV6dtxQ/PeEMMA3KgqlbbC1j+Qa3bb -bP6MvPJwNQzcmRk13NfIRmPVNnGuV/u3gm3c ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R2 O=Google Trust Services LLC -# Subject: CN=GTS Root R2 O=Google Trust Services LLC -# Label: "GTS Root R2" -# Serial: 159662449406622349769042896298 -# MD5 Fingerprint: 1e:39:c0:53:e6:1e:29:82:0b:ca:52:55:36:5d:57:dc -# SHA1 Fingerprint: 9a:44:49:76:32:db:de:fa:d0:bc:fb:5a:7b:17:bd:9e:56:09:24:94 -# SHA256 Fingerprint: 8d:25:cd:97:22:9d:bf:70:35:6b:da:4e:b3:cc:73:40:31:e2:4c:f0:0f:af:cf:d3:2d:c7:6e:b5:84:1c:7e:a8 ------BEGIN CERTIFICATE----- -MIIFVzCCAz+gAwIBAgINAgPlrsWNBCUaqxElqjANBgkqhkiG9w0BAQwFADBHMQsw -CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU -MBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw -MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp -Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEBAQUA -A4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3LvCvpt -nfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3KgGjSY -6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9BuXvAu -MC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOdre7k -RXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXuPuWg -f9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1mKPV -+3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K8Yzo -dDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqjx5RW -Ir9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsRnTKa -G73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0kzCq -gc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9OktwID -AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E -FgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBAB/Kzt3H -vqGf2SdMC9wXmBFqiN495nFWcrKeGk6c1SuYJF2ba3uwM4IJvd8lRuqYnrYb/oM8 -0mJhwQTtzuDFycgTE1XnqGOtjHsB/ncw4c5omwX4Eu55MaBBRTUoCnGkJE+M3DyC -B19m3H0Q/gxhswWV7uGugQ+o+MePTagjAiZrHYNSVc61LwDKgEDg4XSsYPWHgJ2u -NmSRXbBoGOqKYcl3qJfEycel/FVL8/B/uWU9J2jQzGv6U53hkRrJXRqWbTKH7QMg -yALOWr7Z6v2yTcQvG99fevX4i8buMTolUVVnjWQye+mew4K6Ki3pHrTgSAai/Gev -HyICc/sgCq+dVEuhzf9gR7A/Xe8bVr2XIZYtCtFenTgCR2y59PYjJbigapordwj6 -xLEokCZYCDzifqrXPW+6MYgKBesntaFJ7qBFVHvmJ2WZICGoo7z7GJa7Um8M7YNR -TOlZ4iBgxcJlkoKM8xAfDoqXvneCbT+PHV28SSe9zE8P4c52hgQjxcCMElv924Sg -JPFI/2R80L5cFtHvma3AH/vLrrw4IgYmZNralw4/KBVEqE8AyvCazM90arQ+POuV -7LXTWtiBmelDGDfrs7vRWGJB82bSj6p4lVQgw1oudCvV0b4YacCs1aTPObpRhANl -6WLAYv7YTVWW4tAR+kg0Eeye7QUd5MjWHYbL ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R3 O=Google Trust Services LLC -# Subject: CN=GTS Root R3 O=Google Trust Services LLC -# Label: "GTS Root R3" -# Serial: 159662495401136852707857743206 -# MD5 Fingerprint: 3e:e7:9d:58:02:94:46:51:94:e5:e0:22:4a:8b:e7:73 -# SHA1 Fingerprint: ed:e5:71:80:2b:c8:92:b9:5b:83:3c:d2:32:68:3f:09:cd:a0:1e:46 -# SHA256 Fingerprint: 34:d8:a7:3e:e2:08:d9:bc:db:0d:95:65:20:93:4b:4e:40:e6:94:82:59:6e:8b:6f:73:c8:42:6b:01:0a:6f:48 ------BEGIN CERTIFICATE----- -MIICCTCCAY6gAwIBAgINAgPluILrIPglJ209ZjAKBggqhkjOPQQDAzBHMQswCQYD -VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG -A1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw -WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz -IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNi -AAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout736G -jOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2ADDL2 -4CejQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW -BBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEA9uEglRR7 -VKOQFhG/hMjqb2sXnh5GmCCbn9MN2azTL818+FsuVbu/3ZL3pAzcMeGiAjEA/Jdm -ZuVDFhOD3cffL74UOO0BzrEXGhF16b0DjyZ+hOXJYKaV11RZt+cRLInUue4X ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R4 O=Google Trust Services LLC -# Subject: CN=GTS Root R4 O=Google Trust Services LLC -# Label: "GTS Root R4" -# Serial: 159662532700760215368942768210 -# MD5 Fingerprint: 43:96:83:77:19:4d:76:b3:9d:65:52:e4:1d:22:a5:e8 -# SHA1 Fingerprint: 77:d3:03:67:b5:e0:0c:15:f6:0c:38:61:df:7c:e1:3b:92:46:4d:47 -# SHA256 Fingerprint: 34:9d:fa:40:58:c5:e2:63:12:3b:39:8a:e7:95:57:3c:4e:13:13:c8:3f:e6:8f:93:55:6c:d5:e8:03:1b:3c:7d ------BEGIN CERTIFICATE----- -MIICCTCCAY6gAwIBAgINAgPlwGjvYxqccpBQUjAKBggqhkjOPQQDAzBHMQswCQYD -VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG -A1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw -WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz -IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQAIgNi -AATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzuhXyi -QHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/lxKvR -HYqjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW -BBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNpADBmAjEA6ED/g94D -9J+uHXqnLrmvT/aDHQ4thQEd0dlq7A/Cr8deVl5c1RxYIigL9zC2L7F8AjEA8GE8 -p/SgguMh1YQdc4acLa/KNJvxn7kjNuK8YAOdgLOaVsjh4rsUecrNIdSUtUlD ------END CERTIFICATE----- - -# Issuer: CN=Telia Root CA v2 O=Telia Finland Oyj -# Subject: CN=Telia Root CA v2 O=Telia Finland Oyj -# Label: "Telia Root CA v2" -# Serial: 7288924052977061235122729490515358 -# MD5 Fingerprint: 0e:8f:ac:aa:82:df:85:b1:f4:dc:10:1c:fc:99:d9:48 -# SHA1 Fingerprint: b9:99:cd:d1:73:50:8a:c4:47:05:08:9c:8c:88:fb:be:a0:2b:40:cd -# SHA256 Fingerprint: 24:2b:69:74:2f:cb:1e:5b:2a:bf:98:89:8b:94:57:21:87:54:4e:5b:4d:99:11:78:65:73:62:1f:6a:74:b8:2c ------BEGIN CERTIFICATE----- -MIIFdDCCA1ygAwIBAgIPAWdfJ9b+euPkrL4JWwWeMA0GCSqGSIb3DQEBCwUAMEQx -CzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZMBcGA1UE -AwwQVGVsaWEgUm9vdCBDQSB2MjAeFw0xODExMjkxMTU1NTRaFw00MzExMjkxMTU1 -NTRaMEQxCzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZ -MBcGA1UEAwwQVGVsaWEgUm9vdCBDQSB2MjCCAiIwDQYJKoZIhvcNAQEBBQADggIP -ADCCAgoCggIBALLQPwe84nvQa5n44ndp586dpAO8gm2h/oFlH0wnrI4AuhZ76zBq -AMCzdGh+sq/H1WKzej9Qyow2RCRj0jbpDIX2Q3bVTKFgcmfiKDOlyzG4OiIjNLh9 -vVYiQJ3q9HsDrWj8soFPmNB06o3lfc1jw6P23pLCWBnglrvFxKk9pXSW/q/5iaq9 -lRdU2HhE8Qx3FZLgmEKnpNaqIJLNwaCzlrI6hEKNfdWV5Nbb6WLEWLN5xYzTNTOD -n3WhUidhOPFZPY5Q4L15POdslv5e2QJltI5c0BE0312/UqeBAMN/mUWZFdUXyApT -7GPzmX3MaRKGwhfwAZ6/hLzRUssbkmbOpFPlob/E2wnW5olWK8jjfN7j/4nlNW4o -6GwLI1GpJQXrSPjdscr6bAhR77cYbETKJuFzxokGgeWKrLDiKca5JLNrRBH0pUPC -TEPlcDaMtjNXepUugqD0XBCzYYP2AgWGLnwtbNwDRm41k9V6lS/eINhbfpSQBGq6 -WT0EBXWdN6IOLj3rwaRSg/7Qa9RmjtzG6RJOHSpXqhC8fF6CfaamyfItufUXJ63R -DolUK5X6wK0dmBR4M0KGCqlztft0DbcbMBnEWg4cJ7faGND/isgFuvGqHKI3t+ZI -pEYslOqodmJHixBTB0hXbOKSTbauBcvcwUpej6w9GU7C7WB1K9vBykLVAgMBAAGj -YzBhMB8GA1UdIwQYMBaAFHKs5DN5qkWH9v2sHZ7Wxy+G2CQ5MB0GA1UdDgQWBBRy -rOQzeapFh/b9rB2e1scvhtgkOTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw -AwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAoDtZpwmUPjaE0n4vOaWWl/oRrfxn83EJ -8rKJhGdEr7nv7ZbsnGTbMjBvZ5qsfl+yqwE2foH65IRe0qw24GtixX1LDoJt0nZi -0f6X+J8wfBj5tFJ3gh1229MdqfDBmgC9bXXYfef6xzijnHDoRnkDry5023X4blMM -A8iZGok1GTzTyVR8qPAs5m4HeW9q4ebqkYJpCh3DflminmtGFZhb069GHWLIzoBS -SRE/yQQSwxN8PzuKlts8oB4KtItUsiRnDe+Cy748fdHif64W1lZYudogsYMVoe+K -TTJvQS8TUoKU1xrBeKJR3Stwbbca+few4GeXVtt8YVMJAygCQMez2P2ccGrGKMOF -6eLtGpOg3kuYooQ+BXcBlj37tCAPnHICehIv1aO6UXivKitEZU61/Qrowc15h2Er -3oBXRb9n8ZuRXqWk7FlIEA04x7D6w0RtBPV4UBySllva9bguulvP5fBqnUsvWHMt -Ty3EHD70sz+rFQ47GUGKpMFXEmZxTPpT41frYpUJnlTd0cI8Vzy9OK2YZLe4A5pT -VmBds9hCG1xLEooc6+t9xnppxyd/pPiL8uSUZodL6ZQHCRJ5irLrdATczvREWeAW -ysUsWNc8e89ihmpQfTU2Zqf7N+cox9jQraVplI/owd8k+BsHMYeB2F326CjYSlKA -rBPuUBQemMc= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH -# Subject: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH -# Label: "D-TRUST BR Root CA 1 2020" -# Serial: 165870826978392376648679885835942448534 -# MD5 Fingerprint: b5:aa:4b:d5:ed:f7:e3:55:2e:8f:72:0a:f3:75:b8:ed -# SHA1 Fingerprint: 1f:5b:98:f0:e3:b5:f7:74:3c:ed:e6:b0:36:7d:32:cd:f4:09:41:67 -# SHA256 Fingerprint: e5:9a:aa:81:60:09:c2:2b:ff:5b:25:ba:d3:7d:f3:06:f0:49:79:7c:1f:81:d8:5a:b0:89:e6:57:bd:8f:00:44 ------BEGIN CERTIFICATE----- -MIIC2zCCAmCgAwIBAgIQfMmPK4TX3+oPyWWa00tNljAKBggqhkjOPQQDAzBIMQsw -CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS -VVNUIEJSIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTA5NDUwMFoXDTM1MDIxMTA5 -NDQ1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG -A1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB -BAAiA2IABMbLxyjR+4T1mu9CFCDhQ2tuda38KwOE1HaTJddZO0Flax7mNCq7dPYS -zuht56vkPE4/RAiLzRZxy7+SmfSk1zxQVFKQhYN4lGdnoxwJGT11NIXe7WB9xwy0 -QVK5buXuQqOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHOREKv/ -VbNafAkl1bK6CKBrqx9tMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g -PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2JyX3Jvb3Rf -Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l -dC9DTj1ELVRSVVNUJTIwQlIlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 -c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO -PQQDAwNpADBmAjEAlJAtE/rhY/hhY+ithXhUkZy4kzg+GkHaQBZTQgjKL47xPoFW -wKrY7RjEsK70PvomAjEA8yjixtsrmfu3Ubgko6SUeho/5jbiA1czijDLgsfWFBHV -dWNbFJWcHwHP2NVypw87 ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH -# Subject: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH -# Label: "D-TRUST EV Root CA 1 2020" -# Serial: 126288379621884218666039612629459926992 -# MD5 Fingerprint: 8c:2d:9d:70:9f:48:99:11:06:11:fb:e9:cb:30:c0:6e -# SHA1 Fingerprint: 61:db:8c:21:59:69:03:90:d8:7c:9c:12:86:54:cf:9d:3d:f4:dd:07 -# SHA256 Fingerprint: 08:17:0d:1a:a3:64:53:90:1a:2f:95:92:45:e3:47:db:0c:8d:37:ab:aa:bc:56:b8:1a:a1:00:dc:95:89:70:db ------BEGIN CERTIFICATE----- -MIIC2zCCAmCgAwIBAgIQXwJB13qHfEwDo6yWjfv/0DAKBggqhkjOPQQDAzBIMQsw -CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS -VVNUIEVWIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTEwMDAwMFoXDTM1MDIxMTA5 -NTk1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG -A1UEAxMZRC1UUlVTVCBFViBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB -BAAiA2IABPEL3YZDIBnfl4XoIkqbz52Yv7QFJsnL46bSj8WeeHsxiamJrSc8ZRCC -/N/DnU7wMyPE0jL1HLDfMxddxfCxivnvubcUyilKwg+pf3VlSSowZ/Rk99Yad9rD -wpdhQntJraOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFH8QARY3 -OqQo5FD4pPfsazK2/umLMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g -PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2V2X3Jvb3Rf -Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l -dC9DTj1ELVRSVVNUJTIwRVYlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 -c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO -PQQDAwNpADBmAjEAyjzGKnXCXnViOTYAYFqLwZOZzNnbQTs7h5kXO9XMT8oi96CA -y/m0sRtW9XLS/BnRAjEAkfcwkz8QRitxpNA7RJvAKQIFskF3UfN5Wp6OFKBOQtJb -gfM0agPnIjhQW+0ZT0MW ------END CERTIFICATE----- - -# Issuer: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. -# Subject: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. -# Label: "DigiCert TLS ECC P384 Root G5" -# Serial: 13129116028163249804115411775095713523 -# MD5 Fingerprint: d3:71:04:6a:43:1c:db:a6:59:e1:a8:a3:aa:c5:71:ed -# SHA1 Fingerprint: 17:f3:de:5e:9f:0f:19:e9:8e:f6:1f:32:26:6e:20:c4:07:ae:30:ee -# SHA256 Fingerprint: 01:8e:13:f0:77:25:32:cf:80:9b:d1:b1:72:81:86:72:83:fc:48:c6:e1:3b:e9:c6:98:12:85:4a:49:0c:1b:05 ------BEGIN CERTIFICATE----- -MIICGTCCAZ+gAwIBAgIQCeCTZaz32ci5PhwLBCou8zAKBggqhkjOPQQDAzBOMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJjAkBgNVBAMTHURp -Z2lDZXJ0IFRMUyBFQ0MgUDM4NCBSb290IEc1MB4XDTIxMDExNTAwMDAwMFoXDTQ2 -MDExNDIzNTk1OVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ -bmMuMSYwJAYDVQQDEx1EaWdpQ2VydCBUTFMgRUNDIFAzODQgUm9vdCBHNTB2MBAG -ByqGSM49AgEGBSuBBAAiA2IABMFEoc8Rl1Ca3iOCNQfN0MsYndLxf3c1TzvdlHJS -7cI7+Oz6e2tYIOyZrsn8aLN1udsJ7MgT9U7GCh1mMEy7H0cKPGEQQil8pQgO4CLp -0zVozptjn4S1mU1YoI71VOeVyaNCMEAwHQYDVR0OBBYEFMFRRVBZqz7nLFr6ICIS -B4CIfBFqMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MAoGCCqGSM49 -BAMDA2gAMGUCMQCJao1H5+z8blUD2WdsJk6Dxv3J+ysTvLd6jLRl0mlpYxNjOyZQ -LgGheQaRnUi/wr4CMEfDFXuxoJGZSZOoPHzoRgaLLPIxAJSdYsiJvRmEFOml+wG4 -DXZDjC5Ty3zfDBeWUA== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. -# Subject: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. -# Label: "DigiCert TLS RSA4096 Root G5" -# Serial: 11930366277458970227240571539258396554 -# MD5 Fingerprint: ac:fe:f7:34:96:a9:f2:b3:b4:12:4b:e4:27:41:6f:e1 -# SHA1 Fingerprint: a7:88:49:dc:5d:7c:75:8c:8c:de:39:98:56:b3:aa:d0:b2:a5:71:35 -# SHA256 Fingerprint: 37:1a:00:dc:05:33:b3:72:1a:7e:eb:40:e8:41:9e:70:79:9d:2b:0a:0f:2c:1d:80:69:31:65:f7:ce:c4:ad:75 ------BEGIN CERTIFICATE----- -MIIFZjCCA06gAwIBAgIQCPm0eKj6ftpqMzeJ3nzPijANBgkqhkiG9w0BAQwFADBN -MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJTAjBgNVBAMT -HERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwHhcNMjEwMTE1MDAwMDAwWhcN -NDYwMTE0MjM1OTU5WjBNMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs -IEluYy4xJTAjBgNVBAMTHERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCz0PTJeRGd/fxmgefM1eS87IE+ -ajWOLrfn3q/5B03PMJ3qCQuZvWxX2hhKuHisOjmopkisLnLlvevxGs3npAOpPxG0 -2C+JFvuUAT27L/gTBaF4HI4o4EXgg/RZG5Wzrn4DReW+wkL+7vI8toUTmDKdFqgp -wgscONyfMXdcvyej/Cestyu9dJsXLfKB2l2w4SMXPohKEiPQ6s+d3gMXsUJKoBZM -pG2T6T867jp8nVid9E6P/DsjyG244gXazOvswzH016cpVIDPRFtMbzCe88zdH5RD -nU1/cHAN1DrRN/BsnZvAFJNY781BOHW8EwOVfH/jXOnVDdXifBBiqmvwPXbzP6Po -sMH976pXTayGpxi0KcEsDr9kvimM2AItzVwv8n/vFfQMFawKsPHTDU9qTXeXAaDx -Zre3zu/O7Oyldcqs4+Fj97ihBMi8ez9dLRYiVu1ISf6nL3kwJZu6ay0/nTvEF+cd -Lvvyz6b84xQslpghjLSR6Rlgg/IwKwZzUNWYOwbpx4oMYIwo+FKbbuH2TbsGJJvX -KyY//SovcfXWJL5/MZ4PbeiPT02jP/816t9JXkGPhvnxd3lLG7SjXi/7RgLQZhNe -XoVPzthwiHvOAbWWl9fNff2C+MIkwcoBOU+NosEUQB+cZtUMCUbW8tDRSHZWOkPL -tgoRObqME2wGtZ7P6wIDAQABo0IwQDAdBgNVHQ4EFgQUUTMc7TZArxfTJc1paPKv -TiM+s0EwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcN -AQEMBQADggIBAGCmr1tfV9qJ20tQqcQjNSH/0GEwhJG3PxDPJY7Jv0Y02cEhJhxw -GXIeo8mH/qlDZJY6yFMECrZBu8RHANmfGBg7sg7zNOok992vIGCukihfNudd5N7H -PNtQOa27PShNlnx2xlv0wdsUpasZYgcYQF+Xkdycx6u1UQ3maVNVzDl92sURVXLF -O4uJ+DQtpBflF+aZfTCIITfNMBc9uPK8qHWgQ9w+iUuQrm0D4ByjoJYJu32jtyoQ -REtGBzRj7TG5BO6jm5qu5jF49OokYTurWGT/u4cnYiWB39yhL/btp/96j1EuMPik -AdKFOV8BmZZvWltwGUb+hmA+rYAQCd05JS9Yf7vSdPD3Rh9GOUrYU9DzLjtxpdRv -/PNn5AeP3SYZ4Y1b+qOTEZvpyDrDVWiakuFSdjjo4bq9+0/V77PnSIMx8IIh47a+ -p6tv75/fTM8BuGJqIz3nCU2AG3swpMPdB380vqQmsvZB6Akd4yCYqjdP//fx4ilw -MUc/dNAUFvohigLVigmUdy7yWSiLfFCSCmZ4OIN1xLVaqBHG5cGdZlXPU8Sv13WF -qUITVuwhd4GTWgzqltlJyqEI8pc7bZsEGCREjnwB8twl2F6GmrE52/WRMmrRpnCK -ovfepEWFJqgejF0pW8hL2JpqA15w8oVPbEtoL8pU9ozaMv7Da4M/OMZ+ ------END CERTIFICATE----- - -# Issuer: CN=Certainly Root R1 O=Certainly -# Subject: CN=Certainly Root R1 O=Certainly -# Label: "Certainly Root R1" -# Serial: 188833316161142517227353805653483829216 -# MD5 Fingerprint: 07:70:d4:3e:82:87:a0:fa:33:36:13:f4:fa:33:e7:12 -# SHA1 Fingerprint: a0:50:ee:0f:28:71:f4:27:b2:12:6d:6f:50:96:25:ba:cc:86:42:af -# SHA256 Fingerprint: 77:b8:2c:d8:64:4c:43:05:f7:ac:c5:cb:15:6b:45:67:50:04:03:3d:51:c6:0c:62:02:a8:e0:c3:34:67:d3:a0 ------BEGIN CERTIFICATE----- -MIIFRzCCAy+gAwIBAgIRAI4P+UuQcWhlM1T01EQ5t+AwDQYJKoZIhvcNAQELBQAw -PTELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUNlcnRhaW5seTEaMBgGA1UEAxMRQ2Vy -dGFpbmx5IFJvb3QgUjEwHhcNMjEwNDAxMDAwMDAwWhcNNDYwNDAxMDAwMDAwWjA9 -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0 -YWlubHkgUm9vdCBSMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANA2 -1B/q3avk0bbm+yLA3RMNansiExyXPGhjZjKcA7WNpIGD2ngwEc/csiu+kr+O5MQT -vqRoTNoCaBZ0vrLdBORrKt03H2As2/X3oXyVtwxwhi7xOu9S98zTm/mLvg7fMbed -aFySpvXl8wo0tf97ouSHocavFwDvA5HtqRxOcT3Si2yJ9HiG5mpJoM610rCrm/b0 -1C7jcvk2xusVtyWMOvwlDbMicyF0yEqWYZL1LwsYpfSt4u5BvQF5+paMjRcCMLT5 -r3gajLQ2EBAHBXDQ9DGQilHFhiZ5shGIXsXwClTNSaa/ApzSRKft43jvRl5tcdF5 -cBxGX1HpyTfcX35pe0HfNEXgO4T0oYoKNp43zGJS4YkNKPl6I7ENPT2a/Z2B7yyQ -wHtETrtJ4A5KVpK8y7XdeReJkd5hiXSSqOMyhb5OhaRLWcsrxXiOcVTQAjeZjOVJ -6uBUcqQRBi8LjMFbvrWhsFNunLhgkR9Za/kt9JQKl7XsxXYDVBtlUrpMklZRNaBA -2CnbrlJ2Oy0wQJuK0EJWtLeIAaSHO1OWzaMWj/Nmqhexx2DgwUMFDO6bW2BvBlyH -Wyf5QBGenDPBt+U1VwV/J84XIIwc/PH72jEpSe31C4SnT8H2TsIonPru4K8H+zMR -eiFPCyEQtkA6qyI6BJyLm4SGcprSp6XEtHWRqSsjAgMBAAGjQjBAMA4GA1UdDwEB -/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTgqj8ljZ9EXME66C6u -d0yEPmcM9DANBgkqhkiG9w0BAQsFAAOCAgEAuVevuBLaV4OPaAszHQNTVfSVcOQr -PbA56/qJYv331hgELyE03fFo8NWWWt7CgKPBjcZq91l3rhVkz1t5BXdm6ozTaw3d -8VkswTOlMIAVRQdFGjEitpIAq5lNOo93r6kiyi9jyhXWx8bwPWz8HA2YEGGeEaIi -1wrykXprOQ4vMMM2SZ/g6Q8CRFA3lFV96p/2O7qUpUzpvD5RtOjKkjZUbVwlKNrd -rRT90+7iIgXr0PK3aBLXWopBGsaSpVo7Y0VPv+E6dyIvXL9G+VoDhRNCX8reU9di -taY1BMJH/5n9hN9czulegChB8n3nHpDYT3Y+gjwN/KUD+nsa2UUeYNrEjvn8K8l7 -lcUq/6qJ34IxD3L/DCfXCh5WAFAeDJDBlrXYFIW7pw0WwfgHJBu6haEaBQmAupVj -yTrsJZ9/nbqkRxWbRHDxakvWOF5D8xh+UG7pWijmZeZ3Gzr9Hb4DJqPb1OG7fpYn -Kx3upPvaJVQTA945xsMfTZDsjxtK0hzthZU4UHlG1sGQUDGpXJpuHfUzVounmdLy -yCwzk5Iwx06MZTMQZBf9JBeW0Y3COmor6xOLRPIh80oat3df1+2IpHLlOR+Vnb5n -wXARPbv0+Em34yaXOp/SX3z7wJl8OSngex2/DaeP0ik0biQVy96QXr8axGbqwua6 -OV+KmalBWQewLK8= ------END CERTIFICATE----- - -# Issuer: CN=Certainly Root E1 O=Certainly -# Subject: CN=Certainly Root E1 O=Certainly -# Label: "Certainly Root E1" -# Serial: 8168531406727139161245376702891150584 -# MD5 Fingerprint: 0a:9e:ca:cd:3e:52:50:c6:36:f3:4b:a3:ed:a7:53:e9 -# SHA1 Fingerprint: f9:e1:6d:dc:01:89:cf:d5:82:45:63:3e:c5:37:7d:c2:eb:93:6f:2b -# SHA256 Fingerprint: b4:58:5f:22:e4:ac:75:6a:4e:86:12:a1:36:1c:5d:9d:03:1a:93:fd:84:fe:bb:77:8f:a3:06:8b:0f:c4:2d:c2 ------BEGIN CERTIFICATE----- -MIIB9zCCAX2gAwIBAgIQBiUzsUcDMydc+Y2aub/M+DAKBggqhkjOPQQDAzA9MQsw -CQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0YWlu -bHkgUm9vdCBFMTAeFw0yMTA0MDEwMDAwMDBaFw00NjA0MDEwMDAwMDBaMD0xCzAJ -BgNVBAYTAlVTMRIwEAYDVQQKEwlDZXJ0YWlubHkxGjAYBgNVBAMTEUNlcnRhaW5s -eSBSb290IEUxMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE3m/4fxzf7flHh4axpMCK -+IKXgOqPyEpeKn2IaKcBYhSRJHpcnqMXfYqGITQYUBsQ3tA3SybHGWCA6TS9YBk2 -QNYphwk8kXr2vBMj3VlOBF7PyAIcGFPBMdjaIOlEjeR2o0IwQDAOBgNVHQ8BAf8E -BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU8ygYy2R17ikq6+2uI1g4 -hevIIgcwCgYIKoZIzj0EAwMDaAAwZQIxALGOWiDDshliTd6wT99u0nCK8Z9+aozm -ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG -BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR ------END CERTIFICATE----- - -# Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. -# Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. -# Label: "Security Communication RootCA3" -# Serial: 16247922307909811815 -# MD5 Fingerprint: 1c:9a:16:ff:9e:5c:e0:4d:8a:14:01:f4:35:5d:29:26 -# SHA1 Fingerprint: c3:03:c8:22:74:92:e5:61:a2:9c:5f:79:91:2b:1e:44:13:91:30:3a -# SHA256 Fingerprint: 24:a5:5c:2a:b0:51:44:2d:06:17:76:65:41:23:9a:4a:d0:32:d7:c5:51:75:aa:34:ff:de:2f:bc:4f:5c:52:94 ------BEGIN CERTIFICATE----- -MIIFfzCCA2egAwIBAgIJAOF8N0D9G/5nMA0GCSqGSIb3DQEBDAUAMF0xCzAJBgNV -BAYTAkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMScw -JQYDVQQDEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTMwHhcNMTYwNjE2 -MDYxNzE2WhcNMzgwMTE4MDYxNzE2WjBdMQswCQYDVQQGEwJKUDElMCMGA1UEChMc -U0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UEAxMeU2VjdXJpdHkg -Q29tbXVuaWNhdGlvbiBSb290Q0EzMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC -CgKCAgEA48lySfcw3gl8qUCBWNO0Ot26YQ+TUG5pPDXC7ltzkBtnTCHsXzW7OT4r -CmDvu20rhvtxosis5FaU+cmvsXLUIKx00rgVrVH+hXShuRD+BYD5UpOzQD11EKzA -lrenfna84xtSGc4RHwsENPXY9Wk8d/Nk9A2qhd7gCVAEF5aEt8iKvE1y/By7z/MG -TfmfZPd+pmaGNXHIEYBMwXFAWB6+oHP2/D5Q4eAvJj1+XCO1eXDe+uDRpdYMQXF7 -9+qMHIjH7Iv10S9VlkZ8WjtYO/u62C21Jdp6Ts9EriGmnpjKIG58u4iFW/vAEGK7 -8vknR+/RiTlDxN/e4UG/VHMgly1s2vPUB6PmudhvrvyMGS7TZ2crldtYXLVqAvO4 -g160a75BflcJdURQVc1aEWEhCmHCqYj9E7wtiS/NYeCVvsq1e+F7NGcLH7YMx3we -GVPKp7FKFSBWFHA9K4IsD50VHUeAR/94mQ4xr28+j+2GaR57GIgUssL8gjMunEst -+3A7caoreyYn8xrC3PsXuKHqy6C0rtOUfnrQq8PsOC0RLoi/1D+tEjtCrI8Cbn3M -0V9hvqG8OmpI6iZVIhZdXw3/JzOfGAN0iltSIEdrRU0id4xVJ/CvHozJgyJUt5rQ -T9nO/NkuHJYosQLTA70lUhw0Zk8jq/R3gpYd0VcwCBEF/VfR2ccCAwEAAaNCMEAw -HQYDVR0OBBYEFGQUfPxYchamCik0FW8qy7z8r6irMA4GA1UdDwEB/wQEAwIBBjAP -BgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBDAUAA4ICAQDcAiMI4u8hOscNtybS -YpOnpSNyByCCYN8Y11StaSWSntkUz5m5UoHPrmyKO1o5yGwBQ8IibQLwYs1OY0PA -FNr0Y/Dq9HHuTofjcan0yVflLl8cebsjqodEV+m9NU1Bu0soo5iyG9kLFwfl9+qd -9XbXv8S2gVj/yP9kaWJ5rW4OH3/uHWnlt3Jxs/6lATWUVCvAUm2PVcTJ0rjLyjQI -UYWg9by0F1jqClx6vWPGOi//lkkZhOpn2ASxYfQAW0q3nHE3GYV5v4GwxxMOdnE+ -OoAGrgYWp421wsTL/0ClXI2lyTrtcoHKXJg80jQDdwj98ClZXSEIx2C/pHF7uNke -gr4Jr2VvKKu/S7XuPghHJ6APbw+LP6yVGPO5DtxnVW5inkYO0QR4ynKudtml+LLf -iAlhi+8kTtFZP1rUPcmTPCtk9YENFpb3ksP+MW/oKjJ0DvRMmEoYDjBU1cXrvMUV -nuiZIesnKwkK2/HmcBhWuwzkvvnoEKQTkrgc4NtnHVMDpCKn3F2SEDzq//wbEBrD -2NCcnWXL0CsnMQMeNuE9dnUM/0Umud1RvCPHX9jYhxBAEg09ODfnRDwYwFMJZI// -1ZqmfHAuc1Uh6N//g7kdPjIe1qZ9LPFm6Vwdp6POXiUyK+OVrCoHzrQoeIY8Laad -TdJ0MN1kURXbg4NR16/9M51NZg== ------END CERTIFICATE----- - -# Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. -# Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. -# Label: "Security Communication ECC RootCA1" -# Serial: 15446673492073852651 -# MD5 Fingerprint: 7e:43:b0:92:68:ec:05:43:4c:98:ab:5d:35:2e:7e:86 -# SHA1 Fingerprint: b8:0e:26:a9:bf:d2:b2:3b:c0:ef:46:c9:ba:c7:bb:f6:1d:0d:41:41 -# SHA256 Fingerprint: e7:4f:bd:a5:5b:d5:64:c4:73:a3:6b:44:1a:a7:99:c8:a6:8e:07:74:40:e8:28:8b:9f:a1:e5:0e:4b:ba:ca:11 ------BEGIN CERTIFICATE----- -MIICODCCAb6gAwIBAgIJANZdm7N4gS7rMAoGCCqGSM49BAMDMGExCzAJBgNVBAYT -AkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMSswKQYD -VQQDEyJTZWN1cml0eSBDb21tdW5pY2F0aW9uIEVDQyBSb290Q0ExMB4XDTE2MDYx -NjA1MTUyOFoXDTM4MDExODA1MTUyOFowYTELMAkGA1UEBhMCSlAxJTAjBgNVBAoT -HFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKzApBgNVBAMTIlNlY3VyaXR5 -IENvbW11bmljYXRpb24gRUNDIFJvb3RDQTEwdjAQBgcqhkjOPQIBBgUrgQQAIgNi -AASkpW9gAwPDvTH00xecK4R1rOX9PVdu12O/5gSJko6BnOPpR27KkBLIE+Cnnfdl -dB9sELLo5OnvbYUymUSxXv3MdhDYW72ixvnWQuRXdtyQwjWpS4g8EkdtXP9JTxpK -ULGjQjBAMB0GA1UdDgQWBBSGHOf+LaVKiwj+KBH6vqNm+GBZLzAOBgNVHQ8BAf8E -BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu -9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O -be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k= ------END CERTIFICATE----- - -# Issuer: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY -# Subject: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY -# Label: "BJCA Global Root CA1" -# Serial: 113562791157148395269083148143378328608 -# MD5 Fingerprint: 42:32:99:76:43:33:36:24:35:07:82:9b:28:f9:d0:90 -# SHA1 Fingerprint: d5:ec:8d:7b:4c:ba:79:f4:e7:e8:cb:9d:6b:ae:77:83:10:03:21:6a -# SHA256 Fingerprint: f3:89:6f:88:fe:7c:0a:88:27:66:a7:fa:6a:d2:74:9f:b5:7a:7f:3e:98:fb:76:9c:1f:a7:b0:9c:2c:44:d5:ae ------BEGIN CERTIFICATE----- -MIIFdDCCA1ygAwIBAgIQVW9l47TZkGobCdFsPsBsIDANBgkqhkiG9w0BAQsFADBU -MQswCQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRI -T1JJVFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0ExMB4XDTE5MTIxOTAz -MTYxN1oXDTQ0MTIxMjAzMTYxN1owVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJF -SUpJTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2Jh -bCBSb290IENBMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAPFmCL3Z -xRVhy4QEQaVpN3cdwbB7+sN3SJATcmTRuHyQNZ0YeYjjlwE8R4HyDqKYDZ4/N+AZ -spDyRhySsTphzvq3Rp4Dhtczbu33RYx2N95ulpH3134rhxfVizXuhJFyV9xgw8O5 -58dnJCNPYwpj9mZ9S1WnP3hkSWkSl+BMDdMJoDIwOvqfwPKcxRIqLhy1BDPapDgR -at7GGPZHOiJBhyL8xIkoVNiMpTAK+BcWyqw3/XmnkRd4OJmtWO2y3syJfQOcs4ll -5+M7sSKGjwZteAf9kRJ/sGsciQ35uMt0WwfCyPQ10WRjeulumijWML3mG90Vr4Tq -nMfK9Q7q8l0ph49pczm+LiRvRSGsxdRpJQaDrXpIhRMsDQa4bHlW/KNnMoH1V6XK -V0Jp6VwkYe/iMBhORJhVb3rCk9gZtt58R4oRTklH2yiUAguUSiz5EtBP6DF+bHq/ -pj+bOT0CFqMYs2esWz8sgytnOYFcuX6U1WTdno9uruh8W7TXakdI136z1C2OVnZO -z2nxbkRs1CTqjSShGL+9V/6pmTW12xB3uD1IutbB5/EjPtffhZ0nPNRAvQoMvfXn -jSXWgXSHRtQpdaJCbPdzied9v3pKH9MiyRVVz99vfFXQpIsHETdfg6YmV6YBW37+ -WGgHqel62bno/1Afq8K0wM7o6v0PvY1NuLxxAgMBAAGjQjBAMB0GA1UdDgQWBBTF -7+3M2I0hxkjk49cULqcWk+WYATAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE -AwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAUoKsITQfI/Ki2Pm4rzc2IInRNwPWaZ+4 -YRC6ojGYWUfo0Q0lHhVBDOAqVdVXUsv45Mdpox1NcQJeXyFFYEhcCY5JEMEE3Kli -awLwQ8hOnThJdMkycFRtwUf8jrQ2ntScvd0g1lPJGKm1Vrl2i5VnZu69mP6u775u -+2D2/VnGKhs/I0qUJDAnyIm860Qkmss9vk/Ves6OF8tiwdneHg56/0OGNFK8YT88 -X7vZdrRTvJez/opMEi4r89fO4aL/3Xtw+zuhTaRjAv04l5U/BXCga99igUOLtFkN -SoxUnMW7gZ/NfaXvCyUeOiDbHPwfmGcCCtRzRBPbUYQaVQNW4AB+dAb/OMRyHdOo -P2gxXdMJxy6MW2Pg6Nwe0uxhHvLe5e/2mXZgLR6UcnHGCyoyx5JO1UbXHfmpGQrI -+pXObSOYqgs4rZpWDW+N8TEAiMEXnM0ZNjX+VVOg4DwzX5Ze4jLp3zO7Bkqp2IRz -znfSxqxx4VyjHQy7Ct9f4qNx2No3WqB4K/TUfet27fJhcKVlmtOJNBir+3I+17Q9 -eVzYH6Eze9mCUAyTF6ps3MKCuwJXNq+YJyo5UOGwifUll35HaBC07HPKs5fRJNz2 -YqAo07WjuGS3iGJCz51TzZm+ZGiPTx4SSPfSKcOYKMryMguTjClPPGAyzQWWYezy -r/6zcCwupvI= ------END CERTIFICATE----- - -# Issuer: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY -# Subject: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY -# Label: "BJCA Global Root CA2" -# Serial: 58605626836079930195615843123109055211 -# MD5 Fingerprint: 5e:0a:f6:47:5f:a6:14:e8:11:01:95:3f:4d:01:eb:3c -# SHA1 Fingerprint: f4:27:86:eb:6e:b8:6d:88:31:67:02:fb:ba:66:a4:53:00:aa:7a:a6 -# SHA256 Fingerprint: 57:4d:f6:93:1e:27:80:39:66:7b:72:0a:fd:c1:60:0f:c2:7e:b6:6d:d3:09:29:79:fb:73:85:64:87:21:28:82 ------BEGIN CERTIFICATE----- -MIICJTCCAaugAwIBAgIQLBcIfWQqwP6FGFkGz7RK6zAKBggqhkjOPQQDAzBUMQsw -CQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRIT1JJ -VFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0EyMB4XDTE5MTIxOTAzMTgy -MVoXDTQ0MTIxMjAzMTgyMVowVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJFSUpJ -TkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2JhbCBS -b290IENBMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABJ3LgJGNU2e1uVCxA/jlSR9B -IgmwUVJY1is0j8USRhTFiy8shP8sbqjV8QnjAyEUxEM9fMEsxEtqSs3ph+B99iK+ -+kpRuDCK/eHeGBIK9ke35xe/J4rUQUyWPGCWwf0VHKNCMEAwHQYDVR0OBBYEFNJK -sVF/BvDRgh9Obl+rg/xI1LCRMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD -AgEGMAoGCCqGSM49BAMDA2gAMGUCMBq8W9f+qdJUDkpd0m2xQNz0Q9XSSpkZElaA -94M04TVOSG0ED1cxMDAtsaqdAzjbBgIxAMvMh1PLet8gUXOQwKhbYdDFUDn9hf7B -43j4ptZLvZuHjw/l1lOWqzzIQNph91Oj9w== ------END CERTIFICATE----- - -# Issuer: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited -# Subject: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited -# Label: "Sectigo Public Server Authentication Root E46" -# Serial: 88989738453351742415770396670917916916 -# MD5 Fingerprint: 28:23:f8:b2:98:5c:37:16:3b:3e:46:13:4e:b0:b3:01 -# SHA1 Fingerprint: ec:8a:39:6c:40:f0:2e:bc:42:75:d4:9f:ab:1c:1a:5b:67:be:d2:9a -# SHA256 Fingerprint: c9:0f:26:f0:fb:1b:40:18:b2:22:27:51:9b:5c:a2:b5:3e:2c:a5:b3:be:5c:f1:8e:fe:1b:ef:47:38:0c:53:83 ------BEGIN CERTIFICATE----- -MIICOjCCAcGgAwIBAgIQQvLM2htpN0RfFf51KBC49DAKBggqhkjOPQQDAzBfMQsw -CQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1T -ZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwHhcN -MjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEYMBYG -A1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1YmxpYyBT -ZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA -IgNiAAR2+pmpbiDt+dd34wc7qNs9Xzjoq1WmVk/WSOrsfy2qw7LFeeyZYX8QeccC -WvkEN/U0NSt3zn8gj1KjAIns1aeibVvjS5KToID1AZTc8GgHHs3u/iVStSBDHBv+ -6xnOQ6OjQjBAMB0GA1UdDgQWBBTRItpMWfFLXyY4qp3W7usNw/upYTAOBgNVHQ8B -Af8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNnADBkAjAn7qRa -qCG76UeXlImldCBteU/IvZNeWBj7LRoAasm4PdCkT0RHlAFWovgzJQxC36oCMB3q -4S6ILuH5px0CMk7yn2xVdOOurvulGu7t0vzCAxHrRVxgED1cf5kDW21USAGKcw== ------END CERTIFICATE----- - -# Issuer: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited -# Subject: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited -# Label: "Sectigo Public Server Authentication Root R46" -# Serial: 156256931880233212765902055439220583700 -# MD5 Fingerprint: 32:10:09:52:00:d5:7e:6c:43:df:15:c0:b1:16:93:e5 -# SHA1 Fingerprint: ad:98:f9:f3:e4:7d:75:3b:65:d4:82:b3:a4:52:17:bb:6e:f5:e4:38 -# SHA256 Fingerprint: 7b:b6:47:a6:2a:ee:ac:88:bf:25:7a:a5:22:d0:1f:fe:a3:95:e0:ab:45:c7:3f:93:f6:56:54:ec:38:f2:5a:06 ------BEGIN CERTIFICATE----- -MIIFijCCA3KgAwIBAgIQdY39i658BwD6qSWn4cetFDANBgkqhkiG9w0BAQwFADBf -MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQD -Ey1TZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYw -HhcNMjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEY -MBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1Ymxp -YyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQCTvtU2UnXYASOgHEdCSe5jtrch/cSV1UgrJnwUUxDa -ef0rty2k1Cz66jLdScK5vQ9IPXtamFSvnl0xdE8H/FAh3aTPaE8bEmNtJZlMKpnz -SDBh+oF8HqcIStw+KxwfGExxqjWMrfhu6DtK2eWUAtaJhBOqbchPM8xQljeSM9xf -iOefVNlI8JhD1mb9nxc4Q8UBUQvX4yMPFF1bFOdLvt30yNoDN9HWOaEhUTCDsG3X -ME6WW5HwcCSrv0WBZEMNvSE6Lzzpng3LILVCJ8zab5vuZDCQOc2TZYEhMbUjUDM3 -IuM47fgxMMxF/mL50V0yeUKH32rMVhlATc6qu/m1dkmU8Sf4kaWD5QazYw6A3OAS -VYCmO2a0OYctyPDQ0RTp5A1NDvZdV3LFOxxHVp3i1fuBYYzMTYCQNFu31xR13NgE -SJ/AwSiItOkcyqex8Va3e0lMWeUgFaiEAin6OJRpmkkGj80feRQXEgyDet4fsZfu -+Zd4KKTIRJLpfSYFplhym3kT2BFfrsU4YjRosoYwjviQYZ4ybPUHNs2iTG7sijbt -8uaZFURww3y8nDnAtOFr94MlI1fZEoDlSfB1D++N6xybVCi0ITz8fAr/73trdf+L -HaAZBav6+CuBQug4urv7qv094PPK306Xlynt8xhW6aWWrL3DkJiy4Pmi1KZHQ3xt -zwIDAQABo0IwQDAdBgNVHQ4EFgQUVnNYZJX5khqwEioEYnmhQBWIIUkwDgYDVR0P -AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAC9c -mTz8Bl6MlC5w6tIyMY208FHVvArzZJ8HXtXBc2hkeqK5Duj5XYUtqDdFqij0lgVQ -YKlJfp/imTYpE0RHap1VIDzYm/EDMrraQKFz6oOht0SmDpkBm+S8f74TlH7Kph52 -gDY9hAaLMyZlbcp+nv4fjFg4exqDsQ+8FxG75gbMY/qB8oFM2gsQa6H61SilzwZA -Fv97fRheORKkU55+MkIQpiGRqRxOF3yEvJ+M0ejf5lG5Nkc/kLnHvALcWxxPDkjB -JYOcCj+esQMzEhonrPcibCTRAUH4WAP+JWgiH5paPHxsnnVI84HxZmduTILA7rpX -DhjvLpr3Etiga+kFpaHpaPi8TD8SHkXoUsCjvxInebnMMTzD9joiFgOgyY9mpFui -TdaBJQbpdqQACj7LzTWb4OE4y2BThihCQRxEV+ioratF4yUQvNs+ZUH7G6aXD+u5 -dHn5HrwdVw1Hr8Mvn4dGp+smWg9WY7ViYG4A++MnESLn/pmPNPW56MORcr3Ywx65 -LvKRRFHQV80MNNVIIb/bE/FmJUNS0nAiNs2fxBx1IK1jcmMGDw4nztJqDby1ORrp -0XZ60Vzk50lJLVU3aPAaOpg+VBeHVOmmJ1CJeyAvP/+/oYtKR5j/K3tJPsMpRmAY -QqszKbrAKbkTidOIijlBO8n9pu0f9GBj39ItVQGL ------END CERTIFICATE----- - -# Issuer: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation -# Subject: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation -# Label: "SSL.com TLS RSA Root CA 2022" -# Serial: 148535279242832292258835760425842727825 -# MD5 Fingerprint: d8:4e:c6:59:30:d8:fe:a0:d6:7a:5a:2c:2c:69:78:da -# SHA1 Fingerprint: ec:2c:83:40:72:af:26:95:10:ff:0e:f2:03:ee:31:70:f6:78:9d:ca -# SHA256 Fingerprint: 8f:af:7d:2e:2c:b4:70:9b:b8:e0:b3:36:66:bf:75:a5:dd:45:b5:de:48:0f:8e:a8:d4:bf:e6:be:bc:17:f2:ed ------BEGIN CERTIFICATE----- -MIIFiTCCA3GgAwIBAgIQb77arXO9CEDii02+1PdbkTANBgkqhkiG9w0BAQsFADBO -MQswCQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQD -DBxTU0wuY29tIFRMUyBSU0EgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzQyMloX -DTQ2MDgxOTE2MzQyMVowTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jw -b3JhdGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgUlNBIFJvb3QgQ0EgMjAyMjCC -AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANCkCXJPQIgSYT41I57u9nTP -L3tYPc48DRAokC+X94xI2KDYJbFMsBFMF3NQ0CJKY7uB0ylu1bUJPiYYf7ISf5OY -t6/wNr/y7hienDtSxUcZXXTzZGbVXcdotL8bHAajvI9AI7YexoS9UcQbOcGV0ins -S657Lb85/bRi3pZ7QcacoOAGcvvwB5cJOYF0r/c0WRFXCsJbwST0MXMwgsadugL3 -PnxEX4MN8/HdIGkWCVDi1FW24IBydm5MR7d1VVm0U3TZlMZBrViKMWYPHqIbKUBO -L9975hYsLfy/7PO0+r4Y9ptJ1O4Fbtk085zx7AGL0SDGD6C1vBdOSHtRwvzpXGk3 -R2azaPgVKPC506QVzFpPulJwoxJF3ca6TvvC0PeoUidtbnm1jPx7jMEWTO6Af77w -dr5BUxIzrlo4QqvXDz5BjXYHMtWrifZOZ9mxQnUjbvPNQrL8VfVThxc7wDNY8VLS -+YCk8OjwO4s4zKTGkH8PnP2L0aPP2oOnaclQNtVcBdIKQXTbYxE3waWglksejBYS -d66UNHsef8JmAOSqg+qKkK3ONkRN0VHpvB/zagX9wHQfJRlAUW7qglFA35u5CCoG -AtUjHBPW6dvbxrB6y3snm/vg1UYk7RBLY0ulBY+6uB0rpvqR4pJSvezrZ5dtmi2f -gTIFZzL7SAg/2SW4BCUvAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j -BBgwFoAU+y437uOEeicuzRk1sTN8/9REQrkwHQYDVR0OBBYEFPsuN+7jhHonLs0Z -NbEzfP/UREK5MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAjYlt -hEUY8U+zoO9opMAdrDC8Z2awms22qyIZZtM7QbUQnRC6cm4pJCAcAZli05bg4vsM -QtfhWsSWTVTNj8pDU/0quOr4ZcoBwq1gaAafORpR2eCNJvkLTqVTJXojpBzOCBvf -R4iyrT7gJ4eLSYwfqUdYe5byiB0YrrPRpgqU+tvT5TgKa3kSM/tKWTcWQA673vWJ -DPFs0/dRa1419dvAJuoSc06pkZCmF8NsLzjUo3KUQyxi4U5cMj29TH0ZR6LDSeeW -P4+a0zvkEdiLA9z2tmBVGKaBUfPhqBVq6+AL8BQx1rmMRTqoENjwuSfr98t67wVy -lrXEj5ZzxOhWc5y8aVFjvO9nHEMaX3cZHxj4HCUp+UmZKbaSPaKDN7EgkaibMOlq -bLQjk2UEqxHzDh1TJElTHaE/nUiSEeJ9DU/1172iWD54nR4fK/4huxoTtrEoZP2w -AgDHbICivRZQIA9ygV/MlP+7mea6kMvq+cYMwq7FGc4zoWtcu358NFcXrfA/rs3q -r5nsLFR+jM4uElZI7xc7P0peYNLcdDa8pUNjyw9bowJWCZ4kLOGGgYz+qxcs+sji -Mho6/4UIyYOf8kpIEFR3N+2ivEC+5BB09+Rbu7nzifmPQdjH5FCQNYA+HLhNkNPU -98OwoX6EyneSMSy4kLGCenROmxMmtNVQZlR4rmA= ------END CERTIFICATE----- - -# Issuer: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation -# Subject: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation -# Label: "SSL.com TLS ECC Root CA 2022" -# Serial: 26605119622390491762507526719404364228 -# MD5 Fingerprint: 99:d7:5c:f1:51:36:cc:e9:ce:d9:19:2e:77:71:56:c5 -# SHA1 Fingerprint: 9f:5f:d9:1a:54:6d:f5:0c:71:f0:ee:7a:bd:17:49:98:84:73:e2:39 -# SHA256 Fingerprint: c3:2f:fd:9f:46:f9:36:d1:6c:36:73:99:09:59:43:4b:9a:d6:0a:af:bb:9e:7c:f3:36:54:f1:44:cc:1b:a1:43 ------BEGIN CERTIFICATE----- -MIICOjCCAcCgAwIBAgIQFAP1q/s3ixdAW+JDsqXRxDAKBggqhkjOPQQDAzBOMQsw -CQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQDDBxT -U0wuY29tIFRMUyBFQ0MgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzM0OFoXDTQ2 -MDgxOTE2MzM0N1owTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jwb3Jh -dGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgRUNDIFJvb3QgQ0EgMjAyMjB2MBAG -ByqGSM49AgEGBSuBBAAiA2IABEUpNXP6wrgjzhR9qLFNoFs27iosU8NgCTWyJGYm -acCzldZdkkAZDsalE3D07xJRKF3nzL35PIXBz5SQySvOkkJYWWf9lCcQZIxPBLFN -SeR7T5v15wj4A4j3p8OSSxlUgaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSME -GDAWgBSJjy+j6CugFFR781a4Jl9nOAuc0DAdBgNVHQ4EFgQUiY8vo+groBRUe/NW -uCZfZzgLnNAwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2gAMGUCMFXjIlbp -15IkWE8elDIPDAI2wv2sdDJO4fscgIijzPvX6yv/N33w7deedWo1dlJF4AIxAMeN -b0Igj762TVntd00pxCAgRWSGOlDGxK0tk/UYfXLtqc/ErFc2KAhl3zx5Zn6g6g== ------END CERTIFICATE----- - -# Issuer: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos -# Subject: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos -# Label: "Atos TrustedRoot Root CA ECC TLS 2021" -# Serial: 81873346711060652204712539181482831616 -# MD5 Fingerprint: 16:9f:ad:f1:70:ad:79:d6:ed:29:b4:d1:c5:79:70:a8 -# SHA1 Fingerprint: 9e:bc:75:10:42:b3:02:f3:81:f4:f7:30:62:d4:8f:c3:a7:51:b2:dd -# SHA256 Fingerprint: b2:fa:e5:3e:14:cc:d7:ab:92:12:06:47:01:ae:27:9c:1d:89:88:fa:cb:77:5f:a8:a0:08:91:4e:66:39:88:a8 ------BEGIN CERTIFICATE----- -MIICFTCCAZugAwIBAgIQPZg7pmY9kGP3fiZXOATvADAKBggqhkjOPQQDAzBMMS4w -LAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgRUNDIFRMUyAyMDIxMQ0w -CwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTI2MjNaFw00MTA0 -MTcwOTI2MjJaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBDQSBF -Q0MgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMHYwEAYHKoZI -zj0CAQYFK4EEACIDYgAEloZYKDcKZ9Cg3iQZGeHkBQcfl+3oZIK59sRxUM6KDP/X -tXa7oWyTbIOiaG6l2b4siJVBzV3dscqDY4PMwL502eCdpO5KTlbgmClBk1IQ1SQ4 -AjJn8ZQSb+/Xxd4u/RmAo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR2 -KCXWfeBmmnoJsmo7jjPXNtNPojAOBgNVHQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMD -aAAwZQIwW5kp85wxtolrbNa9d+F851F+uDrNozZffPc8dz7kUK2o59JZDCaOMDtu -CCrCp1rIAjEAmeMM56PDr9NJLkaCI2ZdyQAUEv049OGYa3cpetskz2VAv9LcjBHo -9H1/IISpQuQo ------END CERTIFICATE----- - -# Issuer: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos -# Subject: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos -# Label: "Atos TrustedRoot Root CA RSA TLS 2021" -# Serial: 111436099570196163832749341232207667876 -# MD5 Fingerprint: d4:d3:46:b8:9a:c0:9c:76:5d:9e:3a:c3:b9:99:31:d2 -# SHA1 Fingerprint: 18:52:3b:0d:06:37:e4:d6:3a:df:23:e4:98:fb:5b:16:fb:86:74:48 -# SHA256 Fingerprint: 81:a9:08:8e:a5:9f:b3:64:c5:48:a6:f8:55:59:09:9b:6f:04:05:ef:bf:18:e5:32:4e:c9:f4:57:ba:00:11:2f ------BEGIN CERTIFICATE----- -MIIFZDCCA0ygAwIBAgIQU9XP5hmTC/srBRLYwiqipDANBgkqhkiG9w0BAQwFADBM -MS4wLAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgUlNBIFRMUyAyMDIx -MQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTIxMTBaFw00 -MTA0MTcwOTIxMDlaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBD -QSBSU0EgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMIICIjAN -BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtoAOxHm9BYx9sKOdTSJNy/BBl01Z -4NH+VoyX8te9j2y3I49f1cTYQcvyAh5x5en2XssIKl4w8i1mx4QbZFc4nXUtVsYv -Ye+W/CBGvevUez8/fEc4BKkbqlLfEzfTFRVOvV98r61jx3ncCHvVoOX3W3WsgFWZ -kmGbzSoXfduP9LVq6hdKZChmFSlsAvFr1bqjM9xaZ6cF4r9lthawEO3NUDPJcFDs -GY6wx/J0W2tExn2WuZgIWWbeKQGb9Cpt0xU6kGpn8bRrZtkh68rZYnxGEFzedUln -nkL5/nWpo63/dgpnQOPF943HhZpZnmKaau1Fh5hnstVKPNe0OwANwI8f4UDErmwh -3El+fsqyjW22v5MvoVw+j8rtgI5Y4dtXz4U2OLJxpAmMkokIiEjxQGMYsluMWuPD -0xeqqxmjLBvk1cbiZnrXghmmOxYsL3GHX0WelXOTwkKBIROW1527k2gV+p2kHYzy -geBYBr3JtuP2iV2J+axEoctr+hbxx1A9JNr3w+SH1VbxT5Aw+kUJWdo0zuATHAR8 -ANSbhqRAvNncTFd+rrcztl524WWLZt+NyteYr842mIycg5kDcPOvdO3GDjbnvezB -c6eUWsuSZIKmAMFwoW4sKeFYV+xafJlrJaSQOoD0IJ2azsct+bJLKZWD6TWNp0lI -pw9MGZHQ9b8Q4HECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU -dEmZ0f+0emhFdcN+tNzMzjkz2ggwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB -DAUAA4ICAQAjQ1MkYlxt/T7Cz1UAbMVWiLkO3TriJQ2VSpfKgInuKs1l+NsW4AmS -4BjHeJi78+xCUvuppILXTdiK/ORO/auQxDh1MoSf/7OwKwIzNsAQkG8dnK/haZPs -o0UvFJ/1TCplQ3IM98P4lYsU84UgYt1UU90s3BiVaU+DR3BAM1h3Egyi61IxHkzJ -qM7F78PRreBrAwA0JrRUITWXAdxfG/F851X6LWh3e9NpzNMOa7pNdkTWwhWaJuyw -xfW70Xp0wmzNxbVe9kzmWy2B27O3Opee7c9GslA9hGCZcbUztVdF5kJHdWoOsAgM -rr3e97sPWD2PAzHoPYJQyi9eDF20l74gNAf0xBLh7tew2VktafcxBPTy+av5EzH4 -AXcOPUIjJsyacmdRIXrMPIWo6iFqO9taPKU0nprALN+AnCng33eU0aKAQv9qTFsR -0PXNor6uzFFcw9VUewyu1rkGd4Di7wcaaMxZUa1+XGdrudviB0JbuAEFWDlN5LuY -o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5 -dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE -oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ== ------END CERTIFICATE----- diff --git a/server/venv/Lib/site-packages/certifi/core.py b/server/venv/Lib/site-packages/certifi/core.py deleted file mode 100644 index de02898..0000000 --- a/server/venv/Lib/site-packages/certifi/core.py +++ /dev/null @@ -1,108 +0,0 @@ -""" -certifi.py -~~~~~~~~~~ - -This module returns the installation location of cacert.pem or its contents. -""" -import sys - - -if sys.version_info >= (3, 11): - - from importlib.resources import as_file, files - - _CACERT_CTX = None - _CACERT_PATH = None - - def where() -> str: - # This is slightly terrible, but we want to delay extracting the file - # in cases where we're inside of a zipimport situation until someone - # actually calls where(), but we don't want to re-extract the file - # on every call of where(), so we'll do it once then store it in a - # global variable. - global _CACERT_CTX - global _CACERT_PATH - if _CACERT_PATH is None: - # This is slightly janky, the importlib.resources API wants you to - # manage the cleanup of this file, so it doesn't actually return a - # path, it returns a context manager that will give you the path - # when you enter it and will do any cleanup when you leave it. In - # the common case of not needing a temporary file, it will just - # return the file system location and the __exit__() is a no-op. - # - # We also have to hold onto the actual context manager, because - # it will do the cleanup whenever it gets garbage collected, so - # we will also store that at the global level as well. - _CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem")) - _CACERT_PATH = str(_CACERT_CTX.__enter__()) - - return _CACERT_PATH - - def contents() -> str: - return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii") - -elif sys.version_info >= (3, 7): - - from importlib.resources import path as get_path, read_text - - _CACERT_CTX = None - _CACERT_PATH = None - - def where() -> str: - # This is slightly terrible, but we want to delay extracting the - # file in cases where we're inside of a zipimport situation until - # someone actually calls where(), but we don't want to re-extract - # the file on every call of where(), so we'll do it once then store - # it in a global variable. - global _CACERT_CTX - global _CACERT_PATH - if _CACERT_PATH is None: - # This is slightly janky, the importlib.resources API wants you - # to manage the cleanup of this file, so it doesn't actually - # return a path, it returns a context manager that will give - # you the path when you enter it and will do any cleanup when - # you leave it. In the common case of not needing a temporary - # file, it will just return the file system location and the - # __exit__() is a no-op. - # - # We also have to hold onto the actual context manager, because - # it will do the cleanup whenever it gets garbage collected, so - # we will also store that at the global level as well. - _CACERT_CTX = get_path("certifi", "cacert.pem") - _CACERT_PATH = str(_CACERT_CTX.__enter__()) - - return _CACERT_PATH - - def contents() -> str: - return read_text("certifi", "cacert.pem", encoding="ascii") - -else: - import os - import types - from typing import Union - - Package = Union[types.ModuleType, str] - Resource = Union[str, "os.PathLike"] - - # This fallback will work for Python versions prior to 3.7 that lack the - # importlib.resources module but relies on the existing `where` function - # so won't address issues with environments like PyOxidizer that don't set - # __file__ on modules. - def read_text( - package: Package, - resource: Resource, - encoding: str = 'utf-8', - errors: str = 'strict' - ) -> str: - with open(where(), encoding=encoding) as data: - return data.read() - - # If we don't have importlib.resources, then we will just do the old logic - # of assuming we're on the filesystem and munge the path directly. - def where() -> str: - f = os.path.dirname(__file__) - - return os.path.join(f, "cacert.pem") - - def contents() -> str: - return read_text("certifi", "cacert.pem", encoding="ascii") diff --git a/server/venv/Lib/site-packages/certifi/py.typed b/server/venv/Lib/site-packages/certifi/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/charset_normalizer-3.3.1.dist-info/INSTALLER b/server/venv/Lib/site-packages/charset_normalizer-3.3.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/server/venv/Lib/site-packages/charset_normalizer-3.3.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/server/venv/Lib/site-packages/charset_normalizer-3.3.1.dist-info/LICENSE b/server/venv/Lib/site-packages/charset_normalizer-3.3.1.dist-info/LICENSE deleted file mode 100644 index 6da2297..0000000 --- a/server/venv/Lib/site-packages/charset_normalizer-3.3.1.dist-info/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2019 TAHRI Ahmed R. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. \ No newline at end of file diff --git a/server/venv/Lib/site-packages/charset_normalizer-3.3.1.dist-info/METADATA b/server/venv/Lib/site-packages/charset_normalizer-3.3.1.dist-info/METADATA deleted file mode 100644 index 0ba0b77..0000000 --- a/server/venv/Lib/site-packages/charset_normalizer-3.3.1.dist-info/METADATA +++ /dev/null @@ -1,674 +0,0 @@ -Metadata-Version: 2.1 -Name: charset-normalizer -Version: 3.3.1 -Summary: The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet. -Home-page: https://github.com/Ousret/charset_normalizer -Author: Ahmed TAHRI -Author-email: ahmed.tahri@cloudnursery.dev -License: MIT -Project-URL: Bug Reports, https://github.com/Ousret/charset_normalizer/issues -Project-URL: Documentation, https://charset-normalizer.readthedocs.io/en/latest -Keywords: encoding,charset,charset-detector,detector,normalization,unicode,chardet,detect -Classifier: Development Status :: 5 - Production/Stable -Classifier: License :: OSI Approved :: MIT License -Classifier: Intended Audience :: Developers -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Text Processing :: Linguistic -Classifier: Topic :: Utilities -Classifier: Typing :: Typed -Requires-Python: >=3.7.0 -Description-Content-Type: text/markdown -License-File: LICENSE -Provides-Extra: unicode_backport - -

Charset Detection, for Everyone 👋

- -

- The Real First Universal Charset Detector
- - - - - Download Count Total - - - - -

-

- Featured Packages
- - Static Badge - - - Static Badge - -

-

- In other language (unofficial port - by the community)
- - Static Badge - -

- -> A library that helps you read text from an unknown charset encoding.
Motivated by `chardet`, -> I'm trying to resolve the issue by taking a new approach. -> All IANA character set names for which the Python core library provides codecs are supported. - -

- >>>>> 👉 Try Me Online Now, Then Adopt Me 👈 <<<<< -

- -This project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**. - -| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) | -|--------------------------------------------------|:---------------------------------------------:|:--------------------------------------------------------------------------------------------------:|:-----------------------------------------------:| -| `Fast` | ❌ | ✅ | ✅ | -| `Universal**` | ❌ | ✅ | ❌ | -| `Reliable` **without** distinguishable standards | ❌ | ✅ | ✅ | -| `Reliable` **with** distinguishable standards | ✅ | ✅ | ✅ | -| `License` | LGPL-2.1
_restrictive_ | MIT | MPL-1.1
_restrictive_ | -| `Native Python` | ✅ | ✅ | ❌ | -| `Detect spoken language` | ❌ | ✅ | N/A | -| `UnicodeDecodeError Safety` | ❌ | ✅ | ❌ | -| `Whl Size (min)` | 193.6 kB | 42 kB | ~200 kB | -| `Supported Encoding` | 33 | 🎉 [99](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 | - -

-Reading Normalized TextCat Reading Text -

- -*\*\* : They are clearly using specific code for a specific encoding even if covering most of used one*
-Did you got there because of the logs? See [https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html](https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html) - -## ⚡ Performance - -This package offer better performance than its counterpart Chardet. Here are some numbers. - -| Package | Accuracy | Mean per file (ms) | File per sec (est) | -|-----------------------------------------------|:--------:|:------------------:|:------------------:| -| [chardet](https://github.com/chardet/chardet) | 86 % | 200 ms | 5 file/sec | -| charset-normalizer | **98 %** | **10 ms** | 100 file/sec | - -| Package | 99th percentile | 95th percentile | 50th percentile | -|-----------------------------------------------|:---------------:|:---------------:|:---------------:| -| [chardet](https://github.com/chardet/chardet) | 1200 ms | 287 ms | 23 ms | -| charset-normalizer | 100 ms | 50 ms | 5 ms | - -Chardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload. - -> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows. -> And yes, these results might change at any time. The dataset can be updated to include more files. -> The actual delays heavily depends on your CPU capabilities. The factors should remain the same. -> Keep in mind that the stats are generous and that Chardet accuracy vs our is measured using Chardet initial capability -> (eg. Supported Encoding) Challenge-them if you want. - -## ✨ Installation - -Using pip: - -```sh -pip install charset-normalizer -U -``` - -## 🚀 Basic Usage - -### CLI -This package comes with a CLI. - -``` -usage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD] - file [file ...] - -The Real First Universal Charset Detector. Discover originating encoding used -on text file. Normalize text to unicode. - -positional arguments: - files File(s) to be analysed - -optional arguments: - -h, --help show this help message and exit - -v, --verbose Display complementary information about file if any. - Stdout will contain logs about the detection process. - -a, --with-alternative - Output complementary possibilities if any. Top-level - JSON WILL be a list. - -n, --normalize Permit to normalize input file. If not set, program - does not write anything. - -m, --minimal Only output the charset detected to STDOUT. Disabling - JSON output. - -r, --replace Replace file when trying to normalize it instead of - creating a new one. - -f, --force Replace file without asking if you are sure, use this - flag with caution. - -t THRESHOLD, --threshold THRESHOLD - Define a custom maximum amount of chaos allowed in - decoded content. 0. <= chaos <= 1. - --version Show version information and exit. -``` - -```bash -normalizer ./data/sample.1.fr.srt -``` - -or - -```bash -python -m charset_normalizer ./data/sample.1.fr.srt -``` - -🎉 Since version 1.4.0 the CLI produce easily usable stdout result in JSON format. - -```json -{ - "path": "/home/default/projects/charset_normalizer/data/sample.1.fr.srt", - "encoding": "cp1252", - "encoding_aliases": [ - "1252", - "windows_1252" - ], - "alternative_encodings": [ - "cp1254", - "cp1256", - "cp1258", - "iso8859_14", - "iso8859_15", - "iso8859_16", - "iso8859_3", - "iso8859_9", - "latin_1", - "mbcs" - ], - "language": "French", - "alphabets": [ - "Basic Latin", - "Latin-1 Supplement" - ], - "has_sig_or_bom": false, - "chaos": 0.149, - "coherence": 97.152, - "unicode_path": null, - "is_preferred": true -} -``` - -### Python -*Just print out normalized text* -```python -from charset_normalizer import from_path - -results = from_path('./my_subtitle.srt') - -print(str(results.best())) -``` - -*Upgrade your code without effort* -```python -from charset_normalizer import detect -``` - -The above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible. - -See the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/) - -## 😇 Why - -When I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a -reliable alternative using a completely different method. Also! I never back down on a good challenge! - -I **don't care** about the **originating charset** encoding, because **two different tables** can -produce **two identical rendered string.** -What I want is to get readable text, the best I can. - -In a way, **I'm brute forcing text decoding.** How cool is that ? 😎 - -Don't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode. - -## 🍰 How - - - Discard all charset encoding table that could not fit the binary content. - - Measure noise, or the mess once opened (by chunks) with a corresponding charset encoding. - - Extract matches with the lowest mess detected. - - Additionally, we measure coherence / probe for a language. - -**Wait a minute**, what is noise/mess and coherence according to **YOU ?** - -*Noise :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then -**I established** some ground rules about **what is obvious** when **it seems like** a mess. - I know that my interpretation of what is noise is probably incomplete, feel free to contribute in order to - improve or rewrite it. - -*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought -that intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design. - -## ⚡ Known limitations - - - Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters)) - - Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content. - -## ⚠️ About Python EOLs - -**If you are running:** - -- Python >=2.7,<3.5: Unsupported -- Python 3.5: charset-normalizer < 2.1 -- Python 3.6: charset-normalizer < 3.1 -- Python 3.7: charset-normalizer < 4.0 - -Upgrade your Python interpreter as soon as possible. - -## 👤 Contributing - -Contributions, issues and feature requests are very much welcome.
-Feel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute. - -## 📝 License - -Copyright © [Ahmed TAHRI @Ousret](https://github.com/Ousret).
-This project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed. - -Characters frequencies used in this project © 2012 [Denny Vrandečić](http://simia.net/letters/) - -## 💼 For Enterprise - -Professional support for charset-normalizer is available as part of the [Tidelift -Subscription][1]. Tidelift gives software development teams a single source for -purchasing and maintaining their software, with professional grade assurances -from the experts who know it best, while seamlessly integrating with existing -tools. - -[1]: https://tidelift.com/subscription/pkg/pypi-charset-normalizer?utm_source=pypi-charset-normalizer&utm_medium=readme - -# Changelog -All notable changes to charset-normalizer will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - -## [3.3.1](https://github.com/Ousret/charset_normalizer/compare/3.3.0...3.3.1) (2023-10-22) - -### Changed -- Optional mypyc compilation upgraded to version 1.6.1 for Python >= 3.8 -- Improved the general detection reliability based on reports from the community - -## [3.3.0](https://github.com/Ousret/charset_normalizer/compare/3.2.0...3.3.0) (2023-09-30) - -### Added -- Allow to execute the CLI (e.g. normalizer) through `python -m charset_normalizer.cli` or `python -m charset_normalizer` -- Support for 9 forgotten encoding that are supported by Python but unlisted in `encoding.aliases` as they have no alias (#323) - -### Removed -- (internal) Redundant utils.is_ascii function and unused function is_private_use_only -- (internal) charset_normalizer.assets is moved inside charset_normalizer.constant - -### Changed -- (internal) Unicode code blocks in constants are updated using the latest v15.0.0 definition to improve detection -- Optional mypyc compilation upgraded to version 1.5.1 for Python >= 3.8 - -### Fixed -- Unable to properly sort CharsetMatch when both chaos/noise and coherence were close due to an unreachable condition in \_\_lt\_\_ (#350) - -## [3.2.0](https://github.com/Ousret/charset_normalizer/compare/3.1.0...3.2.0) (2023-06-07) - -### Changed -- Typehint for function `from_path` no longer enforce `PathLike` as its first argument -- Minor improvement over the global detection reliability - -### Added -- Introduce function `is_binary` that relies on main capabilities, and optimized to detect binaries -- Propagate `enable_fallback` argument throughout `from_bytes`, `from_path`, and `from_fp` that allow a deeper control over the detection (default True) -- Explicit support for Python 3.12 - -### Fixed -- Edge case detection failure where a file would contain 'very-long' camel cased word (Issue #289) - -## [3.1.0](https://github.com/Ousret/charset_normalizer/compare/3.0.1...3.1.0) (2023-03-06) - -### Added -- Argument `should_rename_legacy` for legacy function `detect` and disregard any new arguments without errors (PR #262) - -### Removed -- Support for Python 3.6 (PR #260) - -### Changed -- Optional speedup provided by mypy/c 1.0.1 - -## [3.0.1](https://github.com/Ousret/charset_normalizer/compare/3.0.0...3.0.1) (2022-11-18) - -### Fixed -- Multi-bytes cutter/chunk generator did not always cut correctly (PR #233) - -### Changed -- Speedup provided by mypy/c 0.990 on Python >= 3.7 - -## [3.0.0](https://github.com/Ousret/charset_normalizer/compare/2.1.1...3.0.0) (2022-10-20) - -### Added -- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results -- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES -- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio -- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl) - -### Changed -- Build with static metadata using 'build' frontend -- Make the language detection stricter -- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1 - -### Fixed -- CLI with opt --normalize fail when using full path for files -- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it -- Sphinx warnings when generating the documentation - -### Removed -- Coherence detector no longer return 'Simple English' instead return 'English' -- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese' -- Breaking: Method `first()` and `best()` from CharsetMatch -- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII) -- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches -- Breaking: Top-level function `normalize` -- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch -- Support for the backport `unicodedata2` - -## [3.0.0rc1](https://github.com/Ousret/charset_normalizer/compare/3.0.0b2...3.0.0rc1) (2022-10-18) - -### Added -- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results -- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES -- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio - -### Changed -- Build with static metadata using 'build' frontend -- Make the language detection stricter - -### Fixed -- CLI with opt --normalize fail when using full path for files -- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it - -### Removed -- Coherence detector no longer return 'Simple English' instead return 'English' -- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese' - -## [3.0.0b2](https://github.com/Ousret/charset_normalizer/compare/3.0.0b1...3.0.0b2) (2022-08-21) - -### Added -- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl) - -### Removed -- Breaking: Method `first()` and `best()` from CharsetMatch -- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII) - -### Fixed -- Sphinx warnings when generating the documentation - -## [3.0.0b1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...3.0.0b1) (2022-08-15) - -### Changed -- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1 - -### Removed -- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches -- Breaking: Top-level function `normalize` -- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch -- Support for the backport `unicodedata2` - -## [2.1.1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...2.1.1) (2022-08-19) - -### Deprecated -- Function `normalize` scheduled for removal in 3.0 - -### Changed -- Removed useless call to decode in fn is_unprintable (#206) - -### Fixed -- Third-party library (i18n xgettext) crashing not recognizing utf_8 (PEP 263) with underscore from [@aleksandernovikov](https://github.com/aleksandernovikov) (#204) - -## [2.1.0](https://github.com/Ousret/charset_normalizer/compare/2.0.12...2.1.0) (2022-06-19) - -### Added -- Output the Unicode table version when running the CLI with `--version` (PR #194) - -### Changed -- Re-use decoded buffer for single byte character sets from [@nijel](https://github.com/nijel) (PR #175) -- Fixing some performance bottlenecks from [@deedy5](https://github.com/deedy5) (PR #183) - -### Fixed -- Workaround potential bug in cpython with Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space (PR #175) -- CLI default threshold aligned with the API threshold from [@oleksandr-kuzmenko](https://github.com/oleksandr-kuzmenko) (PR #181) - -### Removed -- Support for Python 3.5 (PR #192) - -### Deprecated -- Use of backport unicodedata from `unicodedata2` as Python is quickly catching up, scheduled for removal in 3.0 (PR #194) - -## [2.0.12](https://github.com/Ousret/charset_normalizer/compare/2.0.11...2.0.12) (2022-02-12) - -### Fixed -- ASCII miss-detection on rare cases (PR #170) - -## [2.0.11](https://github.com/Ousret/charset_normalizer/compare/2.0.10...2.0.11) (2022-01-30) - -### Added -- Explicit support for Python 3.11 (PR #164) - -### Changed -- The logging behavior have been completely reviewed, now using only TRACE and DEBUG levels (PR #163 #165) - -## [2.0.10](https://github.com/Ousret/charset_normalizer/compare/2.0.9...2.0.10) (2022-01-04) - -### Fixed -- Fallback match entries might lead to UnicodeDecodeError for large bytes sequence (PR #154) - -### Changed -- Skipping the language-detection (CD) on ASCII (PR #155) - -## [2.0.9](https://github.com/Ousret/charset_normalizer/compare/2.0.8...2.0.9) (2021-12-03) - -### Changed -- Moderating the logging impact (since 2.0.8) for specific environments (PR #147) - -### Fixed -- Wrong logging level applied when setting kwarg `explain` to True (PR #146) - -## [2.0.8](https://github.com/Ousret/charset_normalizer/compare/2.0.7...2.0.8) (2021-11-24) -### Changed -- Improvement over Vietnamese detection (PR #126) -- MD improvement on trailing data and long foreign (non-pure latin) data (PR #124) -- Efficiency improvements in cd/alphabet_languages from [@adbar](https://github.com/adbar) (PR #122) -- call sum() without an intermediary list following PEP 289 recommendations from [@adbar](https://github.com/adbar) (PR #129) -- Code style as refactored by Sourcery-AI (PR #131) -- Minor adjustment on the MD around european words (PR #133) -- Remove and replace SRTs from assets / tests (PR #139) -- Initialize the library logger with a `NullHandler` by default from [@nmaynes](https://github.com/nmaynes) (PR #135) -- Setting kwarg `explain` to True will add provisionally (bounded to function lifespan) a specific stream handler (PR #135) - -### Fixed -- Fix large (misleading) sequence giving UnicodeDecodeError (PR #137) -- Avoid using too insignificant chunk (PR #137) - -### Added -- Add and expose function `set_logging_handler` to configure a specific StreamHandler from [@nmaynes](https://github.com/nmaynes) (PR #135) -- Add `CHANGELOG.md` entries, format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) (PR #141) - -## [2.0.7](https://github.com/Ousret/charset_normalizer/compare/2.0.6...2.0.7) (2021-10-11) -### Added -- Add support for Kazakh (Cyrillic) language detection (PR #109) - -### Changed -- Further, improve inferring the language from a given single-byte code page (PR #112) -- Vainly trying to leverage PEP263 when PEP3120 is not supported (PR #116) -- Refactoring for potential performance improvements in loops from [@adbar](https://github.com/adbar) (PR #113) -- Various detection improvement (MD+CD) (PR #117) - -### Removed -- Remove redundant logging entry about detected language(s) (PR #115) - -### Fixed -- Fix a minor inconsistency between Python 3.5 and other versions regarding language detection (PR #117 #102) - -## [2.0.6](https://github.com/Ousret/charset_normalizer/compare/2.0.5...2.0.6) (2021-09-18) -### Fixed -- Unforeseen regression with the loss of the backward-compatibility with some older minor of Python 3.5.x (PR #100) -- Fix CLI crash when using --minimal output in certain cases (PR #103) - -### Changed -- Minor improvement to the detection efficiency (less than 1%) (PR #106 #101) - -## [2.0.5](https://github.com/Ousret/charset_normalizer/compare/2.0.4...2.0.5) (2021-09-14) -### Changed -- The project now comply with: flake8, mypy, isort and black to ensure a better overall quality (PR #81) -- The BC-support with v1.x was improved, the old staticmethods are restored (PR #82) -- The Unicode detection is slightly improved (PR #93) -- Add syntax sugar \_\_bool\_\_ for results CharsetMatches list-container (PR #91) - -### Removed -- The project no longer raise warning on tiny content given for detection, will be simply logged as warning instead (PR #92) - -### Fixed -- In some rare case, the chunks extractor could cut in the middle of a multi-byte character and could mislead the mess detection (PR #95) -- Some rare 'space' characters could trip up the UnprintablePlugin/Mess detection (PR #96) -- The MANIFEST.in was not exhaustive (PR #78) - -## [2.0.4](https://github.com/Ousret/charset_normalizer/compare/2.0.3...2.0.4) (2021-07-30) -### Fixed -- The CLI no longer raise an unexpected exception when no encoding has been found (PR #70) -- Fix accessing the 'alphabets' property when the payload contains surrogate characters (PR #68) -- The logger could mislead (explain=True) on detected languages and the impact of one MBCS match (PR #72) -- Submatch factoring could be wrong in rare edge cases (PR #72) -- Multiple files given to the CLI were ignored when publishing results to STDOUT. (After the first path) (PR #72) -- Fix line endings from CRLF to LF for certain project files (PR #67) - -### Changed -- Adjust the MD to lower the sensitivity, thus improving the global detection reliability (PR #69 #76) -- Allow fallback on specified encoding if any (PR #71) - -## [2.0.3](https://github.com/Ousret/charset_normalizer/compare/2.0.2...2.0.3) (2021-07-16) -### Changed -- Part of the detection mechanism has been improved to be less sensitive, resulting in more accurate detection results. Especially ASCII. (PR #63) -- According to the community wishes, the detection will fall back on ASCII or UTF-8 in a last-resort case. (PR #64) - -## [2.0.2](https://github.com/Ousret/charset_normalizer/compare/2.0.1...2.0.2) (2021-07-15) -### Fixed -- Empty/Too small JSON payload miss-detection fixed. Report from [@tseaver](https://github.com/tseaver) (PR #59) - -### Changed -- Don't inject unicodedata2 into sys.modules from [@akx](https://github.com/akx) (PR #57) - -## [2.0.1](https://github.com/Ousret/charset_normalizer/compare/2.0.0...2.0.1) (2021-07-13) -### Fixed -- Make it work where there isn't a filesystem available, dropping assets frequencies.json. Report from [@sethmlarson](https://github.com/sethmlarson). (PR #55) -- Using explain=False permanently disable the verbose output in the current runtime (PR #47) -- One log entry (language target preemptive) was not show in logs when using explain=True (PR #47) -- Fix undesired exception (ValueError) on getitem of instance CharsetMatches (PR #52) - -### Changed -- Public function normalize default args values were not aligned with from_bytes (PR #53) - -### Added -- You may now use charset aliases in cp_isolation and cp_exclusion arguments (PR #47) - -## [2.0.0](https://github.com/Ousret/charset_normalizer/compare/1.4.1...2.0.0) (2021-07-02) -### Changed -- 4x to 5 times faster than the previous 1.4.0 release. At least 2x faster than Chardet. -- Accent has been made on UTF-8 detection, should perform rather instantaneous. -- The backward compatibility with Chardet has been greatly improved. The legacy detect function returns an identical charset name whenever possible. -- The detection mechanism has been slightly improved, now Turkish content is detected correctly (most of the time) -- The program has been rewritten to ease the readability and maintainability. (+Using static typing)+ -- utf_7 detection has been reinstated. - -### Removed -- This package no longer require anything when used with Python 3.5 (Dropped cached_property) -- Removed support for these languages: Catalan, Esperanto, Kazakh, Baque, Volapük, Azeri, Galician, Nynorsk, Macedonian, and Serbocroatian. -- The exception hook on UnicodeDecodeError has been removed. - -### Deprecated -- Methods coherence_non_latin, w_counter, chaos_secondary_pass of the class CharsetMatch are now deprecated and scheduled for removal in v3.0 - -### Fixed -- The CLI output used the relative path of the file(s). Should be absolute. - -## [1.4.1](https://github.com/Ousret/charset_normalizer/compare/1.4.0...1.4.1) (2021-05-28) -### Fixed -- Logger configuration/usage no longer conflict with others (PR #44) - -## [1.4.0](https://github.com/Ousret/charset_normalizer/compare/1.3.9...1.4.0) (2021-05-21) -### Removed -- Using standard logging instead of using the package loguru. -- Dropping nose test framework in favor of the maintained pytest. -- Choose to not use dragonmapper package to help with gibberish Chinese/CJK text. -- Require cached_property only for Python 3.5 due to constraint. Dropping for every other interpreter version. -- Stop support for UTF-7 that does not contain a SIG. -- Dropping PrettyTable, replaced with pure JSON output in CLI. - -### Fixed -- BOM marker in a CharsetNormalizerMatch instance could be False in rare cases even if obviously present. Due to the sub-match factoring process. -- Not searching properly for the BOM when trying utf32/16 parent codec. - -### Changed -- Improving the package final size by compressing frequencies.json. -- Huge improvement over the larges payload. - -### Added -- CLI now produces JSON consumable output. -- Return ASCII if given sequences fit. Given reasonable confidence. - -## [1.3.9](https://github.com/Ousret/charset_normalizer/compare/1.3.8...1.3.9) (2021-05-13) - -### Fixed -- In some very rare cases, you may end up getting encode/decode errors due to a bad bytes payload (PR #40) - -## [1.3.8](https://github.com/Ousret/charset_normalizer/compare/1.3.7...1.3.8) (2021-05-12) - -### Fixed -- Empty given payload for detection may cause an exception if trying to access the `alphabets` property. (PR #39) - -## [1.3.7](https://github.com/Ousret/charset_normalizer/compare/1.3.6...1.3.7) (2021-05-12) - -### Fixed -- The legacy detect function should return UTF-8-SIG if sig is present in the payload. (PR #38) - -## [1.3.6](https://github.com/Ousret/charset_normalizer/compare/1.3.5...1.3.6) (2021-02-09) - -### Changed -- Amend the previous release to allow prettytable 2.0 (PR #35) - -## [1.3.5](https://github.com/Ousret/charset_normalizer/compare/1.3.4...1.3.5) (2021-02-08) - -### Fixed -- Fix error while using the package with a python pre-release interpreter (PR #33) - -### Changed -- Dependencies refactoring, constraints revised. - -### Added -- Add python 3.9 and 3.10 to the supported interpreters - -MIT License - -Copyright (c) 2019 TAHRI Ahmed R. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/server/venv/Lib/site-packages/charset_normalizer-3.3.1.dist-info/RECORD b/server/venv/Lib/site-packages/charset_normalizer-3.3.1.dist-info/RECORD deleted file mode 100644 index 5ba16ad..0000000 --- a/server/venv/Lib/site-packages/charset_normalizer-3.3.1.dist-info/RECORD +++ /dev/null @@ -1,36 +0,0 @@ -../../Scripts/normalizer.exe,sha256=FGbAjFbxfNRHgqipIJNM4bp_ySebnxRF6Sz0-CPStpk,108474 -charset_normalizer-3.3.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -charset_normalizer-3.3.1.dist-info/LICENSE,sha256=znnj1Var_lZ-hzOvD5W50wcQDp9qls3SD2xIau88ufc,1090 -charset_normalizer-3.3.1.dist-info/METADATA,sha256=36RHagcJsy9CFkHN56ndhmYJEZ5hah_4qTIXFkrTe7g,33802 -charset_normalizer-3.3.1.dist-info/RECORD,, -charset_normalizer-3.3.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -charset_normalizer-3.3.1.dist-info/WHEEL,sha256=badvNS-y9fEq0X-qzdZYvql_JFjI7Xfw-wR8FsjoK0I,102 -charset_normalizer-3.3.1.dist-info/entry_points.txt,sha256=ADSTKrkXZ3hhdOVFi6DcUEHQRS0xfxDIE_pEz4wLIXA,65 -charset_normalizer-3.3.1.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19 -charset_normalizer/__init__.py,sha256=m1cUEsb9K5v831m9P_lv2JlUEKD7MhxL7fxw3hn75o4,1623 -charset_normalizer/__main__.py,sha256=nVnMo31hTPN2Yy045GJIvHj3dKDJz4dAQR3cUSdvYyc,77 -charset_normalizer/__pycache__/__init__.cpython-311.pyc,, -charset_normalizer/__pycache__/__main__.cpython-311.pyc,, -charset_normalizer/__pycache__/api.cpython-311.pyc,, -charset_normalizer/__pycache__/cd.cpython-311.pyc,, -charset_normalizer/__pycache__/constant.cpython-311.pyc,, -charset_normalizer/__pycache__/legacy.cpython-311.pyc,, -charset_normalizer/__pycache__/md.cpython-311.pyc,, -charset_normalizer/__pycache__/models.cpython-311.pyc,, -charset_normalizer/__pycache__/utils.cpython-311.pyc,, -charset_normalizer/__pycache__/version.cpython-311.pyc,, -charset_normalizer/api.py,sha256=qFL0frUrcfcYEJmGpqoJ4Af68ToVue3f5SK1gp8UC5Q,21723 -charset_normalizer/cd.py,sha256=Yfk3sbee0Xqo1-vmQYbOqM51-SajXPLzFVG89nTsZzc,12955 -charset_normalizer/cli/__init__.py,sha256=COwP8fK2qbuldMem2lL81JieY-PIA2G2GZ5IdAPMPFA,106 -charset_normalizer/cli/__main__.py,sha256=rs-cBipBzr7d0TAaUa0nG4qrjXhdddeCVB-f6Xt_wS0,10040 -charset_normalizer/cli/__pycache__/__init__.cpython-311.pyc,, -charset_normalizer/cli/__pycache__/__main__.cpython-311.pyc,, -charset_normalizer/constant.py,sha256=2tVrXQ9cvC8jt0b8gZzRXvXte1pVbRra0A5dOWDQSao,42476 -charset_normalizer/legacy.py,sha256=KbJxEpu7g6zE2uXSB3T-3178cgiSQdVJlJmY-gv3EAM,2125 -charset_normalizer/md.cp311-win_amd64.pyd,sha256=UDBnVSFaRQU254hkZwWPK4e18urbul6MwekkhKccpZs,10752 -charset_normalizer/md.py,sha256=HAl9wANkpchzq7UYGZ0kHpkRLRyfQQYLsZOTv8xNHa0,19417 -charset_normalizer/md__mypyc.cp311-win_amd64.pyd,sha256=k_wCBRZrGNGywTzPm8M_PtefmXiSAN4b1IwyTPAm-gM,113664 -charset_normalizer/models.py,sha256=GUMoL9BqGd2o844SVZSkrdWnf0nSyyJGdhMJMkMNZ68,11824 -charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -charset_normalizer/utils.py,sha256=ewRcfYnB9jFol0UhmBBaVY0Q63q1Wf91sMZnT-KvbR4,11697 -charset_normalizer/version.py,sha256=n3AAdOnVw6mALeTbSozGt2jXwL3cnfNDWMBhP2ueB-c,85 diff --git a/server/venv/Lib/site-packages/charset_normalizer-3.3.1.dist-info/REQUESTED b/server/venv/Lib/site-packages/charset_normalizer-3.3.1.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/charset_normalizer-3.3.1.dist-info/WHEEL b/server/venv/Lib/site-packages/charset_normalizer-3.3.1.dist-info/WHEEL deleted file mode 100644 index 6d16045..0000000 --- a/server/venv/Lib/site-packages/charset_normalizer-3.3.1.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.41.2) -Root-Is-Purelib: false -Tag: cp311-cp311-win_amd64 - diff --git a/server/venv/Lib/site-packages/charset_normalizer-3.3.1.dist-info/entry_points.txt b/server/venv/Lib/site-packages/charset_normalizer-3.3.1.dist-info/entry_points.txt deleted file mode 100644 index 65619e7..0000000 --- a/server/venv/Lib/site-packages/charset_normalizer-3.3.1.dist-info/entry_points.txt +++ /dev/null @@ -1,2 +0,0 @@ -[console_scripts] -normalizer = charset_normalizer.cli:cli_detect diff --git a/server/venv/Lib/site-packages/charset_normalizer-3.3.1.dist-info/top_level.txt b/server/venv/Lib/site-packages/charset_normalizer-3.3.1.dist-info/top_level.txt deleted file mode 100644 index 66958f0..0000000 --- a/server/venv/Lib/site-packages/charset_normalizer-3.3.1.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -charset_normalizer diff --git a/server/venv/Lib/site-packages/charset_normalizer/__init__.py b/server/venv/Lib/site-packages/charset_normalizer/__init__.py deleted file mode 100644 index 4077a44..0000000 --- a/server/venv/Lib/site-packages/charset_normalizer/__init__.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Charset-Normalizer -~~~~~~~~~~~~~~ -The Real First Universal Charset Detector. -A library that helps you read text from an unknown charset encoding. -Motivated by chardet, This package is trying to resolve the issue by taking a new approach. -All IANA character set names for which the Python core library provides codecs are supported. - -Basic usage: - >>> from charset_normalizer import from_bytes - >>> results = from_bytes('Bсеки човек има право на образование. Oбразованието!'.encode('utf_8')) - >>> best_guess = results.best() - >>> str(best_guess) - 'Bсеки човек има право на образование. Oбразованието!' - -Others methods and usages are available - see the full documentation -at . -:copyright: (c) 2021 by Ahmed TAHRI -:license: MIT, see LICENSE for more details. -""" -import logging - -from .api import from_bytes, from_fp, from_path, is_binary -from .legacy import detect -from .models import CharsetMatch, CharsetMatches -from .utils import set_logging_handler -from .version import VERSION, __version__ - -__all__ = ( - "from_fp", - "from_path", - "from_bytes", - "is_binary", - "detect", - "CharsetMatch", - "CharsetMatches", - "__version__", - "VERSION", - "set_logging_handler", -) - -# Attach a NullHandler to the top level logger by default -# https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library - -logging.getLogger("charset_normalizer").addHandler(logging.NullHandler()) diff --git a/server/venv/Lib/site-packages/charset_normalizer/__main__.py b/server/venv/Lib/site-packages/charset_normalizer/__main__.py deleted file mode 100644 index acf7004..0000000 --- a/server/venv/Lib/site-packages/charset_normalizer/__main__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .cli import cli_detect - -if __name__ == "__main__": - cli_detect() diff --git a/server/venv/Lib/site-packages/charset_normalizer/api.py b/server/venv/Lib/site-packages/charset_normalizer/api.py deleted file mode 100644 index 4472bf6..0000000 --- a/server/venv/Lib/site-packages/charset_normalizer/api.py +++ /dev/null @@ -1,626 +0,0 @@ -import logging -from os import PathLike -from typing import BinaryIO, List, Optional, Set, Union - -from .cd import ( - coherence_ratio, - encoding_languages, - mb_encoding_languages, - merge_coherence_ratios, -) -from .constant import IANA_SUPPORTED, TOO_BIG_SEQUENCE, TOO_SMALL_SEQUENCE, TRACE -from .md import mess_ratio -from .models import CharsetMatch, CharsetMatches -from .utils import ( - any_specified_encoding, - cut_sequence_chunks, - iana_name, - identify_sig_or_bom, - is_cp_similar, - is_multi_byte_encoding, - should_strip_sig_or_bom, -) - -# Will most likely be controversial -# logging.addLevelName(TRACE, "TRACE") -logger = logging.getLogger("charset_normalizer") -explain_handler = logging.StreamHandler() -explain_handler.setFormatter( - logging.Formatter("%(asctime)s | %(levelname)s | %(message)s") -) - - -def from_bytes( - sequences: Union[bytes, bytearray], - steps: int = 5, - chunk_size: int = 512, - threshold: float = 0.2, - cp_isolation: Optional[List[str]] = None, - cp_exclusion: Optional[List[str]] = None, - preemptive_behaviour: bool = True, - explain: bool = False, - language_threshold: float = 0.1, - enable_fallback: bool = True, -) -> CharsetMatches: - """ - Given a raw bytes sequence, return the best possibles charset usable to render str objects. - If there is no results, it is a strong indicator that the source is binary/not text. - By default, the process will extract 5 blocks of 512o each to assess the mess and coherence of a given sequence. - And will give up a particular code page after 20% of measured mess. Those criteria are customizable at will. - - The preemptive behavior DOES NOT replace the traditional detection workflow, it prioritize a particular code page - but never take it for granted. Can improve the performance. - - You may want to focus your attention to some code page or/and not others, use cp_isolation and cp_exclusion for that - purpose. - - This function will strip the SIG in the payload/sequence every time except on UTF-16, UTF-32. - By default the library does not setup any handler other than the NullHandler, if you choose to set the 'explain' - toggle to True it will alter the logger configuration to add a StreamHandler that is suitable for debugging. - Custom logging format and handler can be set manually. - """ - - if not isinstance(sequences, (bytearray, bytes)): - raise TypeError( - "Expected object of type bytes or bytearray, got: {0}".format( - type(sequences) - ) - ) - - if explain: - previous_logger_level: int = logger.level - logger.addHandler(explain_handler) - logger.setLevel(TRACE) - - length: int = len(sequences) - - if length == 0: - logger.debug("Encoding detection on empty bytes, assuming utf_8 intention.") - if explain: - logger.removeHandler(explain_handler) - logger.setLevel(previous_logger_level or logging.WARNING) - return CharsetMatches([CharsetMatch(sequences, "utf_8", 0.0, False, [], "")]) - - if cp_isolation is not None: - logger.log( - TRACE, - "cp_isolation is set. use this flag for debugging purpose. " - "limited list of encoding allowed : %s.", - ", ".join(cp_isolation), - ) - cp_isolation = [iana_name(cp, False) for cp in cp_isolation] - else: - cp_isolation = [] - - if cp_exclusion is not None: - logger.log( - TRACE, - "cp_exclusion is set. use this flag for debugging purpose. " - "limited list of encoding excluded : %s.", - ", ".join(cp_exclusion), - ) - cp_exclusion = [iana_name(cp, False) for cp in cp_exclusion] - else: - cp_exclusion = [] - - if length <= (chunk_size * steps): - logger.log( - TRACE, - "override steps (%i) and chunk_size (%i) as content does not fit (%i byte(s) given) parameters.", - steps, - chunk_size, - length, - ) - steps = 1 - chunk_size = length - - if steps > 1 and length / steps < chunk_size: - chunk_size = int(length / steps) - - is_too_small_sequence: bool = len(sequences) < TOO_SMALL_SEQUENCE - is_too_large_sequence: bool = len(sequences) >= TOO_BIG_SEQUENCE - - if is_too_small_sequence: - logger.log( - TRACE, - "Trying to detect encoding from a tiny portion of ({}) byte(s).".format( - length - ), - ) - elif is_too_large_sequence: - logger.log( - TRACE, - "Using lazy str decoding because the payload is quite large, ({}) byte(s).".format( - length - ), - ) - - prioritized_encodings: List[str] = [] - - specified_encoding: Optional[str] = ( - any_specified_encoding(sequences) if preemptive_behaviour else None - ) - - if specified_encoding is not None: - prioritized_encodings.append(specified_encoding) - logger.log( - TRACE, - "Detected declarative mark in sequence. Priority +1 given for %s.", - specified_encoding, - ) - - tested: Set[str] = set() - tested_but_hard_failure: List[str] = [] - tested_but_soft_failure: List[str] = [] - - fallback_ascii: Optional[CharsetMatch] = None - fallback_u8: Optional[CharsetMatch] = None - fallback_specified: Optional[CharsetMatch] = None - - results: CharsetMatches = CharsetMatches() - - sig_encoding, sig_payload = identify_sig_or_bom(sequences) - - if sig_encoding is not None: - prioritized_encodings.append(sig_encoding) - logger.log( - TRACE, - "Detected a SIG or BOM mark on first %i byte(s). Priority +1 given for %s.", - len(sig_payload), - sig_encoding, - ) - - prioritized_encodings.append("ascii") - - if "utf_8" not in prioritized_encodings: - prioritized_encodings.append("utf_8") - - for encoding_iana in prioritized_encodings + IANA_SUPPORTED: - if cp_isolation and encoding_iana not in cp_isolation: - continue - - if cp_exclusion and encoding_iana in cp_exclusion: - continue - - if encoding_iana in tested: - continue - - tested.add(encoding_iana) - - decoded_payload: Optional[str] = None - bom_or_sig_available: bool = sig_encoding == encoding_iana - strip_sig_or_bom: bool = bom_or_sig_available and should_strip_sig_or_bom( - encoding_iana - ) - - if encoding_iana in {"utf_16", "utf_32"} and not bom_or_sig_available: - logger.log( - TRACE, - "Encoding %s won't be tested as-is because it require a BOM. Will try some sub-encoder LE/BE.", - encoding_iana, - ) - continue - if encoding_iana in {"utf_7"} and not bom_or_sig_available: - logger.log( - TRACE, - "Encoding %s won't be tested as-is because detection is unreliable without BOM/SIG.", - encoding_iana, - ) - continue - - try: - is_multi_byte_decoder: bool = is_multi_byte_encoding(encoding_iana) - except (ModuleNotFoundError, ImportError): - logger.log( - TRACE, - "Encoding %s does not provide an IncrementalDecoder", - encoding_iana, - ) - continue - - try: - if is_too_large_sequence and is_multi_byte_decoder is False: - str( - sequences[: int(50e4)] - if strip_sig_or_bom is False - else sequences[len(sig_payload) : int(50e4)], - encoding=encoding_iana, - ) - else: - decoded_payload = str( - sequences - if strip_sig_or_bom is False - else sequences[len(sig_payload) :], - encoding=encoding_iana, - ) - except (UnicodeDecodeError, LookupError) as e: - if not isinstance(e, LookupError): - logger.log( - TRACE, - "Code page %s does not fit given bytes sequence at ALL. %s", - encoding_iana, - str(e), - ) - tested_but_hard_failure.append(encoding_iana) - continue - - similar_soft_failure_test: bool = False - - for encoding_soft_failed in tested_but_soft_failure: - if is_cp_similar(encoding_iana, encoding_soft_failed): - similar_soft_failure_test = True - break - - if similar_soft_failure_test: - logger.log( - TRACE, - "%s is deemed too similar to code page %s and was consider unsuited already. Continuing!", - encoding_iana, - encoding_soft_failed, - ) - continue - - r_ = range( - 0 if not bom_or_sig_available else len(sig_payload), - length, - int(length / steps), - ) - - multi_byte_bonus: bool = ( - is_multi_byte_decoder - and decoded_payload is not None - and len(decoded_payload) < length - ) - - if multi_byte_bonus: - logger.log( - TRACE, - "Code page %s is a multi byte encoding table and it appear that at least one character " - "was encoded using n-bytes.", - encoding_iana, - ) - - max_chunk_gave_up: int = int(len(r_) / 4) - - max_chunk_gave_up = max(max_chunk_gave_up, 2) - early_stop_count: int = 0 - lazy_str_hard_failure = False - - md_chunks: List[str] = [] - md_ratios = [] - - try: - for chunk in cut_sequence_chunks( - sequences, - encoding_iana, - r_, - chunk_size, - bom_or_sig_available, - strip_sig_or_bom, - sig_payload, - is_multi_byte_decoder, - decoded_payload, - ): - md_chunks.append(chunk) - - md_ratios.append( - mess_ratio( - chunk, - threshold, - explain is True and 1 <= len(cp_isolation) <= 2, - ) - ) - - if md_ratios[-1] >= threshold: - early_stop_count += 1 - - if (early_stop_count >= max_chunk_gave_up) or ( - bom_or_sig_available and strip_sig_or_bom is False - ): - break - except ( - UnicodeDecodeError - ) as e: # Lazy str loading may have missed something there - logger.log( - TRACE, - "LazyStr Loading: After MD chunk decode, code page %s does not fit given bytes sequence at ALL. %s", - encoding_iana, - str(e), - ) - early_stop_count = max_chunk_gave_up - lazy_str_hard_failure = True - - # We might want to check the sequence again with the whole content - # Only if initial MD tests passes - if ( - not lazy_str_hard_failure - and is_too_large_sequence - and not is_multi_byte_decoder - ): - try: - sequences[int(50e3) :].decode(encoding_iana, errors="strict") - except UnicodeDecodeError as e: - logger.log( - TRACE, - "LazyStr Loading: After final lookup, code page %s does not fit given bytes sequence at ALL. %s", - encoding_iana, - str(e), - ) - tested_but_hard_failure.append(encoding_iana) - continue - - mean_mess_ratio: float = sum(md_ratios) / len(md_ratios) if md_ratios else 0.0 - if mean_mess_ratio >= threshold or early_stop_count >= max_chunk_gave_up: - tested_but_soft_failure.append(encoding_iana) - logger.log( - TRACE, - "%s was excluded because of initial chaos probing. Gave up %i time(s). " - "Computed mean chaos is %f %%.", - encoding_iana, - early_stop_count, - round(mean_mess_ratio * 100, ndigits=3), - ) - # Preparing those fallbacks in case we got nothing. - if ( - enable_fallback - and encoding_iana in ["ascii", "utf_8", specified_encoding] - and not lazy_str_hard_failure - ): - fallback_entry = CharsetMatch( - sequences, encoding_iana, threshold, False, [], decoded_payload - ) - if encoding_iana == specified_encoding: - fallback_specified = fallback_entry - elif encoding_iana == "ascii": - fallback_ascii = fallback_entry - else: - fallback_u8 = fallback_entry - continue - - logger.log( - TRACE, - "%s passed initial chaos probing. Mean measured chaos is %f %%", - encoding_iana, - round(mean_mess_ratio * 100, ndigits=3), - ) - - if not is_multi_byte_decoder: - target_languages: List[str] = encoding_languages(encoding_iana) - else: - target_languages = mb_encoding_languages(encoding_iana) - - if target_languages: - logger.log( - TRACE, - "{} should target any language(s) of {}".format( - encoding_iana, str(target_languages) - ), - ) - - cd_ratios = [] - - # We shall skip the CD when its about ASCII - # Most of the time its not relevant to run "language-detection" on it. - if encoding_iana != "ascii": - for chunk in md_chunks: - chunk_languages = coherence_ratio( - chunk, - language_threshold, - ",".join(target_languages) if target_languages else None, - ) - - cd_ratios.append(chunk_languages) - - cd_ratios_merged = merge_coherence_ratios(cd_ratios) - - if cd_ratios_merged: - logger.log( - TRACE, - "We detected language {} using {}".format( - cd_ratios_merged, encoding_iana - ), - ) - - results.append( - CharsetMatch( - sequences, - encoding_iana, - mean_mess_ratio, - bom_or_sig_available, - cd_ratios_merged, - decoded_payload, - ) - ) - - if ( - encoding_iana in [specified_encoding, "ascii", "utf_8"] - and mean_mess_ratio < 0.1 - ): - logger.debug( - "Encoding detection: %s is most likely the one.", encoding_iana - ) - if explain: - logger.removeHandler(explain_handler) - logger.setLevel(previous_logger_level) - return CharsetMatches([results[encoding_iana]]) - - if encoding_iana == sig_encoding: - logger.debug( - "Encoding detection: %s is most likely the one as we detected a BOM or SIG within " - "the beginning of the sequence.", - encoding_iana, - ) - if explain: - logger.removeHandler(explain_handler) - logger.setLevel(previous_logger_level) - return CharsetMatches([results[encoding_iana]]) - - if len(results) == 0: - if fallback_u8 or fallback_ascii or fallback_specified: - logger.log( - TRACE, - "Nothing got out of the detection process. Using ASCII/UTF-8/Specified fallback.", - ) - - if fallback_specified: - logger.debug( - "Encoding detection: %s will be used as a fallback match", - fallback_specified.encoding, - ) - results.append(fallback_specified) - elif ( - (fallback_u8 and fallback_ascii is None) - or ( - fallback_u8 - and fallback_ascii - and fallback_u8.fingerprint != fallback_ascii.fingerprint - ) - or (fallback_u8 is not None) - ): - logger.debug("Encoding detection: utf_8 will be used as a fallback match") - results.append(fallback_u8) - elif fallback_ascii: - logger.debug("Encoding detection: ascii will be used as a fallback match") - results.append(fallback_ascii) - - if results: - logger.debug( - "Encoding detection: Found %s as plausible (best-candidate) for content. With %i alternatives.", - results.best().encoding, # type: ignore - len(results) - 1, - ) - else: - logger.debug("Encoding detection: Unable to determine any suitable charset.") - - if explain: - logger.removeHandler(explain_handler) - logger.setLevel(previous_logger_level) - - return results - - -def from_fp( - fp: BinaryIO, - steps: int = 5, - chunk_size: int = 512, - threshold: float = 0.20, - cp_isolation: Optional[List[str]] = None, - cp_exclusion: Optional[List[str]] = None, - preemptive_behaviour: bool = True, - explain: bool = False, - language_threshold: float = 0.1, - enable_fallback: bool = True, -) -> CharsetMatches: - """ - Same thing than the function from_bytes but using a file pointer that is already ready. - Will not close the file pointer. - """ - return from_bytes( - fp.read(), - steps, - chunk_size, - threshold, - cp_isolation, - cp_exclusion, - preemptive_behaviour, - explain, - language_threshold, - enable_fallback, - ) - - -def from_path( - path: Union[str, bytes, PathLike], # type: ignore[type-arg] - steps: int = 5, - chunk_size: int = 512, - threshold: float = 0.20, - cp_isolation: Optional[List[str]] = None, - cp_exclusion: Optional[List[str]] = None, - preemptive_behaviour: bool = True, - explain: bool = False, - language_threshold: float = 0.1, - enable_fallback: bool = True, -) -> CharsetMatches: - """ - Same thing than the function from_bytes but with one extra step. Opening and reading given file path in binary mode. - Can raise IOError. - """ - with open(path, "rb") as fp: - return from_fp( - fp, - steps, - chunk_size, - threshold, - cp_isolation, - cp_exclusion, - preemptive_behaviour, - explain, - language_threshold, - enable_fallback, - ) - - -def is_binary( - fp_or_path_or_payload: Union[PathLike, str, BinaryIO, bytes], # type: ignore[type-arg] - steps: int = 5, - chunk_size: int = 512, - threshold: float = 0.20, - cp_isolation: Optional[List[str]] = None, - cp_exclusion: Optional[List[str]] = None, - preemptive_behaviour: bool = True, - explain: bool = False, - language_threshold: float = 0.1, - enable_fallback: bool = False, -) -> bool: - """ - Detect if the given input (file, bytes, or path) points to a binary file. aka. not a string. - Based on the same main heuristic algorithms and default kwargs at the sole exception that fallbacks match - are disabled to be stricter around ASCII-compatible but unlikely to be a string. - """ - if isinstance(fp_or_path_or_payload, (str, PathLike)): - guesses = from_path( - fp_or_path_or_payload, - steps=steps, - chunk_size=chunk_size, - threshold=threshold, - cp_isolation=cp_isolation, - cp_exclusion=cp_exclusion, - preemptive_behaviour=preemptive_behaviour, - explain=explain, - language_threshold=language_threshold, - enable_fallback=enable_fallback, - ) - elif isinstance( - fp_or_path_or_payload, - ( - bytes, - bytearray, - ), - ): - guesses = from_bytes( - fp_or_path_or_payload, - steps=steps, - chunk_size=chunk_size, - threshold=threshold, - cp_isolation=cp_isolation, - cp_exclusion=cp_exclusion, - preemptive_behaviour=preemptive_behaviour, - explain=explain, - language_threshold=language_threshold, - enable_fallback=enable_fallback, - ) - else: - guesses = from_fp( - fp_or_path_or_payload, - steps=steps, - chunk_size=chunk_size, - threshold=threshold, - cp_isolation=cp_isolation, - cp_exclusion=cp_exclusion, - preemptive_behaviour=preemptive_behaviour, - explain=explain, - language_threshold=language_threshold, - enable_fallback=enable_fallback, - ) - - return not guesses diff --git a/server/venv/Lib/site-packages/charset_normalizer/cd.py b/server/venv/Lib/site-packages/charset_normalizer/cd.py deleted file mode 100644 index 4e79a92..0000000 --- a/server/venv/Lib/site-packages/charset_normalizer/cd.py +++ /dev/null @@ -1,395 +0,0 @@ -import importlib -from codecs import IncrementalDecoder -from collections import Counter -from functools import lru_cache -from typing import Counter as TypeCounter, Dict, List, Optional, Tuple - -from .constant import ( - FREQUENCIES, - KO_NAMES, - LANGUAGE_SUPPORTED_COUNT, - TOO_SMALL_SEQUENCE, - ZH_NAMES, -) -from .md import is_suspiciously_successive_range -from .models import CoherenceMatches -from .utils import ( - is_accentuated, - is_latin, - is_multi_byte_encoding, - is_unicode_range_secondary, - unicode_range, -) - - -def encoding_unicode_range(iana_name: str) -> List[str]: - """ - Return associated unicode ranges in a single byte code page. - """ - if is_multi_byte_encoding(iana_name): - raise IOError("Function not supported on multi-byte code page") - - decoder = importlib.import_module( - "encodings.{}".format(iana_name) - ).IncrementalDecoder - - p: IncrementalDecoder = decoder(errors="ignore") - seen_ranges: Dict[str, int] = {} - character_count: int = 0 - - for i in range(0x40, 0xFF): - chunk: str = p.decode(bytes([i])) - - if chunk: - character_range: Optional[str] = unicode_range(chunk) - - if character_range is None: - continue - - if is_unicode_range_secondary(character_range) is False: - if character_range not in seen_ranges: - seen_ranges[character_range] = 0 - seen_ranges[character_range] += 1 - character_count += 1 - - return sorted( - [ - character_range - for character_range in seen_ranges - if seen_ranges[character_range] / character_count >= 0.15 - ] - ) - - -def unicode_range_languages(primary_range: str) -> List[str]: - """ - Return inferred languages used with a unicode range. - """ - languages: List[str] = [] - - for language, characters in FREQUENCIES.items(): - for character in characters: - if unicode_range(character) == primary_range: - languages.append(language) - break - - return languages - - -@lru_cache() -def encoding_languages(iana_name: str) -> List[str]: - """ - Single-byte encoding language association. Some code page are heavily linked to particular language(s). - This function does the correspondence. - """ - unicode_ranges: List[str] = encoding_unicode_range(iana_name) - primary_range: Optional[str] = None - - for specified_range in unicode_ranges: - if "Latin" not in specified_range: - primary_range = specified_range - break - - if primary_range is None: - return ["Latin Based"] - - return unicode_range_languages(primary_range) - - -@lru_cache() -def mb_encoding_languages(iana_name: str) -> List[str]: - """ - Multi-byte encoding language association. Some code page are heavily linked to particular language(s). - This function does the correspondence. - """ - if ( - iana_name.startswith("shift_") - or iana_name.startswith("iso2022_jp") - or iana_name.startswith("euc_j") - or iana_name == "cp932" - ): - return ["Japanese"] - if iana_name.startswith("gb") or iana_name in ZH_NAMES: - return ["Chinese"] - if iana_name.startswith("iso2022_kr") or iana_name in KO_NAMES: - return ["Korean"] - - return [] - - -@lru_cache(maxsize=LANGUAGE_SUPPORTED_COUNT) -def get_target_features(language: str) -> Tuple[bool, bool]: - """ - Determine main aspects from a supported language if it contains accents and if is pure Latin. - """ - target_have_accents: bool = False - target_pure_latin: bool = True - - for character in FREQUENCIES[language]: - if not target_have_accents and is_accentuated(character): - target_have_accents = True - if target_pure_latin and is_latin(character) is False: - target_pure_latin = False - - return target_have_accents, target_pure_latin - - -def alphabet_languages( - characters: List[str], ignore_non_latin: bool = False -) -> List[str]: - """ - Return associated languages associated to given characters. - """ - languages: List[Tuple[str, float]] = [] - - source_have_accents = any(is_accentuated(character) for character in characters) - - for language, language_characters in FREQUENCIES.items(): - target_have_accents, target_pure_latin = get_target_features(language) - - if ignore_non_latin and target_pure_latin is False: - continue - - if target_have_accents is False and source_have_accents: - continue - - character_count: int = len(language_characters) - - character_match_count: int = len( - [c for c in language_characters if c in characters] - ) - - ratio: float = character_match_count / character_count - - if ratio >= 0.2: - languages.append((language, ratio)) - - languages = sorted(languages, key=lambda x: x[1], reverse=True) - - return [compatible_language[0] for compatible_language in languages] - - -def characters_popularity_compare( - language: str, ordered_characters: List[str] -) -> float: - """ - Determine if a ordered characters list (by occurrence from most appearance to rarest) match a particular language. - The result is a ratio between 0. (absolutely no correspondence) and 1. (near perfect fit). - Beware that is function is not strict on the match in order to ease the detection. (Meaning close match is 1.) - """ - if language not in FREQUENCIES: - raise ValueError("{} not available".format(language)) - - character_approved_count: int = 0 - FREQUENCIES_language_set = set(FREQUENCIES[language]) - - ordered_characters_count: int = len(ordered_characters) - target_language_characters_count: int = len(FREQUENCIES[language]) - - large_alphabet: bool = target_language_characters_count > 26 - - for character, character_rank in zip( - ordered_characters, range(0, ordered_characters_count) - ): - if character not in FREQUENCIES_language_set: - continue - - character_rank_in_language: int = FREQUENCIES[language].index(character) - expected_projection_ratio: float = ( - target_language_characters_count / ordered_characters_count - ) - character_rank_projection: int = int(character_rank * expected_projection_ratio) - - if ( - large_alphabet is False - and abs(character_rank_projection - character_rank_in_language) > 4 - ): - continue - - if ( - large_alphabet is True - and abs(character_rank_projection - character_rank_in_language) - < target_language_characters_count / 3 - ): - character_approved_count += 1 - continue - - characters_before_source: List[str] = FREQUENCIES[language][ - 0:character_rank_in_language - ] - characters_after_source: List[str] = FREQUENCIES[language][ - character_rank_in_language: - ] - characters_before: List[str] = ordered_characters[0:character_rank] - characters_after: List[str] = ordered_characters[character_rank:] - - before_match_count: int = len( - set(characters_before) & set(characters_before_source) - ) - - after_match_count: int = len( - set(characters_after) & set(characters_after_source) - ) - - if len(characters_before_source) == 0 and before_match_count <= 4: - character_approved_count += 1 - continue - - if len(characters_after_source) == 0 and after_match_count <= 4: - character_approved_count += 1 - continue - - if ( - before_match_count / len(characters_before_source) >= 0.4 - or after_match_count / len(characters_after_source) >= 0.4 - ): - character_approved_count += 1 - continue - - return character_approved_count / len(ordered_characters) - - -def alpha_unicode_split(decoded_sequence: str) -> List[str]: - """ - Given a decoded text sequence, return a list of str. Unicode range / alphabet separation. - Ex. a text containing English/Latin with a bit a Hebrew will return two items in the resulting list; - One containing the latin letters and the other hebrew. - """ - layers: Dict[str, str] = {} - - for character in decoded_sequence: - if character.isalpha() is False: - continue - - character_range: Optional[str] = unicode_range(character) - - if character_range is None: - continue - - layer_target_range: Optional[str] = None - - for discovered_range in layers: - if ( - is_suspiciously_successive_range(discovered_range, character_range) - is False - ): - layer_target_range = discovered_range - break - - if layer_target_range is None: - layer_target_range = character_range - - if layer_target_range not in layers: - layers[layer_target_range] = character.lower() - continue - - layers[layer_target_range] += character.lower() - - return list(layers.values()) - - -def merge_coherence_ratios(results: List[CoherenceMatches]) -> CoherenceMatches: - """ - This function merge results previously given by the function coherence_ratio. - The return type is the same as coherence_ratio. - """ - per_language_ratios: Dict[str, List[float]] = {} - for result in results: - for sub_result in result: - language, ratio = sub_result - if language not in per_language_ratios: - per_language_ratios[language] = [ratio] - continue - per_language_ratios[language].append(ratio) - - merge = [ - ( - language, - round( - sum(per_language_ratios[language]) / len(per_language_ratios[language]), - 4, - ), - ) - for language in per_language_ratios - ] - - return sorted(merge, key=lambda x: x[1], reverse=True) - - -def filter_alt_coherence_matches(results: CoherenceMatches) -> CoherenceMatches: - """ - We shall NOT return "English—" in CoherenceMatches because it is an alternative - of "English". This function only keeps the best match and remove the em-dash in it. - """ - index_results: Dict[str, List[float]] = dict() - - for result in results: - language, ratio = result - no_em_name: str = language.replace("—", "") - - if no_em_name not in index_results: - index_results[no_em_name] = [] - - index_results[no_em_name].append(ratio) - - if any(len(index_results[e]) > 1 for e in index_results): - filtered_results: CoherenceMatches = [] - - for language in index_results: - filtered_results.append((language, max(index_results[language]))) - - return filtered_results - - return results - - -@lru_cache(maxsize=2048) -def coherence_ratio( - decoded_sequence: str, threshold: float = 0.1, lg_inclusion: Optional[str] = None -) -> CoherenceMatches: - """ - Detect ANY language that can be identified in given sequence. The sequence will be analysed by layers. - A layer = Character extraction by alphabets/ranges. - """ - - results: List[Tuple[str, float]] = [] - ignore_non_latin: bool = False - - sufficient_match_count: int = 0 - - lg_inclusion_list = lg_inclusion.split(",") if lg_inclusion is not None else [] - if "Latin Based" in lg_inclusion_list: - ignore_non_latin = True - lg_inclusion_list.remove("Latin Based") - - for layer in alpha_unicode_split(decoded_sequence): - sequence_frequencies: TypeCounter[str] = Counter(layer) - most_common = sequence_frequencies.most_common() - - character_count: int = sum(o for c, o in most_common) - - if character_count <= TOO_SMALL_SEQUENCE: - continue - - popular_character_ordered: List[str] = [c for c, o in most_common] - - for language in lg_inclusion_list or alphabet_languages( - popular_character_ordered, ignore_non_latin - ): - ratio: float = characters_popularity_compare( - language, popular_character_ordered - ) - - if ratio < threshold: - continue - elif ratio >= 0.8: - sufficient_match_count += 1 - - results.append((language, round(ratio, 4))) - - if sufficient_match_count >= 3: - break - - return sorted( - filter_alt_coherence_matches(results), key=lambda x: x[1], reverse=True - ) diff --git a/server/venv/Lib/site-packages/charset_normalizer/cli/__init__.py b/server/venv/Lib/site-packages/charset_normalizer/cli/__init__.py deleted file mode 100644 index 57460cc..0000000 --- a/server/venv/Lib/site-packages/charset_normalizer/cli/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -from .__main__ import cli_detect, query_yes_no - -__all__ = ( - "cli_detect", - "query_yes_no", -) diff --git a/server/venv/Lib/site-packages/charset_normalizer/cli/__main__.py b/server/venv/Lib/site-packages/charset_normalizer/cli/__main__.py deleted file mode 100644 index 2802122..0000000 --- a/server/venv/Lib/site-packages/charset_normalizer/cli/__main__.py +++ /dev/null @@ -1,296 +0,0 @@ -import argparse -import sys -from json import dumps -from os.path import abspath, basename, dirname, join, realpath -from platform import python_version -from typing import List, Optional -from unicodedata import unidata_version - -import charset_normalizer.md as md_module -from charset_normalizer import from_fp -from charset_normalizer.models import CliDetectionResult -from charset_normalizer.version import __version__ - - -def query_yes_no(question: str, default: str = "yes") -> bool: - """Ask a yes/no question via input() and return their answer. - - "question" is a string that is presented to the user. - "default" is the presumed answer if the user just hits . - It must be "yes" (the default), "no" or None (meaning - an answer is required of the user). - - The "answer" return value is True for "yes" or False for "no". - - Credit goes to (c) https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input - """ - valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False} - if default is None: - prompt = " [y/n] " - elif default == "yes": - prompt = " [Y/n] " - elif default == "no": - prompt = " [y/N] " - else: - raise ValueError("invalid default answer: '%s'" % default) - - while True: - sys.stdout.write(question + prompt) - choice = input().lower() - if default is not None and choice == "": - return valid[default] - elif choice in valid: - return valid[choice] - else: - sys.stdout.write("Please respond with 'yes' or 'no' " "(or 'y' or 'n').\n") - - -def cli_detect(argv: Optional[List[str]] = None) -> int: - """ - CLI assistant using ARGV and ArgumentParser - :param argv: - :return: 0 if everything is fine, anything else equal trouble - """ - parser = argparse.ArgumentParser( - description="The Real First Universal Charset Detector. " - "Discover originating encoding used on text file. " - "Normalize text to unicode." - ) - - parser.add_argument( - "files", type=argparse.FileType("rb"), nargs="+", help="File(s) to be analysed" - ) - parser.add_argument( - "-v", - "--verbose", - action="store_true", - default=False, - dest="verbose", - help="Display complementary information about file if any. " - "Stdout will contain logs about the detection process.", - ) - parser.add_argument( - "-a", - "--with-alternative", - action="store_true", - default=False, - dest="alternatives", - help="Output complementary possibilities if any. Top-level JSON WILL be a list.", - ) - parser.add_argument( - "-n", - "--normalize", - action="store_true", - default=False, - dest="normalize", - help="Permit to normalize input file. If not set, program does not write anything.", - ) - parser.add_argument( - "-m", - "--minimal", - action="store_true", - default=False, - dest="minimal", - help="Only output the charset detected to STDOUT. Disabling JSON output.", - ) - parser.add_argument( - "-r", - "--replace", - action="store_true", - default=False, - dest="replace", - help="Replace file when trying to normalize it instead of creating a new one.", - ) - parser.add_argument( - "-f", - "--force", - action="store_true", - default=False, - dest="force", - help="Replace file without asking if you are sure, use this flag with caution.", - ) - parser.add_argument( - "-t", - "--threshold", - action="store", - default=0.2, - type=float, - dest="threshold", - help="Define a custom maximum amount of chaos allowed in decoded content. 0. <= chaos <= 1.", - ) - parser.add_argument( - "--version", - action="version", - version="Charset-Normalizer {} - Python {} - Unicode {} - SpeedUp {}".format( - __version__, - python_version(), - unidata_version, - "OFF" if md_module.__file__.lower().endswith(".py") else "ON", - ), - help="Show version information and exit.", - ) - - args = parser.parse_args(argv) - - if args.replace is True and args.normalize is False: - print("Use --replace in addition of --normalize only.", file=sys.stderr) - return 1 - - if args.force is True and args.replace is False: - print("Use --force in addition of --replace only.", file=sys.stderr) - return 1 - - if args.threshold < 0.0 or args.threshold > 1.0: - print("--threshold VALUE should be between 0. AND 1.", file=sys.stderr) - return 1 - - x_ = [] - - for my_file in args.files: - matches = from_fp(my_file, threshold=args.threshold, explain=args.verbose) - - best_guess = matches.best() - - if best_guess is None: - print( - 'Unable to identify originating encoding for "{}". {}'.format( - my_file.name, - "Maybe try increasing maximum amount of chaos." - if args.threshold < 1.0 - else "", - ), - file=sys.stderr, - ) - x_.append( - CliDetectionResult( - abspath(my_file.name), - None, - [], - [], - "Unknown", - [], - False, - 1.0, - 0.0, - None, - True, - ) - ) - else: - x_.append( - CliDetectionResult( - abspath(my_file.name), - best_guess.encoding, - best_guess.encoding_aliases, - [ - cp - for cp in best_guess.could_be_from_charset - if cp != best_guess.encoding - ], - best_guess.language, - best_guess.alphabets, - best_guess.bom, - best_guess.percent_chaos, - best_guess.percent_coherence, - None, - True, - ) - ) - - if len(matches) > 1 and args.alternatives: - for el in matches: - if el != best_guess: - x_.append( - CliDetectionResult( - abspath(my_file.name), - el.encoding, - el.encoding_aliases, - [ - cp - for cp in el.could_be_from_charset - if cp != el.encoding - ], - el.language, - el.alphabets, - el.bom, - el.percent_chaos, - el.percent_coherence, - None, - False, - ) - ) - - if args.normalize is True: - if best_guess.encoding.startswith("utf") is True: - print( - '"{}" file does not need to be normalized, as it already came from unicode.'.format( - my_file.name - ), - file=sys.stderr, - ) - if my_file.closed is False: - my_file.close() - continue - - dir_path = dirname(realpath(my_file.name)) - file_name = basename(realpath(my_file.name)) - - o_: List[str] = file_name.split(".") - - if args.replace is False: - o_.insert(-1, best_guess.encoding) - if my_file.closed is False: - my_file.close() - elif ( - args.force is False - and query_yes_no( - 'Are you sure to normalize "{}" by replacing it ?'.format( - my_file.name - ), - "no", - ) - is False - ): - if my_file.closed is False: - my_file.close() - continue - - try: - x_[0].unicode_path = join(dir_path, ".".join(o_)) - - with open(x_[0].unicode_path, "w", encoding="utf-8") as fp: - fp.write(str(best_guess)) - except IOError as e: - print(str(e), file=sys.stderr) - if my_file.closed is False: - my_file.close() - return 2 - - if my_file.closed is False: - my_file.close() - - if args.minimal is False: - print( - dumps( - [el.__dict__ for el in x_] if len(x_) > 1 else x_[0].__dict__, - ensure_ascii=True, - indent=4, - ) - ) - else: - for my_file in args.files: - print( - ", ".join( - [ - el.encoding or "undefined" - for el in x_ - if el.path == abspath(my_file.name) - ] - ) - ) - - return 0 - - -if __name__ == "__main__": - cli_detect() diff --git a/server/venv/Lib/site-packages/charset_normalizer/constant.py b/server/venv/Lib/site-packages/charset_normalizer/constant.py deleted file mode 100644 index ac74ff1..0000000 --- a/server/venv/Lib/site-packages/charset_normalizer/constant.py +++ /dev/null @@ -1,1995 +0,0 @@ -# -*- coding: utf-8 -*- -from codecs import BOM_UTF8, BOM_UTF16_BE, BOM_UTF16_LE, BOM_UTF32_BE, BOM_UTF32_LE -from encodings.aliases import aliases -from re import IGNORECASE, compile as re_compile -from typing import Dict, List, Set, Union - -# Contain for each eligible encoding a list of/item bytes SIG/BOM -ENCODING_MARKS: Dict[str, Union[bytes, List[bytes]]] = { - "utf_8": BOM_UTF8, - "utf_7": [ - b"\x2b\x2f\x76\x38", - b"\x2b\x2f\x76\x39", - b"\x2b\x2f\x76\x2b", - b"\x2b\x2f\x76\x2f", - b"\x2b\x2f\x76\x38\x2d", - ], - "gb18030": b"\x84\x31\x95\x33", - "utf_32": [BOM_UTF32_BE, BOM_UTF32_LE], - "utf_16": [BOM_UTF16_BE, BOM_UTF16_LE], -} - -TOO_SMALL_SEQUENCE: int = 32 -TOO_BIG_SEQUENCE: int = int(10e6) - -UTF8_MAXIMAL_ALLOCATION: int = 1_112_064 - -# Up-to-date Unicode ucd/15.0.0 -UNICODE_RANGES_COMBINED: Dict[str, range] = { - "Control character": range(32), - "Basic Latin": range(32, 128), - "Latin-1 Supplement": range(128, 256), - "Latin Extended-A": range(256, 384), - "Latin Extended-B": range(384, 592), - "IPA Extensions": range(592, 688), - "Spacing Modifier Letters": range(688, 768), - "Combining Diacritical Marks": range(768, 880), - "Greek and Coptic": range(880, 1024), - "Cyrillic": range(1024, 1280), - "Cyrillic Supplement": range(1280, 1328), - "Armenian": range(1328, 1424), - "Hebrew": range(1424, 1536), - "Arabic": range(1536, 1792), - "Syriac": range(1792, 1872), - "Arabic Supplement": range(1872, 1920), - "Thaana": range(1920, 1984), - "NKo": range(1984, 2048), - "Samaritan": range(2048, 2112), - "Mandaic": range(2112, 2144), - "Syriac Supplement": range(2144, 2160), - "Arabic Extended-B": range(2160, 2208), - "Arabic Extended-A": range(2208, 2304), - "Devanagari": range(2304, 2432), - "Bengali": range(2432, 2560), - "Gurmukhi": range(2560, 2688), - "Gujarati": range(2688, 2816), - "Oriya": range(2816, 2944), - "Tamil": range(2944, 3072), - "Telugu": range(3072, 3200), - "Kannada": range(3200, 3328), - "Malayalam": range(3328, 3456), - "Sinhala": range(3456, 3584), - "Thai": range(3584, 3712), - "Lao": range(3712, 3840), - "Tibetan": range(3840, 4096), - "Myanmar": range(4096, 4256), - "Georgian": range(4256, 4352), - "Hangul Jamo": range(4352, 4608), - "Ethiopic": range(4608, 4992), - "Ethiopic Supplement": range(4992, 5024), - "Cherokee": range(5024, 5120), - "Unified Canadian Aboriginal Syllabics": range(5120, 5760), - "Ogham": range(5760, 5792), - "Runic": range(5792, 5888), - "Tagalog": range(5888, 5920), - "Hanunoo": range(5920, 5952), - "Buhid": range(5952, 5984), - "Tagbanwa": range(5984, 6016), - "Khmer": range(6016, 6144), - "Mongolian": range(6144, 6320), - "Unified Canadian Aboriginal Syllabics Extended": range(6320, 6400), - "Limbu": range(6400, 6480), - "Tai Le": range(6480, 6528), - "New Tai Lue": range(6528, 6624), - "Khmer Symbols": range(6624, 6656), - "Buginese": range(6656, 6688), - "Tai Tham": range(6688, 6832), - "Combining Diacritical Marks Extended": range(6832, 6912), - "Balinese": range(6912, 7040), - "Sundanese": range(7040, 7104), - "Batak": range(7104, 7168), - "Lepcha": range(7168, 7248), - "Ol Chiki": range(7248, 7296), - "Cyrillic Extended-C": range(7296, 7312), - "Georgian Extended": range(7312, 7360), - "Sundanese Supplement": range(7360, 7376), - "Vedic Extensions": range(7376, 7424), - "Phonetic Extensions": range(7424, 7552), - "Phonetic Extensions Supplement": range(7552, 7616), - "Combining Diacritical Marks Supplement": range(7616, 7680), - "Latin Extended Additional": range(7680, 7936), - "Greek Extended": range(7936, 8192), - "General Punctuation": range(8192, 8304), - "Superscripts and Subscripts": range(8304, 8352), - "Currency Symbols": range(8352, 8400), - "Combining Diacritical Marks for Symbols": range(8400, 8448), - "Letterlike Symbols": range(8448, 8528), - "Number Forms": range(8528, 8592), - "Arrows": range(8592, 8704), - "Mathematical Operators": range(8704, 8960), - "Miscellaneous Technical": range(8960, 9216), - "Control Pictures": range(9216, 9280), - "Optical Character Recognition": range(9280, 9312), - "Enclosed Alphanumerics": range(9312, 9472), - "Box Drawing": range(9472, 9600), - "Block Elements": range(9600, 9632), - "Geometric Shapes": range(9632, 9728), - "Miscellaneous Symbols": range(9728, 9984), - "Dingbats": range(9984, 10176), - "Miscellaneous Mathematical Symbols-A": range(10176, 10224), - "Supplemental Arrows-A": range(10224, 10240), - "Braille Patterns": range(10240, 10496), - "Supplemental Arrows-B": range(10496, 10624), - "Miscellaneous Mathematical Symbols-B": range(10624, 10752), - "Supplemental Mathematical Operators": range(10752, 11008), - "Miscellaneous Symbols and Arrows": range(11008, 11264), - "Glagolitic": range(11264, 11360), - "Latin Extended-C": range(11360, 11392), - "Coptic": range(11392, 11520), - "Georgian Supplement": range(11520, 11568), - "Tifinagh": range(11568, 11648), - "Ethiopic Extended": range(11648, 11744), - "Cyrillic Extended-A": range(11744, 11776), - "Supplemental Punctuation": range(11776, 11904), - "CJK Radicals Supplement": range(11904, 12032), - "Kangxi Radicals": range(12032, 12256), - "Ideographic Description Characters": range(12272, 12288), - "CJK Symbols and Punctuation": range(12288, 12352), - "Hiragana": range(12352, 12448), - "Katakana": range(12448, 12544), - "Bopomofo": range(12544, 12592), - "Hangul Compatibility Jamo": range(12592, 12688), - "Kanbun": range(12688, 12704), - "Bopomofo Extended": range(12704, 12736), - "CJK Strokes": range(12736, 12784), - "Katakana Phonetic Extensions": range(12784, 12800), - "Enclosed CJK Letters and Months": range(12800, 13056), - "CJK Compatibility": range(13056, 13312), - "CJK Unified Ideographs Extension A": range(13312, 19904), - "Yijing Hexagram Symbols": range(19904, 19968), - "CJK Unified Ideographs": range(19968, 40960), - "Yi Syllables": range(40960, 42128), - "Yi Radicals": range(42128, 42192), - "Lisu": range(42192, 42240), - "Vai": range(42240, 42560), - "Cyrillic Extended-B": range(42560, 42656), - "Bamum": range(42656, 42752), - "Modifier Tone Letters": range(42752, 42784), - "Latin Extended-D": range(42784, 43008), - "Syloti Nagri": range(43008, 43056), - "Common Indic Number Forms": range(43056, 43072), - "Phags-pa": range(43072, 43136), - "Saurashtra": range(43136, 43232), - "Devanagari Extended": range(43232, 43264), - "Kayah Li": range(43264, 43312), - "Rejang": range(43312, 43360), - "Hangul Jamo Extended-A": range(43360, 43392), - "Javanese": range(43392, 43488), - "Myanmar Extended-B": range(43488, 43520), - "Cham": range(43520, 43616), - "Myanmar Extended-A": range(43616, 43648), - "Tai Viet": range(43648, 43744), - "Meetei Mayek Extensions": range(43744, 43776), - "Ethiopic Extended-A": range(43776, 43824), - "Latin Extended-E": range(43824, 43888), - "Cherokee Supplement": range(43888, 43968), - "Meetei Mayek": range(43968, 44032), - "Hangul Syllables": range(44032, 55216), - "Hangul Jamo Extended-B": range(55216, 55296), - "High Surrogates": range(55296, 56192), - "High Private Use Surrogates": range(56192, 56320), - "Low Surrogates": range(56320, 57344), - "Private Use Area": range(57344, 63744), - "CJK Compatibility Ideographs": range(63744, 64256), - "Alphabetic Presentation Forms": range(64256, 64336), - "Arabic Presentation Forms-A": range(64336, 65024), - "Variation Selectors": range(65024, 65040), - "Vertical Forms": range(65040, 65056), - "Combining Half Marks": range(65056, 65072), - "CJK Compatibility Forms": range(65072, 65104), - "Small Form Variants": range(65104, 65136), - "Arabic Presentation Forms-B": range(65136, 65280), - "Halfwidth and Fullwidth Forms": range(65280, 65520), - "Specials": range(65520, 65536), - "Linear B Syllabary": range(65536, 65664), - "Linear B Ideograms": range(65664, 65792), - "Aegean Numbers": range(65792, 65856), - "Ancient Greek Numbers": range(65856, 65936), - "Ancient Symbols": range(65936, 66000), - "Phaistos Disc": range(66000, 66048), - "Lycian": range(66176, 66208), - "Carian": range(66208, 66272), - "Coptic Epact Numbers": range(66272, 66304), - "Old Italic": range(66304, 66352), - "Gothic": range(66352, 66384), - "Old Permic": range(66384, 66432), - "Ugaritic": range(66432, 66464), - "Old Persian": range(66464, 66528), - "Deseret": range(66560, 66640), - "Shavian": range(66640, 66688), - "Osmanya": range(66688, 66736), - "Osage": range(66736, 66816), - "Elbasan": range(66816, 66864), - "Caucasian Albanian": range(66864, 66928), - "Vithkuqi": range(66928, 67008), - "Linear A": range(67072, 67456), - "Latin Extended-F": range(67456, 67520), - "Cypriot Syllabary": range(67584, 67648), - "Imperial Aramaic": range(67648, 67680), - "Palmyrene": range(67680, 67712), - "Nabataean": range(67712, 67760), - "Hatran": range(67808, 67840), - "Phoenician": range(67840, 67872), - "Lydian": range(67872, 67904), - "Meroitic Hieroglyphs": range(67968, 68000), - "Meroitic Cursive": range(68000, 68096), - "Kharoshthi": range(68096, 68192), - "Old South Arabian": range(68192, 68224), - "Old North Arabian": range(68224, 68256), - "Manichaean": range(68288, 68352), - "Avestan": range(68352, 68416), - "Inscriptional Parthian": range(68416, 68448), - "Inscriptional Pahlavi": range(68448, 68480), - "Psalter Pahlavi": range(68480, 68528), - "Old Turkic": range(68608, 68688), - "Old Hungarian": range(68736, 68864), - "Hanifi Rohingya": range(68864, 68928), - "Rumi Numeral Symbols": range(69216, 69248), - "Yezidi": range(69248, 69312), - "Arabic Extended-C": range(69312, 69376), - "Old Sogdian": range(69376, 69424), - "Sogdian": range(69424, 69488), - "Old Uyghur": range(69488, 69552), - "Chorasmian": range(69552, 69600), - "Elymaic": range(69600, 69632), - "Brahmi": range(69632, 69760), - "Kaithi": range(69760, 69840), - "Sora Sompeng": range(69840, 69888), - "Chakma": range(69888, 69968), - "Mahajani": range(69968, 70016), - "Sharada": range(70016, 70112), - "Sinhala Archaic Numbers": range(70112, 70144), - "Khojki": range(70144, 70224), - "Multani": range(70272, 70320), - "Khudawadi": range(70320, 70400), - "Grantha": range(70400, 70528), - "Newa": range(70656, 70784), - "Tirhuta": range(70784, 70880), - "Siddham": range(71040, 71168), - "Modi": range(71168, 71264), - "Mongolian Supplement": range(71264, 71296), - "Takri": range(71296, 71376), - "Ahom": range(71424, 71504), - "Dogra": range(71680, 71760), - "Warang Citi": range(71840, 71936), - "Dives Akuru": range(71936, 72032), - "Nandinagari": range(72096, 72192), - "Zanabazar Square": range(72192, 72272), - "Soyombo": range(72272, 72368), - "Unified Canadian Aboriginal Syllabics Extended-A": range(72368, 72384), - "Pau Cin Hau": range(72384, 72448), - "Devanagari Extended-A": range(72448, 72544), - "Bhaiksuki": range(72704, 72816), - "Marchen": range(72816, 72896), - "Masaram Gondi": range(72960, 73056), - "Gunjala Gondi": range(73056, 73136), - "Makasar": range(73440, 73472), - "Kawi": range(73472, 73568), - "Lisu Supplement": range(73648, 73664), - "Tamil Supplement": range(73664, 73728), - "Cuneiform": range(73728, 74752), - "Cuneiform Numbers and Punctuation": range(74752, 74880), - "Early Dynastic Cuneiform": range(74880, 75088), - "Cypro-Minoan": range(77712, 77824), - "Egyptian Hieroglyphs": range(77824, 78896), - "Egyptian Hieroglyph Format Controls": range(78896, 78944), - "Anatolian Hieroglyphs": range(82944, 83584), - "Bamum Supplement": range(92160, 92736), - "Mro": range(92736, 92784), - "Tangsa": range(92784, 92880), - "Bassa Vah": range(92880, 92928), - "Pahawh Hmong": range(92928, 93072), - "Medefaidrin": range(93760, 93856), - "Miao": range(93952, 94112), - "Ideographic Symbols and Punctuation": range(94176, 94208), - "Tangut": range(94208, 100352), - "Tangut Components": range(100352, 101120), - "Khitan Small Script": range(101120, 101632), - "Tangut Supplement": range(101632, 101760), - "Kana Extended-B": range(110576, 110592), - "Kana Supplement": range(110592, 110848), - "Kana Extended-A": range(110848, 110896), - "Small Kana Extension": range(110896, 110960), - "Nushu": range(110960, 111360), - "Duployan": range(113664, 113824), - "Shorthand Format Controls": range(113824, 113840), - "Znamenny Musical Notation": range(118528, 118736), - "Byzantine Musical Symbols": range(118784, 119040), - "Musical Symbols": range(119040, 119296), - "Ancient Greek Musical Notation": range(119296, 119376), - "Kaktovik Numerals": range(119488, 119520), - "Mayan Numerals": range(119520, 119552), - "Tai Xuan Jing Symbols": range(119552, 119648), - "Counting Rod Numerals": range(119648, 119680), - "Mathematical Alphanumeric Symbols": range(119808, 120832), - "Sutton SignWriting": range(120832, 121520), - "Latin Extended-G": range(122624, 122880), - "Glagolitic Supplement": range(122880, 122928), - "Cyrillic Extended-D": range(122928, 123024), - "Nyiakeng Puachue Hmong": range(123136, 123216), - "Toto": range(123536, 123584), - "Wancho": range(123584, 123648), - "Nag Mundari": range(124112, 124160), - "Ethiopic Extended-B": range(124896, 124928), - "Mende Kikakui": range(124928, 125152), - "Adlam": range(125184, 125280), - "Indic Siyaq Numbers": range(126064, 126144), - "Ottoman Siyaq Numbers": range(126208, 126288), - "Arabic Mathematical Alphabetic Symbols": range(126464, 126720), - "Mahjong Tiles": range(126976, 127024), - "Domino Tiles": range(127024, 127136), - "Playing Cards": range(127136, 127232), - "Enclosed Alphanumeric Supplement": range(127232, 127488), - "Enclosed Ideographic Supplement": range(127488, 127744), - "Miscellaneous Symbols and Pictographs": range(127744, 128512), - "Emoticons range(Emoji)": range(128512, 128592), - "Ornamental Dingbats": range(128592, 128640), - "Transport and Map Symbols": range(128640, 128768), - "Alchemical Symbols": range(128768, 128896), - "Geometric Shapes Extended": range(128896, 129024), - "Supplemental Arrows-C": range(129024, 129280), - "Supplemental Symbols and Pictographs": range(129280, 129536), - "Chess Symbols": range(129536, 129648), - "Symbols and Pictographs Extended-A": range(129648, 129792), - "Symbols for Legacy Computing": range(129792, 130048), - "CJK Unified Ideographs Extension B": range(131072, 173792), - "CJK Unified Ideographs Extension C": range(173824, 177984), - "CJK Unified Ideographs Extension D": range(177984, 178208), - "CJK Unified Ideographs Extension E": range(178208, 183984), - "CJK Unified Ideographs Extension F": range(183984, 191472), - "CJK Compatibility Ideographs Supplement": range(194560, 195104), - "CJK Unified Ideographs Extension G": range(196608, 201552), - "CJK Unified Ideographs Extension H": range(201552, 205744), - "Tags": range(917504, 917632), - "Variation Selectors Supplement": range(917760, 918000), - "Supplementary Private Use Area-A": range(983040, 1048576), - "Supplementary Private Use Area-B": range(1048576, 1114112), -} - - -UNICODE_SECONDARY_RANGE_KEYWORD: List[str] = [ - "Supplement", - "Extended", - "Extensions", - "Modifier", - "Marks", - "Punctuation", - "Symbols", - "Forms", - "Operators", - "Miscellaneous", - "Drawing", - "Block", - "Shapes", - "Supplemental", - "Tags", -] - -RE_POSSIBLE_ENCODING_INDICATION = re_compile( - r"(?:(?:encoding)|(?:charset)|(?:coding))(?:[\:= ]{1,10})(?:[\"\']?)([a-zA-Z0-9\-_]+)(?:[\"\']?)", - IGNORECASE, -) - -IANA_NO_ALIASES = [ - "cp720", - "cp737", - "cp856", - "cp874", - "cp875", - "cp1006", - "koi8_r", - "koi8_t", - "koi8_u", -] - -IANA_SUPPORTED: List[str] = sorted( - filter( - lambda x: x.endswith("_codec") is False - and x not in {"rot_13", "tactis", "mbcs"}, - list(set(aliases.values())) + IANA_NO_ALIASES, - ) -) - -IANA_SUPPORTED_COUNT: int = len(IANA_SUPPORTED) - -# pre-computed code page that are similar using the function cp_similarity. -IANA_SUPPORTED_SIMILAR: Dict[str, List[str]] = { - "cp037": ["cp1026", "cp1140", "cp273", "cp500"], - "cp1026": ["cp037", "cp1140", "cp273", "cp500"], - "cp1125": ["cp866"], - "cp1140": ["cp037", "cp1026", "cp273", "cp500"], - "cp1250": ["iso8859_2"], - "cp1251": ["kz1048", "ptcp154"], - "cp1252": ["iso8859_15", "iso8859_9", "latin_1"], - "cp1253": ["iso8859_7"], - "cp1254": ["iso8859_15", "iso8859_9", "latin_1"], - "cp1257": ["iso8859_13"], - "cp273": ["cp037", "cp1026", "cp1140", "cp500"], - "cp437": ["cp850", "cp858", "cp860", "cp861", "cp862", "cp863", "cp865"], - "cp500": ["cp037", "cp1026", "cp1140", "cp273"], - "cp850": ["cp437", "cp857", "cp858", "cp865"], - "cp857": ["cp850", "cp858", "cp865"], - "cp858": ["cp437", "cp850", "cp857", "cp865"], - "cp860": ["cp437", "cp861", "cp862", "cp863", "cp865"], - "cp861": ["cp437", "cp860", "cp862", "cp863", "cp865"], - "cp862": ["cp437", "cp860", "cp861", "cp863", "cp865"], - "cp863": ["cp437", "cp860", "cp861", "cp862", "cp865"], - "cp865": ["cp437", "cp850", "cp857", "cp858", "cp860", "cp861", "cp862", "cp863"], - "cp866": ["cp1125"], - "iso8859_10": ["iso8859_14", "iso8859_15", "iso8859_4", "iso8859_9", "latin_1"], - "iso8859_11": ["tis_620"], - "iso8859_13": ["cp1257"], - "iso8859_14": [ - "iso8859_10", - "iso8859_15", - "iso8859_16", - "iso8859_3", - "iso8859_9", - "latin_1", - ], - "iso8859_15": [ - "cp1252", - "cp1254", - "iso8859_10", - "iso8859_14", - "iso8859_16", - "iso8859_3", - "iso8859_9", - "latin_1", - ], - "iso8859_16": [ - "iso8859_14", - "iso8859_15", - "iso8859_2", - "iso8859_3", - "iso8859_9", - "latin_1", - ], - "iso8859_2": ["cp1250", "iso8859_16", "iso8859_4"], - "iso8859_3": ["iso8859_14", "iso8859_15", "iso8859_16", "iso8859_9", "latin_1"], - "iso8859_4": ["iso8859_10", "iso8859_2", "iso8859_9", "latin_1"], - "iso8859_7": ["cp1253"], - "iso8859_9": [ - "cp1252", - "cp1254", - "cp1258", - "iso8859_10", - "iso8859_14", - "iso8859_15", - "iso8859_16", - "iso8859_3", - "iso8859_4", - "latin_1", - ], - "kz1048": ["cp1251", "ptcp154"], - "latin_1": [ - "cp1252", - "cp1254", - "cp1258", - "iso8859_10", - "iso8859_14", - "iso8859_15", - "iso8859_16", - "iso8859_3", - "iso8859_4", - "iso8859_9", - ], - "mac_iceland": ["mac_roman", "mac_turkish"], - "mac_roman": ["mac_iceland", "mac_turkish"], - "mac_turkish": ["mac_iceland", "mac_roman"], - "ptcp154": ["cp1251", "kz1048"], - "tis_620": ["iso8859_11"], -} - - -CHARDET_CORRESPONDENCE: Dict[str, str] = { - "iso2022_kr": "ISO-2022-KR", - "iso2022_jp": "ISO-2022-JP", - "euc_kr": "EUC-KR", - "tis_620": "TIS-620", - "utf_32": "UTF-32", - "euc_jp": "EUC-JP", - "koi8_r": "KOI8-R", - "iso8859_1": "ISO-8859-1", - "iso8859_2": "ISO-8859-2", - "iso8859_5": "ISO-8859-5", - "iso8859_6": "ISO-8859-6", - "iso8859_7": "ISO-8859-7", - "iso8859_8": "ISO-8859-8", - "utf_16": "UTF-16", - "cp855": "IBM855", - "mac_cyrillic": "MacCyrillic", - "gb2312": "GB2312", - "gb18030": "GB18030", - "cp932": "CP932", - "cp866": "IBM866", - "utf_8": "utf-8", - "utf_8_sig": "UTF-8-SIG", - "shift_jis": "SHIFT_JIS", - "big5": "Big5", - "cp1250": "windows-1250", - "cp1251": "windows-1251", - "cp1252": "Windows-1252", - "cp1253": "windows-1253", - "cp1255": "windows-1255", - "cp1256": "windows-1256", - "cp1254": "Windows-1254", - "cp949": "CP949", -} - - -COMMON_SAFE_ASCII_CHARACTERS: Set[str] = { - "<", - ">", - "=", - ":", - "/", - "&", - ";", - "{", - "}", - "[", - "]", - ",", - "|", - '"', - "-", -} - - -KO_NAMES: Set[str] = {"johab", "cp949", "euc_kr"} -ZH_NAMES: Set[str] = {"big5", "cp950", "big5hkscs", "hz"} - -# Logging LEVEL below DEBUG -TRACE: int = 5 - - -# Language label that contain the em dash "—" -# character are to be considered alternative seq to origin -FREQUENCIES: Dict[str, List[str]] = { - "English": [ - "e", - "a", - "t", - "i", - "o", - "n", - "s", - "r", - "h", - "l", - "d", - "c", - "u", - "m", - "f", - "p", - "g", - "w", - "y", - "b", - "v", - "k", - "x", - "j", - "z", - "q", - ], - "English—": [ - "e", - "a", - "t", - "i", - "o", - "n", - "s", - "r", - "h", - "l", - "d", - "c", - "m", - "u", - "f", - "p", - "g", - "w", - "b", - "y", - "v", - "k", - "j", - "x", - "z", - "q", - ], - "German": [ - "e", - "n", - "i", - "r", - "s", - "t", - "a", - "d", - "h", - "u", - "l", - "g", - "o", - "c", - "m", - "b", - "f", - "k", - "w", - "z", - "p", - "v", - "ü", - "ä", - "ö", - "j", - ], - "French": [ - "e", - "a", - "s", - "n", - "i", - "t", - "r", - "l", - "u", - "o", - "d", - "c", - "p", - "m", - "é", - "v", - "g", - "f", - "b", - "h", - "q", - "à", - "x", - "è", - "y", - "j", - ], - "Dutch": [ - "e", - "n", - "a", - "i", - "r", - "t", - "o", - "d", - "s", - "l", - "g", - "h", - "v", - "m", - "u", - "k", - "c", - "p", - "b", - "w", - "j", - "z", - "f", - "y", - "x", - "ë", - ], - "Italian": [ - "e", - "i", - "a", - "o", - "n", - "l", - "t", - "r", - "s", - "c", - "d", - "u", - "p", - "m", - "g", - "v", - "f", - "b", - "z", - "h", - "q", - "è", - "à", - "k", - "y", - "ò", - ], - "Polish": [ - "a", - "i", - "o", - "e", - "n", - "r", - "z", - "w", - "s", - "c", - "t", - "k", - "y", - "d", - "p", - "m", - "u", - "l", - "j", - "ł", - "g", - "b", - "h", - "ą", - "ę", - "ó", - ], - "Spanish": [ - "e", - "a", - "o", - "n", - "s", - "r", - "i", - "l", - "d", - "t", - "c", - "u", - "m", - "p", - "b", - "g", - "v", - "f", - "y", - "ó", - "h", - "q", - "í", - "j", - "z", - "á", - ], - "Russian": [ - "о", - "а", - "е", - "и", - "н", - "с", - "т", - "р", - "в", - "л", - "к", - "м", - "д", - "п", - "у", - "г", - "я", - "ы", - "з", - "б", - "й", - "ь", - "ч", - "х", - "ж", - "ц", - ], - # Jap-Kanji - "Japanese": [ - "人", - "一", - "大", - "亅", - "丁", - "丨", - "竹", - "笑", - "口", - "日", - "今", - "二", - "彳", - "行", - "十", - "土", - "丶", - "寸", - "寺", - "時", - "乙", - "丿", - "乂", - "气", - "気", - "冂", - "巾", - "亠", - "市", - "目", - "儿", - "見", - "八", - "小", - "凵", - "県", - "月", - "彐", - "門", - "間", - "木", - "東", - "山", - "出", - "本", - "中", - "刀", - "分", - "耳", - "又", - "取", - "最", - "言", - "田", - "心", - "思", - "刂", - "前", - "京", - "尹", - "事", - "生", - "厶", - "云", - "会", - "未", - "来", - "白", - "冫", - "楽", - "灬", - "馬", - "尸", - "尺", - "駅", - "明", - "耂", - "者", - "了", - "阝", - "都", - "高", - "卜", - "占", - "厂", - "广", - "店", - "子", - "申", - "奄", - "亻", - "俺", - "上", - "方", - "冖", - "学", - "衣", - "艮", - "食", - "自", - ], - # Jap-Katakana - "Japanese—": [ - "ー", - "ン", - "ス", - "・", - "ル", - "ト", - "リ", - "イ", - "ア", - "ラ", - "ッ", - "ク", - "ド", - "シ", - "レ", - "ジ", - "タ", - "フ", - "ロ", - "カ", - "テ", - "マ", - "ィ", - "グ", - "バ", - "ム", - "プ", - "オ", - "コ", - "デ", - "ニ", - "ウ", - "メ", - "サ", - "ビ", - "ナ", - "ブ", - "ャ", - "エ", - "ュ", - "チ", - "キ", - "ズ", - "ダ", - "パ", - "ミ", - "ェ", - "ョ", - "ハ", - "セ", - "ベ", - "ガ", - "モ", - "ツ", - "ネ", - "ボ", - "ソ", - "ノ", - "ァ", - "ヴ", - "ワ", - "ポ", - "ペ", - "ピ", - "ケ", - "ゴ", - "ギ", - "ザ", - "ホ", - "ゲ", - "ォ", - "ヤ", - "ヒ", - "ユ", - "ヨ", - "ヘ", - "ゼ", - "ヌ", - "ゥ", - "ゾ", - "ヶ", - "ヂ", - "ヲ", - "ヅ", - "ヵ", - "ヱ", - "ヰ", - "ヮ", - "ヽ", - "゠", - "ヾ", - "ヷ", - "ヿ", - "ヸ", - "ヹ", - "ヺ", - ], - # Jap-Hiragana - "Japanese——": [ - "の", - "に", - "る", - "た", - "と", - "は", - "し", - "い", - "を", - "で", - "て", - "が", - "な", - "れ", - "か", - "ら", - "さ", - "っ", - "り", - "す", - "あ", - "も", - "こ", - "ま", - "う", - "く", - "よ", - "き", - "ん", - "め", - "お", - "け", - "そ", - "つ", - "だ", - "や", - "え", - "ど", - "わ", - "ち", - "み", - "せ", - "じ", - "ば", - "へ", - "び", - "ず", - "ろ", - "ほ", - "げ", - "む", - "べ", - "ひ", - "ょ", - "ゆ", - "ぶ", - "ご", - "ゃ", - "ね", - "ふ", - "ぐ", - "ぎ", - "ぼ", - "ゅ", - "づ", - "ざ", - "ぞ", - "ぬ", - "ぜ", - "ぱ", - "ぽ", - "ぷ", - "ぴ", - "ぃ", - "ぁ", - "ぇ", - "ぺ", - "ゞ", - "ぢ", - "ぉ", - "ぅ", - "ゐ", - "ゝ", - "ゑ", - "゛", - "゜", - "ゎ", - "ゔ", - "゚", - "ゟ", - "゙", - "ゕ", - "ゖ", - ], - "Portuguese": [ - "a", - "e", - "o", - "s", - "i", - "r", - "d", - "n", - "t", - "m", - "u", - "c", - "l", - "p", - "g", - "v", - "b", - "f", - "h", - "ã", - "q", - "é", - "ç", - "á", - "z", - "í", - ], - "Swedish": [ - "e", - "a", - "n", - "r", - "t", - "s", - "i", - "l", - "d", - "o", - "m", - "k", - "g", - "v", - "h", - "f", - "u", - "p", - "ä", - "c", - "b", - "ö", - "å", - "y", - "j", - "x", - ], - "Chinese": [ - "的", - "一", - "是", - "不", - "了", - "在", - "人", - "有", - "我", - "他", - "这", - "个", - "们", - "中", - "来", - "上", - "大", - "为", - "和", - "国", - "地", - "到", - "以", - "说", - "时", - "要", - "就", - "出", - "会", - "可", - "也", - "你", - "对", - "生", - "能", - "而", - "子", - "那", - "得", - "于", - "着", - "下", - "自", - "之", - "年", - "过", - "发", - "后", - "作", - "里", - "用", - "道", - "行", - "所", - "然", - "家", - "种", - "事", - "成", - "方", - "多", - "经", - "么", - "去", - "法", - "学", - "如", - "都", - "同", - "现", - "当", - "没", - "动", - "面", - "起", - "看", - "定", - "天", - "分", - "还", - "进", - "好", - "小", - "部", - "其", - "些", - "主", - "样", - "理", - "心", - "她", - "本", - "前", - "开", - "但", - "因", - "只", - "从", - "想", - "实", - ], - "Ukrainian": [ - "о", - "а", - "н", - "і", - "и", - "р", - "в", - "т", - "е", - "с", - "к", - "л", - "у", - "д", - "м", - "п", - "з", - "я", - "ь", - "б", - "г", - "й", - "ч", - "х", - "ц", - "ї", - ], - "Norwegian": [ - "e", - "r", - "n", - "t", - "a", - "s", - "i", - "o", - "l", - "d", - "g", - "k", - "m", - "v", - "f", - "p", - "u", - "b", - "h", - "å", - "y", - "j", - "ø", - "c", - "æ", - "w", - ], - "Finnish": [ - "a", - "i", - "n", - "t", - "e", - "s", - "l", - "o", - "u", - "k", - "ä", - "m", - "r", - "v", - "j", - "h", - "p", - "y", - "d", - "ö", - "g", - "c", - "b", - "f", - "w", - "z", - ], - "Vietnamese": [ - "n", - "h", - "t", - "i", - "c", - "g", - "a", - "o", - "u", - "m", - "l", - "r", - "à", - "đ", - "s", - "e", - "v", - "p", - "b", - "y", - "ư", - "d", - "á", - "k", - "ộ", - "ế", - ], - "Czech": [ - "o", - "e", - "a", - "n", - "t", - "s", - "i", - "l", - "v", - "r", - "k", - "d", - "u", - "m", - "p", - "í", - "c", - "h", - "z", - "á", - "y", - "j", - "b", - "ě", - "é", - "ř", - ], - "Hungarian": [ - "e", - "a", - "t", - "l", - "s", - "n", - "k", - "r", - "i", - "o", - "z", - "á", - "é", - "g", - "m", - "b", - "y", - "v", - "d", - "h", - "u", - "p", - "j", - "ö", - "f", - "c", - ], - "Korean": [ - "이", - "다", - "에", - "의", - "는", - "로", - "하", - "을", - "가", - "고", - "지", - "서", - "한", - "은", - "기", - "으", - "년", - "대", - "사", - "시", - "를", - "리", - "도", - "인", - "스", - "일", - ], - "Indonesian": [ - "a", - "n", - "e", - "i", - "r", - "t", - "u", - "s", - "d", - "k", - "m", - "l", - "g", - "p", - "b", - "o", - "h", - "y", - "j", - "c", - "w", - "f", - "v", - "z", - "x", - "q", - ], - "Turkish": [ - "a", - "e", - "i", - "n", - "r", - "l", - "ı", - "k", - "d", - "t", - "s", - "m", - "y", - "u", - "o", - "b", - "ü", - "ş", - "v", - "g", - "z", - "h", - "c", - "p", - "ç", - "ğ", - ], - "Romanian": [ - "e", - "i", - "a", - "r", - "n", - "t", - "u", - "l", - "o", - "c", - "s", - "d", - "p", - "m", - "ă", - "f", - "v", - "î", - "g", - "b", - "ș", - "ț", - "z", - "h", - "â", - "j", - ], - "Farsi": [ - "ا", - "ی", - "ر", - "د", - "ن", - "ه", - "و", - "م", - "ت", - "ب", - "س", - "ل", - "ک", - "ش", - "ز", - "ف", - "گ", - "ع", - "خ", - "ق", - "ج", - "آ", - "پ", - "ح", - "ط", - "ص", - ], - "Arabic": [ - "ا", - "ل", - "ي", - "م", - "و", - "ن", - "ر", - "ت", - "ب", - "ة", - "ع", - "د", - "س", - "ف", - "ه", - "ك", - "ق", - "أ", - "ح", - "ج", - "ش", - "ط", - "ص", - "ى", - "خ", - "إ", - ], - "Danish": [ - "e", - "r", - "n", - "t", - "a", - "i", - "s", - "d", - "l", - "o", - "g", - "m", - "k", - "f", - "v", - "u", - "b", - "h", - "p", - "å", - "y", - "ø", - "æ", - "c", - "j", - "w", - ], - "Serbian": [ - "а", - "и", - "о", - "е", - "н", - "р", - "с", - "у", - "т", - "к", - "ј", - "в", - "д", - "м", - "п", - "л", - "г", - "з", - "б", - "a", - "i", - "e", - "o", - "n", - "ц", - "ш", - ], - "Lithuanian": [ - "i", - "a", - "s", - "o", - "r", - "e", - "t", - "n", - "u", - "k", - "m", - "l", - "p", - "v", - "d", - "j", - "g", - "ė", - "b", - "y", - "ų", - "š", - "ž", - "c", - "ą", - "į", - ], - "Slovene": [ - "e", - "a", - "i", - "o", - "n", - "r", - "s", - "l", - "t", - "j", - "v", - "k", - "d", - "p", - "m", - "u", - "z", - "b", - "g", - "h", - "č", - "c", - "š", - "ž", - "f", - "y", - ], - "Slovak": [ - "o", - "a", - "e", - "n", - "i", - "r", - "v", - "t", - "s", - "l", - "k", - "d", - "m", - "p", - "u", - "c", - "h", - "j", - "b", - "z", - "á", - "y", - "ý", - "í", - "č", - "é", - ], - "Hebrew": [ - "י", - "ו", - "ה", - "ל", - "ר", - "ב", - "ת", - "מ", - "א", - "ש", - "נ", - "ע", - "ם", - "ד", - "ק", - "ח", - "פ", - "ס", - "כ", - "ג", - "ט", - "צ", - "ן", - "ז", - "ך", - ], - "Bulgarian": [ - "а", - "и", - "о", - "е", - "н", - "т", - "р", - "с", - "в", - "л", - "к", - "д", - "п", - "м", - "з", - "г", - "я", - "ъ", - "у", - "б", - "ч", - "ц", - "й", - "ж", - "щ", - "х", - ], - "Croatian": [ - "a", - "i", - "o", - "e", - "n", - "r", - "j", - "s", - "t", - "u", - "k", - "l", - "v", - "d", - "m", - "p", - "g", - "z", - "b", - "c", - "č", - "h", - "š", - "ž", - "ć", - "f", - ], - "Hindi": [ - "क", - "र", - "स", - "न", - "त", - "म", - "ह", - "प", - "य", - "ल", - "व", - "ज", - "द", - "ग", - "ब", - "श", - "ट", - "अ", - "ए", - "थ", - "भ", - "ड", - "च", - "ध", - "ष", - "इ", - ], - "Estonian": [ - "a", - "i", - "e", - "s", - "t", - "l", - "u", - "n", - "o", - "k", - "r", - "d", - "m", - "v", - "g", - "p", - "j", - "h", - "ä", - "b", - "õ", - "ü", - "f", - "c", - "ö", - "y", - ], - "Thai": [ - "า", - "น", - "ร", - "อ", - "ก", - "เ", - "ง", - "ม", - "ย", - "ล", - "ว", - "ด", - "ท", - "ส", - "ต", - "ะ", - "ป", - "บ", - "ค", - "ห", - "แ", - "จ", - "พ", - "ช", - "ข", - "ใ", - ], - "Greek": [ - "α", - "τ", - "ο", - "ι", - "ε", - "ν", - "ρ", - "σ", - "κ", - "η", - "π", - "ς", - "υ", - "μ", - "λ", - "ί", - "ό", - "ά", - "γ", - "έ", - "δ", - "ή", - "ω", - "χ", - "θ", - "ύ", - ], - "Tamil": [ - "க", - "த", - "ப", - "ட", - "ர", - "ம", - "ல", - "ன", - "வ", - "ற", - "ய", - "ள", - "ச", - "ந", - "இ", - "ண", - "அ", - "ஆ", - "ழ", - "ங", - "எ", - "உ", - "ஒ", - "ஸ", - ], - "Kazakh": [ - "а", - "ы", - "е", - "н", - "т", - "р", - "л", - "і", - "д", - "с", - "м", - "қ", - "к", - "о", - "б", - "и", - "у", - "ғ", - "ж", - "ң", - "з", - "ш", - "й", - "п", - "г", - "ө", - ], -} - -LANGUAGE_SUPPORTED_COUNT: int = len(FREQUENCIES) diff --git a/server/venv/Lib/site-packages/charset_normalizer/legacy.py b/server/venv/Lib/site-packages/charset_normalizer/legacy.py deleted file mode 100644 index a458e49..0000000 --- a/server/venv/Lib/site-packages/charset_normalizer/legacy.py +++ /dev/null @@ -1,54 +0,0 @@ -from typing import Any, Dict, Optional, Union -from warnings import warn - -from .api import from_bytes -from .constant import CHARDET_CORRESPONDENCE - - -def detect( - byte_str: bytes, should_rename_legacy: bool = False, **kwargs: Any -) -> Dict[str, Optional[Union[str, float]]]: - """ - chardet legacy method - Detect the encoding of the given byte string. It should be mostly backward-compatible. - Encoding name will match Chardet own writing whenever possible. (Not on encoding name unsupported by it) - This function is deprecated and should be used to migrate your project easily, consult the documentation for - further information. Not planned for removal. - - :param byte_str: The byte sequence to examine. - :param should_rename_legacy: Should we rename legacy encodings - to their more modern equivalents? - """ - if len(kwargs): - warn( - f"charset-normalizer disregard arguments '{','.join(list(kwargs.keys()))}' in legacy function detect()" - ) - - if not isinstance(byte_str, (bytearray, bytes)): - raise TypeError( # pragma: nocover - "Expected object of type bytes or bytearray, got: " - "{0}".format(type(byte_str)) - ) - - if isinstance(byte_str, bytearray): - byte_str = bytes(byte_str) - - r = from_bytes(byte_str).best() - - encoding = r.encoding if r is not None else None - language = r.language if r is not None and r.language != "Unknown" else "" - confidence = 1.0 - r.chaos if r is not None else None - - # Note: CharsetNormalizer does not return 'UTF-8-SIG' as the sig get stripped in the detection/normalization process - # but chardet does return 'utf-8-sig' and it is a valid codec name. - if r is not None and encoding == "utf_8" and r.bom: - encoding += "_sig" - - if should_rename_legacy is False and encoding in CHARDET_CORRESPONDENCE: - encoding = CHARDET_CORRESPONDENCE[encoding] - - return { - "encoding": encoding, - "language": language, - "confidence": confidence, - } diff --git a/server/venv/Lib/site-packages/charset_normalizer/md.cp311-win_amd64.pyd b/server/venv/Lib/site-packages/charset_normalizer/md.cp311-win_amd64.pyd deleted file mode 100644 index 2ff4df0..0000000 Binary files a/server/venv/Lib/site-packages/charset_normalizer/md.cp311-win_amd64.pyd and /dev/null differ diff --git a/server/venv/Lib/site-packages/charset_normalizer/md.py b/server/venv/Lib/site-packages/charset_normalizer/md.py deleted file mode 100644 index 05bb3ec..0000000 --- a/server/venv/Lib/site-packages/charset_normalizer/md.py +++ /dev/null @@ -1,584 +0,0 @@ -from functools import lru_cache -from logging import getLogger -from typing import List, Optional - -from .constant import ( - COMMON_SAFE_ASCII_CHARACTERS, - TRACE, - UNICODE_SECONDARY_RANGE_KEYWORD, -) -from .utils import ( - is_accentuated, - is_case_variable, - is_cjk, - is_emoticon, - is_hangul, - is_hiragana, - is_katakana, - is_latin, - is_punctuation, - is_separator, - is_symbol, - is_thai, - is_unprintable, - remove_accent, - unicode_range, -) - - -class MessDetectorPlugin: - """ - Base abstract class used for mess detection plugins. - All detectors MUST extend and implement given methods. - """ - - def eligible(self, character: str) -> bool: - """ - Determine if given character should be fed in. - """ - raise NotImplementedError # pragma: nocover - - def feed(self, character: str) -> None: - """ - The main routine to be executed upon character. - Insert the logic in witch the text would be considered chaotic. - """ - raise NotImplementedError # pragma: nocover - - def reset(self) -> None: # pragma: no cover - """ - Permit to reset the plugin to the initial state. - """ - raise NotImplementedError - - @property - def ratio(self) -> float: - """ - Compute the chaos ratio based on what your feed() has seen. - Must NOT be lower than 0.; No restriction gt 0. - """ - raise NotImplementedError # pragma: nocover - - -class TooManySymbolOrPunctuationPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._punctuation_count: int = 0 - self._symbol_count: int = 0 - self._character_count: int = 0 - - self._last_printable_char: Optional[str] = None - self._frenzy_symbol_in_word: bool = False - - def eligible(self, character: str) -> bool: - return character.isprintable() - - def feed(self, character: str) -> None: - self._character_count += 1 - - if ( - character != self._last_printable_char - and character not in COMMON_SAFE_ASCII_CHARACTERS - ): - if is_punctuation(character): - self._punctuation_count += 1 - elif ( - character.isdigit() is False - and is_symbol(character) - and is_emoticon(character) is False - ): - self._symbol_count += 2 - - self._last_printable_char = character - - def reset(self) -> None: # pragma: no cover - self._punctuation_count = 0 - self._character_count = 0 - self._symbol_count = 0 - - @property - def ratio(self) -> float: - if self._character_count == 0: - return 0.0 - - ratio_of_punctuation: float = ( - self._punctuation_count + self._symbol_count - ) / self._character_count - - return ratio_of_punctuation if ratio_of_punctuation >= 0.3 else 0.0 - - -class TooManyAccentuatedPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._character_count: int = 0 - self._accentuated_count: int = 0 - - def eligible(self, character: str) -> bool: - return character.isalpha() - - def feed(self, character: str) -> None: - self._character_count += 1 - - if is_accentuated(character): - self._accentuated_count += 1 - - def reset(self) -> None: # pragma: no cover - self._character_count = 0 - self._accentuated_count = 0 - - @property - def ratio(self) -> float: - if self._character_count == 0 or self._character_count < 8: - return 0.0 - ratio_of_accentuation: float = self._accentuated_count / self._character_count - return ratio_of_accentuation if ratio_of_accentuation >= 0.35 else 0.0 - - -class UnprintablePlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._unprintable_count: int = 0 - self._character_count: int = 0 - - def eligible(self, character: str) -> bool: - return True - - def feed(self, character: str) -> None: - if is_unprintable(character): - self._unprintable_count += 1 - self._character_count += 1 - - def reset(self) -> None: # pragma: no cover - self._unprintable_count = 0 - - @property - def ratio(self) -> float: - if self._character_count == 0: - return 0.0 - - return (self._unprintable_count * 8) / self._character_count - - -class SuspiciousDuplicateAccentPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._successive_count: int = 0 - self._character_count: int = 0 - - self._last_latin_character: Optional[str] = None - - def eligible(self, character: str) -> bool: - return character.isalpha() and is_latin(character) - - def feed(self, character: str) -> None: - self._character_count += 1 - if ( - self._last_latin_character is not None - and is_accentuated(character) - and is_accentuated(self._last_latin_character) - ): - if character.isupper() and self._last_latin_character.isupper(): - self._successive_count += 1 - # Worse if its the same char duplicated with different accent. - if remove_accent(character) == remove_accent(self._last_latin_character): - self._successive_count += 1 - self._last_latin_character = character - - def reset(self) -> None: # pragma: no cover - self._successive_count = 0 - self._character_count = 0 - self._last_latin_character = None - - @property - def ratio(self) -> float: - if self._character_count == 0: - return 0.0 - - return (self._successive_count * 2) / self._character_count - - -class SuspiciousRange(MessDetectorPlugin): - def __init__(self) -> None: - self._suspicious_successive_range_count: int = 0 - self._character_count: int = 0 - self._last_printable_seen: Optional[str] = None - - def eligible(self, character: str) -> bool: - return character.isprintable() - - def feed(self, character: str) -> None: - self._character_count += 1 - - if ( - character.isspace() - or is_punctuation(character) - or character in COMMON_SAFE_ASCII_CHARACTERS - ): - self._last_printable_seen = None - return - - if self._last_printable_seen is None: - self._last_printable_seen = character - return - - unicode_range_a: Optional[str] = unicode_range(self._last_printable_seen) - unicode_range_b: Optional[str] = unicode_range(character) - - if is_suspiciously_successive_range(unicode_range_a, unicode_range_b): - self._suspicious_successive_range_count += 1 - - self._last_printable_seen = character - - def reset(self) -> None: # pragma: no cover - self._character_count = 0 - self._suspicious_successive_range_count = 0 - self._last_printable_seen = None - - @property - def ratio(self) -> float: - if self._character_count <= 24: - return 0.0 - - ratio_of_suspicious_range_usage: float = ( - self._suspicious_successive_range_count * 2 - ) / self._character_count - - return ratio_of_suspicious_range_usage - - -class SuperWeirdWordPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._word_count: int = 0 - self._bad_word_count: int = 0 - self._foreign_long_count: int = 0 - - self._is_current_word_bad: bool = False - self._foreign_long_watch: bool = False - - self._character_count: int = 0 - self._bad_character_count: int = 0 - - self._buffer: str = "" - self._buffer_accent_count: int = 0 - - def eligible(self, character: str) -> bool: - return True - - def feed(self, character: str) -> None: - if character.isalpha(): - self._buffer += character - if is_accentuated(character): - self._buffer_accent_count += 1 - if ( - self._foreign_long_watch is False - and (is_latin(character) is False or is_accentuated(character)) - and is_cjk(character) is False - and is_hangul(character) is False - and is_katakana(character) is False - and is_hiragana(character) is False - and is_thai(character) is False - ): - self._foreign_long_watch = True - return - if not self._buffer: - return - if ( - character.isspace() or is_punctuation(character) or is_separator(character) - ) and self._buffer: - self._word_count += 1 - buffer_length: int = len(self._buffer) - - self._character_count += buffer_length - - if buffer_length >= 4: - if self._buffer_accent_count / buffer_length > 0.34: - self._is_current_word_bad = True - # Word/Buffer ending with an upper case accentuated letter are so rare, - # that we will consider them all as suspicious. Same weight as foreign_long suspicious. - if ( - is_accentuated(self._buffer[-1]) - and self._buffer[-1].isupper() - and all(_.isupper() for _ in self._buffer) is False - ): - self._foreign_long_count += 1 - self._is_current_word_bad = True - if buffer_length >= 24 and self._foreign_long_watch: - camel_case_dst = [ - i - for c, i in zip(self._buffer, range(0, buffer_length)) - if c.isupper() - ] - probable_camel_cased: bool = False - - if camel_case_dst and (len(camel_case_dst) / buffer_length <= 0.3): - probable_camel_cased = True - - if not probable_camel_cased: - self._foreign_long_count += 1 - self._is_current_word_bad = True - - if self._is_current_word_bad: - self._bad_word_count += 1 - self._bad_character_count += len(self._buffer) - self._is_current_word_bad = False - - self._foreign_long_watch = False - self._buffer = "" - self._buffer_accent_count = 0 - elif ( - character not in {"<", ">", "-", "=", "~", "|", "_"} - and character.isdigit() is False - and is_symbol(character) - ): - self._is_current_word_bad = True - self._buffer += character - - def reset(self) -> None: # pragma: no cover - self._buffer = "" - self._is_current_word_bad = False - self._foreign_long_watch = False - self._bad_word_count = 0 - self._word_count = 0 - self._character_count = 0 - self._bad_character_count = 0 - self._foreign_long_count = 0 - - @property - def ratio(self) -> float: - if self._word_count <= 10 and self._foreign_long_count == 0: - return 0.0 - - return self._bad_character_count / self._character_count - - -class CjkInvalidStopPlugin(MessDetectorPlugin): - """ - GB(Chinese) based encoding often render the stop incorrectly when the content does not fit and - can be easily detected. Searching for the overuse of '丅' and '丄'. - """ - - def __init__(self) -> None: - self._wrong_stop_count: int = 0 - self._cjk_character_count: int = 0 - - def eligible(self, character: str) -> bool: - return True - - def feed(self, character: str) -> None: - if character in {"丅", "丄"}: - self._wrong_stop_count += 1 - return - if is_cjk(character): - self._cjk_character_count += 1 - - def reset(self) -> None: # pragma: no cover - self._wrong_stop_count = 0 - self._cjk_character_count = 0 - - @property - def ratio(self) -> float: - if self._cjk_character_count < 16: - return 0.0 - return self._wrong_stop_count / self._cjk_character_count - - -class ArchaicUpperLowerPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._buf: bool = False - - self._character_count_since_last_sep: int = 0 - - self._successive_upper_lower_count: int = 0 - self._successive_upper_lower_count_final: int = 0 - - self._character_count: int = 0 - - self._last_alpha_seen: Optional[str] = None - self._current_ascii_only: bool = True - - def eligible(self, character: str) -> bool: - return True - - def feed(self, character: str) -> None: - is_concerned = character.isalpha() and is_case_variable(character) - chunk_sep = is_concerned is False - - if chunk_sep and self._character_count_since_last_sep > 0: - if ( - self._character_count_since_last_sep <= 64 - and character.isdigit() is False - and self._current_ascii_only is False - ): - self._successive_upper_lower_count_final += ( - self._successive_upper_lower_count - ) - - self._successive_upper_lower_count = 0 - self._character_count_since_last_sep = 0 - self._last_alpha_seen = None - self._buf = False - self._character_count += 1 - self._current_ascii_only = True - - return - - if self._current_ascii_only is True and character.isascii() is False: - self._current_ascii_only = False - - if self._last_alpha_seen is not None: - if (character.isupper() and self._last_alpha_seen.islower()) or ( - character.islower() and self._last_alpha_seen.isupper() - ): - if self._buf is True: - self._successive_upper_lower_count += 2 - self._buf = False - else: - self._buf = True - else: - self._buf = False - - self._character_count += 1 - self._character_count_since_last_sep += 1 - self._last_alpha_seen = character - - def reset(self) -> None: # pragma: no cover - self._character_count = 0 - self._character_count_since_last_sep = 0 - self._successive_upper_lower_count = 0 - self._successive_upper_lower_count_final = 0 - self._last_alpha_seen = None - self._buf = False - self._current_ascii_only = True - - @property - def ratio(self) -> float: - if self._character_count == 0: - return 0.0 - - return self._successive_upper_lower_count_final / self._character_count - - -@lru_cache(maxsize=1024) -def is_suspiciously_successive_range( - unicode_range_a: Optional[str], unicode_range_b: Optional[str] -) -> bool: - """ - Determine if two Unicode range seen next to each other can be considered as suspicious. - """ - if unicode_range_a is None or unicode_range_b is None: - return True - - if unicode_range_a == unicode_range_b: - return False - - if "Latin" in unicode_range_a and "Latin" in unicode_range_b: - return False - - if "Emoticons" in unicode_range_a or "Emoticons" in unicode_range_b: - return False - - # Latin characters can be accompanied with a combining diacritical mark - # eg. Vietnamese. - if ("Latin" in unicode_range_a or "Latin" in unicode_range_b) and ( - "Combining" in unicode_range_a or "Combining" in unicode_range_b - ): - return False - - keywords_range_a, keywords_range_b = unicode_range_a.split( - " " - ), unicode_range_b.split(" ") - - for el in keywords_range_a: - if el in UNICODE_SECONDARY_RANGE_KEYWORD: - continue - if el in keywords_range_b: - return False - - # Japanese Exception - range_a_jp_chars, range_b_jp_chars = ( - unicode_range_a - in ( - "Hiragana", - "Katakana", - ), - unicode_range_b in ("Hiragana", "Katakana"), - ) - if (range_a_jp_chars or range_b_jp_chars) and ( - "CJK" in unicode_range_a or "CJK" in unicode_range_b - ): - return False - if range_a_jp_chars and range_b_jp_chars: - return False - - if "Hangul" in unicode_range_a or "Hangul" in unicode_range_b: - if "CJK" in unicode_range_a or "CJK" in unicode_range_b: - return False - if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin": - return False - - # Chinese/Japanese use dedicated range for punctuation and/or separators. - if ("CJK" in unicode_range_a or "CJK" in unicode_range_b) or ( - unicode_range_a in ["Katakana", "Hiragana"] - and unicode_range_b in ["Katakana", "Hiragana"] - ): - if "Punctuation" in unicode_range_a or "Punctuation" in unicode_range_b: - return False - if "Forms" in unicode_range_a or "Forms" in unicode_range_b: - return False - if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin": - return False - - return True - - -@lru_cache(maxsize=2048) -def mess_ratio( - decoded_sequence: str, maximum_threshold: float = 0.2, debug: bool = False -) -> float: - """ - Compute a mess ratio given a decoded bytes sequence. The maximum threshold does stop the computation earlier. - """ - - detectors: List[MessDetectorPlugin] = [ - md_class() for md_class in MessDetectorPlugin.__subclasses__() - ] - - length: int = len(decoded_sequence) + 1 - - mean_mess_ratio: float = 0.0 - - if length < 512: - intermediary_mean_mess_ratio_calc: int = 32 - elif length <= 1024: - intermediary_mean_mess_ratio_calc = 64 - else: - intermediary_mean_mess_ratio_calc = 128 - - for character, index in zip(decoded_sequence + "\n", range(length)): - for detector in detectors: - if detector.eligible(character): - detector.feed(character) - - if ( - index > 0 and index % intermediary_mean_mess_ratio_calc == 0 - ) or index == length - 1: - mean_mess_ratio = sum(dt.ratio for dt in detectors) - - if mean_mess_ratio >= maximum_threshold: - break - - if debug: - logger = getLogger("charset_normalizer") - - logger.log( - TRACE, - "Mess-detector extended-analysis start. " - f"intermediary_mean_mess_ratio_calc={intermediary_mean_mess_ratio_calc} mean_mess_ratio={mean_mess_ratio} " - f"maximum_threshold={maximum_threshold}", - ) - - if len(decoded_sequence) > 16: - logger.log(TRACE, f"Starting with: {decoded_sequence[:16]}") - logger.log(TRACE, f"Ending with: {decoded_sequence[-16::]}") - - for dt in detectors: # pragma: nocover - logger.log(TRACE, f"{dt.__class__}: {dt.ratio}") - - return round(mean_mess_ratio, 3) diff --git a/server/venv/Lib/site-packages/charset_normalizer/md__mypyc.cp311-win_amd64.pyd b/server/venv/Lib/site-packages/charset_normalizer/md__mypyc.cp311-win_amd64.pyd deleted file mode 100644 index 3300710..0000000 Binary files a/server/venv/Lib/site-packages/charset_normalizer/md__mypyc.cp311-win_amd64.pyd and /dev/null differ diff --git a/server/venv/Lib/site-packages/charset_normalizer/models.py b/server/venv/Lib/site-packages/charset_normalizer/models.py deleted file mode 100644 index 1e50f7b..0000000 --- a/server/venv/Lib/site-packages/charset_normalizer/models.py +++ /dev/null @@ -1,337 +0,0 @@ -from encodings.aliases import aliases -from hashlib import sha256 -from json import dumps -from typing import Any, Dict, Iterator, List, Optional, Tuple, Union - -from .constant import TOO_BIG_SEQUENCE -from .utils import iana_name, is_multi_byte_encoding, unicode_range - - -class CharsetMatch: - def __init__( - self, - payload: bytes, - guessed_encoding: str, - mean_mess_ratio: float, - has_sig_or_bom: bool, - languages: "CoherenceMatches", - decoded_payload: Optional[str] = None, - ): - self._payload: bytes = payload - - self._encoding: str = guessed_encoding - self._mean_mess_ratio: float = mean_mess_ratio - self._languages: CoherenceMatches = languages - self._has_sig_or_bom: bool = has_sig_or_bom - self._unicode_ranges: Optional[List[str]] = None - - self._leaves: List[CharsetMatch] = [] - self._mean_coherence_ratio: float = 0.0 - - self._output_payload: Optional[bytes] = None - self._output_encoding: Optional[str] = None - - self._string: Optional[str] = decoded_payload - - def __eq__(self, other: object) -> bool: - if not isinstance(other, CharsetMatch): - raise TypeError( - "__eq__ cannot be invoked on {} and {}.".format( - str(other.__class__), str(self.__class__) - ) - ) - return self.encoding == other.encoding and self.fingerprint == other.fingerprint - - def __lt__(self, other: object) -> bool: - """ - Implemented to make sorted available upon CharsetMatches items. - """ - if not isinstance(other, CharsetMatch): - raise ValueError - - chaos_difference: float = abs(self.chaos - other.chaos) - coherence_difference: float = abs(self.coherence - other.coherence) - - # Below 1% difference --> Use Coherence - if chaos_difference < 0.01 and coherence_difference > 0.02: - return self.coherence > other.coherence - elif chaos_difference < 0.01 and coherence_difference <= 0.02: - # When having a difficult decision, use the result that decoded as many multi-byte as possible. - return self.multi_byte_usage > other.multi_byte_usage - - return self.chaos < other.chaos - - @property - def multi_byte_usage(self) -> float: - return 1.0 - (len(str(self)) / len(self.raw)) - - def __str__(self) -> str: - # Lazy Str Loading - if self._string is None: - self._string = str(self._payload, self._encoding, "strict") - return self._string - - def __repr__(self) -> str: - return "".format(self.encoding, self.fingerprint) - - def add_submatch(self, other: "CharsetMatch") -> None: - if not isinstance(other, CharsetMatch) or other == self: - raise ValueError( - "Unable to add instance <{}> as a submatch of a CharsetMatch".format( - other.__class__ - ) - ) - - other._string = None # Unload RAM usage; dirty trick. - self._leaves.append(other) - - @property - def encoding(self) -> str: - return self._encoding - - @property - def encoding_aliases(self) -> List[str]: - """ - Encoding name are known by many name, using this could help when searching for IBM855 when it's listed as CP855. - """ - also_known_as: List[str] = [] - for u, p in aliases.items(): - if self.encoding == u: - also_known_as.append(p) - elif self.encoding == p: - also_known_as.append(u) - return also_known_as - - @property - def bom(self) -> bool: - return self._has_sig_or_bom - - @property - def byte_order_mark(self) -> bool: - return self._has_sig_or_bom - - @property - def languages(self) -> List[str]: - """ - Return the complete list of possible languages found in decoded sequence. - Usually not really useful. Returned list may be empty even if 'language' property return something != 'Unknown'. - """ - return [e[0] for e in self._languages] - - @property - def language(self) -> str: - """ - Most probable language found in decoded sequence. If none were detected or inferred, the property will return - "Unknown". - """ - if not self._languages: - # Trying to infer the language based on the given encoding - # Its either English or we should not pronounce ourselves in certain cases. - if "ascii" in self.could_be_from_charset: - return "English" - - # doing it there to avoid circular import - from charset_normalizer.cd import encoding_languages, mb_encoding_languages - - languages = ( - mb_encoding_languages(self.encoding) - if is_multi_byte_encoding(self.encoding) - else encoding_languages(self.encoding) - ) - - if len(languages) == 0 or "Latin Based" in languages: - return "Unknown" - - return languages[0] - - return self._languages[0][0] - - @property - def chaos(self) -> float: - return self._mean_mess_ratio - - @property - def coherence(self) -> float: - if not self._languages: - return 0.0 - return self._languages[0][1] - - @property - def percent_chaos(self) -> float: - return round(self.chaos * 100, ndigits=3) - - @property - def percent_coherence(self) -> float: - return round(self.coherence * 100, ndigits=3) - - @property - def raw(self) -> bytes: - """ - Original untouched bytes. - """ - return self._payload - - @property - def submatch(self) -> List["CharsetMatch"]: - return self._leaves - - @property - def has_submatch(self) -> bool: - return len(self._leaves) > 0 - - @property - def alphabets(self) -> List[str]: - if self._unicode_ranges is not None: - return self._unicode_ranges - # list detected ranges - detected_ranges: List[Optional[str]] = [ - unicode_range(char) for char in str(self) - ] - # filter and sort - self._unicode_ranges = sorted(list({r for r in detected_ranges if r})) - return self._unicode_ranges - - @property - def could_be_from_charset(self) -> List[str]: - """ - The complete list of encoding that output the exact SAME str result and therefore could be the originating - encoding. - This list does include the encoding available in property 'encoding'. - """ - return [self._encoding] + [m.encoding for m in self._leaves] - - def output(self, encoding: str = "utf_8") -> bytes: - """ - Method to get re-encoded bytes payload using given target encoding. Default to UTF-8. - Any errors will be simply ignored by the encoder NOT replaced. - """ - if self._output_encoding is None or self._output_encoding != encoding: - self._output_encoding = encoding - self._output_payload = str(self).encode(encoding, "replace") - - return self._output_payload # type: ignore - - @property - def fingerprint(self) -> str: - """ - Retrieve the unique SHA256 computed using the transformed (re-encoded) payload. Not the original one. - """ - return sha256(self.output()).hexdigest() - - -class CharsetMatches: - """ - Container with every CharsetMatch items ordered by default from most probable to the less one. - Act like a list(iterable) but does not implements all related methods. - """ - - def __init__(self, results: Optional[List[CharsetMatch]] = None): - self._results: List[CharsetMatch] = sorted(results) if results else [] - - def __iter__(self) -> Iterator[CharsetMatch]: - yield from self._results - - def __getitem__(self, item: Union[int, str]) -> CharsetMatch: - """ - Retrieve a single item either by its position or encoding name (alias may be used here). - Raise KeyError upon invalid index or encoding not present in results. - """ - if isinstance(item, int): - return self._results[item] - if isinstance(item, str): - item = iana_name(item, False) - for result in self._results: - if item in result.could_be_from_charset: - return result - raise KeyError - - def __len__(self) -> int: - return len(self._results) - - def __bool__(self) -> bool: - return len(self._results) > 0 - - def append(self, item: CharsetMatch) -> None: - """ - Insert a single match. Will be inserted accordingly to preserve sort. - Can be inserted as a submatch. - """ - if not isinstance(item, CharsetMatch): - raise ValueError( - "Cannot append instance '{}' to CharsetMatches".format( - str(item.__class__) - ) - ) - # We should disable the submatch factoring when the input file is too heavy (conserve RAM usage) - if len(item.raw) <= TOO_BIG_SEQUENCE: - for match in self._results: - if match.fingerprint == item.fingerprint and match.chaos == item.chaos: - match.add_submatch(item) - return - self._results.append(item) - self._results = sorted(self._results) - - def best(self) -> Optional["CharsetMatch"]: - """ - Simply return the first match. Strict equivalent to matches[0]. - """ - if not self._results: - return None - return self._results[0] - - def first(self) -> Optional["CharsetMatch"]: - """ - Redundant method, call the method best(). Kept for BC reasons. - """ - return self.best() - - -CoherenceMatch = Tuple[str, float] -CoherenceMatches = List[CoherenceMatch] - - -class CliDetectionResult: - def __init__( - self, - path: str, - encoding: Optional[str], - encoding_aliases: List[str], - alternative_encodings: List[str], - language: str, - alphabets: List[str], - has_sig_or_bom: bool, - chaos: float, - coherence: float, - unicode_path: Optional[str], - is_preferred: bool, - ): - self.path: str = path - self.unicode_path: Optional[str] = unicode_path - self.encoding: Optional[str] = encoding - self.encoding_aliases: List[str] = encoding_aliases - self.alternative_encodings: List[str] = alternative_encodings - self.language: str = language - self.alphabets: List[str] = alphabets - self.has_sig_or_bom: bool = has_sig_or_bom - self.chaos: float = chaos - self.coherence: float = coherence - self.is_preferred: bool = is_preferred - - @property - def __dict__(self) -> Dict[str, Any]: # type: ignore - return { - "path": self.path, - "encoding": self.encoding, - "encoding_aliases": self.encoding_aliases, - "alternative_encodings": self.alternative_encodings, - "language": self.language, - "alphabets": self.alphabets, - "has_sig_or_bom": self.has_sig_or_bom, - "chaos": self.chaos, - "coherence": self.coherence, - "unicode_path": self.unicode_path, - "is_preferred": self.is_preferred, - } - - def to_json(self) -> str: - return dumps(self.__dict__, ensure_ascii=True, indent=4) diff --git a/server/venv/Lib/site-packages/charset_normalizer/py.typed b/server/venv/Lib/site-packages/charset_normalizer/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/charset_normalizer/utils.py b/server/venv/Lib/site-packages/charset_normalizer/utils.py deleted file mode 100644 index 6c9443e..0000000 --- a/server/venv/Lib/site-packages/charset_normalizer/utils.py +++ /dev/null @@ -1,399 +0,0 @@ -import importlib -import logging -import unicodedata -from codecs import IncrementalDecoder -from encodings.aliases import aliases -from functools import lru_cache -from re import findall -from typing import Generator, List, Optional, Set, Tuple, Union - -from _multibytecodec import MultibyteIncrementalDecoder - -from .constant import ( - ENCODING_MARKS, - IANA_SUPPORTED_SIMILAR, - RE_POSSIBLE_ENCODING_INDICATION, - UNICODE_RANGES_COMBINED, - UNICODE_SECONDARY_RANGE_KEYWORD, - UTF8_MAXIMAL_ALLOCATION, -) - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_accentuated(character: str) -> bool: - try: - description: str = unicodedata.name(character) - except ValueError: - return False - return ( - "WITH GRAVE" in description - or "WITH ACUTE" in description - or "WITH CEDILLA" in description - or "WITH DIAERESIS" in description - or "WITH CIRCUMFLEX" in description - or "WITH TILDE" in description - ) - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def remove_accent(character: str) -> str: - decomposed: str = unicodedata.decomposition(character) - if not decomposed: - return character - - codes: List[str] = decomposed.split(" ") - - return chr(int(codes[0], 16)) - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def unicode_range(character: str) -> Optional[str]: - """ - Retrieve the Unicode range official name from a single character. - """ - character_ord: int = ord(character) - - for range_name, ord_range in UNICODE_RANGES_COMBINED.items(): - if character_ord in ord_range: - return range_name - - return None - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_latin(character: str) -> bool: - try: - description: str = unicodedata.name(character) - except ValueError: - return False - return "LATIN" in description - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_punctuation(character: str) -> bool: - character_category: str = unicodedata.category(character) - - if "P" in character_category: - return True - - character_range: Optional[str] = unicode_range(character) - - if character_range is None: - return False - - return "Punctuation" in character_range - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_symbol(character: str) -> bool: - character_category: str = unicodedata.category(character) - - if "S" in character_category or "N" in character_category: - return True - - character_range: Optional[str] = unicode_range(character) - - if character_range is None: - return False - - return "Forms" in character_range and character_category != "Lo" - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_emoticon(character: str) -> bool: - character_range: Optional[str] = unicode_range(character) - - if character_range is None: - return False - - return "Emoticons" in character_range or "Pictographs" in character_range - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_separator(character: str) -> bool: - if character.isspace() or character in {"|", "+", "<", ">"}: - return True - - character_category: str = unicodedata.category(character) - - return "Z" in character_category or character_category in {"Po", "Pd", "Pc"} - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_case_variable(character: str) -> bool: - return character.islower() != character.isupper() - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_cjk(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: - return False - - return "CJK" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_hiragana(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: - return False - - return "HIRAGANA" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_katakana(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: - return False - - return "KATAKANA" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_hangul(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: - return False - - return "HANGUL" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_thai(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: - return False - - return "THAI" in character_name - - -@lru_cache(maxsize=len(UNICODE_RANGES_COMBINED)) -def is_unicode_range_secondary(range_name: str) -> bool: - return any(keyword in range_name for keyword in UNICODE_SECONDARY_RANGE_KEYWORD) - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_unprintable(character: str) -> bool: - return ( - character.isspace() is False # includes \n \t \r \v - and character.isprintable() is False - and character != "\x1A" # Why? Its the ASCII substitute character. - and character != "\ufeff" # bug discovered in Python, - # Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space. - ) - - -def any_specified_encoding(sequence: bytes, search_zone: int = 8192) -> Optional[str]: - """ - Extract using ASCII-only decoder any specified encoding in the first n-bytes. - """ - if not isinstance(sequence, bytes): - raise TypeError - - seq_len: int = len(sequence) - - results: List[str] = findall( - RE_POSSIBLE_ENCODING_INDICATION, - sequence[: min(seq_len, search_zone)].decode("ascii", errors="ignore"), - ) - - if len(results) == 0: - return None - - for specified_encoding in results: - specified_encoding = specified_encoding.lower().replace("-", "_") - - encoding_alias: str - encoding_iana: str - - for encoding_alias, encoding_iana in aliases.items(): - if encoding_alias == specified_encoding: - return encoding_iana - if encoding_iana == specified_encoding: - return encoding_iana - - return None - - -@lru_cache(maxsize=128) -def is_multi_byte_encoding(name: str) -> bool: - """ - Verify is a specific encoding is a multi byte one based on it IANA name - """ - return name in { - "utf_8", - "utf_8_sig", - "utf_16", - "utf_16_be", - "utf_16_le", - "utf_32", - "utf_32_le", - "utf_32_be", - "utf_7", - } or issubclass( - importlib.import_module("encodings.{}".format(name)).IncrementalDecoder, - MultibyteIncrementalDecoder, - ) - - -def identify_sig_or_bom(sequence: bytes) -> Tuple[Optional[str], bytes]: - """ - Identify and extract SIG/BOM in given sequence. - """ - - for iana_encoding in ENCODING_MARKS: - marks: Union[bytes, List[bytes]] = ENCODING_MARKS[iana_encoding] - - if isinstance(marks, bytes): - marks = [marks] - - for mark in marks: - if sequence.startswith(mark): - return iana_encoding, mark - - return None, b"" - - -def should_strip_sig_or_bom(iana_encoding: str) -> bool: - return iana_encoding not in {"utf_16", "utf_32"} - - -def iana_name(cp_name: str, strict: bool = True) -> str: - cp_name = cp_name.lower().replace("-", "_") - - encoding_alias: str - encoding_iana: str - - for encoding_alias, encoding_iana in aliases.items(): - if cp_name in [encoding_alias, encoding_iana]: - return encoding_iana - - if strict: - raise ValueError("Unable to retrieve IANA for '{}'".format(cp_name)) - - return cp_name - - -def range_scan(decoded_sequence: str) -> List[str]: - ranges: Set[str] = set() - - for character in decoded_sequence: - character_range: Optional[str] = unicode_range(character) - - if character_range is None: - continue - - ranges.add(character_range) - - return list(ranges) - - -def cp_similarity(iana_name_a: str, iana_name_b: str) -> float: - if is_multi_byte_encoding(iana_name_a) or is_multi_byte_encoding(iana_name_b): - return 0.0 - - decoder_a = importlib.import_module( - "encodings.{}".format(iana_name_a) - ).IncrementalDecoder - decoder_b = importlib.import_module( - "encodings.{}".format(iana_name_b) - ).IncrementalDecoder - - id_a: IncrementalDecoder = decoder_a(errors="ignore") - id_b: IncrementalDecoder = decoder_b(errors="ignore") - - character_match_count: int = 0 - - for i in range(255): - to_be_decoded: bytes = bytes([i]) - if id_a.decode(to_be_decoded) == id_b.decode(to_be_decoded): - character_match_count += 1 - - return character_match_count / 254 - - -def is_cp_similar(iana_name_a: str, iana_name_b: str) -> bool: - """ - Determine if two code page are at least 80% similar. IANA_SUPPORTED_SIMILAR dict was generated using - the function cp_similarity. - """ - return ( - iana_name_a in IANA_SUPPORTED_SIMILAR - and iana_name_b in IANA_SUPPORTED_SIMILAR[iana_name_a] - ) - - -def set_logging_handler( - name: str = "charset_normalizer", - level: int = logging.INFO, - format_string: str = "%(asctime)s | %(levelname)s | %(message)s", -) -> None: - logger = logging.getLogger(name) - logger.setLevel(level) - - handler = logging.StreamHandler() - handler.setFormatter(logging.Formatter(format_string)) - logger.addHandler(handler) - - -def cut_sequence_chunks( - sequences: bytes, - encoding_iana: str, - offsets: range, - chunk_size: int, - bom_or_sig_available: bool, - strip_sig_or_bom: bool, - sig_payload: bytes, - is_multi_byte_decoder: bool, - decoded_payload: Optional[str] = None, -) -> Generator[str, None, None]: - if decoded_payload and is_multi_byte_decoder is False: - for i in offsets: - chunk = decoded_payload[i : i + chunk_size] - if not chunk: - break - yield chunk - else: - for i in offsets: - chunk_end = i + chunk_size - if chunk_end > len(sequences) + 8: - continue - - cut_sequence = sequences[i : i + chunk_size] - - if bom_or_sig_available and strip_sig_or_bom is False: - cut_sequence = sig_payload + cut_sequence - - chunk = cut_sequence.decode( - encoding_iana, - errors="ignore" if is_multi_byte_decoder else "strict", - ) - - # multi-byte bad cutting detector and adjustment - # not the cleanest way to perform that fix but clever enough for now. - if is_multi_byte_decoder and i > 0: - chunk_partial_size_chk: int = min(chunk_size, 16) - - if ( - decoded_payload - and chunk[:chunk_partial_size_chk] not in decoded_payload - ): - for j in range(i, i - 4, -1): - cut_sequence = sequences[j:chunk_end] - - if bom_or_sig_available and strip_sig_or_bom is False: - cut_sequence = sig_payload + cut_sequence - - chunk = cut_sequence.decode(encoding_iana, errors="ignore") - - if chunk[:chunk_partial_size_chk] in decoded_payload: - break - - yield chunk diff --git a/server/venv/Lib/site-packages/charset_normalizer/version.py b/server/venv/Lib/site-packages/charset_normalizer/version.py deleted file mode 100644 index a6533f4..0000000 --- a/server/venv/Lib/site-packages/charset_normalizer/version.py +++ /dev/null @@ -1,6 +0,0 @@ -""" -Expose version -""" - -__version__ = "3.3.1" -VERSION = __version__.split(".") diff --git a/server/venv/Lib/site-packages/click-8.1.7.dist-info/INSTALLER b/server/venv/Lib/site-packages/click-8.1.7.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/server/venv/Lib/site-packages/click-8.1.7.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/server/venv/Lib/site-packages/click-8.1.7.dist-info/LICENSE.rst b/server/venv/Lib/site-packages/click-8.1.7.dist-info/LICENSE.rst deleted file mode 100644 index d12a849..0000000 --- a/server/venv/Lib/site-packages/click-8.1.7.dist-info/LICENSE.rst +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2014 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/server/venv/Lib/site-packages/click-8.1.7.dist-info/METADATA b/server/venv/Lib/site-packages/click-8.1.7.dist-info/METADATA deleted file mode 100644 index 7a6bbb2..0000000 --- a/server/venv/Lib/site-packages/click-8.1.7.dist-info/METADATA +++ /dev/null @@ -1,103 +0,0 @@ -Metadata-Version: 2.1 -Name: click -Version: 8.1.7 -Summary: Composable command line interface toolkit -Home-page: https://palletsprojects.com/p/click/ -Maintainer: Pallets -Maintainer-email: contact@palletsprojects.com -License: BSD-3-Clause -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Documentation, https://click.palletsprojects.com/ -Project-URL: Changes, https://click.palletsprojects.com/changes/ -Project-URL: Source Code, https://github.com/pallets/click/ -Project-URL: Issue Tracker, https://github.com/pallets/click/issues/ -Project-URL: Chat, https://discord.gg/pallets -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Requires-Python: >=3.7 -Description-Content-Type: text/x-rst -License-File: LICENSE.rst -Requires-Dist: colorama ; platform_system == "Windows" -Requires-Dist: importlib-metadata ; python_version < "3.8" - -\$ click\_ -========== - -Click is a Python package for creating beautiful command line interfaces -in a composable way with as little code as necessary. It's the "Command -Line Interface Creation Kit". It's highly configurable but comes with -sensible defaults out of the box. - -It aims to make the process of writing command line tools quick and fun -while also preventing any frustration caused by the inability to -implement an intended CLI API. - -Click in three points: - -- Arbitrary nesting of commands -- Automatic help page generation -- Supports lazy loading of subcommands at runtime - - -Installing ----------- - -Install and update using `pip`_: - -.. code-block:: text - - $ pip install -U click - -.. _pip: https://pip.pypa.io/en/stable/getting-started/ - - -A Simple Example ----------------- - -.. code-block:: python - - import click - - @click.command() - @click.option("--count", default=1, help="Number of greetings.") - @click.option("--name", prompt="Your name", help="The person to greet.") - def hello(count, name): - """Simple program that greets NAME for a total of COUNT times.""" - for _ in range(count): - click.echo(f"Hello, {name}!") - - if __name__ == '__main__': - hello() - -.. code-block:: text - - $ python hello.py --count=3 - Your name: Click - Hello, Click! - Hello, Click! - Hello, Click! - - -Donate ------- - -The Pallets organization develops and supports Click and other popular -packages. In order to grow the community of contributors and users, and -allow the maintainers to devote more time to the projects, `please -donate today`_. - -.. _please donate today: https://palletsprojects.com/donate - - -Links ------ - -- Documentation: https://click.palletsprojects.com/ -- Changes: https://click.palletsprojects.com/changes/ -- PyPI Releases: https://pypi.org/project/click/ -- Source Code: https://github.com/pallets/click -- Issue Tracker: https://github.com/pallets/click/issues -- Chat: https://discord.gg/pallets diff --git a/server/venv/Lib/site-packages/click-8.1.7.dist-info/RECORD b/server/venv/Lib/site-packages/click-8.1.7.dist-info/RECORD deleted file mode 100644 index dcffbf0..0000000 --- a/server/venv/Lib/site-packages/click-8.1.7.dist-info/RECORD +++ /dev/null @@ -1,40 +0,0 @@ -click-8.1.7.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -click-8.1.7.dist-info/LICENSE.rst,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475 -click-8.1.7.dist-info/METADATA,sha256=qIMevCxGA9yEmJOM_4WHuUJCwWpsIEVbCPOhs45YPN4,3014 -click-8.1.7.dist-info/RECORD,, -click-8.1.7.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -click-8.1.7.dist-info/WHEEL,sha256=5sUXSg9e4bi7lTLOHcm6QEYwO5TIF1TNbTSVFVjcJcc,92 -click-8.1.7.dist-info/top_level.txt,sha256=J1ZQogalYS4pphY_lPECoNMfw0HzTSrZglC4Yfwo4xA,6 -click/__init__.py,sha256=YDDbjm406dTOA0V8bTtdGnhN7zj5j-_dFRewZF_pLvw,3138 -click/__pycache__/__init__.cpython-311.pyc,, -click/__pycache__/_compat.cpython-311.pyc,, -click/__pycache__/_termui_impl.cpython-311.pyc,, -click/__pycache__/_textwrap.cpython-311.pyc,, -click/__pycache__/_winconsole.cpython-311.pyc,, -click/__pycache__/core.cpython-311.pyc,, -click/__pycache__/decorators.cpython-311.pyc,, -click/__pycache__/exceptions.cpython-311.pyc,, -click/__pycache__/formatting.cpython-311.pyc,, -click/__pycache__/globals.cpython-311.pyc,, -click/__pycache__/parser.cpython-311.pyc,, -click/__pycache__/shell_completion.cpython-311.pyc,, -click/__pycache__/termui.cpython-311.pyc,, -click/__pycache__/testing.cpython-311.pyc,, -click/__pycache__/types.cpython-311.pyc,, -click/__pycache__/utils.cpython-311.pyc,, -click/_compat.py,sha256=5318agQpbt4kroKsbqDOYpTSWzL_YCZVUQiTT04yXmc,18744 -click/_termui_impl.py,sha256=3dFYv4445Nw-rFvZOTBMBPYwB1bxnmNk9Du6Dm_oBSU,24069 -click/_textwrap.py,sha256=10fQ64OcBUMuK7mFvh8363_uoOxPlRItZBmKzRJDgoY,1353 -click/_winconsole.py,sha256=5ju3jQkcZD0W27WEMGqmEP4y_crUVzPCqsX_FYb7BO0,7860 -click/core.py,sha256=j6oEWtGgGna8JarD6WxhXmNnxLnfRjwXglbBc-8jr7U,114086 -click/decorators.py,sha256=-ZlbGYgV-oI8jr_oH4RpuL1PFS-5QmeuEAsLDAYgxtw,18719 -click/exceptions.py,sha256=fyROO-47HWFDjt2qupo7A3J32VlpM-ovJnfowu92K3s,9273 -click/formatting.py,sha256=Frf0-5W33-loyY_i9qrwXR8-STnW3m5gvyxLVUdyxyk,9706 -click/globals.py,sha256=TP-qM88STzc7f127h35TD_v920FgfOD2EwzqA0oE8XU,1961 -click/parser.py,sha256=LKyYQE9ZLj5KgIDXkrcTHQRXIggfoivX14_UVIn56YA,19067 -click/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -click/shell_completion.py,sha256=Ty3VM_ts0sQhj6u7eFTiLwHPoTgcXTGEAUg2OpLqYKw,18460 -click/termui.py,sha256=H7Q8FpmPelhJ2ovOhfCRhjMtCpNyjFXryAMLZODqsdc,28324 -click/testing.py,sha256=1Qd4kS5bucn1hsNIRryd0WtTMuCpkA93grkWxT8POsU,16084 -click/types.py,sha256=TZvz3hKvBztf-Hpa2enOmP4eznSPLzijjig5b_0XMxE,36391 -click/utils.py,sha256=1476UduUNY6UePGU4m18uzVHLt1sKM2PP3yWsQhbItM,20298 diff --git a/server/venv/Lib/site-packages/click-8.1.7.dist-info/REQUESTED b/server/venv/Lib/site-packages/click-8.1.7.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/click-8.1.7.dist-info/WHEEL b/server/venv/Lib/site-packages/click-8.1.7.dist-info/WHEEL deleted file mode 100644 index 2c08da0..0000000 --- a/server/venv/Lib/site-packages/click-8.1.7.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.41.1) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/server/venv/Lib/site-packages/click-8.1.7.dist-info/top_level.txt b/server/venv/Lib/site-packages/click-8.1.7.dist-info/top_level.txt deleted file mode 100644 index dca9a90..0000000 --- a/server/venv/Lib/site-packages/click-8.1.7.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -click diff --git a/server/venv/Lib/site-packages/click/__init__.py b/server/venv/Lib/site-packages/click/__init__.py deleted file mode 100644 index 9a1dab0..0000000 --- a/server/venv/Lib/site-packages/click/__init__.py +++ /dev/null @@ -1,73 +0,0 @@ -""" -Click is a simple Python module inspired by the stdlib optparse to make -writing command line scripts fun. Unlike other modules, it's based -around a simple API that does not come with too much magic and is -composable. -""" -from .core import Argument as Argument -from .core import BaseCommand as BaseCommand -from .core import Command as Command -from .core import CommandCollection as CommandCollection -from .core import Context as Context -from .core import Group as Group -from .core import MultiCommand as MultiCommand -from .core import Option as Option -from .core import Parameter as Parameter -from .decorators import argument as argument -from .decorators import command as command -from .decorators import confirmation_option as confirmation_option -from .decorators import group as group -from .decorators import help_option as help_option -from .decorators import make_pass_decorator as make_pass_decorator -from .decorators import option as option -from .decorators import pass_context as pass_context -from .decorators import pass_obj as pass_obj -from .decorators import password_option as password_option -from .decorators import version_option as version_option -from .exceptions import Abort as Abort -from .exceptions import BadArgumentUsage as BadArgumentUsage -from .exceptions import BadOptionUsage as BadOptionUsage -from .exceptions import BadParameter as BadParameter -from .exceptions import ClickException as ClickException -from .exceptions import FileError as FileError -from .exceptions import MissingParameter as MissingParameter -from .exceptions import NoSuchOption as NoSuchOption -from .exceptions import UsageError as UsageError -from .formatting import HelpFormatter as HelpFormatter -from .formatting import wrap_text as wrap_text -from .globals import get_current_context as get_current_context -from .parser import OptionParser as OptionParser -from .termui import clear as clear -from .termui import confirm as confirm -from .termui import echo_via_pager as echo_via_pager -from .termui import edit as edit -from .termui import getchar as getchar -from .termui import launch as launch -from .termui import pause as pause -from .termui import progressbar as progressbar -from .termui import prompt as prompt -from .termui import secho as secho -from .termui import style as style -from .termui import unstyle as unstyle -from .types import BOOL as BOOL -from .types import Choice as Choice -from .types import DateTime as DateTime -from .types import File as File -from .types import FLOAT as FLOAT -from .types import FloatRange as FloatRange -from .types import INT as INT -from .types import IntRange as IntRange -from .types import ParamType as ParamType -from .types import Path as Path -from .types import STRING as STRING -from .types import Tuple as Tuple -from .types import UNPROCESSED as UNPROCESSED -from .types import UUID as UUID -from .utils import echo as echo -from .utils import format_filename as format_filename -from .utils import get_app_dir as get_app_dir -from .utils import get_binary_stream as get_binary_stream -from .utils import get_text_stream as get_text_stream -from .utils import open_file as open_file - -__version__ = "8.1.7" diff --git a/server/venv/Lib/site-packages/click/_compat.py b/server/venv/Lib/site-packages/click/_compat.py deleted file mode 100644 index 23f8866..0000000 --- a/server/venv/Lib/site-packages/click/_compat.py +++ /dev/null @@ -1,623 +0,0 @@ -import codecs -import io -import os -import re -import sys -import typing as t -from weakref import WeakKeyDictionary - -CYGWIN = sys.platform.startswith("cygwin") -WIN = sys.platform.startswith("win") -auto_wrap_for_ansi: t.Optional[t.Callable[[t.TextIO], t.TextIO]] = None -_ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]") - - -def _make_text_stream( - stream: t.BinaryIO, - encoding: t.Optional[str], - errors: t.Optional[str], - force_readable: bool = False, - force_writable: bool = False, -) -> t.TextIO: - if encoding is None: - encoding = get_best_encoding(stream) - if errors is None: - errors = "replace" - return _NonClosingTextIOWrapper( - stream, - encoding, - errors, - line_buffering=True, - force_readable=force_readable, - force_writable=force_writable, - ) - - -def is_ascii_encoding(encoding: str) -> bool: - """Checks if a given encoding is ascii.""" - try: - return codecs.lookup(encoding).name == "ascii" - except LookupError: - return False - - -def get_best_encoding(stream: t.IO[t.Any]) -> str: - """Returns the default stream encoding if not found.""" - rv = getattr(stream, "encoding", None) or sys.getdefaultencoding() - if is_ascii_encoding(rv): - return "utf-8" - return rv - - -class _NonClosingTextIOWrapper(io.TextIOWrapper): - def __init__( - self, - stream: t.BinaryIO, - encoding: t.Optional[str], - errors: t.Optional[str], - force_readable: bool = False, - force_writable: bool = False, - **extra: t.Any, - ) -> None: - self._stream = stream = t.cast( - t.BinaryIO, _FixupStream(stream, force_readable, force_writable) - ) - super().__init__(stream, encoding, errors, **extra) - - def __del__(self) -> None: - try: - self.detach() - except Exception: - pass - - def isatty(self) -> bool: - # https://bitbucket.org/pypy/pypy/issue/1803 - return self._stream.isatty() - - -class _FixupStream: - """The new io interface needs more from streams than streams - traditionally implement. As such, this fix-up code is necessary in - some circumstances. - - The forcing of readable and writable flags are there because some tools - put badly patched objects on sys (one such offender are certain version - of jupyter notebook). - """ - - def __init__( - self, - stream: t.BinaryIO, - force_readable: bool = False, - force_writable: bool = False, - ): - self._stream = stream - self._force_readable = force_readable - self._force_writable = force_writable - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._stream, name) - - def read1(self, size: int) -> bytes: - f = getattr(self._stream, "read1", None) - - if f is not None: - return t.cast(bytes, f(size)) - - return self._stream.read(size) - - def readable(self) -> bool: - if self._force_readable: - return True - x = getattr(self._stream, "readable", None) - if x is not None: - return t.cast(bool, x()) - try: - self._stream.read(0) - except Exception: - return False - return True - - def writable(self) -> bool: - if self._force_writable: - return True - x = getattr(self._stream, "writable", None) - if x is not None: - return t.cast(bool, x()) - try: - self._stream.write("") # type: ignore - except Exception: - try: - self._stream.write(b"") - except Exception: - return False - return True - - def seekable(self) -> bool: - x = getattr(self._stream, "seekable", None) - if x is not None: - return t.cast(bool, x()) - try: - self._stream.seek(self._stream.tell()) - except Exception: - return False - return True - - -def _is_binary_reader(stream: t.IO[t.Any], default: bool = False) -> bool: - try: - return isinstance(stream.read(0), bytes) - except Exception: - return default - # This happens in some cases where the stream was already - # closed. In this case, we assume the default. - - -def _is_binary_writer(stream: t.IO[t.Any], default: bool = False) -> bool: - try: - stream.write(b"") - except Exception: - try: - stream.write("") - return False - except Exception: - pass - return default - return True - - -def _find_binary_reader(stream: t.IO[t.Any]) -> t.Optional[t.BinaryIO]: - # We need to figure out if the given stream is already binary. - # This can happen because the official docs recommend detaching - # the streams to get binary streams. Some code might do this, so - # we need to deal with this case explicitly. - if _is_binary_reader(stream, False): - return t.cast(t.BinaryIO, stream) - - buf = getattr(stream, "buffer", None) - - # Same situation here; this time we assume that the buffer is - # actually binary in case it's closed. - if buf is not None and _is_binary_reader(buf, True): - return t.cast(t.BinaryIO, buf) - - return None - - -def _find_binary_writer(stream: t.IO[t.Any]) -> t.Optional[t.BinaryIO]: - # We need to figure out if the given stream is already binary. - # This can happen because the official docs recommend detaching - # the streams to get binary streams. Some code might do this, so - # we need to deal with this case explicitly. - if _is_binary_writer(stream, False): - return t.cast(t.BinaryIO, stream) - - buf = getattr(stream, "buffer", None) - - # Same situation here; this time we assume that the buffer is - # actually binary in case it's closed. - if buf is not None and _is_binary_writer(buf, True): - return t.cast(t.BinaryIO, buf) - - return None - - -def _stream_is_misconfigured(stream: t.TextIO) -> bool: - """A stream is misconfigured if its encoding is ASCII.""" - # If the stream does not have an encoding set, we assume it's set - # to ASCII. This appears to happen in certain unittest - # environments. It's not quite clear what the correct behavior is - # but this at least will force Click to recover somehow. - return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii") - - -def _is_compat_stream_attr(stream: t.TextIO, attr: str, value: t.Optional[str]) -> bool: - """A stream attribute is compatible if it is equal to the - desired value or the desired value is unset and the attribute - has a value. - """ - stream_value = getattr(stream, attr, None) - return stream_value == value or (value is None and stream_value is not None) - - -def _is_compatible_text_stream( - stream: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] -) -> bool: - """Check if a stream's encoding and errors attributes are - compatible with the desired values. - """ - return _is_compat_stream_attr( - stream, "encoding", encoding - ) and _is_compat_stream_attr(stream, "errors", errors) - - -def _force_correct_text_stream( - text_stream: t.IO[t.Any], - encoding: t.Optional[str], - errors: t.Optional[str], - is_binary: t.Callable[[t.IO[t.Any], bool], bool], - find_binary: t.Callable[[t.IO[t.Any]], t.Optional[t.BinaryIO]], - force_readable: bool = False, - force_writable: bool = False, -) -> t.TextIO: - if is_binary(text_stream, False): - binary_reader = t.cast(t.BinaryIO, text_stream) - else: - text_stream = t.cast(t.TextIO, text_stream) - # If the stream looks compatible, and won't default to a - # misconfigured ascii encoding, return it as-is. - if _is_compatible_text_stream(text_stream, encoding, errors) and not ( - encoding is None and _stream_is_misconfigured(text_stream) - ): - return text_stream - - # Otherwise, get the underlying binary reader. - possible_binary_reader = find_binary(text_stream) - - # If that's not possible, silently use the original reader - # and get mojibake instead of exceptions. - if possible_binary_reader is None: - return text_stream - - binary_reader = possible_binary_reader - - # Default errors to replace instead of strict in order to get - # something that works. - if errors is None: - errors = "replace" - - # Wrap the binary stream in a text stream with the correct - # encoding parameters. - return _make_text_stream( - binary_reader, - encoding, - errors, - force_readable=force_readable, - force_writable=force_writable, - ) - - -def _force_correct_text_reader( - text_reader: t.IO[t.Any], - encoding: t.Optional[str], - errors: t.Optional[str], - force_readable: bool = False, -) -> t.TextIO: - return _force_correct_text_stream( - text_reader, - encoding, - errors, - _is_binary_reader, - _find_binary_reader, - force_readable=force_readable, - ) - - -def _force_correct_text_writer( - text_writer: t.IO[t.Any], - encoding: t.Optional[str], - errors: t.Optional[str], - force_writable: bool = False, -) -> t.TextIO: - return _force_correct_text_stream( - text_writer, - encoding, - errors, - _is_binary_writer, - _find_binary_writer, - force_writable=force_writable, - ) - - -def get_binary_stdin() -> t.BinaryIO: - reader = _find_binary_reader(sys.stdin) - if reader is None: - raise RuntimeError("Was not able to determine binary stream for sys.stdin.") - return reader - - -def get_binary_stdout() -> t.BinaryIO: - writer = _find_binary_writer(sys.stdout) - if writer is None: - raise RuntimeError("Was not able to determine binary stream for sys.stdout.") - return writer - - -def get_binary_stderr() -> t.BinaryIO: - writer = _find_binary_writer(sys.stderr) - if writer is None: - raise RuntimeError("Was not able to determine binary stream for sys.stderr.") - return writer - - -def get_text_stdin( - encoding: t.Optional[str] = None, errors: t.Optional[str] = None -) -> t.TextIO: - rv = _get_windows_console_stream(sys.stdin, encoding, errors) - if rv is not None: - return rv - return _force_correct_text_reader(sys.stdin, encoding, errors, force_readable=True) - - -def get_text_stdout( - encoding: t.Optional[str] = None, errors: t.Optional[str] = None -) -> t.TextIO: - rv = _get_windows_console_stream(sys.stdout, encoding, errors) - if rv is not None: - return rv - return _force_correct_text_writer(sys.stdout, encoding, errors, force_writable=True) - - -def get_text_stderr( - encoding: t.Optional[str] = None, errors: t.Optional[str] = None -) -> t.TextIO: - rv = _get_windows_console_stream(sys.stderr, encoding, errors) - if rv is not None: - return rv - return _force_correct_text_writer(sys.stderr, encoding, errors, force_writable=True) - - -def _wrap_io_open( - file: t.Union[str, "os.PathLike[str]", int], - mode: str, - encoding: t.Optional[str], - errors: t.Optional[str], -) -> t.IO[t.Any]: - """Handles not passing ``encoding`` and ``errors`` in binary mode.""" - if "b" in mode: - return open(file, mode) - - return open(file, mode, encoding=encoding, errors=errors) - - -def open_stream( - filename: "t.Union[str, os.PathLike[str]]", - mode: str = "r", - encoding: t.Optional[str] = None, - errors: t.Optional[str] = "strict", - atomic: bool = False, -) -> t.Tuple[t.IO[t.Any], bool]: - binary = "b" in mode - filename = os.fspath(filename) - - # Standard streams first. These are simple because they ignore the - # atomic flag. Use fsdecode to handle Path("-"). - if os.fsdecode(filename) == "-": - if any(m in mode for m in ["w", "a", "x"]): - if binary: - return get_binary_stdout(), False - return get_text_stdout(encoding=encoding, errors=errors), False - if binary: - return get_binary_stdin(), False - return get_text_stdin(encoding=encoding, errors=errors), False - - # Non-atomic writes directly go out through the regular open functions. - if not atomic: - return _wrap_io_open(filename, mode, encoding, errors), True - - # Some usability stuff for atomic writes - if "a" in mode: - raise ValueError( - "Appending to an existing file is not supported, because that" - " would involve an expensive `copy`-operation to a temporary" - " file. Open the file in normal `w`-mode and copy explicitly" - " if that's what you're after." - ) - if "x" in mode: - raise ValueError("Use the `overwrite`-parameter instead.") - if "w" not in mode: - raise ValueError("Atomic writes only make sense with `w`-mode.") - - # Atomic writes are more complicated. They work by opening a file - # as a proxy in the same folder and then using the fdopen - # functionality to wrap it in a Python file. Then we wrap it in an - # atomic file that moves the file over on close. - import errno - import random - - try: - perm: t.Optional[int] = os.stat(filename).st_mode - except OSError: - perm = None - - flags = os.O_RDWR | os.O_CREAT | os.O_EXCL - - if binary: - flags |= getattr(os, "O_BINARY", 0) - - while True: - tmp_filename = os.path.join( - os.path.dirname(filename), - f".__atomic-write{random.randrange(1 << 32):08x}", - ) - try: - fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm) - break - except OSError as e: - if e.errno == errno.EEXIST or ( - os.name == "nt" - and e.errno == errno.EACCES - and os.path.isdir(e.filename) - and os.access(e.filename, os.W_OK) - ): - continue - raise - - if perm is not None: - os.chmod(tmp_filename, perm) # in case perm includes bits in umask - - f = _wrap_io_open(fd, mode, encoding, errors) - af = _AtomicFile(f, tmp_filename, os.path.realpath(filename)) - return t.cast(t.IO[t.Any], af), True - - -class _AtomicFile: - def __init__(self, f: t.IO[t.Any], tmp_filename: str, real_filename: str) -> None: - self._f = f - self._tmp_filename = tmp_filename - self._real_filename = real_filename - self.closed = False - - @property - def name(self) -> str: - return self._real_filename - - def close(self, delete: bool = False) -> None: - if self.closed: - return - self._f.close() - os.replace(self._tmp_filename, self._real_filename) - self.closed = True - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._f, name) - - def __enter__(self) -> "_AtomicFile": - return self - - def __exit__(self, exc_type: t.Optional[t.Type[BaseException]], *_: t.Any) -> None: - self.close(delete=exc_type is not None) - - def __repr__(self) -> str: - return repr(self._f) - - -def strip_ansi(value: str) -> str: - return _ansi_re.sub("", value) - - -def _is_jupyter_kernel_output(stream: t.IO[t.Any]) -> bool: - while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)): - stream = stream._stream - - return stream.__class__.__module__.startswith("ipykernel.") - - -def should_strip_ansi( - stream: t.Optional[t.IO[t.Any]] = None, color: t.Optional[bool] = None -) -> bool: - if color is None: - if stream is None: - stream = sys.stdin - return not isatty(stream) and not _is_jupyter_kernel_output(stream) - return not color - - -# On Windows, wrap the output streams with colorama to support ANSI -# color codes. -# NOTE: double check is needed so mypy does not analyze this on Linux -if sys.platform.startswith("win") and WIN: - from ._winconsole import _get_windows_console_stream - - def _get_argv_encoding() -> str: - import locale - - return locale.getpreferredencoding() - - _ansi_stream_wrappers: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() - - def auto_wrap_for_ansi( # noqa: F811 - stream: t.TextIO, color: t.Optional[bool] = None - ) -> t.TextIO: - """Support ANSI color and style codes on Windows by wrapping a - stream with colorama. - """ - try: - cached = _ansi_stream_wrappers.get(stream) - except Exception: - cached = None - - if cached is not None: - return cached - - import colorama - - strip = should_strip_ansi(stream, color) - ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip) - rv = t.cast(t.TextIO, ansi_wrapper.stream) - _write = rv.write - - def _safe_write(s): - try: - return _write(s) - except BaseException: - ansi_wrapper.reset_all() - raise - - rv.write = _safe_write - - try: - _ansi_stream_wrappers[stream] = rv - except Exception: - pass - - return rv - -else: - - def _get_argv_encoding() -> str: - return getattr(sys.stdin, "encoding", None) or sys.getfilesystemencoding() - - def _get_windows_console_stream( - f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] - ) -> t.Optional[t.TextIO]: - return None - - -def term_len(x: str) -> int: - return len(strip_ansi(x)) - - -def isatty(stream: t.IO[t.Any]) -> bool: - try: - return stream.isatty() - except Exception: - return False - - -def _make_cached_stream_func( - src_func: t.Callable[[], t.Optional[t.TextIO]], - wrapper_func: t.Callable[[], t.TextIO], -) -> t.Callable[[], t.Optional[t.TextIO]]: - cache: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() - - def func() -> t.Optional[t.TextIO]: - stream = src_func() - - if stream is None: - return None - - try: - rv = cache.get(stream) - except Exception: - rv = None - if rv is not None: - return rv - rv = wrapper_func() - try: - cache[stream] = rv - except Exception: - pass - return rv - - return func - - -_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin) -_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout) -_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr) - - -binary_streams: t.Mapping[str, t.Callable[[], t.BinaryIO]] = { - "stdin": get_binary_stdin, - "stdout": get_binary_stdout, - "stderr": get_binary_stderr, -} - -text_streams: t.Mapping[ - str, t.Callable[[t.Optional[str], t.Optional[str]], t.TextIO] -] = { - "stdin": get_text_stdin, - "stdout": get_text_stdout, - "stderr": get_text_stderr, -} diff --git a/server/venv/Lib/site-packages/click/_termui_impl.py b/server/venv/Lib/site-packages/click/_termui_impl.py deleted file mode 100644 index f744657..0000000 --- a/server/venv/Lib/site-packages/click/_termui_impl.py +++ /dev/null @@ -1,739 +0,0 @@ -""" -This module contains implementations for the termui module. To keep the -import time of Click down, some infrequently used functionality is -placed in this module and only imported as needed. -""" -import contextlib -import math -import os -import sys -import time -import typing as t -from gettext import gettext as _ -from io import StringIO -from types import TracebackType - -from ._compat import _default_text_stdout -from ._compat import CYGWIN -from ._compat import get_best_encoding -from ._compat import isatty -from ._compat import open_stream -from ._compat import strip_ansi -from ._compat import term_len -from ._compat import WIN -from .exceptions import ClickException -from .utils import echo - -V = t.TypeVar("V") - -if os.name == "nt": - BEFORE_BAR = "\r" - AFTER_BAR = "\n" -else: - BEFORE_BAR = "\r\033[?25l" - AFTER_BAR = "\033[?25h\n" - - -class ProgressBar(t.Generic[V]): - def __init__( - self, - iterable: t.Optional[t.Iterable[V]], - length: t.Optional[int] = None, - fill_char: str = "#", - empty_char: str = " ", - bar_template: str = "%(bar)s", - info_sep: str = " ", - show_eta: bool = True, - show_percent: t.Optional[bool] = None, - show_pos: bool = False, - item_show_func: t.Optional[t.Callable[[t.Optional[V]], t.Optional[str]]] = None, - label: t.Optional[str] = None, - file: t.Optional[t.TextIO] = None, - color: t.Optional[bool] = None, - update_min_steps: int = 1, - width: int = 30, - ) -> None: - self.fill_char = fill_char - self.empty_char = empty_char - self.bar_template = bar_template - self.info_sep = info_sep - self.show_eta = show_eta - self.show_percent = show_percent - self.show_pos = show_pos - self.item_show_func = item_show_func - self.label: str = label or "" - - if file is None: - file = _default_text_stdout() - - # There are no standard streams attached to write to. For example, - # pythonw on Windows. - if file is None: - file = StringIO() - - self.file = file - self.color = color - self.update_min_steps = update_min_steps - self._completed_intervals = 0 - self.width: int = width - self.autowidth: bool = width == 0 - - if length is None: - from operator import length_hint - - length = length_hint(iterable, -1) - - if length == -1: - length = None - if iterable is None: - if length is None: - raise TypeError("iterable or length is required") - iterable = t.cast(t.Iterable[V], range(length)) - self.iter: t.Iterable[V] = iter(iterable) - self.length = length - self.pos = 0 - self.avg: t.List[float] = [] - self.last_eta: float - self.start: float - self.start = self.last_eta = time.time() - self.eta_known: bool = False - self.finished: bool = False - self.max_width: t.Optional[int] = None - self.entered: bool = False - self.current_item: t.Optional[V] = None - self.is_hidden: bool = not isatty(self.file) - self._last_line: t.Optional[str] = None - - def __enter__(self) -> "ProgressBar[V]": - self.entered = True - self.render_progress() - return self - - def __exit__( - self, - exc_type: t.Optional[t.Type[BaseException]], - exc_value: t.Optional[BaseException], - tb: t.Optional[TracebackType], - ) -> None: - self.render_finish() - - def __iter__(self) -> t.Iterator[V]: - if not self.entered: - raise RuntimeError("You need to use progress bars in a with block.") - self.render_progress() - return self.generator() - - def __next__(self) -> V: - # Iteration is defined in terms of a generator function, - # returned by iter(self); use that to define next(). This works - # because `self.iter` is an iterable consumed by that generator, - # so it is re-entry safe. Calling `next(self.generator())` - # twice works and does "what you want". - return next(iter(self)) - - def render_finish(self) -> None: - if self.is_hidden: - return - self.file.write(AFTER_BAR) - self.file.flush() - - @property - def pct(self) -> float: - if self.finished: - return 1.0 - return min(self.pos / (float(self.length or 1) or 1), 1.0) - - @property - def time_per_iteration(self) -> float: - if not self.avg: - return 0.0 - return sum(self.avg) / float(len(self.avg)) - - @property - def eta(self) -> float: - if self.length is not None and not self.finished: - return self.time_per_iteration * (self.length - self.pos) - return 0.0 - - def format_eta(self) -> str: - if self.eta_known: - t = int(self.eta) - seconds = t % 60 - t //= 60 - minutes = t % 60 - t //= 60 - hours = t % 24 - t //= 24 - if t > 0: - return f"{t}d {hours:02}:{minutes:02}:{seconds:02}" - else: - return f"{hours:02}:{minutes:02}:{seconds:02}" - return "" - - def format_pos(self) -> str: - pos = str(self.pos) - if self.length is not None: - pos += f"/{self.length}" - return pos - - def format_pct(self) -> str: - return f"{int(self.pct * 100): 4}%"[1:] - - def format_bar(self) -> str: - if self.length is not None: - bar_length = int(self.pct * self.width) - bar = self.fill_char * bar_length - bar += self.empty_char * (self.width - bar_length) - elif self.finished: - bar = self.fill_char * self.width - else: - chars = list(self.empty_char * (self.width or 1)) - if self.time_per_iteration != 0: - chars[ - int( - (math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5) - * self.width - ) - ] = self.fill_char - bar = "".join(chars) - return bar - - def format_progress_line(self) -> str: - show_percent = self.show_percent - - info_bits = [] - if self.length is not None and show_percent is None: - show_percent = not self.show_pos - - if self.show_pos: - info_bits.append(self.format_pos()) - if show_percent: - info_bits.append(self.format_pct()) - if self.show_eta and self.eta_known and not self.finished: - info_bits.append(self.format_eta()) - if self.item_show_func is not None: - item_info = self.item_show_func(self.current_item) - if item_info is not None: - info_bits.append(item_info) - - return ( - self.bar_template - % { - "label": self.label, - "bar": self.format_bar(), - "info": self.info_sep.join(info_bits), - } - ).rstrip() - - def render_progress(self) -> None: - import shutil - - if self.is_hidden: - # Only output the label as it changes if the output is not a - # TTY. Use file=stderr if you expect to be piping stdout. - if self._last_line != self.label: - self._last_line = self.label - echo(self.label, file=self.file, color=self.color) - - return - - buf = [] - # Update width in case the terminal has been resized - if self.autowidth: - old_width = self.width - self.width = 0 - clutter_length = term_len(self.format_progress_line()) - new_width = max(0, shutil.get_terminal_size().columns - clutter_length) - if new_width < old_width: - buf.append(BEFORE_BAR) - buf.append(" " * self.max_width) # type: ignore - self.max_width = new_width - self.width = new_width - - clear_width = self.width - if self.max_width is not None: - clear_width = self.max_width - - buf.append(BEFORE_BAR) - line = self.format_progress_line() - line_len = term_len(line) - if self.max_width is None or self.max_width < line_len: - self.max_width = line_len - - buf.append(line) - buf.append(" " * (clear_width - line_len)) - line = "".join(buf) - # Render the line only if it changed. - - if line != self._last_line: - self._last_line = line - echo(line, file=self.file, color=self.color, nl=False) - self.file.flush() - - def make_step(self, n_steps: int) -> None: - self.pos += n_steps - if self.length is not None and self.pos >= self.length: - self.finished = True - - if (time.time() - self.last_eta) < 1.0: - return - - self.last_eta = time.time() - - # self.avg is a rolling list of length <= 7 of steps where steps are - # defined as time elapsed divided by the total progress through - # self.length. - if self.pos: - step = (time.time() - self.start) / self.pos - else: - step = time.time() - self.start - - self.avg = self.avg[-6:] + [step] - - self.eta_known = self.length is not None - - def update(self, n_steps: int, current_item: t.Optional[V] = None) -> None: - """Update the progress bar by advancing a specified number of - steps, and optionally set the ``current_item`` for this new - position. - - :param n_steps: Number of steps to advance. - :param current_item: Optional item to set as ``current_item`` - for the updated position. - - .. versionchanged:: 8.0 - Added the ``current_item`` optional parameter. - - .. versionchanged:: 8.0 - Only render when the number of steps meets the - ``update_min_steps`` threshold. - """ - if current_item is not None: - self.current_item = current_item - - self._completed_intervals += n_steps - - if self._completed_intervals >= self.update_min_steps: - self.make_step(self._completed_intervals) - self.render_progress() - self._completed_intervals = 0 - - def finish(self) -> None: - self.eta_known = False - self.current_item = None - self.finished = True - - def generator(self) -> t.Iterator[V]: - """Return a generator which yields the items added to the bar - during construction, and updates the progress bar *after* the - yielded block returns. - """ - # WARNING: the iterator interface for `ProgressBar` relies on - # this and only works because this is a simple generator which - # doesn't create or manage additional state. If this function - # changes, the impact should be evaluated both against - # `iter(bar)` and `next(bar)`. `next()` in particular may call - # `self.generator()` repeatedly, and this must remain safe in - # order for that interface to work. - if not self.entered: - raise RuntimeError("You need to use progress bars in a with block.") - - if self.is_hidden: - yield from self.iter - else: - for rv in self.iter: - self.current_item = rv - - # This allows show_item_func to be updated before the - # item is processed. Only trigger at the beginning of - # the update interval. - if self._completed_intervals == 0: - self.render_progress() - - yield rv - self.update(1) - - self.finish() - self.render_progress() - - -def pager(generator: t.Iterable[str], color: t.Optional[bool] = None) -> None: - """Decide what method to use for paging through text.""" - stdout = _default_text_stdout() - - # There are no standard streams attached to write to. For example, - # pythonw on Windows. - if stdout is None: - stdout = StringIO() - - if not isatty(sys.stdin) or not isatty(stdout): - return _nullpager(stdout, generator, color) - pager_cmd = (os.environ.get("PAGER", None) or "").strip() - if pager_cmd: - if WIN: - return _tempfilepager(generator, pager_cmd, color) - return _pipepager(generator, pager_cmd, color) - if os.environ.get("TERM") in ("dumb", "emacs"): - return _nullpager(stdout, generator, color) - if WIN or sys.platform.startswith("os2"): - return _tempfilepager(generator, "more <", color) - if hasattr(os, "system") and os.system("(less) 2>/dev/null") == 0: - return _pipepager(generator, "less", color) - - import tempfile - - fd, filename = tempfile.mkstemp() - os.close(fd) - try: - if hasattr(os, "system") and os.system(f'more "{filename}"') == 0: - return _pipepager(generator, "more", color) - return _nullpager(stdout, generator, color) - finally: - os.unlink(filename) - - -def _pipepager(generator: t.Iterable[str], cmd: str, color: t.Optional[bool]) -> None: - """Page through text by feeding it to another program. Invoking a - pager through this might support colors. - """ - import subprocess - - env = dict(os.environ) - - # If we're piping to less we might support colors under the - # condition that - cmd_detail = cmd.rsplit("/", 1)[-1].split() - if color is None and cmd_detail[0] == "less": - less_flags = f"{os.environ.get('LESS', '')}{' '.join(cmd_detail[1:])}" - if not less_flags: - env["LESS"] = "-R" - color = True - elif "r" in less_flags or "R" in less_flags: - color = True - - c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, env=env) - stdin = t.cast(t.BinaryIO, c.stdin) - encoding = get_best_encoding(stdin) - try: - for text in generator: - if not color: - text = strip_ansi(text) - - stdin.write(text.encode(encoding, "replace")) - except (OSError, KeyboardInterrupt): - pass - else: - stdin.close() - - # Less doesn't respect ^C, but catches it for its own UI purposes (aborting - # search or other commands inside less). - # - # That means when the user hits ^C, the parent process (click) terminates, - # but less is still alive, paging the output and messing up the terminal. - # - # If the user wants to make the pager exit on ^C, they should set - # `LESS='-K'`. It's not our decision to make. - while True: - try: - c.wait() - except KeyboardInterrupt: - pass - else: - break - - -def _tempfilepager( - generator: t.Iterable[str], cmd: str, color: t.Optional[bool] -) -> None: - """Page through text by invoking a program on a temporary file.""" - import tempfile - - fd, filename = tempfile.mkstemp() - # TODO: This never terminates if the passed generator never terminates. - text = "".join(generator) - if not color: - text = strip_ansi(text) - encoding = get_best_encoding(sys.stdout) - with open_stream(filename, "wb")[0] as f: - f.write(text.encode(encoding)) - try: - os.system(f'{cmd} "{filename}"') - finally: - os.close(fd) - os.unlink(filename) - - -def _nullpager( - stream: t.TextIO, generator: t.Iterable[str], color: t.Optional[bool] -) -> None: - """Simply print unformatted text. This is the ultimate fallback.""" - for text in generator: - if not color: - text = strip_ansi(text) - stream.write(text) - - -class Editor: - def __init__( - self, - editor: t.Optional[str] = None, - env: t.Optional[t.Mapping[str, str]] = None, - require_save: bool = True, - extension: str = ".txt", - ) -> None: - self.editor = editor - self.env = env - self.require_save = require_save - self.extension = extension - - def get_editor(self) -> str: - if self.editor is not None: - return self.editor - for key in "VISUAL", "EDITOR": - rv = os.environ.get(key) - if rv: - return rv - if WIN: - return "notepad" - for editor in "sensible-editor", "vim", "nano": - if os.system(f"which {editor} >/dev/null 2>&1") == 0: - return editor - return "vi" - - def edit_file(self, filename: str) -> None: - import subprocess - - editor = self.get_editor() - environ: t.Optional[t.Dict[str, str]] = None - - if self.env: - environ = os.environ.copy() - environ.update(self.env) - - try: - c = subprocess.Popen(f'{editor} "{filename}"', env=environ, shell=True) - exit_code = c.wait() - if exit_code != 0: - raise ClickException( - _("{editor}: Editing failed").format(editor=editor) - ) - except OSError as e: - raise ClickException( - _("{editor}: Editing failed: {e}").format(editor=editor, e=e) - ) from e - - def edit(self, text: t.Optional[t.AnyStr]) -> t.Optional[t.AnyStr]: - import tempfile - - if not text: - data = b"" - elif isinstance(text, (bytes, bytearray)): - data = text - else: - if text and not text.endswith("\n"): - text += "\n" - - if WIN: - data = text.replace("\n", "\r\n").encode("utf-8-sig") - else: - data = text.encode("utf-8") - - fd, name = tempfile.mkstemp(prefix="editor-", suffix=self.extension) - f: t.BinaryIO - - try: - with os.fdopen(fd, "wb") as f: - f.write(data) - - # If the filesystem resolution is 1 second, like Mac OS - # 10.12 Extended, or 2 seconds, like FAT32, and the editor - # closes very fast, require_save can fail. Set the modified - # time to be 2 seconds in the past to work around this. - os.utime(name, (os.path.getatime(name), os.path.getmtime(name) - 2)) - # Depending on the resolution, the exact value might not be - # recorded, so get the new recorded value. - timestamp = os.path.getmtime(name) - - self.edit_file(name) - - if self.require_save and os.path.getmtime(name) == timestamp: - return None - - with open(name, "rb") as f: - rv = f.read() - - if isinstance(text, (bytes, bytearray)): - return rv - - return rv.decode("utf-8-sig").replace("\r\n", "\n") # type: ignore - finally: - os.unlink(name) - - -def open_url(url: str, wait: bool = False, locate: bool = False) -> int: - import subprocess - - def _unquote_file(url: str) -> str: - from urllib.parse import unquote - - if url.startswith("file://"): - url = unquote(url[7:]) - - return url - - if sys.platform == "darwin": - args = ["open"] - if wait: - args.append("-W") - if locate: - args.append("-R") - args.append(_unquote_file(url)) - null = open("/dev/null", "w") - try: - return subprocess.Popen(args, stderr=null).wait() - finally: - null.close() - elif WIN: - if locate: - url = _unquote_file(url.replace('"', "")) - args = f'explorer /select,"{url}"' - else: - url = url.replace('"', "") - wait_str = "/WAIT" if wait else "" - args = f'start {wait_str} "" "{url}"' - return os.system(args) - elif CYGWIN: - if locate: - url = os.path.dirname(_unquote_file(url).replace('"', "")) - args = f'cygstart "{url}"' - else: - url = url.replace('"', "") - wait_str = "-w" if wait else "" - args = f'cygstart {wait_str} "{url}"' - return os.system(args) - - try: - if locate: - url = os.path.dirname(_unquote_file(url)) or "." - else: - url = _unquote_file(url) - c = subprocess.Popen(["xdg-open", url]) - if wait: - return c.wait() - return 0 - except OSError: - if url.startswith(("http://", "https://")) and not locate and not wait: - import webbrowser - - webbrowser.open(url) - return 0 - return 1 - - -def _translate_ch_to_exc(ch: str) -> t.Optional[BaseException]: - if ch == "\x03": - raise KeyboardInterrupt() - - if ch == "\x04" and not WIN: # Unix-like, Ctrl+D - raise EOFError() - - if ch == "\x1a" and WIN: # Windows, Ctrl+Z - raise EOFError() - - return None - - -if WIN: - import msvcrt - - @contextlib.contextmanager - def raw_terminal() -> t.Iterator[int]: - yield -1 - - def getchar(echo: bool) -> str: - # The function `getch` will return a bytes object corresponding to - # the pressed character. Since Windows 10 build 1803, it will also - # return \x00 when called a second time after pressing a regular key. - # - # `getwch` does not share this probably-bugged behavior. Moreover, it - # returns a Unicode object by default, which is what we want. - # - # Either of these functions will return \x00 or \xe0 to indicate - # a special key, and you need to call the same function again to get - # the "rest" of the code. The fun part is that \u00e0 is - # "latin small letter a with grave", so if you type that on a French - # keyboard, you _also_ get a \xe0. - # E.g., consider the Up arrow. This returns \xe0 and then \x48. The - # resulting Unicode string reads as "a with grave" + "capital H". - # This is indistinguishable from when the user actually types - # "a with grave" and then "capital H". - # - # When \xe0 is returned, we assume it's part of a special-key sequence - # and call `getwch` again, but that means that when the user types - # the \u00e0 character, `getchar` doesn't return until a second - # character is typed. - # The alternative is returning immediately, but that would mess up - # cross-platform handling of arrow keys and others that start with - # \xe0. Another option is using `getch`, but then we can't reliably - # read non-ASCII characters, because return values of `getch` are - # limited to the current 8-bit codepage. - # - # Anyway, Click doesn't claim to do this Right(tm), and using `getwch` - # is doing the right thing in more situations than with `getch`. - func: t.Callable[[], str] - - if echo: - func = msvcrt.getwche # type: ignore - else: - func = msvcrt.getwch # type: ignore - - rv = func() - - if rv in ("\x00", "\xe0"): - # \x00 and \xe0 are control characters that indicate special key, - # see above. - rv += func() - - _translate_ch_to_exc(rv) - return rv - -else: - import tty - import termios - - @contextlib.contextmanager - def raw_terminal() -> t.Iterator[int]: - f: t.Optional[t.TextIO] - fd: int - - if not isatty(sys.stdin): - f = open("/dev/tty") - fd = f.fileno() - else: - fd = sys.stdin.fileno() - f = None - - try: - old_settings = termios.tcgetattr(fd) - - try: - tty.setraw(fd) - yield fd - finally: - termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) - sys.stdout.flush() - - if f is not None: - f.close() - except termios.error: - pass - - def getchar(echo: bool) -> str: - with raw_terminal() as fd: - ch = os.read(fd, 32).decode(get_best_encoding(sys.stdin), "replace") - - if echo and isatty(sys.stdout): - sys.stdout.write(ch) - - _translate_ch_to_exc(ch) - return ch diff --git a/server/venv/Lib/site-packages/click/_textwrap.py b/server/venv/Lib/site-packages/click/_textwrap.py deleted file mode 100644 index b47dcbd..0000000 --- a/server/venv/Lib/site-packages/click/_textwrap.py +++ /dev/null @@ -1,49 +0,0 @@ -import textwrap -import typing as t -from contextlib import contextmanager - - -class TextWrapper(textwrap.TextWrapper): - def _handle_long_word( - self, - reversed_chunks: t.List[str], - cur_line: t.List[str], - cur_len: int, - width: int, - ) -> None: - space_left = max(width - cur_len, 1) - - if self.break_long_words: - last = reversed_chunks[-1] - cut = last[:space_left] - res = last[space_left:] - cur_line.append(cut) - reversed_chunks[-1] = res - elif not cur_line: - cur_line.append(reversed_chunks.pop()) - - @contextmanager - def extra_indent(self, indent: str) -> t.Iterator[None]: - old_initial_indent = self.initial_indent - old_subsequent_indent = self.subsequent_indent - self.initial_indent += indent - self.subsequent_indent += indent - - try: - yield - finally: - self.initial_indent = old_initial_indent - self.subsequent_indent = old_subsequent_indent - - def indent_only(self, text: str) -> str: - rv = [] - - for idx, line in enumerate(text.splitlines()): - indent = self.initial_indent - - if idx > 0: - indent = self.subsequent_indent - - rv.append(f"{indent}{line}") - - return "\n".join(rv) diff --git a/server/venv/Lib/site-packages/click/_winconsole.py b/server/venv/Lib/site-packages/click/_winconsole.py deleted file mode 100644 index 6b20df3..0000000 --- a/server/venv/Lib/site-packages/click/_winconsole.py +++ /dev/null @@ -1,279 +0,0 @@ -# This module is based on the excellent work by Adam Bartoš who -# provided a lot of what went into the implementation here in -# the discussion to issue1602 in the Python bug tracker. -# -# There are some general differences in regards to how this works -# compared to the original patches as we do not need to patch -# the entire interpreter but just work in our little world of -# echo and prompt. -import io -import sys -import time -import typing as t -from ctypes import byref -from ctypes import c_char -from ctypes import c_char_p -from ctypes import c_int -from ctypes import c_ssize_t -from ctypes import c_ulong -from ctypes import c_void_p -from ctypes import POINTER -from ctypes import py_object -from ctypes import Structure -from ctypes.wintypes import DWORD -from ctypes.wintypes import HANDLE -from ctypes.wintypes import LPCWSTR -from ctypes.wintypes import LPWSTR - -from ._compat import _NonClosingTextIOWrapper - -assert sys.platform == "win32" -import msvcrt # noqa: E402 -from ctypes import windll # noqa: E402 -from ctypes import WINFUNCTYPE # noqa: E402 - -c_ssize_p = POINTER(c_ssize_t) - -kernel32 = windll.kernel32 -GetStdHandle = kernel32.GetStdHandle -ReadConsoleW = kernel32.ReadConsoleW -WriteConsoleW = kernel32.WriteConsoleW -GetConsoleMode = kernel32.GetConsoleMode -GetLastError = kernel32.GetLastError -GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32)) -CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))( - ("CommandLineToArgvW", windll.shell32) -) -LocalFree = WINFUNCTYPE(c_void_p, c_void_p)(("LocalFree", windll.kernel32)) - -STDIN_HANDLE = GetStdHandle(-10) -STDOUT_HANDLE = GetStdHandle(-11) -STDERR_HANDLE = GetStdHandle(-12) - -PyBUF_SIMPLE = 0 -PyBUF_WRITABLE = 1 - -ERROR_SUCCESS = 0 -ERROR_NOT_ENOUGH_MEMORY = 8 -ERROR_OPERATION_ABORTED = 995 - -STDIN_FILENO = 0 -STDOUT_FILENO = 1 -STDERR_FILENO = 2 - -EOF = b"\x1a" -MAX_BYTES_WRITTEN = 32767 - -try: - from ctypes import pythonapi -except ImportError: - # On PyPy we cannot get buffers so our ability to operate here is - # severely limited. - get_buffer = None -else: - - class Py_buffer(Structure): - _fields_ = [ - ("buf", c_void_p), - ("obj", py_object), - ("len", c_ssize_t), - ("itemsize", c_ssize_t), - ("readonly", c_int), - ("ndim", c_int), - ("format", c_char_p), - ("shape", c_ssize_p), - ("strides", c_ssize_p), - ("suboffsets", c_ssize_p), - ("internal", c_void_p), - ] - - PyObject_GetBuffer = pythonapi.PyObject_GetBuffer - PyBuffer_Release = pythonapi.PyBuffer_Release - - def get_buffer(obj, writable=False): - buf = Py_buffer() - flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLE - PyObject_GetBuffer(py_object(obj), byref(buf), flags) - - try: - buffer_type = c_char * buf.len - return buffer_type.from_address(buf.buf) - finally: - PyBuffer_Release(byref(buf)) - - -class _WindowsConsoleRawIOBase(io.RawIOBase): - def __init__(self, handle): - self.handle = handle - - def isatty(self): - super().isatty() - return True - - -class _WindowsConsoleReader(_WindowsConsoleRawIOBase): - def readable(self): - return True - - def readinto(self, b): - bytes_to_be_read = len(b) - if not bytes_to_be_read: - return 0 - elif bytes_to_be_read % 2: - raise ValueError( - "cannot read odd number of bytes from UTF-16-LE encoded console" - ) - - buffer = get_buffer(b, writable=True) - code_units_to_be_read = bytes_to_be_read // 2 - code_units_read = c_ulong() - - rv = ReadConsoleW( - HANDLE(self.handle), - buffer, - code_units_to_be_read, - byref(code_units_read), - None, - ) - if GetLastError() == ERROR_OPERATION_ABORTED: - # wait for KeyboardInterrupt - time.sleep(0.1) - if not rv: - raise OSError(f"Windows error: {GetLastError()}") - - if buffer[0] == EOF: - return 0 - return 2 * code_units_read.value - - -class _WindowsConsoleWriter(_WindowsConsoleRawIOBase): - def writable(self): - return True - - @staticmethod - def _get_error_message(errno): - if errno == ERROR_SUCCESS: - return "ERROR_SUCCESS" - elif errno == ERROR_NOT_ENOUGH_MEMORY: - return "ERROR_NOT_ENOUGH_MEMORY" - return f"Windows error {errno}" - - def write(self, b): - bytes_to_be_written = len(b) - buf = get_buffer(b) - code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2 - code_units_written = c_ulong() - - WriteConsoleW( - HANDLE(self.handle), - buf, - code_units_to_be_written, - byref(code_units_written), - None, - ) - bytes_written = 2 * code_units_written.value - - if bytes_written == 0 and bytes_to_be_written > 0: - raise OSError(self._get_error_message(GetLastError())) - return bytes_written - - -class ConsoleStream: - def __init__(self, text_stream: t.TextIO, byte_stream: t.BinaryIO) -> None: - self._text_stream = text_stream - self.buffer = byte_stream - - @property - def name(self) -> str: - return self.buffer.name - - def write(self, x: t.AnyStr) -> int: - if isinstance(x, str): - return self._text_stream.write(x) - try: - self.flush() - except Exception: - pass - return self.buffer.write(x) - - def writelines(self, lines: t.Iterable[t.AnyStr]) -> None: - for line in lines: - self.write(line) - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._text_stream, name) - - def isatty(self) -> bool: - return self.buffer.isatty() - - def __repr__(self): - return f"" - - -def _get_text_stdin(buffer_stream: t.BinaryIO) -> t.TextIO: - text_stream = _NonClosingTextIOWrapper( - io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)), - "utf-16-le", - "strict", - line_buffering=True, - ) - return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) - - -def _get_text_stdout(buffer_stream: t.BinaryIO) -> t.TextIO: - text_stream = _NonClosingTextIOWrapper( - io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)), - "utf-16-le", - "strict", - line_buffering=True, - ) - return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) - - -def _get_text_stderr(buffer_stream: t.BinaryIO) -> t.TextIO: - text_stream = _NonClosingTextIOWrapper( - io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)), - "utf-16-le", - "strict", - line_buffering=True, - ) - return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) - - -_stream_factories: t.Mapping[int, t.Callable[[t.BinaryIO], t.TextIO]] = { - 0: _get_text_stdin, - 1: _get_text_stdout, - 2: _get_text_stderr, -} - - -def _is_console(f: t.TextIO) -> bool: - if not hasattr(f, "fileno"): - return False - - try: - fileno = f.fileno() - except (OSError, io.UnsupportedOperation): - return False - - handle = msvcrt.get_osfhandle(fileno) - return bool(GetConsoleMode(handle, byref(DWORD()))) - - -def _get_windows_console_stream( - f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] -) -> t.Optional[t.TextIO]: - if ( - get_buffer is not None - and encoding in {"utf-16-le", None} - and errors in {"strict", None} - and _is_console(f) - ): - func = _stream_factories.get(f.fileno()) - if func is not None: - b = getattr(f, "buffer", None) - - if b is None: - return None - - return func(b) diff --git a/server/venv/Lib/site-packages/click/core.py b/server/venv/Lib/site-packages/click/core.py deleted file mode 100644 index cc65e89..0000000 --- a/server/venv/Lib/site-packages/click/core.py +++ /dev/null @@ -1,3042 +0,0 @@ -import enum -import errno -import inspect -import os -import sys -import typing as t -from collections import abc -from contextlib import contextmanager -from contextlib import ExitStack -from functools import update_wrapper -from gettext import gettext as _ -from gettext import ngettext -from itertools import repeat -from types import TracebackType - -from . import types -from .exceptions import Abort -from .exceptions import BadParameter -from .exceptions import ClickException -from .exceptions import Exit -from .exceptions import MissingParameter -from .exceptions import UsageError -from .formatting import HelpFormatter -from .formatting import join_options -from .globals import pop_context -from .globals import push_context -from .parser import _flag_needs_value -from .parser import OptionParser -from .parser import split_opt -from .termui import confirm -from .termui import prompt -from .termui import style -from .utils import _detect_program_name -from .utils import _expand_args -from .utils import echo -from .utils import make_default_short_help -from .utils import make_str -from .utils import PacifyFlushWrapper - -if t.TYPE_CHECKING: - import typing_extensions as te - from .shell_completion import CompletionItem - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) -V = t.TypeVar("V") - - -def _complete_visible_commands( - ctx: "Context", incomplete: str -) -> t.Iterator[t.Tuple[str, "Command"]]: - """List all the subcommands of a group that start with the - incomplete value and aren't hidden. - - :param ctx: Invocation context for the group. - :param incomplete: Value being completed. May be empty. - """ - multi = t.cast(MultiCommand, ctx.command) - - for name in multi.list_commands(ctx): - if name.startswith(incomplete): - command = multi.get_command(ctx, name) - - if command is not None and not command.hidden: - yield name, command - - -def _check_multicommand( - base_command: "MultiCommand", cmd_name: str, cmd: "Command", register: bool = False -) -> None: - if not base_command.chain or not isinstance(cmd, MultiCommand): - return - if register: - hint = ( - "It is not possible to add multi commands as children to" - " another multi command that is in chain mode." - ) - else: - hint = ( - "Found a multi command as subcommand to a multi command" - " that is in chain mode. This is not supported." - ) - raise RuntimeError( - f"{hint}. Command {base_command.name!r} is set to chain and" - f" {cmd_name!r} was added as a subcommand but it in itself is a" - f" multi command. ({cmd_name!r} is a {type(cmd).__name__}" - f" within a chained {type(base_command).__name__} named" - f" {base_command.name!r})." - ) - - -def batch(iterable: t.Iterable[V], batch_size: int) -> t.List[t.Tuple[V, ...]]: - return list(zip(*repeat(iter(iterable), batch_size))) - - -@contextmanager -def augment_usage_errors( - ctx: "Context", param: t.Optional["Parameter"] = None -) -> t.Iterator[None]: - """Context manager that attaches extra information to exceptions.""" - try: - yield - except BadParameter as e: - if e.ctx is None: - e.ctx = ctx - if param is not None and e.param is None: - e.param = param - raise - except UsageError as e: - if e.ctx is None: - e.ctx = ctx - raise - - -def iter_params_for_processing( - invocation_order: t.Sequence["Parameter"], - declaration_order: t.Sequence["Parameter"], -) -> t.List["Parameter"]: - """Given a sequence of parameters in the order as should be considered - for processing and an iterable of parameters that exist, this returns - a list in the correct order as they should be processed. - """ - - def sort_key(item: "Parameter") -> t.Tuple[bool, float]: - try: - idx: float = invocation_order.index(item) - except ValueError: - idx = float("inf") - - return not item.is_eager, idx - - return sorted(declaration_order, key=sort_key) - - -class ParameterSource(enum.Enum): - """This is an :class:`~enum.Enum` that indicates the source of a - parameter's value. - - Use :meth:`click.Context.get_parameter_source` to get the - source for a parameter by name. - - .. versionchanged:: 8.0 - Use :class:`~enum.Enum` and drop the ``validate`` method. - - .. versionchanged:: 8.0 - Added the ``PROMPT`` value. - """ - - COMMANDLINE = enum.auto() - """The value was provided by the command line args.""" - ENVIRONMENT = enum.auto() - """The value was provided with an environment variable.""" - DEFAULT = enum.auto() - """Used the default specified by the parameter.""" - DEFAULT_MAP = enum.auto() - """Used a default provided by :attr:`Context.default_map`.""" - PROMPT = enum.auto() - """Used a prompt to confirm a default or provide a value.""" - - -class Context: - """The context is a special internal object that holds state relevant - for the script execution at every single level. It's normally invisible - to commands unless they opt-in to getting access to it. - - The context is useful as it can pass internal objects around and can - control special execution features such as reading data from - environment variables. - - A context can be used as context manager in which case it will call - :meth:`close` on teardown. - - :param command: the command class for this context. - :param parent: the parent context. - :param info_name: the info name for this invocation. Generally this - is the most descriptive name for the script or - command. For the toplevel script it is usually - the name of the script, for commands below it it's - the name of the script. - :param obj: an arbitrary object of user data. - :param auto_envvar_prefix: the prefix to use for automatic environment - variables. If this is `None` then reading - from environment variables is disabled. This - does not affect manually set environment - variables which are always read. - :param default_map: a dictionary (like object) with default values - for parameters. - :param terminal_width: the width of the terminal. The default is - inherit from parent context. If no context - defines the terminal width then auto - detection will be applied. - :param max_content_width: the maximum width for content rendered by - Click (this currently only affects help - pages). This defaults to 80 characters if - not overridden. In other words: even if the - terminal is larger than that, Click will not - format things wider than 80 characters by - default. In addition to that, formatters might - add some safety mapping on the right. - :param resilient_parsing: if this flag is enabled then Click will - parse without any interactivity or callback - invocation. Default values will also be - ignored. This is useful for implementing - things such as completion support. - :param allow_extra_args: if this is set to `True` then extra arguments - at the end will not raise an error and will be - kept on the context. The default is to inherit - from the command. - :param allow_interspersed_args: if this is set to `False` then options - and arguments cannot be mixed. The - default is to inherit from the command. - :param ignore_unknown_options: instructs click to ignore options it does - not know and keeps them for later - processing. - :param help_option_names: optionally a list of strings that define how - the default help parameter is named. The - default is ``['--help']``. - :param token_normalize_func: an optional function that is used to - normalize tokens (options, choices, - etc.). This for instance can be used to - implement case insensitive behavior. - :param color: controls if the terminal supports ANSI colors or not. The - default is autodetection. This is only needed if ANSI - codes are used in texts that Click prints which is by - default not the case. This for instance would affect - help output. - :param show_default: Show the default value for commands. If this - value is not set, it defaults to the value from the parent - context. ``Command.show_default`` overrides this default for the - specific command. - - .. versionchanged:: 8.1 - The ``show_default`` parameter is overridden by - ``Command.show_default``, instead of the other way around. - - .. versionchanged:: 8.0 - The ``show_default`` parameter defaults to the value from the - parent context. - - .. versionchanged:: 7.1 - Added the ``show_default`` parameter. - - .. versionchanged:: 4.0 - Added the ``color``, ``ignore_unknown_options``, and - ``max_content_width`` parameters. - - .. versionchanged:: 3.0 - Added the ``allow_extra_args`` and ``allow_interspersed_args`` - parameters. - - .. versionchanged:: 2.0 - Added the ``resilient_parsing``, ``help_option_names``, and - ``token_normalize_func`` parameters. - """ - - #: The formatter class to create with :meth:`make_formatter`. - #: - #: .. versionadded:: 8.0 - formatter_class: t.Type["HelpFormatter"] = HelpFormatter - - def __init__( - self, - command: "Command", - parent: t.Optional["Context"] = None, - info_name: t.Optional[str] = None, - obj: t.Optional[t.Any] = None, - auto_envvar_prefix: t.Optional[str] = None, - default_map: t.Optional[t.MutableMapping[str, t.Any]] = None, - terminal_width: t.Optional[int] = None, - max_content_width: t.Optional[int] = None, - resilient_parsing: bool = False, - allow_extra_args: t.Optional[bool] = None, - allow_interspersed_args: t.Optional[bool] = None, - ignore_unknown_options: t.Optional[bool] = None, - help_option_names: t.Optional[t.List[str]] = None, - token_normalize_func: t.Optional[t.Callable[[str], str]] = None, - color: t.Optional[bool] = None, - show_default: t.Optional[bool] = None, - ) -> None: - #: the parent context or `None` if none exists. - self.parent = parent - #: the :class:`Command` for this context. - self.command = command - #: the descriptive information name - self.info_name = info_name - #: Map of parameter names to their parsed values. Parameters - #: with ``expose_value=False`` are not stored. - self.params: t.Dict[str, t.Any] = {} - #: the leftover arguments. - self.args: t.List[str] = [] - #: protected arguments. These are arguments that are prepended - #: to `args` when certain parsing scenarios are encountered but - #: must be never propagated to another arguments. This is used - #: to implement nested parsing. - self.protected_args: t.List[str] = [] - #: the collected prefixes of the command's options. - self._opt_prefixes: t.Set[str] = set(parent._opt_prefixes) if parent else set() - - if obj is None and parent is not None: - obj = parent.obj - - #: the user object stored. - self.obj: t.Any = obj - self._meta: t.Dict[str, t.Any] = getattr(parent, "meta", {}) - - #: A dictionary (-like object) with defaults for parameters. - if ( - default_map is None - and info_name is not None - and parent is not None - and parent.default_map is not None - ): - default_map = parent.default_map.get(info_name) - - self.default_map: t.Optional[t.MutableMapping[str, t.Any]] = default_map - - #: This flag indicates if a subcommand is going to be executed. A - #: group callback can use this information to figure out if it's - #: being executed directly or because the execution flow passes - #: onwards to a subcommand. By default it's None, but it can be - #: the name of the subcommand to execute. - #: - #: If chaining is enabled this will be set to ``'*'`` in case - #: any commands are executed. It is however not possible to - #: figure out which ones. If you require this knowledge you - #: should use a :func:`result_callback`. - self.invoked_subcommand: t.Optional[str] = None - - if terminal_width is None and parent is not None: - terminal_width = parent.terminal_width - - #: The width of the terminal (None is autodetection). - self.terminal_width: t.Optional[int] = terminal_width - - if max_content_width is None and parent is not None: - max_content_width = parent.max_content_width - - #: The maximum width of formatted content (None implies a sensible - #: default which is 80 for most things). - self.max_content_width: t.Optional[int] = max_content_width - - if allow_extra_args is None: - allow_extra_args = command.allow_extra_args - - #: Indicates if the context allows extra args or if it should - #: fail on parsing. - #: - #: .. versionadded:: 3.0 - self.allow_extra_args = allow_extra_args - - if allow_interspersed_args is None: - allow_interspersed_args = command.allow_interspersed_args - - #: Indicates if the context allows mixing of arguments and - #: options or not. - #: - #: .. versionadded:: 3.0 - self.allow_interspersed_args: bool = allow_interspersed_args - - if ignore_unknown_options is None: - ignore_unknown_options = command.ignore_unknown_options - - #: Instructs click to ignore options that a command does not - #: understand and will store it on the context for later - #: processing. This is primarily useful for situations where you - #: want to call into external programs. Generally this pattern is - #: strongly discouraged because it's not possibly to losslessly - #: forward all arguments. - #: - #: .. versionadded:: 4.0 - self.ignore_unknown_options: bool = ignore_unknown_options - - if help_option_names is None: - if parent is not None: - help_option_names = parent.help_option_names - else: - help_option_names = ["--help"] - - #: The names for the help options. - self.help_option_names: t.List[str] = help_option_names - - if token_normalize_func is None and parent is not None: - token_normalize_func = parent.token_normalize_func - - #: An optional normalization function for tokens. This is - #: options, choices, commands etc. - self.token_normalize_func: t.Optional[ - t.Callable[[str], str] - ] = token_normalize_func - - #: Indicates if resilient parsing is enabled. In that case Click - #: will do its best to not cause any failures and default values - #: will be ignored. Useful for completion. - self.resilient_parsing: bool = resilient_parsing - - # If there is no envvar prefix yet, but the parent has one and - # the command on this level has a name, we can expand the envvar - # prefix automatically. - if auto_envvar_prefix is None: - if ( - parent is not None - and parent.auto_envvar_prefix is not None - and self.info_name is not None - ): - auto_envvar_prefix = ( - f"{parent.auto_envvar_prefix}_{self.info_name.upper()}" - ) - else: - auto_envvar_prefix = auto_envvar_prefix.upper() - - if auto_envvar_prefix is not None: - auto_envvar_prefix = auto_envvar_prefix.replace("-", "_") - - self.auto_envvar_prefix: t.Optional[str] = auto_envvar_prefix - - if color is None and parent is not None: - color = parent.color - - #: Controls if styling output is wanted or not. - self.color: t.Optional[bool] = color - - if show_default is None and parent is not None: - show_default = parent.show_default - - #: Show option default values when formatting help text. - self.show_default: t.Optional[bool] = show_default - - self._close_callbacks: t.List[t.Callable[[], t.Any]] = [] - self._depth = 0 - self._parameter_source: t.Dict[str, ParameterSource] = {} - self._exit_stack = ExitStack() - - def to_info_dict(self) -> t.Dict[str, t.Any]: - """Gather information that could be useful for a tool generating - user-facing documentation. This traverses the entire CLI - structure. - - .. code-block:: python - - with Context(cli) as ctx: - info = ctx.to_info_dict() - - .. versionadded:: 8.0 - """ - return { - "command": self.command.to_info_dict(self), - "info_name": self.info_name, - "allow_extra_args": self.allow_extra_args, - "allow_interspersed_args": self.allow_interspersed_args, - "ignore_unknown_options": self.ignore_unknown_options, - "auto_envvar_prefix": self.auto_envvar_prefix, - } - - def __enter__(self) -> "Context": - self._depth += 1 - push_context(self) - return self - - def __exit__( - self, - exc_type: t.Optional[t.Type[BaseException]], - exc_value: t.Optional[BaseException], - tb: t.Optional[TracebackType], - ) -> None: - self._depth -= 1 - if self._depth == 0: - self.close() - pop_context() - - @contextmanager - def scope(self, cleanup: bool = True) -> t.Iterator["Context"]: - """This helper method can be used with the context object to promote - it to the current thread local (see :func:`get_current_context`). - The default behavior of this is to invoke the cleanup functions which - can be disabled by setting `cleanup` to `False`. The cleanup - functions are typically used for things such as closing file handles. - - If the cleanup is intended the context object can also be directly - used as a context manager. - - Example usage:: - - with ctx.scope(): - assert get_current_context() is ctx - - This is equivalent:: - - with ctx: - assert get_current_context() is ctx - - .. versionadded:: 5.0 - - :param cleanup: controls if the cleanup functions should be run or - not. The default is to run these functions. In - some situations the context only wants to be - temporarily pushed in which case this can be disabled. - Nested pushes automatically defer the cleanup. - """ - if not cleanup: - self._depth += 1 - try: - with self as rv: - yield rv - finally: - if not cleanup: - self._depth -= 1 - - @property - def meta(self) -> t.Dict[str, t.Any]: - """This is a dictionary which is shared with all the contexts - that are nested. It exists so that click utilities can store some - state here if they need to. It is however the responsibility of - that code to manage this dictionary well. - - The keys are supposed to be unique dotted strings. For instance - module paths are a good choice for it. What is stored in there is - irrelevant for the operation of click. However what is important is - that code that places data here adheres to the general semantics of - the system. - - Example usage:: - - LANG_KEY = f'{__name__}.lang' - - def set_language(value): - ctx = get_current_context() - ctx.meta[LANG_KEY] = value - - def get_language(): - return get_current_context().meta.get(LANG_KEY, 'en_US') - - .. versionadded:: 5.0 - """ - return self._meta - - def make_formatter(self) -> HelpFormatter: - """Creates the :class:`~click.HelpFormatter` for the help and - usage output. - - To quickly customize the formatter class used without overriding - this method, set the :attr:`formatter_class` attribute. - - .. versionchanged:: 8.0 - Added the :attr:`formatter_class` attribute. - """ - return self.formatter_class( - width=self.terminal_width, max_width=self.max_content_width - ) - - def with_resource(self, context_manager: t.ContextManager[V]) -> V: - """Register a resource as if it were used in a ``with`` - statement. The resource will be cleaned up when the context is - popped. - - Uses :meth:`contextlib.ExitStack.enter_context`. It calls the - resource's ``__enter__()`` method and returns the result. When - the context is popped, it closes the stack, which calls the - resource's ``__exit__()`` method. - - To register a cleanup function for something that isn't a - context manager, use :meth:`call_on_close`. Or use something - from :mod:`contextlib` to turn it into a context manager first. - - .. code-block:: python - - @click.group() - @click.option("--name") - @click.pass_context - def cli(ctx): - ctx.obj = ctx.with_resource(connect_db(name)) - - :param context_manager: The context manager to enter. - :return: Whatever ``context_manager.__enter__()`` returns. - - .. versionadded:: 8.0 - """ - return self._exit_stack.enter_context(context_manager) - - def call_on_close(self, f: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]: - """Register a function to be called when the context tears down. - - This can be used to close resources opened during the script - execution. Resources that support Python's context manager - protocol which would be used in a ``with`` statement should be - registered with :meth:`with_resource` instead. - - :param f: The function to execute on teardown. - """ - return self._exit_stack.callback(f) - - def close(self) -> None: - """Invoke all close callbacks registered with - :meth:`call_on_close`, and exit all context managers entered - with :meth:`with_resource`. - """ - self._exit_stack.close() - # In case the context is reused, create a new exit stack. - self._exit_stack = ExitStack() - - @property - def command_path(self) -> str: - """The computed command path. This is used for the ``usage`` - information on the help page. It's automatically created by - combining the info names of the chain of contexts to the root. - """ - rv = "" - if self.info_name is not None: - rv = self.info_name - if self.parent is not None: - parent_command_path = [self.parent.command_path] - - if isinstance(self.parent.command, Command): - for param in self.parent.command.get_params(self): - parent_command_path.extend(param.get_usage_pieces(self)) - - rv = f"{' '.join(parent_command_path)} {rv}" - return rv.lstrip() - - def find_root(self) -> "Context": - """Finds the outermost context.""" - node = self - while node.parent is not None: - node = node.parent - return node - - def find_object(self, object_type: t.Type[V]) -> t.Optional[V]: - """Finds the closest object of a given type.""" - node: t.Optional["Context"] = self - - while node is not None: - if isinstance(node.obj, object_type): - return node.obj - - node = node.parent - - return None - - def ensure_object(self, object_type: t.Type[V]) -> V: - """Like :meth:`find_object` but sets the innermost object to a - new instance of `object_type` if it does not exist. - """ - rv = self.find_object(object_type) - if rv is None: - self.obj = rv = object_type() - return rv - - @t.overload - def lookup_default( - self, name: str, call: "te.Literal[True]" = True - ) -> t.Optional[t.Any]: - ... - - @t.overload - def lookup_default( - self, name: str, call: "te.Literal[False]" = ... - ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: - ... - - def lookup_default(self, name: str, call: bool = True) -> t.Optional[t.Any]: - """Get the default for a parameter from :attr:`default_map`. - - :param name: Name of the parameter. - :param call: If the default is a callable, call it. Disable to - return the callable instead. - - .. versionchanged:: 8.0 - Added the ``call`` parameter. - """ - if self.default_map is not None: - value = self.default_map.get(name) - - if call and callable(value): - return value() - - return value - - return None - - def fail(self, message: str) -> "te.NoReturn": - """Aborts the execution of the program with a specific error - message. - - :param message: the error message to fail with. - """ - raise UsageError(message, self) - - def abort(self) -> "te.NoReturn": - """Aborts the script.""" - raise Abort() - - def exit(self, code: int = 0) -> "te.NoReturn": - """Exits the application with a given exit code.""" - raise Exit(code) - - def get_usage(self) -> str: - """Helper method to get formatted usage string for the current - context and command. - """ - return self.command.get_usage(self) - - def get_help(self) -> str: - """Helper method to get formatted help page for the current - context and command. - """ - return self.command.get_help(self) - - def _make_sub_context(self, command: "Command") -> "Context": - """Create a new context of the same type as this context, but - for a new command. - - :meta private: - """ - return type(self)(command, info_name=command.name, parent=self) - - @t.overload - def invoke( - __self, # noqa: B902 - __callback: "t.Callable[..., V]", - *args: t.Any, - **kwargs: t.Any, - ) -> V: - ... - - @t.overload - def invoke( - __self, # noqa: B902 - __callback: "Command", - *args: t.Any, - **kwargs: t.Any, - ) -> t.Any: - ... - - def invoke( - __self, # noqa: B902 - __callback: t.Union["Command", "t.Callable[..., V]"], - *args: t.Any, - **kwargs: t.Any, - ) -> t.Union[t.Any, V]: - """Invokes a command callback in exactly the way it expects. There - are two ways to invoke this method: - - 1. the first argument can be a callback and all other arguments and - keyword arguments are forwarded directly to the function. - 2. the first argument is a click command object. In that case all - arguments are forwarded as well but proper click parameters - (options and click arguments) must be keyword arguments and Click - will fill in defaults. - - Note that before Click 3.2 keyword arguments were not properly filled - in against the intention of this code and no context was created. For - more information about this change and why it was done in a bugfix - release see :ref:`upgrade-to-3.2`. - - .. versionchanged:: 8.0 - All ``kwargs`` are tracked in :attr:`params` so they will be - passed if :meth:`forward` is called at multiple levels. - """ - if isinstance(__callback, Command): - other_cmd = __callback - - if other_cmd.callback is None: - raise TypeError( - "The given command does not have a callback that can be invoked." - ) - else: - __callback = t.cast("t.Callable[..., V]", other_cmd.callback) - - ctx = __self._make_sub_context(other_cmd) - - for param in other_cmd.params: - if param.name not in kwargs and param.expose_value: - kwargs[param.name] = param.type_cast_value( # type: ignore - ctx, param.get_default(ctx) - ) - - # Track all kwargs as params, so that forward() will pass - # them on in subsequent calls. - ctx.params.update(kwargs) - else: - ctx = __self - - with augment_usage_errors(__self): - with ctx: - return __callback(*args, **kwargs) - - def forward( - __self, __cmd: "Command", *args: t.Any, **kwargs: t.Any # noqa: B902 - ) -> t.Any: - """Similar to :meth:`invoke` but fills in default keyword - arguments from the current context if the other command expects - it. This cannot invoke callbacks directly, only other commands. - - .. versionchanged:: 8.0 - All ``kwargs`` are tracked in :attr:`params` so they will be - passed if ``forward`` is called at multiple levels. - """ - # Can only forward to other commands, not direct callbacks. - if not isinstance(__cmd, Command): - raise TypeError("Callback is not a command.") - - for param in __self.params: - if param not in kwargs: - kwargs[param] = __self.params[param] - - return __self.invoke(__cmd, *args, **kwargs) - - def set_parameter_source(self, name: str, source: ParameterSource) -> None: - """Set the source of a parameter. This indicates the location - from which the value of the parameter was obtained. - - :param name: The name of the parameter. - :param source: A member of :class:`~click.core.ParameterSource`. - """ - self._parameter_source[name] = source - - def get_parameter_source(self, name: str) -> t.Optional[ParameterSource]: - """Get the source of a parameter. This indicates the location - from which the value of the parameter was obtained. - - This can be useful for determining when a user specified a value - on the command line that is the same as the default value. It - will be :attr:`~click.core.ParameterSource.DEFAULT` only if the - value was actually taken from the default. - - :param name: The name of the parameter. - :rtype: ParameterSource - - .. versionchanged:: 8.0 - Returns ``None`` if the parameter was not provided from any - source. - """ - return self._parameter_source.get(name) - - -class BaseCommand: - """The base command implements the minimal API contract of commands. - Most code will never use this as it does not implement a lot of useful - functionality but it can act as the direct subclass of alternative - parsing methods that do not depend on the Click parser. - - For instance, this can be used to bridge Click and other systems like - argparse or docopt. - - Because base commands do not implement a lot of the API that other - parts of Click take for granted, they are not supported for all - operations. For instance, they cannot be used with the decorators - usually and they have no built-in callback system. - - .. versionchanged:: 2.0 - Added the `context_settings` parameter. - - :param name: the name of the command to use unless a group overrides it. - :param context_settings: an optional dictionary with defaults that are - passed to the context object. - """ - - #: The context class to create with :meth:`make_context`. - #: - #: .. versionadded:: 8.0 - context_class: t.Type[Context] = Context - #: the default for the :attr:`Context.allow_extra_args` flag. - allow_extra_args = False - #: the default for the :attr:`Context.allow_interspersed_args` flag. - allow_interspersed_args = True - #: the default for the :attr:`Context.ignore_unknown_options` flag. - ignore_unknown_options = False - - def __init__( - self, - name: t.Optional[str], - context_settings: t.Optional[t.MutableMapping[str, t.Any]] = None, - ) -> None: - #: the name the command thinks it has. Upon registering a command - #: on a :class:`Group` the group will default the command name - #: with this information. You should instead use the - #: :class:`Context`\'s :attr:`~Context.info_name` attribute. - self.name = name - - if context_settings is None: - context_settings = {} - - #: an optional dictionary with defaults passed to the context. - self.context_settings: t.MutableMapping[str, t.Any] = context_settings - - def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: - """Gather information that could be useful for a tool generating - user-facing documentation. This traverses the entire structure - below this command. - - Use :meth:`click.Context.to_info_dict` to traverse the entire - CLI structure. - - :param ctx: A :class:`Context` representing this command. - - .. versionadded:: 8.0 - """ - return {"name": self.name} - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} {self.name}>" - - def get_usage(self, ctx: Context) -> str: - raise NotImplementedError("Base commands cannot get usage") - - def get_help(self, ctx: Context) -> str: - raise NotImplementedError("Base commands cannot get help") - - def make_context( - self, - info_name: t.Optional[str], - args: t.List[str], - parent: t.Optional[Context] = None, - **extra: t.Any, - ) -> Context: - """This function when given an info name and arguments will kick - off the parsing and create a new :class:`Context`. It does not - invoke the actual command callback though. - - To quickly customize the context class used without overriding - this method, set the :attr:`context_class` attribute. - - :param info_name: the info name for this invocation. Generally this - is the most descriptive name for the script or - command. For the toplevel script it's usually - the name of the script, for commands below it's - the name of the command. - :param args: the arguments to parse as list of strings. - :param parent: the parent context if available. - :param extra: extra keyword arguments forwarded to the context - constructor. - - .. versionchanged:: 8.0 - Added the :attr:`context_class` attribute. - """ - for key, value in self.context_settings.items(): - if key not in extra: - extra[key] = value - - ctx = self.context_class( - self, info_name=info_name, parent=parent, **extra # type: ignore - ) - - with ctx.scope(cleanup=False): - self.parse_args(ctx, args) - return ctx - - def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: - """Given a context and a list of arguments this creates the parser - and parses the arguments, then modifies the context as necessary. - This is automatically invoked by :meth:`make_context`. - """ - raise NotImplementedError("Base commands do not know how to parse arguments.") - - def invoke(self, ctx: Context) -> t.Any: - """Given a context, this invokes the command. The default - implementation is raising a not implemented error. - """ - raise NotImplementedError("Base commands are not invocable by default") - - def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: - """Return a list of completions for the incomplete value. Looks - at the names of chained multi-commands. - - Any command could be part of a chained multi-command, so sibling - commands are valid at any point during command completion. Other - command classes will return more completions. - - :param ctx: Invocation context for this command. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - results: t.List["CompletionItem"] = [] - - while ctx.parent is not None: - ctx = ctx.parent - - if isinstance(ctx.command, MultiCommand) and ctx.command.chain: - results.extend( - CompletionItem(name, help=command.get_short_help_str()) - for name, command in _complete_visible_commands(ctx, incomplete) - if name not in ctx.protected_args - ) - - return results - - @t.overload - def main( - self, - args: t.Optional[t.Sequence[str]] = None, - prog_name: t.Optional[str] = None, - complete_var: t.Optional[str] = None, - standalone_mode: "te.Literal[True]" = True, - **extra: t.Any, - ) -> "te.NoReturn": - ... - - @t.overload - def main( - self, - args: t.Optional[t.Sequence[str]] = None, - prog_name: t.Optional[str] = None, - complete_var: t.Optional[str] = None, - standalone_mode: bool = ..., - **extra: t.Any, - ) -> t.Any: - ... - - def main( - self, - args: t.Optional[t.Sequence[str]] = None, - prog_name: t.Optional[str] = None, - complete_var: t.Optional[str] = None, - standalone_mode: bool = True, - windows_expand_args: bool = True, - **extra: t.Any, - ) -> t.Any: - """This is the way to invoke a script with all the bells and - whistles as a command line application. This will always terminate - the application after a call. If this is not wanted, ``SystemExit`` - needs to be caught. - - This method is also available by directly calling the instance of - a :class:`Command`. - - :param args: the arguments that should be used for parsing. If not - provided, ``sys.argv[1:]`` is used. - :param prog_name: the program name that should be used. By default - the program name is constructed by taking the file - name from ``sys.argv[0]``. - :param complete_var: the environment variable that controls the - bash completion support. The default is - ``"__COMPLETE"`` with prog_name in - uppercase. - :param standalone_mode: the default behavior is to invoke the script - in standalone mode. Click will then - handle exceptions and convert them into - error messages and the function will never - return but shut down the interpreter. If - this is set to `False` they will be - propagated to the caller and the return - value of this function is the return value - of :meth:`invoke`. - :param windows_expand_args: Expand glob patterns, user dir, and - env vars in command line args on Windows. - :param extra: extra keyword arguments are forwarded to the context - constructor. See :class:`Context` for more information. - - .. versionchanged:: 8.0.1 - Added the ``windows_expand_args`` parameter to allow - disabling command line arg expansion on Windows. - - .. versionchanged:: 8.0 - When taking arguments from ``sys.argv`` on Windows, glob - patterns, user dir, and env vars are expanded. - - .. versionchanged:: 3.0 - Added the ``standalone_mode`` parameter. - """ - if args is None: - args = sys.argv[1:] - - if os.name == "nt" and windows_expand_args: - args = _expand_args(args) - else: - args = list(args) - - if prog_name is None: - prog_name = _detect_program_name() - - # Process shell completion requests and exit early. - self._main_shell_completion(extra, prog_name, complete_var) - - try: - try: - with self.make_context(prog_name, args, **extra) as ctx: - rv = self.invoke(ctx) - if not standalone_mode: - return rv - # it's not safe to `ctx.exit(rv)` here! - # note that `rv` may actually contain data like "1" which - # has obvious effects - # more subtle case: `rv=[None, None]` can come out of - # chained commands which all returned `None` -- so it's not - # even always obvious that `rv` indicates success/failure - # by its truthiness/falsiness - ctx.exit() - except (EOFError, KeyboardInterrupt) as e: - echo(file=sys.stderr) - raise Abort() from e - except ClickException as e: - if not standalone_mode: - raise - e.show() - sys.exit(e.exit_code) - except OSError as e: - if e.errno == errno.EPIPE: - sys.stdout = t.cast(t.TextIO, PacifyFlushWrapper(sys.stdout)) - sys.stderr = t.cast(t.TextIO, PacifyFlushWrapper(sys.stderr)) - sys.exit(1) - else: - raise - except Exit as e: - if standalone_mode: - sys.exit(e.exit_code) - else: - # in non-standalone mode, return the exit code - # note that this is only reached if `self.invoke` above raises - # an Exit explicitly -- thus bypassing the check there which - # would return its result - # the results of non-standalone execution may therefore be - # somewhat ambiguous: if there are codepaths which lead to - # `ctx.exit(1)` and to `return 1`, the caller won't be able to - # tell the difference between the two - return e.exit_code - except Abort: - if not standalone_mode: - raise - echo(_("Aborted!"), file=sys.stderr) - sys.exit(1) - - def _main_shell_completion( - self, - ctx_args: t.MutableMapping[str, t.Any], - prog_name: str, - complete_var: t.Optional[str] = None, - ) -> None: - """Check if the shell is asking for tab completion, process - that, then exit early. Called from :meth:`main` before the - program is invoked. - - :param prog_name: Name of the executable in the shell. - :param complete_var: Name of the environment variable that holds - the completion instruction. Defaults to - ``_{PROG_NAME}_COMPLETE``. - - .. versionchanged:: 8.2.0 - Dots (``.``) in ``prog_name`` are replaced with underscores (``_``). - """ - if complete_var is None: - complete_name = prog_name.replace("-", "_").replace(".", "_") - complete_var = f"_{complete_name}_COMPLETE".upper() - - instruction = os.environ.get(complete_var) - - if not instruction: - return - - from .shell_completion import shell_complete - - rv = shell_complete(self, ctx_args, prog_name, complete_var, instruction) - sys.exit(rv) - - def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Any: - """Alias for :meth:`main`.""" - return self.main(*args, **kwargs) - - -class Command(BaseCommand): - """Commands are the basic building block of command line interfaces in - Click. A basic command handles command line parsing and might dispatch - more parsing to commands nested below it. - - :param name: the name of the command to use unless a group overrides it. - :param context_settings: an optional dictionary with defaults that are - passed to the context object. - :param callback: the callback to invoke. This is optional. - :param params: the parameters to register with this command. This can - be either :class:`Option` or :class:`Argument` objects. - :param help: the help string to use for this command. - :param epilog: like the help string but it's printed at the end of the - help page after everything else. - :param short_help: the short help to use for this command. This is - shown on the command listing of the parent command. - :param add_help_option: by default each command registers a ``--help`` - option. This can be disabled by this parameter. - :param no_args_is_help: this controls what happens if no arguments are - provided. This option is disabled by default. - If enabled this will add ``--help`` as argument - if no arguments are passed - :param hidden: hide this command from help outputs. - - :param deprecated: issues a message indicating that - the command is deprecated. - - .. versionchanged:: 8.1 - ``help``, ``epilog``, and ``short_help`` are stored unprocessed, - all formatting is done when outputting help text, not at init, - and is done even if not using the ``@command`` decorator. - - .. versionchanged:: 8.0 - Added a ``repr`` showing the command name. - - .. versionchanged:: 7.1 - Added the ``no_args_is_help`` parameter. - - .. versionchanged:: 2.0 - Added the ``context_settings`` parameter. - """ - - def __init__( - self, - name: t.Optional[str], - context_settings: t.Optional[t.MutableMapping[str, t.Any]] = None, - callback: t.Optional[t.Callable[..., t.Any]] = None, - params: t.Optional[t.List["Parameter"]] = None, - help: t.Optional[str] = None, - epilog: t.Optional[str] = None, - short_help: t.Optional[str] = None, - options_metavar: t.Optional[str] = "[OPTIONS]", - add_help_option: bool = True, - no_args_is_help: bool = False, - hidden: bool = False, - deprecated: bool = False, - ) -> None: - super().__init__(name, context_settings) - #: the callback to execute when the command fires. This might be - #: `None` in which case nothing happens. - self.callback = callback - #: the list of parameters for this command in the order they - #: should show up in the help page and execute. Eager parameters - #: will automatically be handled before non eager ones. - self.params: t.List["Parameter"] = params or [] - self.help = help - self.epilog = epilog - self.options_metavar = options_metavar - self.short_help = short_help - self.add_help_option = add_help_option - self.no_args_is_help = no_args_is_help - self.hidden = hidden - self.deprecated = deprecated - - def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict(ctx) - info_dict.update( - params=[param.to_info_dict() for param in self.get_params(ctx)], - help=self.help, - epilog=self.epilog, - short_help=self.short_help, - hidden=self.hidden, - deprecated=self.deprecated, - ) - return info_dict - - def get_usage(self, ctx: Context) -> str: - """Formats the usage line into a string and returns it. - - Calls :meth:`format_usage` internally. - """ - formatter = ctx.make_formatter() - self.format_usage(ctx, formatter) - return formatter.getvalue().rstrip("\n") - - def get_params(self, ctx: Context) -> t.List["Parameter"]: - rv = self.params - help_option = self.get_help_option(ctx) - - if help_option is not None: - rv = [*rv, help_option] - - return rv - - def format_usage(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the usage line into the formatter. - - This is a low-level method called by :meth:`get_usage`. - """ - pieces = self.collect_usage_pieces(ctx) - formatter.write_usage(ctx.command_path, " ".join(pieces)) - - def collect_usage_pieces(self, ctx: Context) -> t.List[str]: - """Returns all the pieces that go into the usage line and returns - it as a list of strings. - """ - rv = [self.options_metavar] if self.options_metavar else [] - - for param in self.get_params(ctx): - rv.extend(param.get_usage_pieces(ctx)) - - return rv - - def get_help_option_names(self, ctx: Context) -> t.List[str]: - """Returns the names for the help option.""" - all_names = set(ctx.help_option_names) - for param in self.params: - all_names.difference_update(param.opts) - all_names.difference_update(param.secondary_opts) - return list(all_names) - - def get_help_option(self, ctx: Context) -> t.Optional["Option"]: - """Returns the help option object.""" - help_options = self.get_help_option_names(ctx) - - if not help_options or not self.add_help_option: - return None - - def show_help(ctx: Context, param: "Parameter", value: str) -> None: - if value and not ctx.resilient_parsing: - echo(ctx.get_help(), color=ctx.color) - ctx.exit() - - return Option( - help_options, - is_flag=True, - is_eager=True, - expose_value=False, - callback=show_help, - help=_("Show this message and exit."), - ) - - def make_parser(self, ctx: Context) -> OptionParser: - """Creates the underlying option parser for this command.""" - parser = OptionParser(ctx) - for param in self.get_params(ctx): - param.add_to_parser(parser, ctx) - return parser - - def get_help(self, ctx: Context) -> str: - """Formats the help into a string and returns it. - - Calls :meth:`format_help` internally. - """ - formatter = ctx.make_formatter() - self.format_help(ctx, formatter) - return formatter.getvalue().rstrip("\n") - - def get_short_help_str(self, limit: int = 45) -> str: - """Gets short help for the command or makes it by shortening the - long help string. - """ - if self.short_help: - text = inspect.cleandoc(self.short_help) - elif self.help: - text = make_default_short_help(self.help, limit) - else: - text = "" - - if self.deprecated: - text = _("(Deprecated) {text}").format(text=text) - - return text.strip() - - def format_help(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the help into the formatter if it exists. - - This is a low-level method called by :meth:`get_help`. - - This calls the following methods: - - - :meth:`format_usage` - - :meth:`format_help_text` - - :meth:`format_options` - - :meth:`format_epilog` - """ - self.format_usage(ctx, formatter) - self.format_help_text(ctx, formatter) - self.format_options(ctx, formatter) - self.format_epilog(ctx, formatter) - - def format_help_text(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the help text to the formatter if it exists.""" - if self.help is not None: - # truncate the help text to the first form feed - text = inspect.cleandoc(self.help).partition("\f")[0] - else: - text = "" - - if self.deprecated: - text = _("(Deprecated) {text}").format(text=text) - - if text: - formatter.write_paragraph() - - with formatter.indentation(): - formatter.write_text(text) - - def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes all the options into the formatter if they exist.""" - opts = [] - for param in self.get_params(ctx): - rv = param.get_help_record(ctx) - if rv is not None: - opts.append(rv) - - if opts: - with formatter.section(_("Options")): - formatter.write_dl(opts) - - def format_epilog(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the epilog into the formatter if it exists.""" - if self.epilog: - epilog = inspect.cleandoc(self.epilog) - formatter.write_paragraph() - - with formatter.indentation(): - formatter.write_text(epilog) - - def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: - if not args and self.no_args_is_help and not ctx.resilient_parsing: - echo(ctx.get_help(), color=ctx.color) - ctx.exit() - - parser = self.make_parser(ctx) - opts, args, param_order = parser.parse_args(args=args) - - for param in iter_params_for_processing(param_order, self.get_params(ctx)): - value, args = param.handle_parse_result(ctx, opts, args) - - if args and not ctx.allow_extra_args and not ctx.resilient_parsing: - ctx.fail( - ngettext( - "Got unexpected extra argument ({args})", - "Got unexpected extra arguments ({args})", - len(args), - ).format(args=" ".join(map(str, args))) - ) - - ctx.args = args - ctx._opt_prefixes.update(parser._opt_prefixes) - return args - - def invoke(self, ctx: Context) -> t.Any: - """Given a context, this invokes the attached callback (if it exists) - in the right way. - """ - if self.deprecated: - message = _( - "DeprecationWarning: The command {name!r} is deprecated." - ).format(name=self.name) - echo(style(message, fg="red"), err=True) - - if self.callback is not None: - return ctx.invoke(self.callback, **ctx.params) - - def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: - """Return a list of completions for the incomplete value. Looks - at the names of options and chained multi-commands. - - :param ctx: Invocation context for this command. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - results: t.List["CompletionItem"] = [] - - if incomplete and not incomplete[0].isalnum(): - for param in self.get_params(ctx): - if ( - not isinstance(param, Option) - or param.hidden - or ( - not param.multiple - and ctx.get_parameter_source(param.name) # type: ignore - is ParameterSource.COMMANDLINE - ) - ): - continue - - results.extend( - CompletionItem(name, help=param.help) - for name in [*param.opts, *param.secondary_opts] - if name.startswith(incomplete) - ) - - results.extend(super().shell_complete(ctx, incomplete)) - return results - - -class MultiCommand(Command): - """A multi command is the basic implementation of a command that - dispatches to subcommands. The most common version is the - :class:`Group`. - - :param invoke_without_command: this controls how the multi command itself - is invoked. By default it's only invoked - if a subcommand is provided. - :param no_args_is_help: this controls what happens if no arguments are - provided. This option is enabled by default if - `invoke_without_command` is disabled or disabled - if it's enabled. If enabled this will add - ``--help`` as argument if no arguments are - passed. - :param subcommand_metavar: the string that is used in the documentation - to indicate the subcommand place. - :param chain: if this is set to `True` chaining of multiple subcommands - is enabled. This restricts the form of commands in that - they cannot have optional arguments but it allows - multiple commands to be chained together. - :param result_callback: The result callback to attach to this multi - command. This can be set or changed later with the - :meth:`result_callback` decorator. - :param attrs: Other command arguments described in :class:`Command`. - """ - - allow_extra_args = True - allow_interspersed_args = False - - def __init__( - self, - name: t.Optional[str] = None, - invoke_without_command: bool = False, - no_args_is_help: t.Optional[bool] = None, - subcommand_metavar: t.Optional[str] = None, - chain: bool = False, - result_callback: t.Optional[t.Callable[..., t.Any]] = None, - **attrs: t.Any, - ) -> None: - super().__init__(name, **attrs) - - if no_args_is_help is None: - no_args_is_help = not invoke_without_command - - self.no_args_is_help = no_args_is_help - self.invoke_without_command = invoke_without_command - - if subcommand_metavar is None: - if chain: - subcommand_metavar = "COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]..." - else: - subcommand_metavar = "COMMAND [ARGS]..." - - self.subcommand_metavar = subcommand_metavar - self.chain = chain - # The result callback that is stored. This can be set or - # overridden with the :func:`result_callback` decorator. - self._result_callback = result_callback - - if self.chain: - for param in self.params: - if isinstance(param, Argument) and not param.required: - raise RuntimeError( - "Multi commands in chain mode cannot have" - " optional arguments." - ) - - def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict(ctx) - commands = {} - - for name in self.list_commands(ctx): - command = self.get_command(ctx, name) - - if command is None: - continue - - sub_ctx = ctx._make_sub_context(command) - - with sub_ctx.scope(cleanup=False): - commands[name] = command.to_info_dict(sub_ctx) - - info_dict.update(commands=commands, chain=self.chain) - return info_dict - - def collect_usage_pieces(self, ctx: Context) -> t.List[str]: - rv = super().collect_usage_pieces(ctx) - rv.append(self.subcommand_metavar) - return rv - - def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: - super().format_options(ctx, formatter) - self.format_commands(ctx, formatter) - - def result_callback(self, replace: bool = False) -> t.Callable[[F], F]: - """Adds a result callback to the command. By default if a - result callback is already registered this will chain them but - this can be disabled with the `replace` parameter. The result - callback is invoked with the return value of the subcommand - (or the list of return values from all subcommands if chaining - is enabled) as well as the parameters as they would be passed - to the main callback. - - Example:: - - @click.group() - @click.option('-i', '--input', default=23) - def cli(input): - return 42 - - @cli.result_callback() - def process_result(result, input): - return result + input - - :param replace: if set to `True` an already existing result - callback will be removed. - - .. versionchanged:: 8.0 - Renamed from ``resultcallback``. - - .. versionadded:: 3.0 - """ - - def decorator(f: F) -> F: - old_callback = self._result_callback - - if old_callback is None or replace: - self._result_callback = f - return f - - def function(__value, *args, **kwargs): # type: ignore - inner = old_callback(__value, *args, **kwargs) - return f(inner, *args, **kwargs) - - self._result_callback = rv = update_wrapper(t.cast(F, function), f) - return rv - - return decorator - - def format_commands(self, ctx: Context, formatter: HelpFormatter) -> None: - """Extra format methods for multi methods that adds all the commands - after the options. - """ - commands = [] - for subcommand in self.list_commands(ctx): - cmd = self.get_command(ctx, subcommand) - # What is this, the tool lied about a command. Ignore it - if cmd is None: - continue - if cmd.hidden: - continue - - commands.append((subcommand, cmd)) - - # allow for 3 times the default spacing - if len(commands): - limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands) - - rows = [] - for subcommand, cmd in commands: - help = cmd.get_short_help_str(limit) - rows.append((subcommand, help)) - - if rows: - with formatter.section(_("Commands")): - formatter.write_dl(rows) - - def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: - if not args and self.no_args_is_help and not ctx.resilient_parsing: - echo(ctx.get_help(), color=ctx.color) - ctx.exit() - - rest = super().parse_args(ctx, args) - - if self.chain: - ctx.protected_args = rest - ctx.args = [] - elif rest: - ctx.protected_args, ctx.args = rest[:1], rest[1:] - - return ctx.args - - def invoke(self, ctx: Context) -> t.Any: - def _process_result(value: t.Any) -> t.Any: - if self._result_callback is not None: - value = ctx.invoke(self._result_callback, value, **ctx.params) - return value - - if not ctx.protected_args: - if self.invoke_without_command: - # No subcommand was invoked, so the result callback is - # invoked with the group return value for regular - # groups, or an empty list for chained groups. - with ctx: - rv = super().invoke(ctx) - return _process_result([] if self.chain else rv) - ctx.fail(_("Missing command.")) - - # Fetch args back out - args = [*ctx.protected_args, *ctx.args] - ctx.args = [] - ctx.protected_args = [] - - # If we're not in chain mode, we only allow the invocation of a - # single command but we also inform the current context about the - # name of the command to invoke. - if not self.chain: - # Make sure the context is entered so we do not clean up - # resources until the result processor has worked. - with ctx: - cmd_name, cmd, args = self.resolve_command(ctx, args) - assert cmd is not None - ctx.invoked_subcommand = cmd_name - super().invoke(ctx) - sub_ctx = cmd.make_context(cmd_name, args, parent=ctx) - with sub_ctx: - return _process_result(sub_ctx.command.invoke(sub_ctx)) - - # In chain mode we create the contexts step by step, but after the - # base command has been invoked. Because at that point we do not - # know the subcommands yet, the invoked subcommand attribute is - # set to ``*`` to inform the command that subcommands are executed - # but nothing else. - with ctx: - ctx.invoked_subcommand = "*" if args else None - super().invoke(ctx) - - # Otherwise we make every single context and invoke them in a - # chain. In that case the return value to the result processor - # is the list of all invoked subcommand's results. - contexts = [] - while args: - cmd_name, cmd, args = self.resolve_command(ctx, args) - assert cmd is not None - sub_ctx = cmd.make_context( - cmd_name, - args, - parent=ctx, - allow_extra_args=True, - allow_interspersed_args=False, - ) - contexts.append(sub_ctx) - args, sub_ctx.args = sub_ctx.args, [] - - rv = [] - for sub_ctx in contexts: - with sub_ctx: - rv.append(sub_ctx.command.invoke(sub_ctx)) - return _process_result(rv) - - def resolve_command( - self, ctx: Context, args: t.List[str] - ) -> t.Tuple[t.Optional[str], t.Optional[Command], t.List[str]]: - cmd_name = make_str(args[0]) - original_cmd_name = cmd_name - - # Get the command - cmd = self.get_command(ctx, cmd_name) - - # If we can't find the command but there is a normalization - # function available, we try with that one. - if cmd is None and ctx.token_normalize_func is not None: - cmd_name = ctx.token_normalize_func(cmd_name) - cmd = self.get_command(ctx, cmd_name) - - # If we don't find the command we want to show an error message - # to the user that it was not provided. However, there is - # something else we should do: if the first argument looks like - # an option we want to kick off parsing again for arguments to - # resolve things like --help which now should go to the main - # place. - if cmd is None and not ctx.resilient_parsing: - if split_opt(cmd_name)[0]: - self.parse_args(ctx, ctx.args) - ctx.fail(_("No such command {name!r}.").format(name=original_cmd_name)) - return cmd_name if cmd else None, cmd, args[1:] - - def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: - """Given a context and a command name, this returns a - :class:`Command` object if it exists or returns `None`. - """ - raise NotImplementedError - - def list_commands(self, ctx: Context) -> t.List[str]: - """Returns a list of subcommand names in the order they should - appear. - """ - return [] - - def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: - """Return a list of completions for the incomplete value. Looks - at the names of options, subcommands, and chained - multi-commands. - - :param ctx: Invocation context for this command. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - results = [ - CompletionItem(name, help=command.get_short_help_str()) - for name, command in _complete_visible_commands(ctx, incomplete) - ] - results.extend(super().shell_complete(ctx, incomplete)) - return results - - -class Group(MultiCommand): - """A group allows a command to have subcommands attached. This is - the most common way to implement nesting in Click. - - :param name: The name of the group command. - :param commands: A dict mapping names to :class:`Command` objects. - Can also be a list of :class:`Command`, which will use - :attr:`Command.name` to create the dict. - :param attrs: Other command arguments described in - :class:`MultiCommand`, :class:`Command`, and - :class:`BaseCommand`. - - .. versionchanged:: 8.0 - The ``commands`` argument can be a list of command objects. - """ - - #: If set, this is used by the group's :meth:`command` decorator - #: as the default :class:`Command` class. This is useful to make all - #: subcommands use a custom command class. - #: - #: .. versionadded:: 8.0 - command_class: t.Optional[t.Type[Command]] = None - - #: If set, this is used by the group's :meth:`group` decorator - #: as the default :class:`Group` class. This is useful to make all - #: subgroups use a custom group class. - #: - #: If set to the special value :class:`type` (literally - #: ``group_class = type``), this group's class will be used as the - #: default class. This makes a custom group class continue to make - #: custom groups. - #: - #: .. versionadded:: 8.0 - group_class: t.Optional[t.Union[t.Type["Group"], t.Type[type]]] = None - # Literal[type] isn't valid, so use Type[type] - - def __init__( - self, - name: t.Optional[str] = None, - commands: t.Optional[ - t.Union[t.MutableMapping[str, Command], t.Sequence[Command]] - ] = None, - **attrs: t.Any, - ) -> None: - super().__init__(name, **attrs) - - if commands is None: - commands = {} - elif isinstance(commands, abc.Sequence): - commands = {c.name: c for c in commands if c.name is not None} - - #: The registered subcommands by their exported names. - self.commands: t.MutableMapping[str, Command] = commands - - def add_command(self, cmd: Command, name: t.Optional[str] = None) -> None: - """Registers another :class:`Command` with this group. If the name - is not provided, the name of the command is used. - """ - name = name or cmd.name - if name is None: - raise TypeError("Command has no name.") - _check_multicommand(self, name, cmd, register=True) - self.commands[name] = cmd - - @t.overload - def command(self, __func: t.Callable[..., t.Any]) -> Command: - ... - - @t.overload - def command( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], Command]: - ... - - def command( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Union[t.Callable[[t.Callable[..., t.Any]], Command], Command]: - """A shortcut decorator for declaring and attaching a command to - the group. This takes the same arguments as :func:`command` and - immediately registers the created command with this group by - calling :meth:`add_command`. - - To customize the command class used, set the - :attr:`command_class` attribute. - - .. versionchanged:: 8.1 - This decorator can be applied without parentheses. - - .. versionchanged:: 8.0 - Added the :attr:`command_class` attribute. - """ - from .decorators import command - - func: t.Optional[t.Callable[..., t.Any]] = None - - if args and callable(args[0]): - assert ( - len(args) == 1 and not kwargs - ), "Use 'command(**kwargs)(callable)' to provide arguments." - (func,) = args - args = () - - if self.command_class and kwargs.get("cls") is None: - kwargs["cls"] = self.command_class - - def decorator(f: t.Callable[..., t.Any]) -> Command: - cmd: Command = command(*args, **kwargs)(f) - self.add_command(cmd) - return cmd - - if func is not None: - return decorator(func) - - return decorator - - @t.overload - def group(self, __func: t.Callable[..., t.Any]) -> "Group": - ... - - @t.overload - def group( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], "Group"]: - ... - - def group( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Union[t.Callable[[t.Callable[..., t.Any]], "Group"], "Group"]: - """A shortcut decorator for declaring and attaching a group to - the group. This takes the same arguments as :func:`group` and - immediately registers the created group with this group by - calling :meth:`add_command`. - - To customize the group class used, set the :attr:`group_class` - attribute. - - .. versionchanged:: 8.1 - This decorator can be applied without parentheses. - - .. versionchanged:: 8.0 - Added the :attr:`group_class` attribute. - """ - from .decorators import group - - func: t.Optional[t.Callable[..., t.Any]] = None - - if args and callable(args[0]): - assert ( - len(args) == 1 and not kwargs - ), "Use 'group(**kwargs)(callable)' to provide arguments." - (func,) = args - args = () - - if self.group_class is not None and kwargs.get("cls") is None: - if self.group_class is type: - kwargs["cls"] = type(self) - else: - kwargs["cls"] = self.group_class - - def decorator(f: t.Callable[..., t.Any]) -> "Group": - cmd: Group = group(*args, **kwargs)(f) - self.add_command(cmd) - return cmd - - if func is not None: - return decorator(func) - - return decorator - - def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: - return self.commands.get(cmd_name) - - def list_commands(self, ctx: Context) -> t.List[str]: - return sorted(self.commands) - - -class CommandCollection(MultiCommand): - """A command collection is a multi command that merges multiple multi - commands together into one. This is a straightforward implementation - that accepts a list of different multi commands as sources and - provides all the commands for each of them. - - See :class:`MultiCommand` and :class:`Command` for the description of - ``name`` and ``attrs``. - """ - - def __init__( - self, - name: t.Optional[str] = None, - sources: t.Optional[t.List[MultiCommand]] = None, - **attrs: t.Any, - ) -> None: - super().__init__(name, **attrs) - #: The list of registered multi commands. - self.sources: t.List[MultiCommand] = sources or [] - - def add_source(self, multi_cmd: MultiCommand) -> None: - """Adds a new multi command to the chain dispatcher.""" - self.sources.append(multi_cmd) - - def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: - for source in self.sources: - rv = source.get_command(ctx, cmd_name) - - if rv is not None: - if self.chain: - _check_multicommand(self, cmd_name, rv) - - return rv - - return None - - def list_commands(self, ctx: Context) -> t.List[str]: - rv: t.Set[str] = set() - - for source in self.sources: - rv.update(source.list_commands(ctx)) - - return sorted(rv) - - -def _check_iter(value: t.Any) -> t.Iterator[t.Any]: - """Check if the value is iterable but not a string. Raises a type - error, or return an iterator over the value. - """ - if isinstance(value, str): - raise TypeError - - return iter(value) - - -class Parameter: - r"""A parameter to a command comes in two versions: they are either - :class:`Option`\s or :class:`Argument`\s. Other subclasses are currently - not supported by design as some of the internals for parsing are - intentionally not finalized. - - Some settings are supported by both options and arguments. - - :param param_decls: the parameter declarations for this option or - argument. This is a list of flags or argument - names. - :param type: the type that should be used. Either a :class:`ParamType` - or a Python type. The latter is converted into the former - automatically if supported. - :param required: controls if this is optional or not. - :param default: the default value if omitted. This can also be a callable, - in which case it's invoked when the default is needed - without any arguments. - :param callback: A function to further process or validate the value - after type conversion. It is called as ``f(ctx, param, value)`` - and must return the value. It is called for all sources, - including prompts. - :param nargs: the number of arguments to match. If not ``1`` the return - value is a tuple instead of single value. The default for - nargs is ``1`` (except if the type is a tuple, then it's - the arity of the tuple). If ``nargs=-1``, all remaining - parameters are collected. - :param metavar: how the value is represented in the help page. - :param expose_value: if this is `True` then the value is passed onwards - to the command callback and stored on the context, - otherwise it's skipped. - :param is_eager: eager values are processed before non eager ones. This - should not be set for arguments or it will inverse the - order of processing. - :param envvar: a string or list of strings that are environment variables - that should be checked. - :param shell_complete: A function that returns custom shell - completions. Used instead of the param's type completion if - given. Takes ``ctx, param, incomplete`` and must return a list - of :class:`~click.shell_completion.CompletionItem` or a list of - strings. - - .. versionchanged:: 8.0 - ``process_value`` validates required parameters and bounded - ``nargs``, and invokes the parameter callback before returning - the value. This allows the callback to validate prompts. - ``full_process_value`` is removed. - - .. versionchanged:: 8.0 - ``autocompletion`` is renamed to ``shell_complete`` and has new - semantics described above. The old name is deprecated and will - be removed in 8.1, until then it will be wrapped to match the - new requirements. - - .. versionchanged:: 8.0 - For ``multiple=True, nargs>1``, the default must be a list of - tuples. - - .. versionchanged:: 8.0 - Setting a default is no longer required for ``nargs>1``, it will - default to ``None``. ``multiple=True`` or ``nargs=-1`` will - default to ``()``. - - .. versionchanged:: 7.1 - Empty environment variables are ignored rather than taking the - empty string value. This makes it possible for scripts to clear - variables if they can't unset them. - - .. versionchanged:: 2.0 - Changed signature for parameter callback to also be passed the - parameter. The old callback format will still work, but it will - raise a warning to give you a chance to migrate the code easier. - """ - - param_type_name = "parameter" - - def __init__( - self, - param_decls: t.Optional[t.Sequence[str]] = None, - type: t.Optional[t.Union[types.ParamType, t.Any]] = None, - required: bool = False, - default: t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]] = None, - callback: t.Optional[t.Callable[[Context, "Parameter", t.Any], t.Any]] = None, - nargs: t.Optional[int] = None, - multiple: bool = False, - metavar: t.Optional[str] = None, - expose_value: bool = True, - is_eager: bool = False, - envvar: t.Optional[t.Union[str, t.Sequence[str]]] = None, - shell_complete: t.Optional[ - t.Callable[ - [Context, "Parameter", str], - t.Union[t.List["CompletionItem"], t.List[str]], - ] - ] = None, - ) -> None: - self.name: t.Optional[str] - self.opts: t.List[str] - self.secondary_opts: t.List[str] - self.name, self.opts, self.secondary_opts = self._parse_decls( - param_decls or (), expose_value - ) - self.type: types.ParamType = types.convert_type(type, default) - - # Default nargs to what the type tells us if we have that - # information available. - if nargs is None: - if self.type.is_composite: - nargs = self.type.arity - else: - nargs = 1 - - self.required = required - self.callback = callback - self.nargs = nargs - self.multiple = multiple - self.expose_value = expose_value - self.default = default - self.is_eager = is_eager - self.metavar = metavar - self.envvar = envvar - self._custom_shell_complete = shell_complete - - if __debug__: - if self.type.is_composite and nargs != self.type.arity: - raise ValueError( - f"'nargs' must be {self.type.arity} (or None) for" - f" type {self.type!r}, but it was {nargs}." - ) - - # Skip no default or callable default. - check_default = default if not callable(default) else None - - if check_default is not None: - if multiple: - try: - # Only check the first value against nargs. - check_default = next(_check_iter(check_default), None) - except TypeError: - raise ValueError( - "'default' must be a list when 'multiple' is true." - ) from None - - # Can be None for multiple with empty default. - if nargs != 1 and check_default is not None: - try: - _check_iter(check_default) - except TypeError: - if multiple: - message = ( - "'default' must be a list of lists when 'multiple' is" - " true and 'nargs' != 1." - ) - else: - message = "'default' must be a list when 'nargs' != 1." - - raise ValueError(message) from None - - if nargs > 1 and len(check_default) != nargs: - subject = "item length" if multiple else "length" - raise ValueError( - f"'default' {subject} must match nargs={nargs}." - ) - - def to_info_dict(self) -> t.Dict[str, t.Any]: - """Gather information that could be useful for a tool generating - user-facing documentation. - - Use :meth:`click.Context.to_info_dict` to traverse the entire - CLI structure. - - .. versionadded:: 8.0 - """ - return { - "name": self.name, - "param_type_name": self.param_type_name, - "opts": self.opts, - "secondary_opts": self.secondary_opts, - "type": self.type.to_info_dict(), - "required": self.required, - "nargs": self.nargs, - "multiple": self.multiple, - "default": self.default, - "envvar": self.envvar, - } - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} {self.name}>" - - def _parse_decls( - self, decls: t.Sequence[str], expose_value: bool - ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: - raise NotImplementedError() - - @property - def human_readable_name(self) -> str: - """Returns the human readable name of this parameter. This is the - same as the name for options, but the metavar for arguments. - """ - return self.name # type: ignore - - def make_metavar(self) -> str: - if self.metavar is not None: - return self.metavar - - metavar = self.type.get_metavar(self) - - if metavar is None: - metavar = self.type.name.upper() - - if self.nargs != 1: - metavar += "..." - - return metavar - - @t.overload - def get_default( - self, ctx: Context, call: "te.Literal[True]" = True - ) -> t.Optional[t.Any]: - ... - - @t.overload - def get_default( - self, ctx: Context, call: bool = ... - ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: - ... - - def get_default( - self, ctx: Context, call: bool = True - ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: - """Get the default for the parameter. Tries - :meth:`Context.lookup_default` first, then the local default. - - :param ctx: Current context. - :param call: If the default is a callable, call it. Disable to - return the callable instead. - - .. versionchanged:: 8.0.2 - Type casting is no longer performed when getting a default. - - .. versionchanged:: 8.0.1 - Type casting can fail in resilient parsing mode. Invalid - defaults will not prevent showing help text. - - .. versionchanged:: 8.0 - Looks at ``ctx.default_map`` first. - - .. versionchanged:: 8.0 - Added the ``call`` parameter. - """ - value = ctx.lookup_default(self.name, call=False) # type: ignore - - if value is None: - value = self.default - - if call and callable(value): - value = value() - - return value - - def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: - raise NotImplementedError() - - def consume_value( - self, ctx: Context, opts: t.Mapping[str, t.Any] - ) -> t.Tuple[t.Any, ParameterSource]: - value = opts.get(self.name) # type: ignore - source = ParameterSource.COMMANDLINE - - if value is None: - value = self.value_from_envvar(ctx) - source = ParameterSource.ENVIRONMENT - - if value is None: - value = ctx.lookup_default(self.name) # type: ignore - source = ParameterSource.DEFAULT_MAP - - if value is None: - value = self.get_default(ctx) - source = ParameterSource.DEFAULT - - return value, source - - def type_cast_value(self, ctx: Context, value: t.Any) -> t.Any: - """Convert and validate a value against the option's - :attr:`type`, :attr:`multiple`, and :attr:`nargs`. - """ - if value is None: - return () if self.multiple or self.nargs == -1 else None - - def check_iter(value: t.Any) -> t.Iterator[t.Any]: - try: - return _check_iter(value) - except TypeError: - # This should only happen when passing in args manually, - # the parser should construct an iterable when parsing - # the command line. - raise BadParameter( - _("Value must be an iterable."), ctx=ctx, param=self - ) from None - - if self.nargs == 1 or self.type.is_composite: - - def convert(value: t.Any) -> t.Any: - return self.type(value, param=self, ctx=ctx) - - elif self.nargs == -1: - - def convert(value: t.Any) -> t.Any: # t.Tuple[t.Any, ...] - return tuple(self.type(x, self, ctx) for x in check_iter(value)) - - else: # nargs > 1 - - def convert(value: t.Any) -> t.Any: # t.Tuple[t.Any, ...] - value = tuple(check_iter(value)) - - if len(value) != self.nargs: - raise BadParameter( - ngettext( - "Takes {nargs} values but 1 was given.", - "Takes {nargs} values but {len} were given.", - len(value), - ).format(nargs=self.nargs, len=len(value)), - ctx=ctx, - param=self, - ) - - return tuple(self.type(x, self, ctx) for x in value) - - if self.multiple: - return tuple(convert(x) for x in check_iter(value)) - - return convert(value) - - def value_is_missing(self, value: t.Any) -> bool: - if value is None: - return True - - if (self.nargs != 1 or self.multiple) and value == (): - return True - - return False - - def process_value(self, ctx: Context, value: t.Any) -> t.Any: - value = self.type_cast_value(ctx, value) - - if self.required and self.value_is_missing(value): - raise MissingParameter(ctx=ctx, param=self) - - if self.callback is not None: - value = self.callback(ctx, self, value) - - return value - - def resolve_envvar_value(self, ctx: Context) -> t.Optional[str]: - if self.envvar is None: - return None - - if isinstance(self.envvar, str): - rv = os.environ.get(self.envvar) - - if rv: - return rv - else: - for envvar in self.envvar: - rv = os.environ.get(envvar) - - if rv: - return rv - - return None - - def value_from_envvar(self, ctx: Context) -> t.Optional[t.Any]: - rv: t.Optional[t.Any] = self.resolve_envvar_value(ctx) - - if rv is not None and self.nargs != 1: - rv = self.type.split_envvar_value(rv) - - return rv - - def handle_parse_result( - self, ctx: Context, opts: t.Mapping[str, t.Any], args: t.List[str] - ) -> t.Tuple[t.Any, t.List[str]]: - with augment_usage_errors(ctx, param=self): - value, source = self.consume_value(ctx, opts) - ctx.set_parameter_source(self.name, source) # type: ignore - - try: - value = self.process_value(ctx, value) - except Exception: - if not ctx.resilient_parsing: - raise - - value = None - - if self.expose_value: - ctx.params[self.name] = value # type: ignore - - return value, args - - def get_help_record(self, ctx: Context) -> t.Optional[t.Tuple[str, str]]: - pass - - def get_usage_pieces(self, ctx: Context) -> t.List[str]: - return [] - - def get_error_hint(self, ctx: Context) -> str: - """Get a stringified version of the param for use in error messages to - indicate which param caused the error. - """ - hint_list = self.opts or [self.human_readable_name] - return " / ".join(f"'{x}'" for x in hint_list) - - def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: - """Return a list of completions for the incomplete value. If a - ``shell_complete`` function was given during init, it is used. - Otherwise, the :attr:`type` - :meth:`~click.types.ParamType.shell_complete` function is used. - - :param ctx: Invocation context for this command. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - if self._custom_shell_complete is not None: - results = self._custom_shell_complete(ctx, self, incomplete) - - if results and isinstance(results[0], str): - from click.shell_completion import CompletionItem - - results = [CompletionItem(c) for c in results] - - return t.cast(t.List["CompletionItem"], results) - - return self.type.shell_complete(ctx, self, incomplete) - - -class Option(Parameter): - """Options are usually optional values on the command line and - have some extra features that arguments don't have. - - All other parameters are passed onwards to the parameter constructor. - - :param show_default: Show the default value for this option in its - help text. Values are not shown by default, unless - :attr:`Context.show_default` is ``True``. If this value is a - string, it shows that string in parentheses instead of the - actual value. This is particularly useful for dynamic options. - For single option boolean flags, the default remains hidden if - its value is ``False``. - :param show_envvar: Controls if an environment variable should be - shown on the help page. Normally, environment variables are not - shown. - :param prompt: If set to ``True`` or a non empty string then the - user will be prompted for input. If set to ``True`` the prompt - will be the option name capitalized. - :param confirmation_prompt: Prompt a second time to confirm the - value if it was prompted for. Can be set to a string instead of - ``True`` to customize the message. - :param prompt_required: If set to ``False``, the user will be - prompted for input only when the option was specified as a flag - without a value. - :param hide_input: If this is ``True`` then the input on the prompt - will be hidden from the user. This is useful for password input. - :param is_flag: forces this option to act as a flag. The default is - auto detection. - :param flag_value: which value should be used for this flag if it's - enabled. This is set to a boolean automatically if - the option string contains a slash to mark two options. - :param multiple: if this is set to `True` then the argument is accepted - multiple times and recorded. This is similar to ``nargs`` - in how it works but supports arbitrary number of - arguments. - :param count: this flag makes an option increment an integer. - :param allow_from_autoenv: if this is enabled then the value of this - parameter will be pulled from an environment - variable in case a prefix is defined on the - context. - :param help: the help string. - :param hidden: hide this option from help outputs. - :param attrs: Other command arguments described in :class:`Parameter`. - - .. versionchanged:: 8.1.0 - Help text indentation is cleaned here instead of only in the - ``@option`` decorator. - - .. versionchanged:: 8.1.0 - The ``show_default`` parameter overrides - ``Context.show_default``. - - .. versionchanged:: 8.1.0 - The default of a single option boolean flag is not shown if the - default value is ``False``. - - .. versionchanged:: 8.0.1 - ``type`` is detected from ``flag_value`` if given. - """ - - param_type_name = "option" - - def __init__( - self, - param_decls: t.Optional[t.Sequence[str]] = None, - show_default: t.Union[bool, str, None] = None, - prompt: t.Union[bool, str] = False, - confirmation_prompt: t.Union[bool, str] = False, - prompt_required: bool = True, - hide_input: bool = False, - is_flag: t.Optional[bool] = None, - flag_value: t.Optional[t.Any] = None, - multiple: bool = False, - count: bool = False, - allow_from_autoenv: bool = True, - type: t.Optional[t.Union[types.ParamType, t.Any]] = None, - help: t.Optional[str] = None, - hidden: bool = False, - show_choices: bool = True, - show_envvar: bool = False, - **attrs: t.Any, - ) -> None: - if help: - help = inspect.cleandoc(help) - - default_is_missing = "default" not in attrs - super().__init__(param_decls, type=type, multiple=multiple, **attrs) - - if prompt is True: - if self.name is None: - raise TypeError("'name' is required with 'prompt=True'.") - - prompt_text: t.Optional[str] = self.name.replace("_", " ").capitalize() - elif prompt is False: - prompt_text = None - else: - prompt_text = prompt - - self.prompt = prompt_text - self.confirmation_prompt = confirmation_prompt - self.prompt_required = prompt_required - self.hide_input = hide_input - self.hidden = hidden - - # If prompt is enabled but not required, then the option can be - # used as a flag to indicate using prompt or flag_value. - self._flag_needs_value = self.prompt is not None and not self.prompt_required - - if is_flag is None: - if flag_value is not None: - # Implicitly a flag because flag_value was set. - is_flag = True - elif self._flag_needs_value: - # Not a flag, but when used as a flag it shows a prompt. - is_flag = False - else: - # Implicitly a flag because flag options were given. - is_flag = bool(self.secondary_opts) - elif is_flag is False and not self._flag_needs_value: - # Not a flag, and prompt is not enabled, can be used as a - # flag if flag_value is set. - self._flag_needs_value = flag_value is not None - - self.default: t.Union[t.Any, t.Callable[[], t.Any]] - - if is_flag and default_is_missing and not self.required: - if multiple: - self.default = () - else: - self.default = False - - if flag_value is None: - flag_value = not self.default - - self.type: types.ParamType - if is_flag and type is None: - # Re-guess the type from the flag value instead of the - # default. - self.type = types.convert_type(None, flag_value) - - self.is_flag: bool = is_flag - self.is_bool_flag: bool = is_flag and isinstance(self.type, types.BoolParamType) - self.flag_value: t.Any = flag_value - - # Counting - self.count = count - if count: - if type is None: - self.type = types.IntRange(min=0) - if default_is_missing: - self.default = 0 - - self.allow_from_autoenv = allow_from_autoenv - self.help = help - self.show_default = show_default - self.show_choices = show_choices - self.show_envvar = show_envvar - - if __debug__: - if self.nargs == -1: - raise TypeError("nargs=-1 is not supported for options.") - - if self.prompt and self.is_flag and not self.is_bool_flag: - raise TypeError("'prompt' is not valid for non-boolean flag.") - - if not self.is_bool_flag and self.secondary_opts: - raise TypeError("Secondary flag is not valid for non-boolean flag.") - - if self.is_bool_flag and self.hide_input and self.prompt is not None: - raise TypeError( - "'prompt' with 'hide_input' is not valid for boolean flag." - ) - - if self.count: - if self.multiple: - raise TypeError("'count' is not valid with 'multiple'.") - - if self.is_flag: - raise TypeError("'count' is not valid with 'is_flag'.") - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update( - help=self.help, - prompt=self.prompt, - is_flag=self.is_flag, - flag_value=self.flag_value, - count=self.count, - hidden=self.hidden, - ) - return info_dict - - def _parse_decls( - self, decls: t.Sequence[str], expose_value: bool - ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: - opts = [] - secondary_opts = [] - name = None - possible_names = [] - - for decl in decls: - if decl.isidentifier(): - if name is not None: - raise TypeError(f"Name '{name}' defined twice") - name = decl - else: - split_char = ";" if decl[:1] == "/" else "/" - if split_char in decl: - first, second = decl.split(split_char, 1) - first = first.rstrip() - if first: - possible_names.append(split_opt(first)) - opts.append(first) - second = second.lstrip() - if second: - secondary_opts.append(second.lstrip()) - if first == second: - raise ValueError( - f"Boolean option {decl!r} cannot use the" - " same flag for true/false." - ) - else: - possible_names.append(split_opt(decl)) - opts.append(decl) - - if name is None and possible_names: - possible_names.sort(key=lambda x: -len(x[0])) # group long options first - name = possible_names[0][1].replace("-", "_").lower() - if not name.isidentifier(): - name = None - - if name is None: - if not expose_value: - return None, opts, secondary_opts - raise TypeError("Could not determine name for option") - - if not opts and not secondary_opts: - raise TypeError( - f"No options defined but a name was passed ({name})." - " Did you mean to declare an argument instead? Did" - f" you mean to pass '--{name}'?" - ) - - return name, opts, secondary_opts - - def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: - if self.multiple: - action = "append" - elif self.count: - action = "count" - else: - action = "store" - - if self.is_flag: - action = f"{action}_const" - - if self.is_bool_flag and self.secondary_opts: - parser.add_option( - obj=self, opts=self.opts, dest=self.name, action=action, const=True - ) - parser.add_option( - obj=self, - opts=self.secondary_opts, - dest=self.name, - action=action, - const=False, - ) - else: - parser.add_option( - obj=self, - opts=self.opts, - dest=self.name, - action=action, - const=self.flag_value, - ) - else: - parser.add_option( - obj=self, - opts=self.opts, - dest=self.name, - action=action, - nargs=self.nargs, - ) - - def get_help_record(self, ctx: Context) -> t.Optional[t.Tuple[str, str]]: - if self.hidden: - return None - - any_prefix_is_slash = False - - def _write_opts(opts: t.Sequence[str]) -> str: - nonlocal any_prefix_is_slash - - rv, any_slashes = join_options(opts) - - if any_slashes: - any_prefix_is_slash = True - - if not self.is_flag and not self.count: - rv += f" {self.make_metavar()}" - - return rv - - rv = [_write_opts(self.opts)] - - if self.secondary_opts: - rv.append(_write_opts(self.secondary_opts)) - - help = self.help or "" - extra = [] - - if self.show_envvar: - envvar = self.envvar - - if envvar is None: - if ( - self.allow_from_autoenv - and ctx.auto_envvar_prefix is not None - and self.name is not None - ): - envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" - - if envvar is not None: - var_str = ( - envvar - if isinstance(envvar, str) - else ", ".join(str(d) for d in envvar) - ) - extra.append(_("env var: {var}").format(var=var_str)) - - # Temporarily enable resilient parsing to avoid type casting - # failing for the default. Might be possible to extend this to - # help formatting in general. - resilient = ctx.resilient_parsing - ctx.resilient_parsing = True - - try: - default_value = self.get_default(ctx, call=False) - finally: - ctx.resilient_parsing = resilient - - show_default = False - show_default_is_str = False - - if self.show_default is not None: - if isinstance(self.show_default, str): - show_default_is_str = show_default = True - else: - show_default = self.show_default - elif ctx.show_default is not None: - show_default = ctx.show_default - - if show_default_is_str or (show_default and (default_value is not None)): - if show_default_is_str: - default_string = f"({self.show_default})" - elif isinstance(default_value, (list, tuple)): - default_string = ", ".join(str(d) for d in default_value) - elif inspect.isfunction(default_value): - default_string = _("(dynamic)") - elif self.is_bool_flag and self.secondary_opts: - # For boolean flags that have distinct True/False opts, - # use the opt without prefix instead of the value. - default_string = split_opt( - (self.opts if self.default else self.secondary_opts)[0] - )[1] - elif self.is_bool_flag and not self.secondary_opts and not default_value: - default_string = "" - else: - default_string = str(default_value) - - if default_string: - extra.append(_("default: {default}").format(default=default_string)) - - if ( - isinstance(self.type, types._NumberRangeBase) - # skip count with default range type - and not (self.count and self.type.min == 0 and self.type.max is None) - ): - range_str = self.type._describe_range() - - if range_str: - extra.append(range_str) - - if self.required: - extra.append(_("required")) - - if extra: - extra_str = "; ".join(extra) - help = f"{help} [{extra_str}]" if help else f"[{extra_str}]" - - return ("; " if any_prefix_is_slash else " / ").join(rv), help - - @t.overload - def get_default( - self, ctx: Context, call: "te.Literal[True]" = True - ) -> t.Optional[t.Any]: - ... - - @t.overload - def get_default( - self, ctx: Context, call: bool = ... - ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: - ... - - def get_default( - self, ctx: Context, call: bool = True - ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: - # If we're a non boolean flag our default is more complex because - # we need to look at all flags in the same group to figure out - # if we're the default one in which case we return the flag - # value as default. - if self.is_flag and not self.is_bool_flag: - for param in ctx.command.params: - if param.name == self.name and param.default: - return t.cast(Option, param).flag_value - - return None - - return super().get_default(ctx, call=call) - - def prompt_for_value(self, ctx: Context) -> t.Any: - """This is an alternative flow that can be activated in the full - value processing if a value does not exist. It will prompt the - user until a valid value exists and then returns the processed - value as result. - """ - assert self.prompt is not None - - # Calculate the default before prompting anything to be stable. - default = self.get_default(ctx) - - # If this is a prompt for a flag we need to handle this - # differently. - if self.is_bool_flag: - return confirm(self.prompt, default) - - return prompt( - self.prompt, - default=default, - type=self.type, - hide_input=self.hide_input, - show_choices=self.show_choices, - confirmation_prompt=self.confirmation_prompt, - value_proc=lambda x: self.process_value(ctx, x), - ) - - def resolve_envvar_value(self, ctx: Context) -> t.Optional[str]: - rv = super().resolve_envvar_value(ctx) - - if rv is not None: - return rv - - if ( - self.allow_from_autoenv - and ctx.auto_envvar_prefix is not None - and self.name is not None - ): - envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" - rv = os.environ.get(envvar) - - if rv: - return rv - - return None - - def value_from_envvar(self, ctx: Context) -> t.Optional[t.Any]: - rv: t.Optional[t.Any] = self.resolve_envvar_value(ctx) - - if rv is None: - return None - - value_depth = (self.nargs != 1) + bool(self.multiple) - - if value_depth > 0: - rv = self.type.split_envvar_value(rv) - - if self.multiple and self.nargs != 1: - rv = batch(rv, self.nargs) - - return rv - - def consume_value( - self, ctx: Context, opts: t.Mapping[str, "Parameter"] - ) -> t.Tuple[t.Any, ParameterSource]: - value, source = super().consume_value(ctx, opts) - - # The parser will emit a sentinel value if the option can be - # given as a flag without a value. This is different from None - # to distinguish from the flag not being given at all. - if value is _flag_needs_value: - if self.prompt is not None and not ctx.resilient_parsing: - value = self.prompt_for_value(ctx) - source = ParameterSource.PROMPT - else: - value = self.flag_value - source = ParameterSource.COMMANDLINE - - elif ( - self.multiple - and value is not None - and any(v is _flag_needs_value for v in value) - ): - value = [self.flag_value if v is _flag_needs_value else v for v in value] - source = ParameterSource.COMMANDLINE - - # The value wasn't set, or used the param's default, prompt if - # prompting is enabled. - elif ( - source in {None, ParameterSource.DEFAULT} - and self.prompt is not None - and (self.required or self.prompt_required) - and not ctx.resilient_parsing - ): - value = self.prompt_for_value(ctx) - source = ParameterSource.PROMPT - - return value, source - - -class Argument(Parameter): - """Arguments are positional parameters to a command. They generally - provide fewer features than options but can have infinite ``nargs`` - and are required by default. - - All parameters are passed onwards to the constructor of :class:`Parameter`. - """ - - param_type_name = "argument" - - def __init__( - self, - param_decls: t.Sequence[str], - required: t.Optional[bool] = None, - **attrs: t.Any, - ) -> None: - if required is None: - if attrs.get("default") is not None: - required = False - else: - required = attrs.get("nargs", 1) > 0 - - if "multiple" in attrs: - raise TypeError("__init__() got an unexpected keyword argument 'multiple'.") - - super().__init__(param_decls, required=required, **attrs) - - if __debug__: - if self.default is not None and self.nargs == -1: - raise TypeError("'default' is not supported for nargs=-1.") - - @property - def human_readable_name(self) -> str: - if self.metavar is not None: - return self.metavar - return self.name.upper() # type: ignore - - def make_metavar(self) -> str: - if self.metavar is not None: - return self.metavar - var = self.type.get_metavar(self) - if not var: - var = self.name.upper() # type: ignore - if not self.required: - var = f"[{var}]" - if self.nargs != 1: - var += "..." - return var - - def _parse_decls( - self, decls: t.Sequence[str], expose_value: bool - ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: - if not decls: - if not expose_value: - return None, [], [] - raise TypeError("Could not determine name for argument") - if len(decls) == 1: - name = arg = decls[0] - name = name.replace("-", "_").lower() - else: - raise TypeError( - "Arguments take exactly one parameter declaration, got" - f" {len(decls)}." - ) - return name, [arg], [] - - def get_usage_pieces(self, ctx: Context) -> t.List[str]: - return [self.make_metavar()] - - def get_error_hint(self, ctx: Context) -> str: - return f"'{self.make_metavar()}'" - - def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: - parser.add_argument(dest=self.name, nargs=self.nargs, obj=self) diff --git a/server/venv/Lib/site-packages/click/decorators.py b/server/venv/Lib/site-packages/click/decorators.py deleted file mode 100644 index d9bba95..0000000 --- a/server/venv/Lib/site-packages/click/decorators.py +++ /dev/null @@ -1,561 +0,0 @@ -import inspect -import types -import typing as t -from functools import update_wrapper -from gettext import gettext as _ - -from .core import Argument -from .core import Command -from .core import Context -from .core import Group -from .core import Option -from .core import Parameter -from .globals import get_current_context -from .utils import echo - -if t.TYPE_CHECKING: - import typing_extensions as te - - P = te.ParamSpec("P") - -R = t.TypeVar("R") -T = t.TypeVar("T") -_AnyCallable = t.Callable[..., t.Any] -FC = t.TypeVar("FC", bound=t.Union[_AnyCallable, Command]) - - -def pass_context(f: "t.Callable[te.Concatenate[Context, P], R]") -> "t.Callable[P, R]": - """Marks a callback as wanting to receive the current context - object as first argument. - """ - - def new_func(*args: "P.args", **kwargs: "P.kwargs") -> "R": - return f(get_current_context(), *args, **kwargs) - - return update_wrapper(new_func, f) - - -def pass_obj(f: "t.Callable[te.Concatenate[t.Any, P], R]") -> "t.Callable[P, R]": - """Similar to :func:`pass_context`, but only pass the object on the - context onwards (:attr:`Context.obj`). This is useful if that object - represents the state of a nested system. - """ - - def new_func(*args: "P.args", **kwargs: "P.kwargs") -> "R": - return f(get_current_context().obj, *args, **kwargs) - - return update_wrapper(new_func, f) - - -def make_pass_decorator( - object_type: t.Type[T], ensure: bool = False -) -> t.Callable[["t.Callable[te.Concatenate[T, P], R]"], "t.Callable[P, R]"]: - """Given an object type this creates a decorator that will work - similar to :func:`pass_obj` but instead of passing the object of the - current context, it will find the innermost context of type - :func:`object_type`. - - This generates a decorator that works roughly like this:: - - from functools import update_wrapper - - def decorator(f): - @pass_context - def new_func(ctx, *args, **kwargs): - obj = ctx.find_object(object_type) - return ctx.invoke(f, obj, *args, **kwargs) - return update_wrapper(new_func, f) - return decorator - - :param object_type: the type of the object to pass. - :param ensure: if set to `True`, a new object will be created and - remembered on the context if it's not there yet. - """ - - def decorator(f: "t.Callable[te.Concatenate[T, P], R]") -> "t.Callable[P, R]": - def new_func(*args: "P.args", **kwargs: "P.kwargs") -> "R": - ctx = get_current_context() - - obj: t.Optional[T] - if ensure: - obj = ctx.ensure_object(object_type) - else: - obj = ctx.find_object(object_type) - - if obj is None: - raise RuntimeError( - "Managed to invoke callback without a context" - f" object of type {object_type.__name__!r}" - " existing." - ) - - return ctx.invoke(f, obj, *args, **kwargs) - - return update_wrapper(new_func, f) - - return decorator # type: ignore[return-value] - - -def pass_meta_key( - key: str, *, doc_description: t.Optional[str] = None -) -> "t.Callable[[t.Callable[te.Concatenate[t.Any, P], R]], t.Callable[P, R]]": - """Create a decorator that passes a key from - :attr:`click.Context.meta` as the first argument to the decorated - function. - - :param key: Key in ``Context.meta`` to pass. - :param doc_description: Description of the object being passed, - inserted into the decorator's docstring. Defaults to "the 'key' - key from Context.meta". - - .. versionadded:: 8.0 - """ - - def decorator(f: "t.Callable[te.Concatenate[t.Any, P], R]") -> "t.Callable[P, R]": - def new_func(*args: "P.args", **kwargs: "P.kwargs") -> R: - ctx = get_current_context() - obj = ctx.meta[key] - return ctx.invoke(f, obj, *args, **kwargs) - - return update_wrapper(new_func, f) - - if doc_description is None: - doc_description = f"the {key!r} key from :attr:`click.Context.meta`" - - decorator.__doc__ = ( - f"Decorator that passes {doc_description} as the first argument" - " to the decorated function." - ) - return decorator # type: ignore[return-value] - - -CmdType = t.TypeVar("CmdType", bound=Command) - - -# variant: no call, directly as decorator for a function. -@t.overload -def command(name: _AnyCallable) -> Command: - ... - - -# variant: with positional name and with positional or keyword cls argument: -# @command(namearg, CommandCls, ...) or @command(namearg, cls=CommandCls, ...) -@t.overload -def command( - name: t.Optional[str], - cls: t.Type[CmdType], - **attrs: t.Any, -) -> t.Callable[[_AnyCallable], CmdType]: - ... - - -# variant: name omitted, cls _must_ be a keyword argument, @command(cls=CommandCls, ...) -@t.overload -def command( - name: None = None, - *, - cls: t.Type[CmdType], - **attrs: t.Any, -) -> t.Callable[[_AnyCallable], CmdType]: - ... - - -# variant: with optional string name, no cls argument provided. -@t.overload -def command( - name: t.Optional[str] = ..., cls: None = None, **attrs: t.Any -) -> t.Callable[[_AnyCallable], Command]: - ... - - -def command( - name: t.Union[t.Optional[str], _AnyCallable] = None, - cls: t.Optional[t.Type[CmdType]] = None, - **attrs: t.Any, -) -> t.Union[Command, t.Callable[[_AnyCallable], t.Union[Command, CmdType]]]: - r"""Creates a new :class:`Command` and uses the decorated function as - callback. This will also automatically attach all decorated - :func:`option`\s and :func:`argument`\s as parameters to the command. - - The name of the command defaults to the name of the function with - underscores replaced by dashes. If you want to change that, you can - pass the intended name as the first argument. - - All keyword arguments are forwarded to the underlying command class. - For the ``params`` argument, any decorated params are appended to - the end of the list. - - Once decorated the function turns into a :class:`Command` instance - that can be invoked as a command line utility or be attached to a - command :class:`Group`. - - :param name: the name of the command. This defaults to the function - name with underscores replaced by dashes. - :param cls: the command class to instantiate. This defaults to - :class:`Command`. - - .. versionchanged:: 8.1 - This decorator can be applied without parentheses. - - .. versionchanged:: 8.1 - The ``params`` argument can be used. Decorated params are - appended to the end of the list. - """ - - func: t.Optional[t.Callable[[_AnyCallable], t.Any]] = None - - if callable(name): - func = name - name = None - assert cls is None, "Use 'command(cls=cls)(callable)' to specify a class." - assert not attrs, "Use 'command(**kwargs)(callable)' to provide arguments." - - if cls is None: - cls = t.cast(t.Type[CmdType], Command) - - def decorator(f: _AnyCallable) -> CmdType: - if isinstance(f, Command): - raise TypeError("Attempted to convert a callback into a command twice.") - - attr_params = attrs.pop("params", None) - params = attr_params if attr_params is not None else [] - - try: - decorator_params = f.__click_params__ # type: ignore - except AttributeError: - pass - else: - del f.__click_params__ # type: ignore - params.extend(reversed(decorator_params)) - - if attrs.get("help") is None: - attrs["help"] = f.__doc__ - - if t.TYPE_CHECKING: - assert cls is not None - assert not callable(name) - - cmd = cls( - name=name or f.__name__.lower().replace("_", "-"), - callback=f, - params=params, - **attrs, - ) - cmd.__doc__ = f.__doc__ - return cmd - - if func is not None: - return decorator(func) - - return decorator - - -GrpType = t.TypeVar("GrpType", bound=Group) - - -# variant: no call, directly as decorator for a function. -@t.overload -def group(name: _AnyCallable) -> Group: - ... - - -# variant: with positional name and with positional or keyword cls argument: -# @group(namearg, GroupCls, ...) or @group(namearg, cls=GroupCls, ...) -@t.overload -def group( - name: t.Optional[str], - cls: t.Type[GrpType], - **attrs: t.Any, -) -> t.Callable[[_AnyCallable], GrpType]: - ... - - -# variant: name omitted, cls _must_ be a keyword argument, @group(cmd=GroupCls, ...) -@t.overload -def group( - name: None = None, - *, - cls: t.Type[GrpType], - **attrs: t.Any, -) -> t.Callable[[_AnyCallable], GrpType]: - ... - - -# variant: with optional string name, no cls argument provided. -@t.overload -def group( - name: t.Optional[str] = ..., cls: None = None, **attrs: t.Any -) -> t.Callable[[_AnyCallable], Group]: - ... - - -def group( - name: t.Union[str, _AnyCallable, None] = None, - cls: t.Optional[t.Type[GrpType]] = None, - **attrs: t.Any, -) -> t.Union[Group, t.Callable[[_AnyCallable], t.Union[Group, GrpType]]]: - """Creates a new :class:`Group` with a function as callback. This - works otherwise the same as :func:`command` just that the `cls` - parameter is set to :class:`Group`. - - .. versionchanged:: 8.1 - This decorator can be applied without parentheses. - """ - if cls is None: - cls = t.cast(t.Type[GrpType], Group) - - if callable(name): - return command(cls=cls, **attrs)(name) - - return command(name, cls, **attrs) - - -def _param_memo(f: t.Callable[..., t.Any], param: Parameter) -> None: - if isinstance(f, Command): - f.params.append(param) - else: - if not hasattr(f, "__click_params__"): - f.__click_params__ = [] # type: ignore - - f.__click_params__.append(param) # type: ignore - - -def argument( - *param_decls: str, cls: t.Optional[t.Type[Argument]] = None, **attrs: t.Any -) -> t.Callable[[FC], FC]: - """Attaches an argument to the command. All positional arguments are - passed as parameter declarations to :class:`Argument`; all keyword - arguments are forwarded unchanged (except ``cls``). - This is equivalent to creating an :class:`Argument` instance manually - and attaching it to the :attr:`Command.params` list. - - For the default argument class, refer to :class:`Argument` and - :class:`Parameter` for descriptions of parameters. - - :param cls: the argument class to instantiate. This defaults to - :class:`Argument`. - :param param_decls: Passed as positional arguments to the constructor of - ``cls``. - :param attrs: Passed as keyword arguments to the constructor of ``cls``. - """ - if cls is None: - cls = Argument - - def decorator(f: FC) -> FC: - _param_memo(f, cls(param_decls, **attrs)) - return f - - return decorator - - -def option( - *param_decls: str, cls: t.Optional[t.Type[Option]] = None, **attrs: t.Any -) -> t.Callable[[FC], FC]: - """Attaches an option to the command. All positional arguments are - passed as parameter declarations to :class:`Option`; all keyword - arguments are forwarded unchanged (except ``cls``). - This is equivalent to creating an :class:`Option` instance manually - and attaching it to the :attr:`Command.params` list. - - For the default option class, refer to :class:`Option` and - :class:`Parameter` for descriptions of parameters. - - :param cls: the option class to instantiate. This defaults to - :class:`Option`. - :param param_decls: Passed as positional arguments to the constructor of - ``cls``. - :param attrs: Passed as keyword arguments to the constructor of ``cls``. - """ - if cls is None: - cls = Option - - def decorator(f: FC) -> FC: - _param_memo(f, cls(param_decls, **attrs)) - return f - - return decorator - - -def confirmation_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: - """Add a ``--yes`` option which shows a prompt before continuing if - not passed. If the prompt is declined, the program will exit. - - :param param_decls: One or more option names. Defaults to the single - value ``"--yes"``. - :param kwargs: Extra arguments are passed to :func:`option`. - """ - - def callback(ctx: Context, param: Parameter, value: bool) -> None: - if not value: - ctx.abort() - - if not param_decls: - param_decls = ("--yes",) - - kwargs.setdefault("is_flag", True) - kwargs.setdefault("callback", callback) - kwargs.setdefault("expose_value", False) - kwargs.setdefault("prompt", "Do you want to continue?") - kwargs.setdefault("help", "Confirm the action without prompting.") - return option(*param_decls, **kwargs) - - -def password_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: - """Add a ``--password`` option which prompts for a password, hiding - input and asking to enter the value again for confirmation. - - :param param_decls: One or more option names. Defaults to the single - value ``"--password"``. - :param kwargs: Extra arguments are passed to :func:`option`. - """ - if not param_decls: - param_decls = ("--password",) - - kwargs.setdefault("prompt", True) - kwargs.setdefault("confirmation_prompt", True) - kwargs.setdefault("hide_input", True) - return option(*param_decls, **kwargs) - - -def version_option( - version: t.Optional[str] = None, - *param_decls: str, - package_name: t.Optional[str] = None, - prog_name: t.Optional[str] = None, - message: t.Optional[str] = None, - **kwargs: t.Any, -) -> t.Callable[[FC], FC]: - """Add a ``--version`` option which immediately prints the version - number and exits the program. - - If ``version`` is not provided, Click will try to detect it using - :func:`importlib.metadata.version` to get the version for the - ``package_name``. On Python < 3.8, the ``importlib_metadata`` - backport must be installed. - - If ``package_name`` is not provided, Click will try to detect it by - inspecting the stack frames. This will be used to detect the - version, so it must match the name of the installed package. - - :param version: The version number to show. If not provided, Click - will try to detect it. - :param param_decls: One or more option names. Defaults to the single - value ``"--version"``. - :param package_name: The package name to detect the version from. If - not provided, Click will try to detect it. - :param prog_name: The name of the CLI to show in the message. If not - provided, it will be detected from the command. - :param message: The message to show. The values ``%(prog)s``, - ``%(package)s``, and ``%(version)s`` are available. Defaults to - ``"%(prog)s, version %(version)s"``. - :param kwargs: Extra arguments are passed to :func:`option`. - :raise RuntimeError: ``version`` could not be detected. - - .. versionchanged:: 8.0 - Add the ``package_name`` parameter, and the ``%(package)s`` - value for messages. - - .. versionchanged:: 8.0 - Use :mod:`importlib.metadata` instead of ``pkg_resources``. The - version is detected based on the package name, not the entry - point name. The Python package name must match the installed - package name, or be passed with ``package_name=``. - """ - if message is None: - message = _("%(prog)s, version %(version)s") - - if version is None and package_name is None: - frame = inspect.currentframe() - f_back = frame.f_back if frame is not None else None - f_globals = f_back.f_globals if f_back is not None else None - # break reference cycle - # https://docs.python.org/3/library/inspect.html#the-interpreter-stack - del frame - - if f_globals is not None: - package_name = f_globals.get("__name__") - - if package_name == "__main__": - package_name = f_globals.get("__package__") - - if package_name: - package_name = package_name.partition(".")[0] - - def callback(ctx: Context, param: Parameter, value: bool) -> None: - if not value or ctx.resilient_parsing: - return - - nonlocal prog_name - nonlocal version - - if prog_name is None: - prog_name = ctx.find_root().info_name - - if version is None and package_name is not None: - metadata: t.Optional[types.ModuleType] - - try: - from importlib import metadata # type: ignore - except ImportError: - # Python < 3.8 - import importlib_metadata as metadata # type: ignore - - try: - version = metadata.version(package_name) # type: ignore - except metadata.PackageNotFoundError: # type: ignore - raise RuntimeError( - f"{package_name!r} is not installed. Try passing" - " 'package_name' instead." - ) from None - - if version is None: - raise RuntimeError( - f"Could not determine the version for {package_name!r} automatically." - ) - - echo( - message % {"prog": prog_name, "package": package_name, "version": version}, - color=ctx.color, - ) - ctx.exit() - - if not param_decls: - param_decls = ("--version",) - - kwargs.setdefault("is_flag", True) - kwargs.setdefault("expose_value", False) - kwargs.setdefault("is_eager", True) - kwargs.setdefault("help", _("Show the version and exit.")) - kwargs["callback"] = callback - return option(*param_decls, **kwargs) - - -def help_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: - """Add a ``--help`` option which immediately prints the help page - and exits the program. - - This is usually unnecessary, as the ``--help`` option is added to - each command automatically unless ``add_help_option=False`` is - passed. - - :param param_decls: One or more option names. Defaults to the single - value ``"--help"``. - :param kwargs: Extra arguments are passed to :func:`option`. - """ - - def callback(ctx: Context, param: Parameter, value: bool) -> None: - if not value or ctx.resilient_parsing: - return - - echo(ctx.get_help(), color=ctx.color) - ctx.exit() - - if not param_decls: - param_decls = ("--help",) - - kwargs.setdefault("is_flag", True) - kwargs.setdefault("expose_value", False) - kwargs.setdefault("is_eager", True) - kwargs.setdefault("help", _("Show this message and exit.")) - kwargs["callback"] = callback - return option(*param_decls, **kwargs) diff --git a/server/venv/Lib/site-packages/click/exceptions.py b/server/venv/Lib/site-packages/click/exceptions.py deleted file mode 100644 index fe68a36..0000000 --- a/server/venv/Lib/site-packages/click/exceptions.py +++ /dev/null @@ -1,288 +0,0 @@ -import typing as t -from gettext import gettext as _ -from gettext import ngettext - -from ._compat import get_text_stderr -from .utils import echo -from .utils import format_filename - -if t.TYPE_CHECKING: - from .core import Command - from .core import Context - from .core import Parameter - - -def _join_param_hints( - param_hint: t.Optional[t.Union[t.Sequence[str], str]] -) -> t.Optional[str]: - if param_hint is not None and not isinstance(param_hint, str): - return " / ".join(repr(x) for x in param_hint) - - return param_hint - - -class ClickException(Exception): - """An exception that Click can handle and show to the user.""" - - #: The exit code for this exception. - exit_code = 1 - - def __init__(self, message: str) -> None: - super().__init__(message) - self.message = message - - def format_message(self) -> str: - return self.message - - def __str__(self) -> str: - return self.message - - def show(self, file: t.Optional[t.IO[t.Any]] = None) -> None: - if file is None: - file = get_text_stderr() - - echo(_("Error: {message}").format(message=self.format_message()), file=file) - - -class UsageError(ClickException): - """An internal exception that signals a usage error. This typically - aborts any further handling. - - :param message: the error message to display. - :param ctx: optionally the context that caused this error. Click will - fill in the context automatically in some situations. - """ - - exit_code = 2 - - def __init__(self, message: str, ctx: t.Optional["Context"] = None) -> None: - super().__init__(message) - self.ctx = ctx - self.cmd: t.Optional["Command"] = self.ctx.command if self.ctx else None - - def show(self, file: t.Optional[t.IO[t.Any]] = None) -> None: - if file is None: - file = get_text_stderr() - color = None - hint = "" - if ( - self.ctx is not None - and self.ctx.command.get_help_option(self.ctx) is not None - ): - hint = _("Try '{command} {option}' for help.").format( - command=self.ctx.command_path, option=self.ctx.help_option_names[0] - ) - hint = f"{hint}\n" - if self.ctx is not None: - color = self.ctx.color - echo(f"{self.ctx.get_usage()}\n{hint}", file=file, color=color) - echo( - _("Error: {message}").format(message=self.format_message()), - file=file, - color=color, - ) - - -class BadParameter(UsageError): - """An exception that formats out a standardized error message for a - bad parameter. This is useful when thrown from a callback or type as - Click will attach contextual information to it (for instance, which - parameter it is). - - .. versionadded:: 2.0 - - :param param: the parameter object that caused this error. This can - be left out, and Click will attach this info itself - if possible. - :param param_hint: a string that shows up as parameter name. This - can be used as alternative to `param` in cases - where custom validation should happen. If it is - a string it's used as such, if it's a list then - each item is quoted and separated. - """ - - def __init__( - self, - message: str, - ctx: t.Optional["Context"] = None, - param: t.Optional["Parameter"] = None, - param_hint: t.Optional[str] = None, - ) -> None: - super().__init__(message, ctx) - self.param = param - self.param_hint = param_hint - - def format_message(self) -> str: - if self.param_hint is not None: - param_hint = self.param_hint - elif self.param is not None: - param_hint = self.param.get_error_hint(self.ctx) # type: ignore - else: - return _("Invalid value: {message}").format(message=self.message) - - return _("Invalid value for {param_hint}: {message}").format( - param_hint=_join_param_hints(param_hint), message=self.message - ) - - -class MissingParameter(BadParameter): - """Raised if click required an option or argument but it was not - provided when invoking the script. - - .. versionadded:: 4.0 - - :param param_type: a string that indicates the type of the parameter. - The default is to inherit the parameter type from - the given `param`. Valid values are ``'parameter'``, - ``'option'`` or ``'argument'``. - """ - - def __init__( - self, - message: t.Optional[str] = None, - ctx: t.Optional["Context"] = None, - param: t.Optional["Parameter"] = None, - param_hint: t.Optional[str] = None, - param_type: t.Optional[str] = None, - ) -> None: - super().__init__(message or "", ctx, param, param_hint) - self.param_type = param_type - - def format_message(self) -> str: - if self.param_hint is not None: - param_hint: t.Optional[str] = self.param_hint - elif self.param is not None: - param_hint = self.param.get_error_hint(self.ctx) # type: ignore - else: - param_hint = None - - param_hint = _join_param_hints(param_hint) - param_hint = f" {param_hint}" if param_hint else "" - - param_type = self.param_type - if param_type is None and self.param is not None: - param_type = self.param.param_type_name - - msg = self.message - if self.param is not None: - msg_extra = self.param.type.get_missing_message(self.param) - if msg_extra: - if msg: - msg += f". {msg_extra}" - else: - msg = msg_extra - - msg = f" {msg}" if msg else "" - - # Translate param_type for known types. - if param_type == "argument": - missing = _("Missing argument") - elif param_type == "option": - missing = _("Missing option") - elif param_type == "parameter": - missing = _("Missing parameter") - else: - missing = _("Missing {param_type}").format(param_type=param_type) - - return f"{missing}{param_hint}.{msg}" - - def __str__(self) -> str: - if not self.message: - param_name = self.param.name if self.param else None - return _("Missing parameter: {param_name}").format(param_name=param_name) - else: - return self.message - - -class NoSuchOption(UsageError): - """Raised if click attempted to handle an option that does not - exist. - - .. versionadded:: 4.0 - """ - - def __init__( - self, - option_name: str, - message: t.Optional[str] = None, - possibilities: t.Optional[t.Sequence[str]] = None, - ctx: t.Optional["Context"] = None, - ) -> None: - if message is None: - message = _("No such option: {name}").format(name=option_name) - - super().__init__(message, ctx) - self.option_name = option_name - self.possibilities = possibilities - - def format_message(self) -> str: - if not self.possibilities: - return self.message - - possibility_str = ", ".join(sorted(self.possibilities)) - suggest = ngettext( - "Did you mean {possibility}?", - "(Possible options: {possibilities})", - len(self.possibilities), - ).format(possibility=possibility_str, possibilities=possibility_str) - return f"{self.message} {suggest}" - - -class BadOptionUsage(UsageError): - """Raised if an option is generally supplied but the use of the option - was incorrect. This is for instance raised if the number of arguments - for an option is not correct. - - .. versionadded:: 4.0 - - :param option_name: the name of the option being used incorrectly. - """ - - def __init__( - self, option_name: str, message: str, ctx: t.Optional["Context"] = None - ) -> None: - super().__init__(message, ctx) - self.option_name = option_name - - -class BadArgumentUsage(UsageError): - """Raised if an argument is generally supplied but the use of the argument - was incorrect. This is for instance raised if the number of values - for an argument is not correct. - - .. versionadded:: 6.0 - """ - - -class FileError(ClickException): - """Raised if a file cannot be opened.""" - - def __init__(self, filename: str, hint: t.Optional[str] = None) -> None: - if hint is None: - hint = _("unknown error") - - super().__init__(hint) - self.ui_filename: str = format_filename(filename) - self.filename = filename - - def format_message(self) -> str: - return _("Could not open file {filename!r}: {message}").format( - filename=self.ui_filename, message=self.message - ) - - -class Abort(RuntimeError): - """An internal signalling exception that signals Click to abort.""" - - -class Exit(RuntimeError): - """An exception that indicates that the application should exit with some - status code. - - :param code: the status code to exit with. - """ - - __slots__ = ("exit_code",) - - def __init__(self, code: int = 0) -> None: - self.exit_code: int = code diff --git a/server/venv/Lib/site-packages/click/formatting.py b/server/venv/Lib/site-packages/click/formatting.py deleted file mode 100644 index ddd2a2f..0000000 --- a/server/venv/Lib/site-packages/click/formatting.py +++ /dev/null @@ -1,301 +0,0 @@ -import typing as t -from contextlib import contextmanager -from gettext import gettext as _ - -from ._compat import term_len -from .parser import split_opt - -# Can force a width. This is used by the test system -FORCED_WIDTH: t.Optional[int] = None - - -def measure_table(rows: t.Iterable[t.Tuple[str, str]]) -> t.Tuple[int, ...]: - widths: t.Dict[int, int] = {} - - for row in rows: - for idx, col in enumerate(row): - widths[idx] = max(widths.get(idx, 0), term_len(col)) - - return tuple(y for x, y in sorted(widths.items())) - - -def iter_rows( - rows: t.Iterable[t.Tuple[str, str]], col_count: int -) -> t.Iterator[t.Tuple[str, ...]]: - for row in rows: - yield row + ("",) * (col_count - len(row)) - - -def wrap_text( - text: str, - width: int = 78, - initial_indent: str = "", - subsequent_indent: str = "", - preserve_paragraphs: bool = False, -) -> str: - """A helper function that intelligently wraps text. By default, it - assumes that it operates on a single paragraph of text but if the - `preserve_paragraphs` parameter is provided it will intelligently - handle paragraphs (defined by two empty lines). - - If paragraphs are handled, a paragraph can be prefixed with an empty - line containing the ``\\b`` character (``\\x08``) to indicate that - no rewrapping should happen in that block. - - :param text: the text that should be rewrapped. - :param width: the maximum width for the text. - :param initial_indent: the initial indent that should be placed on the - first line as a string. - :param subsequent_indent: the indent string that should be placed on - each consecutive line. - :param preserve_paragraphs: if this flag is set then the wrapping will - intelligently handle paragraphs. - """ - from ._textwrap import TextWrapper - - text = text.expandtabs() - wrapper = TextWrapper( - width, - initial_indent=initial_indent, - subsequent_indent=subsequent_indent, - replace_whitespace=False, - ) - if not preserve_paragraphs: - return wrapper.fill(text) - - p: t.List[t.Tuple[int, bool, str]] = [] - buf: t.List[str] = [] - indent = None - - def _flush_par() -> None: - if not buf: - return - if buf[0].strip() == "\b": - p.append((indent or 0, True, "\n".join(buf[1:]))) - else: - p.append((indent or 0, False, " ".join(buf))) - del buf[:] - - for line in text.splitlines(): - if not line: - _flush_par() - indent = None - else: - if indent is None: - orig_len = term_len(line) - line = line.lstrip() - indent = orig_len - term_len(line) - buf.append(line) - _flush_par() - - rv = [] - for indent, raw, text in p: - with wrapper.extra_indent(" " * indent): - if raw: - rv.append(wrapper.indent_only(text)) - else: - rv.append(wrapper.fill(text)) - - return "\n\n".join(rv) - - -class HelpFormatter: - """This class helps with formatting text-based help pages. It's - usually just needed for very special internal cases, but it's also - exposed so that developers can write their own fancy outputs. - - At present, it always writes into memory. - - :param indent_increment: the additional increment for each level. - :param width: the width for the text. This defaults to the terminal - width clamped to a maximum of 78. - """ - - def __init__( - self, - indent_increment: int = 2, - width: t.Optional[int] = None, - max_width: t.Optional[int] = None, - ) -> None: - import shutil - - self.indent_increment = indent_increment - if max_width is None: - max_width = 80 - if width is None: - width = FORCED_WIDTH - if width is None: - width = max(min(shutil.get_terminal_size().columns, max_width) - 2, 50) - self.width = width - self.current_indent = 0 - self.buffer: t.List[str] = [] - - def write(self, string: str) -> None: - """Writes a unicode string into the internal buffer.""" - self.buffer.append(string) - - def indent(self) -> None: - """Increases the indentation.""" - self.current_indent += self.indent_increment - - def dedent(self) -> None: - """Decreases the indentation.""" - self.current_indent -= self.indent_increment - - def write_usage( - self, prog: str, args: str = "", prefix: t.Optional[str] = None - ) -> None: - """Writes a usage line into the buffer. - - :param prog: the program name. - :param args: whitespace separated list of arguments. - :param prefix: The prefix for the first line. Defaults to - ``"Usage: "``. - """ - if prefix is None: - prefix = f"{_('Usage:')} " - - usage_prefix = f"{prefix:>{self.current_indent}}{prog} " - text_width = self.width - self.current_indent - - if text_width >= (term_len(usage_prefix) + 20): - # The arguments will fit to the right of the prefix. - indent = " " * term_len(usage_prefix) - self.write( - wrap_text( - args, - text_width, - initial_indent=usage_prefix, - subsequent_indent=indent, - ) - ) - else: - # The prefix is too long, put the arguments on the next line. - self.write(usage_prefix) - self.write("\n") - indent = " " * (max(self.current_indent, term_len(prefix)) + 4) - self.write( - wrap_text( - args, text_width, initial_indent=indent, subsequent_indent=indent - ) - ) - - self.write("\n") - - def write_heading(self, heading: str) -> None: - """Writes a heading into the buffer.""" - self.write(f"{'':>{self.current_indent}}{heading}:\n") - - def write_paragraph(self) -> None: - """Writes a paragraph into the buffer.""" - if self.buffer: - self.write("\n") - - def write_text(self, text: str) -> None: - """Writes re-indented text into the buffer. This rewraps and - preserves paragraphs. - """ - indent = " " * self.current_indent - self.write( - wrap_text( - text, - self.width, - initial_indent=indent, - subsequent_indent=indent, - preserve_paragraphs=True, - ) - ) - self.write("\n") - - def write_dl( - self, - rows: t.Sequence[t.Tuple[str, str]], - col_max: int = 30, - col_spacing: int = 2, - ) -> None: - """Writes a definition list into the buffer. This is how options - and commands are usually formatted. - - :param rows: a list of two item tuples for the terms and values. - :param col_max: the maximum width of the first column. - :param col_spacing: the number of spaces between the first and - second column. - """ - rows = list(rows) - widths = measure_table(rows) - if len(widths) != 2: - raise TypeError("Expected two columns for definition list") - - first_col = min(widths[0], col_max) + col_spacing - - for first, second in iter_rows(rows, len(widths)): - self.write(f"{'':>{self.current_indent}}{first}") - if not second: - self.write("\n") - continue - if term_len(first) <= first_col - col_spacing: - self.write(" " * (first_col - term_len(first))) - else: - self.write("\n") - self.write(" " * (first_col + self.current_indent)) - - text_width = max(self.width - first_col - 2, 10) - wrapped_text = wrap_text(second, text_width, preserve_paragraphs=True) - lines = wrapped_text.splitlines() - - if lines: - self.write(f"{lines[0]}\n") - - for line in lines[1:]: - self.write(f"{'':>{first_col + self.current_indent}}{line}\n") - else: - self.write("\n") - - @contextmanager - def section(self, name: str) -> t.Iterator[None]: - """Helpful context manager that writes a paragraph, a heading, - and the indents. - - :param name: the section name that is written as heading. - """ - self.write_paragraph() - self.write_heading(name) - self.indent() - try: - yield - finally: - self.dedent() - - @contextmanager - def indentation(self) -> t.Iterator[None]: - """A context manager that increases the indentation.""" - self.indent() - try: - yield - finally: - self.dedent() - - def getvalue(self) -> str: - """Returns the buffer contents.""" - return "".join(self.buffer) - - -def join_options(options: t.Sequence[str]) -> t.Tuple[str, bool]: - """Given a list of option strings this joins them in the most appropriate - way and returns them in the form ``(formatted_string, - any_prefix_is_slash)`` where the second item in the tuple is a flag that - indicates if any of the option prefixes was a slash. - """ - rv = [] - any_prefix_is_slash = False - - for opt in options: - prefix = split_opt(opt)[0] - - if prefix == "/": - any_prefix_is_slash = True - - rv.append((len(prefix), opt)) - - rv.sort(key=lambda x: x[0]) - return ", ".join(x[1] for x in rv), any_prefix_is_slash diff --git a/server/venv/Lib/site-packages/click/globals.py b/server/venv/Lib/site-packages/click/globals.py deleted file mode 100644 index 480058f..0000000 --- a/server/venv/Lib/site-packages/click/globals.py +++ /dev/null @@ -1,68 +0,0 @@ -import typing as t -from threading import local - -if t.TYPE_CHECKING: - import typing_extensions as te - from .core import Context - -_local = local() - - -@t.overload -def get_current_context(silent: "te.Literal[False]" = False) -> "Context": - ... - - -@t.overload -def get_current_context(silent: bool = ...) -> t.Optional["Context"]: - ... - - -def get_current_context(silent: bool = False) -> t.Optional["Context"]: - """Returns the current click context. This can be used as a way to - access the current context object from anywhere. This is a more implicit - alternative to the :func:`pass_context` decorator. This function is - primarily useful for helpers such as :func:`echo` which might be - interested in changing its behavior based on the current context. - - To push the current context, :meth:`Context.scope` can be used. - - .. versionadded:: 5.0 - - :param silent: if set to `True` the return value is `None` if no context - is available. The default behavior is to raise a - :exc:`RuntimeError`. - """ - try: - return t.cast("Context", _local.stack[-1]) - except (AttributeError, IndexError) as e: - if not silent: - raise RuntimeError("There is no active click context.") from e - - return None - - -def push_context(ctx: "Context") -> None: - """Pushes a new context to the current stack.""" - _local.__dict__.setdefault("stack", []).append(ctx) - - -def pop_context() -> None: - """Removes the top level from the stack.""" - _local.stack.pop() - - -def resolve_color_default(color: t.Optional[bool] = None) -> t.Optional[bool]: - """Internal helper to get the default value of the color flag. If a - value is passed it's returned unchanged, otherwise it's looked up from - the current context. - """ - if color is not None: - return color - - ctx = get_current_context(silent=True) - - if ctx is not None: - return ctx.color - - return None diff --git a/server/venv/Lib/site-packages/click/parser.py b/server/venv/Lib/site-packages/click/parser.py deleted file mode 100644 index 5fa7adf..0000000 --- a/server/venv/Lib/site-packages/click/parser.py +++ /dev/null @@ -1,529 +0,0 @@ -""" -This module started out as largely a copy paste from the stdlib's -optparse module with the features removed that we do not need from -optparse because we implement them in Click on a higher level (for -instance type handling, help formatting and a lot more). - -The plan is to remove more and more from here over time. - -The reason this is a different module and not optparse from the stdlib -is that there are differences in 2.x and 3.x about the error messages -generated and optparse in the stdlib uses gettext for no good reason -and might cause us issues. - -Click uses parts of optparse written by Gregory P. Ward and maintained -by the Python Software Foundation. This is limited to code in parser.py. - -Copyright 2001-2006 Gregory P. Ward. All rights reserved. -Copyright 2002-2006 Python Software Foundation. All rights reserved. -""" -# This code uses parts of optparse written by Gregory P. Ward and -# maintained by the Python Software Foundation. -# Copyright 2001-2006 Gregory P. Ward -# Copyright 2002-2006 Python Software Foundation -import typing as t -from collections import deque -from gettext import gettext as _ -from gettext import ngettext - -from .exceptions import BadArgumentUsage -from .exceptions import BadOptionUsage -from .exceptions import NoSuchOption -from .exceptions import UsageError - -if t.TYPE_CHECKING: - import typing_extensions as te - from .core import Argument as CoreArgument - from .core import Context - from .core import Option as CoreOption - from .core import Parameter as CoreParameter - -V = t.TypeVar("V") - -# Sentinel value that indicates an option was passed as a flag without a -# value but is not a flag option. Option.consume_value uses this to -# prompt or use the flag_value. -_flag_needs_value = object() - - -def _unpack_args( - args: t.Sequence[str], nargs_spec: t.Sequence[int] -) -> t.Tuple[t.Sequence[t.Union[str, t.Sequence[t.Optional[str]], None]], t.List[str]]: - """Given an iterable of arguments and an iterable of nargs specifications, - it returns a tuple with all the unpacked arguments at the first index - and all remaining arguments as the second. - - The nargs specification is the number of arguments that should be consumed - or `-1` to indicate that this position should eat up all the remainders. - - Missing items are filled with `None`. - """ - args = deque(args) - nargs_spec = deque(nargs_spec) - rv: t.List[t.Union[str, t.Tuple[t.Optional[str], ...], None]] = [] - spos: t.Optional[int] = None - - def _fetch(c: "te.Deque[V]") -> t.Optional[V]: - try: - if spos is None: - return c.popleft() - else: - return c.pop() - except IndexError: - return None - - while nargs_spec: - nargs = _fetch(nargs_spec) - - if nargs is None: - continue - - if nargs == 1: - rv.append(_fetch(args)) - elif nargs > 1: - x = [_fetch(args) for _ in range(nargs)] - - # If we're reversed, we're pulling in the arguments in reverse, - # so we need to turn them around. - if spos is not None: - x.reverse() - - rv.append(tuple(x)) - elif nargs < 0: - if spos is not None: - raise TypeError("Cannot have two nargs < 0") - - spos = len(rv) - rv.append(None) - - # spos is the position of the wildcard (star). If it's not `None`, - # we fill it with the remainder. - if spos is not None: - rv[spos] = tuple(args) - args = [] - rv[spos + 1 :] = reversed(rv[spos + 1 :]) - - return tuple(rv), list(args) - - -def split_opt(opt: str) -> t.Tuple[str, str]: - first = opt[:1] - if first.isalnum(): - return "", opt - if opt[1:2] == first: - return opt[:2], opt[2:] - return first, opt[1:] - - -def normalize_opt(opt: str, ctx: t.Optional["Context"]) -> str: - if ctx is None or ctx.token_normalize_func is None: - return opt - prefix, opt = split_opt(opt) - return f"{prefix}{ctx.token_normalize_func(opt)}" - - -def split_arg_string(string: str) -> t.List[str]: - """Split an argument string as with :func:`shlex.split`, but don't - fail if the string is incomplete. Ignores a missing closing quote or - incomplete escape sequence and uses the partial token as-is. - - .. code-block:: python - - split_arg_string("example 'my file") - ["example", "my file"] - - split_arg_string("example my\\") - ["example", "my"] - - :param string: String to split. - """ - import shlex - - lex = shlex.shlex(string, posix=True) - lex.whitespace_split = True - lex.commenters = "" - out = [] - - try: - for token in lex: - out.append(token) - except ValueError: - # Raised when end-of-string is reached in an invalid state. Use - # the partial token as-is. The quote or escape character is in - # lex.state, not lex.token. - out.append(lex.token) - - return out - - -class Option: - def __init__( - self, - obj: "CoreOption", - opts: t.Sequence[str], - dest: t.Optional[str], - action: t.Optional[str] = None, - nargs: int = 1, - const: t.Optional[t.Any] = None, - ): - self._short_opts = [] - self._long_opts = [] - self.prefixes: t.Set[str] = set() - - for opt in opts: - prefix, value = split_opt(opt) - if not prefix: - raise ValueError(f"Invalid start character for option ({opt})") - self.prefixes.add(prefix[0]) - if len(prefix) == 1 and len(value) == 1: - self._short_opts.append(opt) - else: - self._long_opts.append(opt) - self.prefixes.add(prefix) - - if action is None: - action = "store" - - self.dest = dest - self.action = action - self.nargs = nargs - self.const = const - self.obj = obj - - @property - def takes_value(self) -> bool: - return self.action in ("store", "append") - - def process(self, value: t.Any, state: "ParsingState") -> None: - if self.action == "store": - state.opts[self.dest] = value # type: ignore - elif self.action == "store_const": - state.opts[self.dest] = self.const # type: ignore - elif self.action == "append": - state.opts.setdefault(self.dest, []).append(value) # type: ignore - elif self.action == "append_const": - state.opts.setdefault(self.dest, []).append(self.const) # type: ignore - elif self.action == "count": - state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 # type: ignore - else: - raise ValueError(f"unknown action '{self.action}'") - state.order.append(self.obj) - - -class Argument: - def __init__(self, obj: "CoreArgument", dest: t.Optional[str], nargs: int = 1): - self.dest = dest - self.nargs = nargs - self.obj = obj - - def process( - self, - value: t.Union[t.Optional[str], t.Sequence[t.Optional[str]]], - state: "ParsingState", - ) -> None: - if self.nargs > 1: - assert value is not None - holes = sum(1 for x in value if x is None) - if holes == len(value): - value = None - elif holes != 0: - raise BadArgumentUsage( - _("Argument {name!r} takes {nargs} values.").format( - name=self.dest, nargs=self.nargs - ) - ) - - if self.nargs == -1 and self.obj.envvar is not None and value == (): - # Replace empty tuple with None so that a value from the - # environment may be tried. - value = None - - state.opts[self.dest] = value # type: ignore - state.order.append(self.obj) - - -class ParsingState: - def __init__(self, rargs: t.List[str]) -> None: - self.opts: t.Dict[str, t.Any] = {} - self.largs: t.List[str] = [] - self.rargs = rargs - self.order: t.List["CoreParameter"] = [] - - -class OptionParser: - """The option parser is an internal class that is ultimately used to - parse options and arguments. It's modelled after optparse and brings - a similar but vastly simplified API. It should generally not be used - directly as the high level Click classes wrap it for you. - - It's not nearly as extensible as optparse or argparse as it does not - implement features that are implemented on a higher level (such as - types or defaults). - - :param ctx: optionally the :class:`~click.Context` where this parser - should go with. - """ - - def __init__(self, ctx: t.Optional["Context"] = None) -> None: - #: The :class:`~click.Context` for this parser. This might be - #: `None` for some advanced use cases. - self.ctx = ctx - #: This controls how the parser deals with interspersed arguments. - #: If this is set to `False`, the parser will stop on the first - #: non-option. Click uses this to implement nested subcommands - #: safely. - self.allow_interspersed_args: bool = True - #: This tells the parser how to deal with unknown options. By - #: default it will error out (which is sensible), but there is a - #: second mode where it will ignore it and continue processing - #: after shifting all the unknown options into the resulting args. - self.ignore_unknown_options: bool = False - - if ctx is not None: - self.allow_interspersed_args = ctx.allow_interspersed_args - self.ignore_unknown_options = ctx.ignore_unknown_options - - self._short_opt: t.Dict[str, Option] = {} - self._long_opt: t.Dict[str, Option] = {} - self._opt_prefixes = {"-", "--"} - self._args: t.List[Argument] = [] - - def add_option( - self, - obj: "CoreOption", - opts: t.Sequence[str], - dest: t.Optional[str], - action: t.Optional[str] = None, - nargs: int = 1, - const: t.Optional[t.Any] = None, - ) -> None: - """Adds a new option named `dest` to the parser. The destination - is not inferred (unlike with optparse) and needs to be explicitly - provided. Action can be any of ``store``, ``store_const``, - ``append``, ``append_const`` or ``count``. - - The `obj` can be used to identify the option in the order list - that is returned from the parser. - """ - opts = [normalize_opt(opt, self.ctx) for opt in opts] - option = Option(obj, opts, dest, action=action, nargs=nargs, const=const) - self._opt_prefixes.update(option.prefixes) - for opt in option._short_opts: - self._short_opt[opt] = option - for opt in option._long_opts: - self._long_opt[opt] = option - - def add_argument( - self, obj: "CoreArgument", dest: t.Optional[str], nargs: int = 1 - ) -> None: - """Adds a positional argument named `dest` to the parser. - - The `obj` can be used to identify the option in the order list - that is returned from the parser. - """ - self._args.append(Argument(obj, dest=dest, nargs=nargs)) - - def parse_args( - self, args: t.List[str] - ) -> t.Tuple[t.Dict[str, t.Any], t.List[str], t.List["CoreParameter"]]: - """Parses positional arguments and returns ``(values, args, order)`` - for the parsed options and arguments as well as the leftover - arguments if there are any. The order is a list of objects as they - appear on the command line. If arguments appear multiple times they - will be memorized multiple times as well. - """ - state = ParsingState(args) - try: - self._process_args_for_options(state) - self._process_args_for_args(state) - except UsageError: - if self.ctx is None or not self.ctx.resilient_parsing: - raise - return state.opts, state.largs, state.order - - def _process_args_for_args(self, state: ParsingState) -> None: - pargs, args = _unpack_args( - state.largs + state.rargs, [x.nargs for x in self._args] - ) - - for idx, arg in enumerate(self._args): - arg.process(pargs[idx], state) - - state.largs = args - state.rargs = [] - - def _process_args_for_options(self, state: ParsingState) -> None: - while state.rargs: - arg = state.rargs.pop(0) - arglen = len(arg) - # Double dashes always handled explicitly regardless of what - # prefixes are valid. - if arg == "--": - return - elif arg[:1] in self._opt_prefixes and arglen > 1: - self._process_opts(arg, state) - elif self.allow_interspersed_args: - state.largs.append(arg) - else: - state.rargs.insert(0, arg) - return - - # Say this is the original argument list: - # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)] - # ^ - # (we are about to process arg(i)). - # - # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of - # [arg0, ..., arg(i-1)] (any options and their arguments will have - # been removed from largs). - # - # The while loop will usually consume 1 or more arguments per pass. - # If it consumes 1 (eg. arg is an option that takes no arguments), - # then after _process_arg() is done the situation is: - # - # largs = subset of [arg0, ..., arg(i)] - # rargs = [arg(i+1), ..., arg(N-1)] - # - # If allow_interspersed_args is false, largs will always be - # *empty* -- still a subset of [arg0, ..., arg(i-1)], but - # not a very interesting subset! - - def _match_long_opt( - self, opt: str, explicit_value: t.Optional[str], state: ParsingState - ) -> None: - if opt not in self._long_opt: - from difflib import get_close_matches - - possibilities = get_close_matches(opt, self._long_opt) - raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx) - - option = self._long_opt[opt] - if option.takes_value: - # At this point it's safe to modify rargs by injecting the - # explicit value, because no exception is raised in this - # branch. This means that the inserted value will be fully - # consumed. - if explicit_value is not None: - state.rargs.insert(0, explicit_value) - - value = self._get_value_from_state(opt, option, state) - - elif explicit_value is not None: - raise BadOptionUsage( - opt, _("Option {name!r} does not take a value.").format(name=opt) - ) - - else: - value = None - - option.process(value, state) - - def _match_short_opt(self, arg: str, state: ParsingState) -> None: - stop = False - i = 1 - prefix = arg[0] - unknown_options = [] - - for ch in arg[1:]: - opt = normalize_opt(f"{prefix}{ch}", self.ctx) - option = self._short_opt.get(opt) - i += 1 - - if not option: - if self.ignore_unknown_options: - unknown_options.append(ch) - continue - raise NoSuchOption(opt, ctx=self.ctx) - if option.takes_value: - # Any characters left in arg? Pretend they're the - # next arg, and stop consuming characters of arg. - if i < len(arg): - state.rargs.insert(0, arg[i:]) - stop = True - - value = self._get_value_from_state(opt, option, state) - - else: - value = None - - option.process(value, state) - - if stop: - break - - # If we got any unknown options we recombine the string of the - # remaining options and re-attach the prefix, then report that - # to the state as new larg. This way there is basic combinatorics - # that can be achieved while still ignoring unknown arguments. - if self.ignore_unknown_options and unknown_options: - state.largs.append(f"{prefix}{''.join(unknown_options)}") - - def _get_value_from_state( - self, option_name: str, option: Option, state: ParsingState - ) -> t.Any: - nargs = option.nargs - - if len(state.rargs) < nargs: - if option.obj._flag_needs_value: - # Option allows omitting the value. - value = _flag_needs_value - else: - raise BadOptionUsage( - option_name, - ngettext( - "Option {name!r} requires an argument.", - "Option {name!r} requires {nargs} arguments.", - nargs, - ).format(name=option_name, nargs=nargs), - ) - elif nargs == 1: - next_rarg = state.rargs[0] - - if ( - option.obj._flag_needs_value - and isinstance(next_rarg, str) - and next_rarg[:1] in self._opt_prefixes - and len(next_rarg) > 1 - ): - # The next arg looks like the start of an option, don't - # use it as the value if omitting the value is allowed. - value = _flag_needs_value - else: - value = state.rargs.pop(0) - else: - value = tuple(state.rargs[:nargs]) - del state.rargs[:nargs] - - return value - - def _process_opts(self, arg: str, state: ParsingState) -> None: - explicit_value = None - # Long option handling happens in two parts. The first part is - # supporting explicitly attached values. In any case, we will try - # to long match the option first. - if "=" in arg: - long_opt, explicit_value = arg.split("=", 1) - else: - long_opt = arg - norm_long_opt = normalize_opt(long_opt, self.ctx) - - # At this point we will match the (assumed) long option through - # the long option matching code. Note that this allows options - # like "-foo" to be matched as long options. - try: - self._match_long_opt(norm_long_opt, explicit_value, state) - except NoSuchOption: - # At this point the long option matching failed, and we need - # to try with short options. However there is a special rule - # which says, that if we have a two character options prefix - # (applies to "--foo" for instance), we do not dispatch to the - # short option code and will instead raise the no option - # error. - if arg[:2] not in self._opt_prefixes: - self._match_short_opt(arg, state) - return - - if not self.ignore_unknown_options: - raise - - state.largs.append(arg) diff --git a/server/venv/Lib/site-packages/click/py.typed b/server/venv/Lib/site-packages/click/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/click/shell_completion.py b/server/venv/Lib/site-packages/click/shell_completion.py deleted file mode 100644 index dc9e00b..0000000 --- a/server/venv/Lib/site-packages/click/shell_completion.py +++ /dev/null @@ -1,596 +0,0 @@ -import os -import re -import typing as t -from gettext import gettext as _ - -from .core import Argument -from .core import BaseCommand -from .core import Context -from .core import MultiCommand -from .core import Option -from .core import Parameter -from .core import ParameterSource -from .parser import split_arg_string -from .utils import echo - - -def shell_complete( - cli: BaseCommand, - ctx_args: t.MutableMapping[str, t.Any], - prog_name: str, - complete_var: str, - instruction: str, -) -> int: - """Perform shell completion for the given CLI program. - - :param cli: Command being called. - :param ctx_args: Extra arguments to pass to - ``cli.make_context``. - :param prog_name: Name of the executable in the shell. - :param complete_var: Name of the environment variable that holds - the completion instruction. - :param instruction: Value of ``complete_var`` with the completion - instruction and shell, in the form ``instruction_shell``. - :return: Status code to exit with. - """ - shell, _, instruction = instruction.partition("_") - comp_cls = get_completion_class(shell) - - if comp_cls is None: - return 1 - - comp = comp_cls(cli, ctx_args, prog_name, complete_var) - - if instruction == "source": - echo(comp.source()) - return 0 - - if instruction == "complete": - echo(comp.complete()) - return 0 - - return 1 - - -class CompletionItem: - """Represents a completion value and metadata about the value. The - default metadata is ``type`` to indicate special shell handling, - and ``help`` if a shell supports showing a help string next to the - value. - - Arbitrary parameters can be passed when creating the object, and - accessed using ``item.attr``. If an attribute wasn't passed, - accessing it returns ``None``. - - :param value: The completion suggestion. - :param type: Tells the shell script to provide special completion - support for the type. Click uses ``"dir"`` and ``"file"``. - :param help: String shown next to the value if supported. - :param kwargs: Arbitrary metadata. The built-in implementations - don't use this, but custom type completions paired with custom - shell support could use it. - """ - - __slots__ = ("value", "type", "help", "_info") - - def __init__( - self, - value: t.Any, - type: str = "plain", - help: t.Optional[str] = None, - **kwargs: t.Any, - ) -> None: - self.value: t.Any = value - self.type: str = type - self.help: t.Optional[str] = help - self._info = kwargs - - def __getattr__(self, name: str) -> t.Any: - return self._info.get(name) - - -# Only Bash >= 4.4 has the nosort option. -_SOURCE_BASH = """\ -%(complete_func)s() { - local IFS=$'\\n' - local response - - response=$(env COMP_WORDS="${COMP_WORDS[*]}" COMP_CWORD=$COMP_CWORD \ -%(complete_var)s=bash_complete $1) - - for completion in $response; do - IFS=',' read type value <<< "$completion" - - if [[ $type == 'dir' ]]; then - COMPREPLY=() - compopt -o dirnames - elif [[ $type == 'file' ]]; then - COMPREPLY=() - compopt -o default - elif [[ $type == 'plain' ]]; then - COMPREPLY+=($value) - fi - done - - return 0 -} - -%(complete_func)s_setup() { - complete -o nosort -F %(complete_func)s %(prog_name)s -} - -%(complete_func)s_setup; -""" - -_SOURCE_ZSH = """\ -#compdef %(prog_name)s - -%(complete_func)s() { - local -a completions - local -a completions_with_descriptions - local -a response - (( ! $+commands[%(prog_name)s] )) && return 1 - - response=("${(@f)$(env COMP_WORDS="${words[*]}" COMP_CWORD=$((CURRENT-1)) \ -%(complete_var)s=zsh_complete %(prog_name)s)}") - - for type key descr in ${response}; do - if [[ "$type" == "plain" ]]; then - if [[ "$descr" == "_" ]]; then - completions+=("$key") - else - completions_with_descriptions+=("$key":"$descr") - fi - elif [[ "$type" == "dir" ]]; then - _path_files -/ - elif [[ "$type" == "file" ]]; then - _path_files -f - fi - done - - if [ -n "$completions_with_descriptions" ]; then - _describe -V unsorted completions_with_descriptions -U - fi - - if [ -n "$completions" ]; then - compadd -U -V unsorted -a completions - fi -} - -if [[ $zsh_eval_context[-1] == loadautofunc ]]; then - # autoload from fpath, call function directly - %(complete_func)s "$@" -else - # eval/source/. command, register function for later - compdef %(complete_func)s %(prog_name)s -fi -""" - -_SOURCE_FISH = """\ -function %(complete_func)s; - set -l response (env %(complete_var)s=fish_complete COMP_WORDS=(commandline -cp) \ -COMP_CWORD=(commandline -t) %(prog_name)s); - - for completion in $response; - set -l metadata (string split "," $completion); - - if test $metadata[1] = "dir"; - __fish_complete_directories $metadata[2]; - else if test $metadata[1] = "file"; - __fish_complete_path $metadata[2]; - else if test $metadata[1] = "plain"; - echo $metadata[2]; - end; - end; -end; - -complete --no-files --command %(prog_name)s --arguments \ -"(%(complete_func)s)"; -""" - - -class ShellComplete: - """Base class for providing shell completion support. A subclass for - a given shell will override attributes and methods to implement the - completion instructions (``source`` and ``complete``). - - :param cli: Command being called. - :param prog_name: Name of the executable in the shell. - :param complete_var: Name of the environment variable that holds - the completion instruction. - - .. versionadded:: 8.0 - """ - - name: t.ClassVar[str] - """Name to register the shell as with :func:`add_completion_class`. - This is used in completion instructions (``{name}_source`` and - ``{name}_complete``). - """ - - source_template: t.ClassVar[str] - """Completion script template formatted by :meth:`source`. This must - be provided by subclasses. - """ - - def __init__( - self, - cli: BaseCommand, - ctx_args: t.MutableMapping[str, t.Any], - prog_name: str, - complete_var: str, - ) -> None: - self.cli = cli - self.ctx_args = ctx_args - self.prog_name = prog_name - self.complete_var = complete_var - - @property - def func_name(self) -> str: - """The name of the shell function defined by the completion - script. - """ - safe_name = re.sub(r"\W*", "", self.prog_name.replace("-", "_"), flags=re.ASCII) - return f"_{safe_name}_completion" - - def source_vars(self) -> t.Dict[str, t.Any]: - """Vars for formatting :attr:`source_template`. - - By default this provides ``complete_func``, ``complete_var``, - and ``prog_name``. - """ - return { - "complete_func": self.func_name, - "complete_var": self.complete_var, - "prog_name": self.prog_name, - } - - def source(self) -> str: - """Produce the shell script that defines the completion - function. By default this ``%``-style formats - :attr:`source_template` with the dict returned by - :meth:`source_vars`. - """ - return self.source_template % self.source_vars() - - def get_completion_args(self) -> t.Tuple[t.List[str], str]: - """Use the env vars defined by the shell script to return a - tuple of ``args, incomplete``. This must be implemented by - subclasses. - """ - raise NotImplementedError - - def get_completions( - self, args: t.List[str], incomplete: str - ) -> t.List[CompletionItem]: - """Determine the context and last complete command or parameter - from the complete args. Call that object's ``shell_complete`` - method to get the completions for the incomplete value. - - :param args: List of complete args before the incomplete value. - :param incomplete: Value being completed. May be empty. - """ - ctx = _resolve_context(self.cli, self.ctx_args, self.prog_name, args) - obj, incomplete = _resolve_incomplete(ctx, args, incomplete) - return obj.shell_complete(ctx, incomplete) - - def format_completion(self, item: CompletionItem) -> str: - """Format a completion item into the form recognized by the - shell script. This must be implemented by subclasses. - - :param item: Completion item to format. - """ - raise NotImplementedError - - def complete(self) -> str: - """Produce the completion data to send back to the shell. - - By default this calls :meth:`get_completion_args`, gets the - completions, then calls :meth:`format_completion` for each - completion. - """ - args, incomplete = self.get_completion_args() - completions = self.get_completions(args, incomplete) - out = [self.format_completion(item) for item in completions] - return "\n".join(out) - - -class BashComplete(ShellComplete): - """Shell completion for Bash.""" - - name = "bash" - source_template = _SOURCE_BASH - - @staticmethod - def _check_version() -> None: - import subprocess - - output = subprocess.run( - ["bash", "-c", 'echo "${BASH_VERSION}"'], stdout=subprocess.PIPE - ) - match = re.search(r"^(\d+)\.(\d+)\.\d+", output.stdout.decode()) - - if match is not None: - major, minor = match.groups() - - if major < "4" or major == "4" and minor < "4": - echo( - _( - "Shell completion is not supported for Bash" - " versions older than 4.4." - ), - err=True, - ) - else: - echo( - _("Couldn't detect Bash version, shell completion is not supported."), - err=True, - ) - - def source(self) -> str: - self._check_version() - return super().source() - - def get_completion_args(self) -> t.Tuple[t.List[str], str]: - cwords = split_arg_string(os.environ["COMP_WORDS"]) - cword = int(os.environ["COMP_CWORD"]) - args = cwords[1:cword] - - try: - incomplete = cwords[cword] - except IndexError: - incomplete = "" - - return args, incomplete - - def format_completion(self, item: CompletionItem) -> str: - return f"{item.type},{item.value}" - - -class ZshComplete(ShellComplete): - """Shell completion for Zsh.""" - - name = "zsh" - source_template = _SOURCE_ZSH - - def get_completion_args(self) -> t.Tuple[t.List[str], str]: - cwords = split_arg_string(os.environ["COMP_WORDS"]) - cword = int(os.environ["COMP_CWORD"]) - args = cwords[1:cword] - - try: - incomplete = cwords[cword] - except IndexError: - incomplete = "" - - return args, incomplete - - def format_completion(self, item: CompletionItem) -> str: - return f"{item.type}\n{item.value}\n{item.help if item.help else '_'}" - - -class FishComplete(ShellComplete): - """Shell completion for Fish.""" - - name = "fish" - source_template = _SOURCE_FISH - - def get_completion_args(self) -> t.Tuple[t.List[str], str]: - cwords = split_arg_string(os.environ["COMP_WORDS"]) - incomplete = os.environ["COMP_CWORD"] - args = cwords[1:] - - # Fish stores the partial word in both COMP_WORDS and - # COMP_CWORD, remove it from complete args. - if incomplete and args and args[-1] == incomplete: - args.pop() - - return args, incomplete - - def format_completion(self, item: CompletionItem) -> str: - if item.help: - return f"{item.type},{item.value}\t{item.help}" - - return f"{item.type},{item.value}" - - -ShellCompleteType = t.TypeVar("ShellCompleteType", bound=t.Type[ShellComplete]) - - -_available_shells: t.Dict[str, t.Type[ShellComplete]] = { - "bash": BashComplete, - "fish": FishComplete, - "zsh": ZshComplete, -} - - -def add_completion_class( - cls: ShellCompleteType, name: t.Optional[str] = None -) -> ShellCompleteType: - """Register a :class:`ShellComplete` subclass under the given name. - The name will be provided by the completion instruction environment - variable during completion. - - :param cls: The completion class that will handle completion for the - shell. - :param name: Name to register the class under. Defaults to the - class's ``name`` attribute. - """ - if name is None: - name = cls.name - - _available_shells[name] = cls - - return cls - - -def get_completion_class(shell: str) -> t.Optional[t.Type[ShellComplete]]: - """Look up a registered :class:`ShellComplete` subclass by the name - provided by the completion instruction environment variable. If the - name isn't registered, returns ``None``. - - :param shell: Name the class is registered under. - """ - return _available_shells.get(shell) - - -def _is_incomplete_argument(ctx: Context, param: Parameter) -> bool: - """Determine if the given parameter is an argument that can still - accept values. - - :param ctx: Invocation context for the command represented by the - parsed complete args. - :param param: Argument object being checked. - """ - if not isinstance(param, Argument): - return False - - assert param.name is not None - # Will be None if expose_value is False. - value = ctx.params.get(param.name) - return ( - param.nargs == -1 - or ctx.get_parameter_source(param.name) is not ParameterSource.COMMANDLINE - or ( - param.nargs > 1 - and isinstance(value, (tuple, list)) - and len(value) < param.nargs - ) - ) - - -def _start_of_option(ctx: Context, value: str) -> bool: - """Check if the value looks like the start of an option.""" - if not value: - return False - - c = value[0] - return c in ctx._opt_prefixes - - -def _is_incomplete_option(ctx: Context, args: t.List[str], param: Parameter) -> bool: - """Determine if the given parameter is an option that needs a value. - - :param args: List of complete args before the incomplete value. - :param param: Option object being checked. - """ - if not isinstance(param, Option): - return False - - if param.is_flag or param.count: - return False - - last_option = None - - for index, arg in enumerate(reversed(args)): - if index + 1 > param.nargs: - break - - if _start_of_option(ctx, arg): - last_option = arg - - return last_option is not None and last_option in param.opts - - -def _resolve_context( - cli: BaseCommand, - ctx_args: t.MutableMapping[str, t.Any], - prog_name: str, - args: t.List[str], -) -> Context: - """Produce the context hierarchy starting with the command and - traversing the complete arguments. This only follows the commands, - it doesn't trigger input prompts or callbacks. - - :param cli: Command being called. - :param prog_name: Name of the executable in the shell. - :param args: List of complete args before the incomplete value. - """ - ctx_args["resilient_parsing"] = True - ctx = cli.make_context(prog_name, args.copy(), **ctx_args) - args = ctx.protected_args + ctx.args - - while args: - command = ctx.command - - if isinstance(command, MultiCommand): - if not command.chain: - name, cmd, args = command.resolve_command(ctx, args) - - if cmd is None: - return ctx - - ctx = cmd.make_context(name, args, parent=ctx, resilient_parsing=True) - args = ctx.protected_args + ctx.args - else: - sub_ctx = ctx - - while args: - name, cmd, args = command.resolve_command(ctx, args) - - if cmd is None: - return ctx - - sub_ctx = cmd.make_context( - name, - args, - parent=ctx, - allow_extra_args=True, - allow_interspersed_args=False, - resilient_parsing=True, - ) - args = sub_ctx.args - - ctx = sub_ctx - args = [*sub_ctx.protected_args, *sub_ctx.args] - else: - break - - return ctx - - -def _resolve_incomplete( - ctx: Context, args: t.List[str], incomplete: str -) -> t.Tuple[t.Union[BaseCommand, Parameter], str]: - """Find the Click object that will handle the completion of the - incomplete value. Return the object and the incomplete value. - - :param ctx: Invocation context for the command represented by - the parsed complete args. - :param args: List of complete args before the incomplete value. - :param incomplete: Value being completed. May be empty. - """ - # Different shells treat an "=" between a long option name and - # value differently. Might keep the value joined, return the "=" - # as a separate item, or return the split name and value. Always - # split and discard the "=" to make completion easier. - if incomplete == "=": - incomplete = "" - elif "=" in incomplete and _start_of_option(ctx, incomplete): - name, _, incomplete = incomplete.partition("=") - args.append(name) - - # The "--" marker tells Click to stop treating values as options - # even if they start with the option character. If it hasn't been - # given and the incomplete arg looks like an option, the current - # command will provide option name completions. - if "--" not in args and _start_of_option(ctx, incomplete): - return ctx.command, incomplete - - params = ctx.command.get_params(ctx) - - # If the last complete arg is an option name with an incomplete - # value, the option will provide value completions. - for param in params: - if _is_incomplete_option(ctx, args, param): - return param, incomplete - - # It's not an option name or value. The first argument without a - # parsed value will provide value completions. - for param in params: - if _is_incomplete_argument(ctx, param): - return param, incomplete - - # There were no unparsed arguments, the command may be a group that - # will provide command name completions. - return ctx.command, incomplete diff --git a/server/venv/Lib/site-packages/click/termui.py b/server/venv/Lib/site-packages/click/termui.py deleted file mode 100644 index db7a4b2..0000000 --- a/server/venv/Lib/site-packages/click/termui.py +++ /dev/null @@ -1,784 +0,0 @@ -import inspect -import io -import itertools -import sys -import typing as t -from gettext import gettext as _ - -from ._compat import isatty -from ._compat import strip_ansi -from .exceptions import Abort -from .exceptions import UsageError -from .globals import resolve_color_default -from .types import Choice -from .types import convert_type -from .types import ParamType -from .utils import echo -from .utils import LazyFile - -if t.TYPE_CHECKING: - from ._termui_impl import ProgressBar - -V = t.TypeVar("V") - -# The prompt functions to use. The doc tools currently override these -# functions to customize how they work. -visible_prompt_func: t.Callable[[str], str] = input - -_ansi_colors = { - "black": 30, - "red": 31, - "green": 32, - "yellow": 33, - "blue": 34, - "magenta": 35, - "cyan": 36, - "white": 37, - "reset": 39, - "bright_black": 90, - "bright_red": 91, - "bright_green": 92, - "bright_yellow": 93, - "bright_blue": 94, - "bright_magenta": 95, - "bright_cyan": 96, - "bright_white": 97, -} -_ansi_reset_all = "\033[0m" - - -def hidden_prompt_func(prompt: str) -> str: - import getpass - - return getpass.getpass(prompt) - - -def _build_prompt( - text: str, - suffix: str, - show_default: bool = False, - default: t.Optional[t.Any] = None, - show_choices: bool = True, - type: t.Optional[ParamType] = None, -) -> str: - prompt = text - if type is not None and show_choices and isinstance(type, Choice): - prompt += f" ({', '.join(map(str, type.choices))})" - if default is not None and show_default: - prompt = f"{prompt} [{_format_default(default)}]" - return f"{prompt}{suffix}" - - -def _format_default(default: t.Any) -> t.Any: - if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"): - return default.name - - return default - - -def prompt( - text: str, - default: t.Optional[t.Any] = None, - hide_input: bool = False, - confirmation_prompt: t.Union[bool, str] = False, - type: t.Optional[t.Union[ParamType, t.Any]] = None, - value_proc: t.Optional[t.Callable[[str], t.Any]] = None, - prompt_suffix: str = ": ", - show_default: bool = True, - err: bool = False, - show_choices: bool = True, -) -> t.Any: - """Prompts a user for input. This is a convenience function that can - be used to prompt a user for input later. - - If the user aborts the input by sending an interrupt signal, this - function will catch it and raise a :exc:`Abort` exception. - - :param text: the text to show for the prompt. - :param default: the default value to use if no input happens. If this - is not given it will prompt until it's aborted. - :param hide_input: if this is set to true then the input value will - be hidden. - :param confirmation_prompt: Prompt a second time to confirm the - value. Can be set to a string instead of ``True`` to customize - the message. - :param type: the type to use to check the value against. - :param value_proc: if this parameter is provided it's a function that - is invoked instead of the type conversion to - convert a value. - :param prompt_suffix: a suffix that should be added to the prompt. - :param show_default: shows or hides the default value in the prompt. - :param err: if set to true the file defaults to ``stderr`` instead of - ``stdout``, the same as with echo. - :param show_choices: Show or hide choices if the passed type is a Choice. - For example if type is a Choice of either day or week, - show_choices is true and text is "Group by" then the - prompt will be "Group by (day, week): ". - - .. versionadded:: 8.0 - ``confirmation_prompt`` can be a custom string. - - .. versionadded:: 7.0 - Added the ``show_choices`` parameter. - - .. versionadded:: 6.0 - Added unicode support for cmd.exe on Windows. - - .. versionadded:: 4.0 - Added the `err` parameter. - - """ - - def prompt_func(text: str) -> str: - f = hidden_prompt_func if hide_input else visible_prompt_func - try: - # Write the prompt separately so that we get nice - # coloring through colorama on Windows - echo(text.rstrip(" "), nl=False, err=err) - # Echo a space to stdout to work around an issue where - # readline causes backspace to clear the whole line. - return f(" ") - except (KeyboardInterrupt, EOFError): - # getpass doesn't print a newline if the user aborts input with ^C. - # Allegedly this behavior is inherited from getpass(3). - # A doc bug has been filed at https://bugs.python.org/issue24711 - if hide_input: - echo(None, err=err) - raise Abort() from None - - if value_proc is None: - value_proc = convert_type(type, default) - - prompt = _build_prompt( - text, prompt_suffix, show_default, default, show_choices, type - ) - - if confirmation_prompt: - if confirmation_prompt is True: - confirmation_prompt = _("Repeat for confirmation") - - confirmation_prompt = _build_prompt(confirmation_prompt, prompt_suffix) - - while True: - while True: - value = prompt_func(prompt) - if value: - break - elif default is not None: - value = default - break - try: - result = value_proc(value) - except UsageError as e: - if hide_input: - echo(_("Error: The value you entered was invalid."), err=err) - else: - echo(_("Error: {e.message}").format(e=e), err=err) # noqa: B306 - continue - if not confirmation_prompt: - return result - while True: - value2 = prompt_func(confirmation_prompt) - is_empty = not value and not value2 - if value2 or is_empty: - break - if value == value2: - return result - echo(_("Error: The two entered values do not match."), err=err) - - -def confirm( - text: str, - default: t.Optional[bool] = False, - abort: bool = False, - prompt_suffix: str = ": ", - show_default: bool = True, - err: bool = False, -) -> bool: - """Prompts for confirmation (yes/no question). - - If the user aborts the input by sending a interrupt signal this - function will catch it and raise a :exc:`Abort` exception. - - :param text: the question to ask. - :param default: The default value to use when no input is given. If - ``None``, repeat until input is given. - :param abort: if this is set to `True` a negative answer aborts the - exception by raising :exc:`Abort`. - :param prompt_suffix: a suffix that should be added to the prompt. - :param show_default: shows or hides the default value in the prompt. - :param err: if set to true the file defaults to ``stderr`` instead of - ``stdout``, the same as with echo. - - .. versionchanged:: 8.0 - Repeat until input is given if ``default`` is ``None``. - - .. versionadded:: 4.0 - Added the ``err`` parameter. - """ - prompt = _build_prompt( - text, - prompt_suffix, - show_default, - "y/n" if default is None else ("Y/n" if default else "y/N"), - ) - - while True: - try: - # Write the prompt separately so that we get nice - # coloring through colorama on Windows - echo(prompt.rstrip(" "), nl=False, err=err) - # Echo a space to stdout to work around an issue where - # readline causes backspace to clear the whole line. - value = visible_prompt_func(" ").lower().strip() - except (KeyboardInterrupt, EOFError): - raise Abort() from None - if value in ("y", "yes"): - rv = True - elif value in ("n", "no"): - rv = False - elif default is not None and value == "": - rv = default - else: - echo(_("Error: invalid input"), err=err) - continue - break - if abort and not rv: - raise Abort() - return rv - - -def echo_via_pager( - text_or_generator: t.Union[t.Iterable[str], t.Callable[[], t.Iterable[str]], str], - color: t.Optional[bool] = None, -) -> None: - """This function takes a text and shows it via an environment specific - pager on stdout. - - .. versionchanged:: 3.0 - Added the `color` flag. - - :param text_or_generator: the text to page, or alternatively, a - generator emitting the text to page. - :param color: controls if the pager supports ANSI colors or not. The - default is autodetection. - """ - color = resolve_color_default(color) - - if inspect.isgeneratorfunction(text_or_generator): - i = t.cast(t.Callable[[], t.Iterable[str]], text_or_generator)() - elif isinstance(text_or_generator, str): - i = [text_or_generator] - else: - i = iter(t.cast(t.Iterable[str], text_or_generator)) - - # convert every element of i to a text type if necessary - text_generator = (el if isinstance(el, str) else str(el) for el in i) - - from ._termui_impl import pager - - return pager(itertools.chain(text_generator, "\n"), color) - - -def progressbar( - iterable: t.Optional[t.Iterable[V]] = None, - length: t.Optional[int] = None, - label: t.Optional[str] = None, - show_eta: bool = True, - show_percent: t.Optional[bool] = None, - show_pos: bool = False, - item_show_func: t.Optional[t.Callable[[t.Optional[V]], t.Optional[str]]] = None, - fill_char: str = "#", - empty_char: str = "-", - bar_template: str = "%(label)s [%(bar)s] %(info)s", - info_sep: str = " ", - width: int = 36, - file: t.Optional[t.TextIO] = None, - color: t.Optional[bool] = None, - update_min_steps: int = 1, -) -> "ProgressBar[V]": - """This function creates an iterable context manager that can be used - to iterate over something while showing a progress bar. It will - either iterate over the `iterable` or `length` items (that are counted - up). While iteration happens, this function will print a rendered - progress bar to the given `file` (defaults to stdout) and will attempt - to calculate remaining time and more. By default, this progress bar - will not be rendered if the file is not a terminal. - - The context manager creates the progress bar. When the context - manager is entered the progress bar is already created. With every - iteration over the progress bar, the iterable passed to the bar is - advanced and the bar is updated. When the context manager exits, - a newline is printed and the progress bar is finalized on screen. - - Note: The progress bar is currently designed for use cases where the - total progress can be expected to take at least several seconds. - Because of this, the ProgressBar class object won't display - progress that is considered too fast, and progress where the time - between steps is less than a second. - - No printing must happen or the progress bar will be unintentionally - destroyed. - - Example usage:: - - with progressbar(items) as bar: - for item in bar: - do_something_with(item) - - Alternatively, if no iterable is specified, one can manually update the - progress bar through the `update()` method instead of directly - iterating over the progress bar. The update method accepts the number - of steps to increment the bar with:: - - with progressbar(length=chunks.total_bytes) as bar: - for chunk in chunks: - process_chunk(chunk) - bar.update(chunks.bytes) - - The ``update()`` method also takes an optional value specifying the - ``current_item`` at the new position. This is useful when used - together with ``item_show_func`` to customize the output for each - manual step:: - - with click.progressbar( - length=total_size, - label='Unzipping archive', - item_show_func=lambda a: a.filename - ) as bar: - for archive in zip_file: - archive.extract() - bar.update(archive.size, archive) - - :param iterable: an iterable to iterate over. If not provided the length - is required. - :param length: the number of items to iterate over. By default the - progressbar will attempt to ask the iterator about its - length, which might or might not work. If an iterable is - also provided this parameter can be used to override the - length. If an iterable is not provided the progress bar - will iterate over a range of that length. - :param label: the label to show next to the progress bar. - :param show_eta: enables or disables the estimated time display. This is - automatically disabled if the length cannot be - determined. - :param show_percent: enables or disables the percentage display. The - default is `True` if the iterable has a length or - `False` if not. - :param show_pos: enables or disables the absolute position display. The - default is `False`. - :param item_show_func: A function called with the current item which - can return a string to show next to the progress bar. If the - function returns ``None`` nothing is shown. The current item can - be ``None``, such as when entering and exiting the bar. - :param fill_char: the character to use to show the filled part of the - progress bar. - :param empty_char: the character to use to show the non-filled part of - the progress bar. - :param bar_template: the format string to use as template for the bar. - The parameters in it are ``label`` for the label, - ``bar`` for the progress bar and ``info`` for the - info section. - :param info_sep: the separator between multiple info items (eta etc.) - :param width: the width of the progress bar in characters, 0 means full - terminal width - :param file: The file to write to. If this is not a terminal then - only the label is printed. - :param color: controls if the terminal supports ANSI colors or not. The - default is autodetection. This is only needed if ANSI - codes are included anywhere in the progress bar output - which is not the case by default. - :param update_min_steps: Render only when this many updates have - completed. This allows tuning for very fast iterators. - - .. versionchanged:: 8.0 - Output is shown even if execution time is less than 0.5 seconds. - - .. versionchanged:: 8.0 - ``item_show_func`` shows the current item, not the previous one. - - .. versionchanged:: 8.0 - Labels are echoed if the output is not a TTY. Reverts a change - in 7.0 that removed all output. - - .. versionadded:: 8.0 - Added the ``update_min_steps`` parameter. - - .. versionchanged:: 4.0 - Added the ``color`` parameter. Added the ``update`` method to - the object. - - .. versionadded:: 2.0 - """ - from ._termui_impl import ProgressBar - - color = resolve_color_default(color) - return ProgressBar( - iterable=iterable, - length=length, - show_eta=show_eta, - show_percent=show_percent, - show_pos=show_pos, - item_show_func=item_show_func, - fill_char=fill_char, - empty_char=empty_char, - bar_template=bar_template, - info_sep=info_sep, - file=file, - label=label, - width=width, - color=color, - update_min_steps=update_min_steps, - ) - - -def clear() -> None: - """Clears the terminal screen. This will have the effect of clearing - the whole visible space of the terminal and moving the cursor to the - top left. This does not do anything if not connected to a terminal. - - .. versionadded:: 2.0 - """ - if not isatty(sys.stdout): - return - - # ANSI escape \033[2J clears the screen, \033[1;1H moves the cursor - echo("\033[2J\033[1;1H", nl=False) - - -def _interpret_color( - color: t.Union[int, t.Tuple[int, int, int], str], offset: int = 0 -) -> str: - if isinstance(color, int): - return f"{38 + offset};5;{color:d}" - - if isinstance(color, (tuple, list)): - r, g, b = color - return f"{38 + offset};2;{r:d};{g:d};{b:d}" - - return str(_ansi_colors[color] + offset) - - -def style( - text: t.Any, - fg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None, - bg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None, - bold: t.Optional[bool] = None, - dim: t.Optional[bool] = None, - underline: t.Optional[bool] = None, - overline: t.Optional[bool] = None, - italic: t.Optional[bool] = None, - blink: t.Optional[bool] = None, - reverse: t.Optional[bool] = None, - strikethrough: t.Optional[bool] = None, - reset: bool = True, -) -> str: - """Styles a text with ANSI styles and returns the new string. By - default the styling is self contained which means that at the end - of the string a reset code is issued. This can be prevented by - passing ``reset=False``. - - Examples:: - - click.echo(click.style('Hello World!', fg='green')) - click.echo(click.style('ATTENTION!', blink=True)) - click.echo(click.style('Some things', reverse=True, fg='cyan')) - click.echo(click.style('More colors', fg=(255, 12, 128), bg=117)) - - Supported color names: - - * ``black`` (might be a gray) - * ``red`` - * ``green`` - * ``yellow`` (might be an orange) - * ``blue`` - * ``magenta`` - * ``cyan`` - * ``white`` (might be light gray) - * ``bright_black`` - * ``bright_red`` - * ``bright_green`` - * ``bright_yellow`` - * ``bright_blue`` - * ``bright_magenta`` - * ``bright_cyan`` - * ``bright_white`` - * ``reset`` (reset the color code only) - - If the terminal supports it, color may also be specified as: - - - An integer in the interval [0, 255]. The terminal must support - 8-bit/256-color mode. - - An RGB tuple of three integers in [0, 255]. The terminal must - support 24-bit/true-color mode. - - See https://en.wikipedia.org/wiki/ANSI_color and - https://gist.github.com/XVilka/8346728 for more information. - - :param text: the string to style with ansi codes. - :param fg: if provided this will become the foreground color. - :param bg: if provided this will become the background color. - :param bold: if provided this will enable or disable bold mode. - :param dim: if provided this will enable or disable dim mode. This is - badly supported. - :param underline: if provided this will enable or disable underline. - :param overline: if provided this will enable or disable overline. - :param italic: if provided this will enable or disable italic. - :param blink: if provided this will enable or disable blinking. - :param reverse: if provided this will enable or disable inverse - rendering (foreground becomes background and the - other way round). - :param strikethrough: if provided this will enable or disable - striking through text. - :param reset: by default a reset-all code is added at the end of the - string which means that styles do not carry over. This - can be disabled to compose styles. - - .. versionchanged:: 8.0 - A non-string ``message`` is converted to a string. - - .. versionchanged:: 8.0 - Added support for 256 and RGB color codes. - - .. versionchanged:: 8.0 - Added the ``strikethrough``, ``italic``, and ``overline`` - parameters. - - .. versionchanged:: 7.0 - Added support for bright colors. - - .. versionadded:: 2.0 - """ - if not isinstance(text, str): - text = str(text) - - bits = [] - - if fg: - try: - bits.append(f"\033[{_interpret_color(fg)}m") - except KeyError: - raise TypeError(f"Unknown color {fg!r}") from None - - if bg: - try: - bits.append(f"\033[{_interpret_color(bg, 10)}m") - except KeyError: - raise TypeError(f"Unknown color {bg!r}") from None - - if bold is not None: - bits.append(f"\033[{1 if bold else 22}m") - if dim is not None: - bits.append(f"\033[{2 if dim else 22}m") - if underline is not None: - bits.append(f"\033[{4 if underline else 24}m") - if overline is not None: - bits.append(f"\033[{53 if overline else 55}m") - if italic is not None: - bits.append(f"\033[{3 if italic else 23}m") - if blink is not None: - bits.append(f"\033[{5 if blink else 25}m") - if reverse is not None: - bits.append(f"\033[{7 if reverse else 27}m") - if strikethrough is not None: - bits.append(f"\033[{9 if strikethrough else 29}m") - bits.append(text) - if reset: - bits.append(_ansi_reset_all) - return "".join(bits) - - -def unstyle(text: str) -> str: - """Removes ANSI styling information from a string. Usually it's not - necessary to use this function as Click's echo function will - automatically remove styling if necessary. - - .. versionadded:: 2.0 - - :param text: the text to remove style information from. - """ - return strip_ansi(text) - - -def secho( - message: t.Optional[t.Any] = None, - file: t.Optional[t.IO[t.AnyStr]] = None, - nl: bool = True, - err: bool = False, - color: t.Optional[bool] = None, - **styles: t.Any, -) -> None: - """This function combines :func:`echo` and :func:`style` into one - call. As such the following two calls are the same:: - - click.secho('Hello World!', fg='green') - click.echo(click.style('Hello World!', fg='green')) - - All keyword arguments are forwarded to the underlying functions - depending on which one they go with. - - Non-string types will be converted to :class:`str`. However, - :class:`bytes` are passed directly to :meth:`echo` without applying - style. If you want to style bytes that represent text, call - :meth:`bytes.decode` first. - - .. versionchanged:: 8.0 - A non-string ``message`` is converted to a string. Bytes are - passed through without style applied. - - .. versionadded:: 2.0 - """ - if message is not None and not isinstance(message, (bytes, bytearray)): - message = style(message, **styles) - - return echo(message, file=file, nl=nl, err=err, color=color) - - -def edit( - text: t.Optional[t.AnyStr] = None, - editor: t.Optional[str] = None, - env: t.Optional[t.Mapping[str, str]] = None, - require_save: bool = True, - extension: str = ".txt", - filename: t.Optional[str] = None, -) -> t.Optional[t.AnyStr]: - r"""Edits the given text in the defined editor. If an editor is given - (should be the full path to the executable but the regular operating - system search path is used for finding the executable) it overrides - the detected editor. Optionally, some environment variables can be - used. If the editor is closed without changes, `None` is returned. In - case a file is edited directly the return value is always `None` and - `require_save` and `extension` are ignored. - - If the editor cannot be opened a :exc:`UsageError` is raised. - - Note for Windows: to simplify cross-platform usage, the newlines are - automatically converted from POSIX to Windows and vice versa. As such, - the message here will have ``\n`` as newline markers. - - :param text: the text to edit. - :param editor: optionally the editor to use. Defaults to automatic - detection. - :param env: environment variables to forward to the editor. - :param require_save: if this is true, then not saving in the editor - will make the return value become `None`. - :param extension: the extension to tell the editor about. This defaults - to `.txt` but changing this might change syntax - highlighting. - :param filename: if provided it will edit this file instead of the - provided text contents. It will not use a temporary - file as an indirection in that case. - """ - from ._termui_impl import Editor - - ed = Editor(editor=editor, env=env, require_save=require_save, extension=extension) - - if filename is None: - return ed.edit(text) - - ed.edit_file(filename) - return None - - -def launch(url: str, wait: bool = False, locate: bool = False) -> int: - """This function launches the given URL (or filename) in the default - viewer application for this file type. If this is an executable, it - might launch the executable in a new session. The return value is - the exit code of the launched application. Usually, ``0`` indicates - success. - - Examples:: - - click.launch('https://click.palletsprojects.com/') - click.launch('/my/downloaded/file', locate=True) - - .. versionadded:: 2.0 - - :param url: URL or filename of the thing to launch. - :param wait: Wait for the program to exit before returning. This - only works if the launched program blocks. In particular, - ``xdg-open`` on Linux does not block. - :param locate: if this is set to `True` then instead of launching the - application associated with the URL it will attempt to - launch a file manager with the file located. This - might have weird effects if the URL does not point to - the filesystem. - """ - from ._termui_impl import open_url - - return open_url(url, wait=wait, locate=locate) - - -# If this is provided, getchar() calls into this instead. This is used -# for unittesting purposes. -_getchar: t.Optional[t.Callable[[bool], str]] = None - - -def getchar(echo: bool = False) -> str: - """Fetches a single character from the terminal and returns it. This - will always return a unicode character and under certain rare - circumstances this might return more than one character. The - situations which more than one character is returned is when for - whatever reason multiple characters end up in the terminal buffer or - standard input was not actually a terminal. - - Note that this will always read from the terminal, even if something - is piped into the standard input. - - Note for Windows: in rare cases when typing non-ASCII characters, this - function might wait for a second character and then return both at once. - This is because certain Unicode characters look like special-key markers. - - .. versionadded:: 2.0 - - :param echo: if set to `True`, the character read will also show up on - the terminal. The default is to not show it. - """ - global _getchar - - if _getchar is None: - from ._termui_impl import getchar as f - - _getchar = f - - return _getchar(echo) - - -def raw_terminal() -> t.ContextManager[int]: - from ._termui_impl import raw_terminal as f - - return f() - - -def pause(info: t.Optional[str] = None, err: bool = False) -> None: - """This command stops execution and waits for the user to press any - key to continue. This is similar to the Windows batch "pause" - command. If the program is not run through a terminal, this command - will instead do nothing. - - .. versionadded:: 2.0 - - .. versionadded:: 4.0 - Added the `err` parameter. - - :param info: The message to print before pausing. Defaults to - ``"Press any key to continue..."``. - :param err: if set to message goes to ``stderr`` instead of - ``stdout``, the same as with echo. - """ - if not isatty(sys.stdin) or not isatty(sys.stdout): - return - - if info is None: - info = _("Press any key to continue...") - - try: - if info: - echo(info, nl=False, err=err) - try: - getchar() - except (KeyboardInterrupt, EOFError): - pass - finally: - if info: - echo(err=err) diff --git a/server/venv/Lib/site-packages/click/testing.py b/server/venv/Lib/site-packages/click/testing.py deleted file mode 100644 index e0df0d2..0000000 --- a/server/venv/Lib/site-packages/click/testing.py +++ /dev/null @@ -1,479 +0,0 @@ -import contextlib -import io -import os -import shlex -import shutil -import sys -import tempfile -import typing as t -from types import TracebackType - -from . import formatting -from . import termui -from . import utils -from ._compat import _find_binary_reader - -if t.TYPE_CHECKING: - from .core import BaseCommand - - -class EchoingStdin: - def __init__(self, input: t.BinaryIO, output: t.BinaryIO) -> None: - self._input = input - self._output = output - self._paused = False - - def __getattr__(self, x: str) -> t.Any: - return getattr(self._input, x) - - def _echo(self, rv: bytes) -> bytes: - if not self._paused: - self._output.write(rv) - - return rv - - def read(self, n: int = -1) -> bytes: - return self._echo(self._input.read(n)) - - def read1(self, n: int = -1) -> bytes: - return self._echo(self._input.read1(n)) # type: ignore - - def readline(self, n: int = -1) -> bytes: - return self._echo(self._input.readline(n)) - - def readlines(self) -> t.List[bytes]: - return [self._echo(x) for x in self._input.readlines()] - - def __iter__(self) -> t.Iterator[bytes]: - return iter(self._echo(x) for x in self._input) - - def __repr__(self) -> str: - return repr(self._input) - - -@contextlib.contextmanager -def _pause_echo(stream: t.Optional[EchoingStdin]) -> t.Iterator[None]: - if stream is None: - yield - else: - stream._paused = True - yield - stream._paused = False - - -class _NamedTextIOWrapper(io.TextIOWrapper): - def __init__( - self, buffer: t.BinaryIO, name: str, mode: str, **kwargs: t.Any - ) -> None: - super().__init__(buffer, **kwargs) - self._name = name - self._mode = mode - - @property - def name(self) -> str: - return self._name - - @property - def mode(self) -> str: - return self._mode - - -def make_input_stream( - input: t.Optional[t.Union[str, bytes, t.IO[t.Any]]], charset: str -) -> t.BinaryIO: - # Is already an input stream. - if hasattr(input, "read"): - rv = _find_binary_reader(t.cast(t.IO[t.Any], input)) - - if rv is not None: - return rv - - raise TypeError("Could not find binary reader for input stream.") - - if input is None: - input = b"" - elif isinstance(input, str): - input = input.encode(charset) - - return io.BytesIO(input) - - -class Result: - """Holds the captured result of an invoked CLI script.""" - - def __init__( - self, - runner: "CliRunner", - stdout_bytes: bytes, - stderr_bytes: t.Optional[bytes], - return_value: t.Any, - exit_code: int, - exception: t.Optional[BaseException], - exc_info: t.Optional[ - t.Tuple[t.Type[BaseException], BaseException, TracebackType] - ] = None, - ): - #: The runner that created the result - self.runner = runner - #: The standard output as bytes. - self.stdout_bytes = stdout_bytes - #: The standard error as bytes, or None if not available - self.stderr_bytes = stderr_bytes - #: The value returned from the invoked command. - #: - #: .. versionadded:: 8.0 - self.return_value = return_value - #: The exit code as integer. - self.exit_code = exit_code - #: The exception that happened if one did. - self.exception = exception - #: The traceback - self.exc_info = exc_info - - @property - def output(self) -> str: - """The (standard) output as unicode string.""" - return self.stdout - - @property - def stdout(self) -> str: - """The standard output as unicode string.""" - return self.stdout_bytes.decode(self.runner.charset, "replace").replace( - "\r\n", "\n" - ) - - @property - def stderr(self) -> str: - """The standard error as unicode string.""" - if self.stderr_bytes is None: - raise ValueError("stderr not separately captured") - return self.stderr_bytes.decode(self.runner.charset, "replace").replace( - "\r\n", "\n" - ) - - def __repr__(self) -> str: - exc_str = repr(self.exception) if self.exception else "okay" - return f"<{type(self).__name__} {exc_str}>" - - -class CliRunner: - """The CLI runner provides functionality to invoke a Click command line - script for unittesting purposes in a isolated environment. This only - works in single-threaded systems without any concurrency as it changes the - global interpreter state. - - :param charset: the character set for the input and output data. - :param env: a dictionary with environment variables for overriding. - :param echo_stdin: if this is set to `True`, then reading from stdin writes - to stdout. This is useful for showing examples in - some circumstances. Note that regular prompts - will automatically echo the input. - :param mix_stderr: if this is set to `False`, then stdout and stderr are - preserved as independent streams. This is useful for - Unix-philosophy apps that have predictable stdout and - noisy stderr, such that each may be measured - independently - """ - - def __init__( - self, - charset: str = "utf-8", - env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, - echo_stdin: bool = False, - mix_stderr: bool = True, - ) -> None: - self.charset = charset - self.env: t.Mapping[str, t.Optional[str]] = env or {} - self.echo_stdin = echo_stdin - self.mix_stderr = mix_stderr - - def get_default_prog_name(self, cli: "BaseCommand") -> str: - """Given a command object it will return the default program name - for it. The default is the `name` attribute or ``"root"`` if not - set. - """ - return cli.name or "root" - - def make_env( - self, overrides: t.Optional[t.Mapping[str, t.Optional[str]]] = None - ) -> t.Mapping[str, t.Optional[str]]: - """Returns the environment overrides for invoking a script.""" - rv = dict(self.env) - if overrides: - rv.update(overrides) - return rv - - @contextlib.contextmanager - def isolation( - self, - input: t.Optional[t.Union[str, bytes, t.IO[t.Any]]] = None, - env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, - color: bool = False, - ) -> t.Iterator[t.Tuple[io.BytesIO, t.Optional[io.BytesIO]]]: - """A context manager that sets up the isolation for invoking of a - command line tool. This sets up stdin with the given input data - and `os.environ` with the overrides from the given dictionary. - This also rebinds some internals in Click to be mocked (like the - prompt functionality). - - This is automatically done in the :meth:`invoke` method. - - :param input: the input stream to put into sys.stdin. - :param env: the environment overrides as dictionary. - :param color: whether the output should contain color codes. The - application can still override this explicitly. - - .. versionchanged:: 8.0 - ``stderr`` is opened with ``errors="backslashreplace"`` - instead of the default ``"strict"``. - - .. versionchanged:: 4.0 - Added the ``color`` parameter. - """ - bytes_input = make_input_stream(input, self.charset) - echo_input = None - - old_stdin = sys.stdin - old_stdout = sys.stdout - old_stderr = sys.stderr - old_forced_width = formatting.FORCED_WIDTH - formatting.FORCED_WIDTH = 80 - - env = self.make_env(env) - - bytes_output = io.BytesIO() - - if self.echo_stdin: - bytes_input = echo_input = t.cast( - t.BinaryIO, EchoingStdin(bytes_input, bytes_output) - ) - - sys.stdin = text_input = _NamedTextIOWrapper( - bytes_input, encoding=self.charset, name="", mode="r" - ) - - if self.echo_stdin: - # Force unbuffered reads, otherwise TextIOWrapper reads a - # large chunk which is echoed early. - text_input._CHUNK_SIZE = 1 # type: ignore - - sys.stdout = _NamedTextIOWrapper( - bytes_output, encoding=self.charset, name="", mode="w" - ) - - bytes_error = None - if self.mix_stderr: - sys.stderr = sys.stdout - else: - bytes_error = io.BytesIO() - sys.stderr = _NamedTextIOWrapper( - bytes_error, - encoding=self.charset, - name="", - mode="w", - errors="backslashreplace", - ) - - @_pause_echo(echo_input) # type: ignore - def visible_input(prompt: t.Optional[str] = None) -> str: - sys.stdout.write(prompt or "") - val = text_input.readline().rstrip("\r\n") - sys.stdout.write(f"{val}\n") - sys.stdout.flush() - return val - - @_pause_echo(echo_input) # type: ignore - def hidden_input(prompt: t.Optional[str] = None) -> str: - sys.stdout.write(f"{prompt or ''}\n") - sys.stdout.flush() - return text_input.readline().rstrip("\r\n") - - @_pause_echo(echo_input) # type: ignore - def _getchar(echo: bool) -> str: - char = sys.stdin.read(1) - - if echo: - sys.stdout.write(char) - - sys.stdout.flush() - return char - - default_color = color - - def should_strip_ansi( - stream: t.Optional[t.IO[t.Any]] = None, color: t.Optional[bool] = None - ) -> bool: - if color is None: - return not default_color - return not color - - old_visible_prompt_func = termui.visible_prompt_func - old_hidden_prompt_func = termui.hidden_prompt_func - old__getchar_func = termui._getchar - old_should_strip_ansi = utils.should_strip_ansi # type: ignore - termui.visible_prompt_func = visible_input - termui.hidden_prompt_func = hidden_input - termui._getchar = _getchar - utils.should_strip_ansi = should_strip_ansi # type: ignore - - old_env = {} - try: - for key, value in env.items(): - old_env[key] = os.environ.get(key) - if value is None: - try: - del os.environ[key] - except Exception: - pass - else: - os.environ[key] = value - yield (bytes_output, bytes_error) - finally: - for key, value in old_env.items(): - if value is None: - try: - del os.environ[key] - except Exception: - pass - else: - os.environ[key] = value - sys.stdout = old_stdout - sys.stderr = old_stderr - sys.stdin = old_stdin - termui.visible_prompt_func = old_visible_prompt_func - termui.hidden_prompt_func = old_hidden_prompt_func - termui._getchar = old__getchar_func - utils.should_strip_ansi = old_should_strip_ansi # type: ignore - formatting.FORCED_WIDTH = old_forced_width - - def invoke( - self, - cli: "BaseCommand", - args: t.Optional[t.Union[str, t.Sequence[str]]] = None, - input: t.Optional[t.Union[str, bytes, t.IO[t.Any]]] = None, - env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, - catch_exceptions: bool = True, - color: bool = False, - **extra: t.Any, - ) -> Result: - """Invokes a command in an isolated environment. The arguments are - forwarded directly to the command line script, the `extra` keyword - arguments are passed to the :meth:`~clickpkg.Command.main` function of - the command. - - This returns a :class:`Result` object. - - :param cli: the command to invoke - :param args: the arguments to invoke. It may be given as an iterable - or a string. When given as string it will be interpreted - as a Unix shell command. More details at - :func:`shlex.split`. - :param input: the input data for `sys.stdin`. - :param env: the environment overrides. - :param catch_exceptions: Whether to catch any other exceptions than - ``SystemExit``. - :param extra: the keyword arguments to pass to :meth:`main`. - :param color: whether the output should contain color codes. The - application can still override this explicitly. - - .. versionchanged:: 8.0 - The result object has the ``return_value`` attribute with - the value returned from the invoked command. - - .. versionchanged:: 4.0 - Added the ``color`` parameter. - - .. versionchanged:: 3.0 - Added the ``catch_exceptions`` parameter. - - .. versionchanged:: 3.0 - The result object has the ``exc_info`` attribute with the - traceback if available. - """ - exc_info = None - with self.isolation(input=input, env=env, color=color) as outstreams: - return_value = None - exception: t.Optional[BaseException] = None - exit_code = 0 - - if isinstance(args, str): - args = shlex.split(args) - - try: - prog_name = extra.pop("prog_name") - except KeyError: - prog_name = self.get_default_prog_name(cli) - - try: - return_value = cli.main(args=args or (), prog_name=prog_name, **extra) - except SystemExit as e: - exc_info = sys.exc_info() - e_code = t.cast(t.Optional[t.Union[int, t.Any]], e.code) - - if e_code is None: - e_code = 0 - - if e_code != 0: - exception = e - - if not isinstance(e_code, int): - sys.stdout.write(str(e_code)) - sys.stdout.write("\n") - e_code = 1 - - exit_code = e_code - - except Exception as e: - if not catch_exceptions: - raise - exception = e - exit_code = 1 - exc_info = sys.exc_info() - finally: - sys.stdout.flush() - stdout = outstreams[0].getvalue() - if self.mix_stderr: - stderr = None - else: - stderr = outstreams[1].getvalue() # type: ignore - - return Result( - runner=self, - stdout_bytes=stdout, - stderr_bytes=stderr, - return_value=return_value, - exit_code=exit_code, - exception=exception, - exc_info=exc_info, # type: ignore - ) - - @contextlib.contextmanager - def isolated_filesystem( - self, temp_dir: t.Optional[t.Union[str, "os.PathLike[str]"]] = None - ) -> t.Iterator[str]: - """A context manager that creates a temporary directory and - changes the current working directory to it. This isolates tests - that affect the contents of the CWD to prevent them from - interfering with each other. - - :param temp_dir: Create the temporary directory under this - directory. If given, the created directory is not removed - when exiting. - - .. versionchanged:: 8.0 - Added the ``temp_dir`` parameter. - """ - cwd = os.getcwd() - dt = tempfile.mkdtemp(dir=temp_dir) - os.chdir(dt) - - try: - yield dt - finally: - os.chdir(cwd) - - if temp_dir is None: - try: - shutil.rmtree(dt) - except OSError: # noqa: B014 - pass diff --git a/server/venv/Lib/site-packages/click/types.py b/server/venv/Lib/site-packages/click/types.py deleted file mode 100644 index 2b1d179..0000000 --- a/server/venv/Lib/site-packages/click/types.py +++ /dev/null @@ -1,1089 +0,0 @@ -import os -import stat -import sys -import typing as t -from datetime import datetime -from gettext import gettext as _ -from gettext import ngettext - -from ._compat import _get_argv_encoding -from ._compat import open_stream -from .exceptions import BadParameter -from .utils import format_filename -from .utils import LazyFile -from .utils import safecall - -if t.TYPE_CHECKING: - import typing_extensions as te - from .core import Context - from .core import Parameter - from .shell_completion import CompletionItem - - -class ParamType: - """Represents the type of a parameter. Validates and converts values - from the command line or Python into the correct type. - - To implement a custom type, subclass and implement at least the - following: - - - The :attr:`name` class attribute must be set. - - Calling an instance of the type with ``None`` must return - ``None``. This is already implemented by default. - - :meth:`convert` must convert string values to the correct type. - - :meth:`convert` must accept values that are already the correct - type. - - It must be able to convert a value if the ``ctx`` and ``param`` - arguments are ``None``. This can occur when converting prompt - input. - """ - - is_composite: t.ClassVar[bool] = False - arity: t.ClassVar[int] = 1 - - #: the descriptive name of this type - name: str - - #: if a list of this type is expected and the value is pulled from a - #: string environment variable, this is what splits it up. `None` - #: means any whitespace. For all parameters the general rule is that - #: whitespace splits them up. The exception are paths and files which - #: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on - #: Windows). - envvar_list_splitter: t.ClassVar[t.Optional[str]] = None - - def to_info_dict(self) -> t.Dict[str, t.Any]: - """Gather information that could be useful for a tool generating - user-facing documentation. - - Use :meth:`click.Context.to_info_dict` to traverse the entire - CLI structure. - - .. versionadded:: 8.0 - """ - # The class name without the "ParamType" suffix. - param_type = type(self).__name__.partition("ParamType")[0] - param_type = param_type.partition("ParameterType")[0] - - # Custom subclasses might not remember to set a name. - if hasattr(self, "name"): - name = self.name - else: - name = param_type - - return {"param_type": param_type, "name": name} - - def __call__( - self, - value: t.Any, - param: t.Optional["Parameter"] = None, - ctx: t.Optional["Context"] = None, - ) -> t.Any: - if value is not None: - return self.convert(value, param, ctx) - - def get_metavar(self, param: "Parameter") -> t.Optional[str]: - """Returns the metavar default for this param if it provides one.""" - - def get_missing_message(self, param: "Parameter") -> t.Optional[str]: - """Optionally might return extra information about a missing - parameter. - - .. versionadded:: 2.0 - """ - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - """Convert the value to the correct type. This is not called if - the value is ``None`` (the missing value). - - This must accept string values from the command line, as well as - values that are already the correct type. It may also convert - other compatible types. - - The ``param`` and ``ctx`` arguments may be ``None`` in certain - situations, such as when converting prompt input. - - If the value cannot be converted, call :meth:`fail` with a - descriptive message. - - :param value: The value to convert. - :param param: The parameter that is using this type to convert - its value. May be ``None``. - :param ctx: The current context that arrived at this value. May - be ``None``. - """ - return value - - def split_envvar_value(self, rv: str) -> t.Sequence[str]: - """Given a value from an environment variable this splits it up - into small chunks depending on the defined envvar list splitter. - - If the splitter is set to `None`, which means that whitespace splits, - then leading and trailing whitespace is ignored. Otherwise, leading - and trailing splitters usually lead to empty items being included. - """ - return (rv or "").split(self.envvar_list_splitter) - - def fail( - self, - message: str, - param: t.Optional["Parameter"] = None, - ctx: t.Optional["Context"] = None, - ) -> "t.NoReturn": - """Helper method to fail with an invalid value message.""" - raise BadParameter(message, ctx=ctx, param=param) - - def shell_complete( - self, ctx: "Context", param: "Parameter", incomplete: str - ) -> t.List["CompletionItem"]: - """Return a list of - :class:`~click.shell_completion.CompletionItem` objects for the - incomplete value. Most types do not provide completions, but - some do, and this allows custom types to provide custom - completions as well. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - return [] - - -class CompositeParamType(ParamType): - is_composite = True - - @property - def arity(self) -> int: # type: ignore - raise NotImplementedError() - - -class FuncParamType(ParamType): - def __init__(self, func: t.Callable[[t.Any], t.Any]) -> None: - self.name: str = func.__name__ - self.func = func - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["func"] = self.func - return info_dict - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - try: - return self.func(value) - except ValueError: - try: - value = str(value) - except UnicodeError: - value = value.decode("utf-8", "replace") - - self.fail(value, param, ctx) - - -class UnprocessedParamType(ParamType): - name = "text" - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - return value - - def __repr__(self) -> str: - return "UNPROCESSED" - - -class StringParamType(ParamType): - name = "text" - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - if isinstance(value, bytes): - enc = _get_argv_encoding() - try: - value = value.decode(enc) - except UnicodeError: - fs_enc = sys.getfilesystemencoding() - if fs_enc != enc: - try: - value = value.decode(fs_enc) - except UnicodeError: - value = value.decode("utf-8", "replace") - else: - value = value.decode("utf-8", "replace") - return value - return str(value) - - def __repr__(self) -> str: - return "STRING" - - -class Choice(ParamType): - """The choice type allows a value to be checked against a fixed set - of supported values. All of these values have to be strings. - - You should only pass a list or tuple of choices. Other iterables - (like generators) may lead to surprising results. - - The resulting value will always be one of the originally passed choices - regardless of ``case_sensitive`` or any ``ctx.token_normalize_func`` - being specified. - - See :ref:`choice-opts` for an example. - - :param case_sensitive: Set to false to make choices case - insensitive. Defaults to true. - """ - - name = "choice" - - def __init__(self, choices: t.Sequence[str], case_sensitive: bool = True) -> None: - self.choices = choices - self.case_sensitive = case_sensitive - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["choices"] = self.choices - info_dict["case_sensitive"] = self.case_sensitive - return info_dict - - def get_metavar(self, param: "Parameter") -> str: - choices_str = "|".join(self.choices) - - # Use curly braces to indicate a required argument. - if param.required and param.param_type_name == "argument": - return f"{{{choices_str}}}" - - # Use square braces to indicate an option or optional argument. - return f"[{choices_str}]" - - def get_missing_message(self, param: "Parameter") -> str: - return _("Choose from:\n\t{choices}").format(choices=",\n\t".join(self.choices)) - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - # Match through normalization and case sensitivity - # first do token_normalize_func, then lowercase - # preserve original `value` to produce an accurate message in - # `self.fail` - normed_value = value - normed_choices = {choice: choice for choice in self.choices} - - if ctx is not None and ctx.token_normalize_func is not None: - normed_value = ctx.token_normalize_func(value) - normed_choices = { - ctx.token_normalize_func(normed_choice): original - for normed_choice, original in normed_choices.items() - } - - if not self.case_sensitive: - normed_value = normed_value.casefold() - normed_choices = { - normed_choice.casefold(): original - for normed_choice, original in normed_choices.items() - } - - if normed_value in normed_choices: - return normed_choices[normed_value] - - choices_str = ", ".join(map(repr, self.choices)) - self.fail( - ngettext( - "{value!r} is not {choice}.", - "{value!r} is not one of {choices}.", - len(self.choices), - ).format(value=value, choice=choices_str, choices=choices_str), - param, - ctx, - ) - - def __repr__(self) -> str: - return f"Choice({list(self.choices)})" - - def shell_complete( - self, ctx: "Context", param: "Parameter", incomplete: str - ) -> t.List["CompletionItem"]: - """Complete choices that start with the incomplete value. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - str_choices = map(str, self.choices) - - if self.case_sensitive: - matched = (c for c in str_choices if c.startswith(incomplete)) - else: - incomplete = incomplete.lower() - matched = (c for c in str_choices if c.lower().startswith(incomplete)) - - return [CompletionItem(c) for c in matched] - - -class DateTime(ParamType): - """The DateTime type converts date strings into `datetime` objects. - - The format strings which are checked are configurable, but default to some - common (non-timezone aware) ISO 8601 formats. - - When specifying *DateTime* formats, you should only pass a list or a tuple. - Other iterables, like generators, may lead to surprising results. - - The format strings are processed using ``datetime.strptime``, and this - consequently defines the format strings which are allowed. - - Parsing is tried using each format, in order, and the first format which - parses successfully is used. - - :param formats: A list or tuple of date format strings, in the order in - which they should be tried. Defaults to - ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``, - ``'%Y-%m-%d %H:%M:%S'``. - """ - - name = "datetime" - - def __init__(self, formats: t.Optional[t.Sequence[str]] = None): - self.formats: t.Sequence[str] = formats or [ - "%Y-%m-%d", - "%Y-%m-%dT%H:%M:%S", - "%Y-%m-%d %H:%M:%S", - ] - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["formats"] = self.formats - return info_dict - - def get_metavar(self, param: "Parameter") -> str: - return f"[{'|'.join(self.formats)}]" - - def _try_to_convert_date(self, value: t.Any, format: str) -> t.Optional[datetime]: - try: - return datetime.strptime(value, format) - except ValueError: - return None - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - if isinstance(value, datetime): - return value - - for format in self.formats: - converted = self._try_to_convert_date(value, format) - - if converted is not None: - return converted - - formats_str = ", ".join(map(repr, self.formats)) - self.fail( - ngettext( - "{value!r} does not match the format {format}.", - "{value!r} does not match the formats {formats}.", - len(self.formats), - ).format(value=value, format=formats_str, formats=formats_str), - param, - ctx, - ) - - def __repr__(self) -> str: - return "DateTime" - - -class _NumberParamTypeBase(ParamType): - _number_class: t.ClassVar[t.Type[t.Any]] - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - try: - return self._number_class(value) - except ValueError: - self.fail( - _("{value!r} is not a valid {number_type}.").format( - value=value, number_type=self.name - ), - param, - ctx, - ) - - -class _NumberRangeBase(_NumberParamTypeBase): - def __init__( - self, - min: t.Optional[float] = None, - max: t.Optional[float] = None, - min_open: bool = False, - max_open: bool = False, - clamp: bool = False, - ) -> None: - self.min = min - self.max = max - self.min_open = min_open - self.max_open = max_open - self.clamp = clamp - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update( - min=self.min, - max=self.max, - min_open=self.min_open, - max_open=self.max_open, - clamp=self.clamp, - ) - return info_dict - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - import operator - - rv = super().convert(value, param, ctx) - lt_min: bool = self.min is not None and ( - operator.le if self.min_open else operator.lt - )(rv, self.min) - gt_max: bool = self.max is not None and ( - operator.ge if self.max_open else operator.gt - )(rv, self.max) - - if self.clamp: - if lt_min: - return self._clamp(self.min, 1, self.min_open) # type: ignore - - if gt_max: - return self._clamp(self.max, -1, self.max_open) # type: ignore - - if lt_min or gt_max: - self.fail( - _("{value} is not in the range {range}.").format( - value=rv, range=self._describe_range() - ), - param, - ctx, - ) - - return rv - - def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float: - """Find the valid value to clamp to bound in the given - direction. - - :param bound: The boundary value. - :param dir: 1 or -1 indicating the direction to move. - :param open: If true, the range does not include the bound. - """ - raise NotImplementedError - - def _describe_range(self) -> str: - """Describe the range for use in help text.""" - if self.min is None: - op = "<" if self.max_open else "<=" - return f"x{op}{self.max}" - - if self.max is None: - op = ">" if self.min_open else ">=" - return f"x{op}{self.min}" - - lop = "<" if self.min_open else "<=" - rop = "<" if self.max_open else "<=" - return f"{self.min}{lop}x{rop}{self.max}" - - def __repr__(self) -> str: - clamp = " clamped" if self.clamp else "" - return f"<{type(self).__name__} {self._describe_range()}{clamp}>" - - -class IntParamType(_NumberParamTypeBase): - name = "integer" - _number_class = int - - def __repr__(self) -> str: - return "INT" - - -class IntRange(_NumberRangeBase, IntParamType): - """Restrict an :data:`click.INT` value to a range of accepted - values. See :ref:`ranges`. - - If ``min`` or ``max`` are not passed, any value is accepted in that - direction. If ``min_open`` or ``max_open`` are enabled, the - corresponding boundary is not included in the range. - - If ``clamp`` is enabled, a value outside the range is clamped to the - boundary instead of failing. - - .. versionchanged:: 8.0 - Added the ``min_open`` and ``max_open`` parameters. - """ - - name = "integer range" - - def _clamp( # type: ignore - self, bound: int, dir: "te.Literal[1, -1]", open: bool - ) -> int: - if not open: - return bound - - return bound + dir - - -class FloatParamType(_NumberParamTypeBase): - name = "float" - _number_class = float - - def __repr__(self) -> str: - return "FLOAT" - - -class FloatRange(_NumberRangeBase, FloatParamType): - """Restrict a :data:`click.FLOAT` value to a range of accepted - values. See :ref:`ranges`. - - If ``min`` or ``max`` are not passed, any value is accepted in that - direction. If ``min_open`` or ``max_open`` are enabled, the - corresponding boundary is not included in the range. - - If ``clamp`` is enabled, a value outside the range is clamped to the - boundary instead of failing. This is not supported if either - boundary is marked ``open``. - - .. versionchanged:: 8.0 - Added the ``min_open`` and ``max_open`` parameters. - """ - - name = "float range" - - def __init__( - self, - min: t.Optional[float] = None, - max: t.Optional[float] = None, - min_open: bool = False, - max_open: bool = False, - clamp: bool = False, - ) -> None: - super().__init__( - min=min, max=max, min_open=min_open, max_open=max_open, clamp=clamp - ) - - if (min_open or max_open) and clamp: - raise TypeError("Clamping is not supported for open bounds.") - - def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float: - if not open: - return bound - - # Could use Python 3.9's math.nextafter here, but clamping an - # open float range doesn't seem to be particularly useful. It's - # left up to the user to write a callback to do it if needed. - raise RuntimeError("Clamping is not supported for open bounds.") - - -class BoolParamType(ParamType): - name = "boolean" - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - if value in {False, True}: - return bool(value) - - norm = value.strip().lower() - - if norm in {"1", "true", "t", "yes", "y", "on"}: - return True - - if norm in {"0", "false", "f", "no", "n", "off"}: - return False - - self.fail( - _("{value!r} is not a valid boolean.").format(value=value), param, ctx - ) - - def __repr__(self) -> str: - return "BOOL" - - -class UUIDParameterType(ParamType): - name = "uuid" - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - import uuid - - if isinstance(value, uuid.UUID): - return value - - value = value.strip() - - try: - return uuid.UUID(value) - except ValueError: - self.fail( - _("{value!r} is not a valid UUID.").format(value=value), param, ctx - ) - - def __repr__(self) -> str: - return "UUID" - - -class File(ParamType): - """Declares a parameter to be a file for reading or writing. The file - is automatically closed once the context tears down (after the command - finished working). - - Files can be opened for reading or writing. The special value ``-`` - indicates stdin or stdout depending on the mode. - - By default, the file is opened for reading text data, but it can also be - opened in binary mode or for writing. The encoding parameter can be used - to force a specific encoding. - - The `lazy` flag controls if the file should be opened immediately or upon - first IO. The default is to be non-lazy for standard input and output - streams as well as files opened for reading, `lazy` otherwise. When opening a - file lazily for reading, it is still opened temporarily for validation, but - will not be held open until first IO. lazy is mainly useful when opening - for writing to avoid creating the file until it is needed. - - Starting with Click 2.0, files can also be opened atomically in which - case all writes go into a separate file in the same folder and upon - completion the file will be moved over to the original location. This - is useful if a file regularly read by other users is modified. - - See :ref:`file-args` for more information. - """ - - name = "filename" - envvar_list_splitter: t.ClassVar[str] = os.path.pathsep - - def __init__( - self, - mode: str = "r", - encoding: t.Optional[str] = None, - errors: t.Optional[str] = "strict", - lazy: t.Optional[bool] = None, - atomic: bool = False, - ) -> None: - self.mode = mode - self.encoding = encoding - self.errors = errors - self.lazy = lazy - self.atomic = atomic - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update(mode=self.mode, encoding=self.encoding) - return info_dict - - def resolve_lazy_flag(self, value: "t.Union[str, os.PathLike[str]]") -> bool: - if self.lazy is not None: - return self.lazy - if os.fspath(value) == "-": - return False - elif "w" in self.mode: - return True - return False - - def convert( - self, - value: t.Union[str, "os.PathLike[str]", t.IO[t.Any]], - param: t.Optional["Parameter"], - ctx: t.Optional["Context"], - ) -> t.IO[t.Any]: - if _is_file_like(value): - return value - - value = t.cast("t.Union[str, os.PathLike[str]]", value) - - try: - lazy = self.resolve_lazy_flag(value) - - if lazy: - lf = LazyFile( - value, self.mode, self.encoding, self.errors, atomic=self.atomic - ) - - if ctx is not None: - ctx.call_on_close(lf.close_intelligently) - - return t.cast(t.IO[t.Any], lf) - - f, should_close = open_stream( - value, self.mode, self.encoding, self.errors, atomic=self.atomic - ) - - # If a context is provided, we automatically close the file - # at the end of the context execution (or flush out). If a - # context does not exist, it's the caller's responsibility to - # properly close the file. This for instance happens when the - # type is used with prompts. - if ctx is not None: - if should_close: - ctx.call_on_close(safecall(f.close)) - else: - ctx.call_on_close(safecall(f.flush)) - - return f - except OSError as e: # noqa: B014 - self.fail(f"'{format_filename(value)}': {e.strerror}", param, ctx) - - def shell_complete( - self, ctx: "Context", param: "Parameter", incomplete: str - ) -> t.List["CompletionItem"]: - """Return a special completion marker that tells the completion - system to use the shell to provide file path completions. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - return [CompletionItem(incomplete, type="file")] - - -def _is_file_like(value: t.Any) -> "te.TypeGuard[t.IO[t.Any]]": - return hasattr(value, "read") or hasattr(value, "write") - - -class Path(ParamType): - """The ``Path`` type is similar to the :class:`File` type, but - returns the filename instead of an open file. Various checks can be - enabled to validate the type of file and permissions. - - :param exists: The file or directory needs to exist for the value to - be valid. If this is not set to ``True``, and the file does not - exist, then all further checks are silently skipped. - :param file_okay: Allow a file as a value. - :param dir_okay: Allow a directory as a value. - :param readable: if true, a readable check is performed. - :param writable: if true, a writable check is performed. - :param executable: if true, an executable check is performed. - :param resolve_path: Make the value absolute and resolve any - symlinks. A ``~`` is not expanded, as this is supposed to be - done by the shell only. - :param allow_dash: Allow a single dash as a value, which indicates - a standard stream (but does not open it). Use - :func:`~click.open_file` to handle opening this value. - :param path_type: Convert the incoming path value to this type. If - ``None``, keep Python's default, which is ``str``. Useful to - convert to :class:`pathlib.Path`. - - .. versionchanged:: 8.1 - Added the ``executable`` parameter. - - .. versionchanged:: 8.0 - Allow passing ``path_type=pathlib.Path``. - - .. versionchanged:: 6.0 - Added the ``allow_dash`` parameter. - """ - - envvar_list_splitter: t.ClassVar[str] = os.path.pathsep - - def __init__( - self, - exists: bool = False, - file_okay: bool = True, - dir_okay: bool = True, - writable: bool = False, - readable: bool = True, - resolve_path: bool = False, - allow_dash: bool = False, - path_type: t.Optional[t.Type[t.Any]] = None, - executable: bool = False, - ): - self.exists = exists - self.file_okay = file_okay - self.dir_okay = dir_okay - self.readable = readable - self.writable = writable - self.executable = executable - self.resolve_path = resolve_path - self.allow_dash = allow_dash - self.type = path_type - - if self.file_okay and not self.dir_okay: - self.name: str = _("file") - elif self.dir_okay and not self.file_okay: - self.name = _("directory") - else: - self.name = _("path") - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update( - exists=self.exists, - file_okay=self.file_okay, - dir_okay=self.dir_okay, - writable=self.writable, - readable=self.readable, - allow_dash=self.allow_dash, - ) - return info_dict - - def coerce_path_result( - self, value: "t.Union[str, os.PathLike[str]]" - ) -> "t.Union[str, bytes, os.PathLike[str]]": - if self.type is not None and not isinstance(value, self.type): - if self.type is str: - return os.fsdecode(value) - elif self.type is bytes: - return os.fsencode(value) - else: - return t.cast("os.PathLike[str]", self.type(value)) - - return value - - def convert( - self, - value: "t.Union[str, os.PathLike[str]]", - param: t.Optional["Parameter"], - ctx: t.Optional["Context"], - ) -> "t.Union[str, bytes, os.PathLike[str]]": - rv = value - - is_dash = self.file_okay and self.allow_dash and rv in (b"-", "-") - - if not is_dash: - if self.resolve_path: - # os.path.realpath doesn't resolve symlinks on Windows - # until Python 3.8. Use pathlib for now. - import pathlib - - rv = os.fsdecode(pathlib.Path(rv).resolve()) - - try: - st = os.stat(rv) - except OSError: - if not self.exists: - return self.coerce_path_result(rv) - self.fail( - _("{name} {filename!r} does not exist.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - if not self.file_okay and stat.S_ISREG(st.st_mode): - self.fail( - _("{name} {filename!r} is a file.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - if not self.dir_okay and stat.S_ISDIR(st.st_mode): - self.fail( - _("{name} '{filename}' is a directory.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - if self.readable and not os.access(rv, os.R_OK): - self.fail( - _("{name} {filename!r} is not readable.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - if self.writable and not os.access(rv, os.W_OK): - self.fail( - _("{name} {filename!r} is not writable.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - if self.executable and not os.access(value, os.X_OK): - self.fail( - _("{name} {filename!r} is not executable.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - return self.coerce_path_result(rv) - - def shell_complete( - self, ctx: "Context", param: "Parameter", incomplete: str - ) -> t.List["CompletionItem"]: - """Return a special completion marker that tells the completion - system to use the shell to provide path completions for only - directories or any paths. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - type = "dir" if self.dir_okay and not self.file_okay else "file" - return [CompletionItem(incomplete, type=type)] - - -class Tuple(CompositeParamType): - """The default behavior of Click is to apply a type on a value directly. - This works well in most cases, except for when `nargs` is set to a fixed - count and different types should be used for different items. In this - case the :class:`Tuple` type can be used. This type can only be used - if `nargs` is set to a fixed number. - - For more information see :ref:`tuple-type`. - - This can be selected by using a Python tuple literal as a type. - - :param types: a list of types that should be used for the tuple items. - """ - - def __init__(self, types: t.Sequence[t.Union[t.Type[t.Any], ParamType]]) -> None: - self.types: t.Sequence[ParamType] = [convert_type(ty) for ty in types] - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["types"] = [t.to_info_dict() for t in self.types] - return info_dict - - @property - def name(self) -> str: # type: ignore - return f"<{' '.join(ty.name for ty in self.types)}>" - - @property - def arity(self) -> int: # type: ignore - return len(self.types) - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - len_type = len(self.types) - len_value = len(value) - - if len_value != len_type: - self.fail( - ngettext( - "{len_type} values are required, but {len_value} was given.", - "{len_type} values are required, but {len_value} were given.", - len_value, - ).format(len_type=len_type, len_value=len_value), - param=param, - ctx=ctx, - ) - - return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value)) - - -def convert_type(ty: t.Optional[t.Any], default: t.Optional[t.Any] = None) -> ParamType: - """Find the most appropriate :class:`ParamType` for the given Python - type. If the type isn't provided, it can be inferred from a default - value. - """ - guessed_type = False - - if ty is None and default is not None: - if isinstance(default, (tuple, list)): - # If the default is empty, ty will remain None and will - # return STRING. - if default: - item = default[0] - - # A tuple of tuples needs to detect the inner types. - # Can't call convert recursively because that would - # incorrectly unwind the tuple to a single type. - if isinstance(item, (tuple, list)): - ty = tuple(map(type, item)) - else: - ty = type(item) - else: - ty = type(default) - - guessed_type = True - - if isinstance(ty, tuple): - return Tuple(ty) - - if isinstance(ty, ParamType): - return ty - - if ty is str or ty is None: - return STRING - - if ty is int: - return INT - - if ty is float: - return FLOAT - - if ty is bool: - return BOOL - - if guessed_type: - return STRING - - if __debug__: - try: - if issubclass(ty, ParamType): - raise AssertionError( - f"Attempted to use an uninstantiated parameter type ({ty})." - ) - except TypeError: - # ty is an instance (correct), so issubclass fails. - pass - - return FuncParamType(ty) - - -#: A dummy parameter type that just does nothing. From a user's -#: perspective this appears to just be the same as `STRING` but -#: internally no string conversion takes place if the input was bytes. -#: This is usually useful when working with file paths as they can -#: appear in bytes and unicode. -#: -#: For path related uses the :class:`Path` type is a better choice but -#: there are situations where an unprocessed type is useful which is why -#: it is is provided. -#: -#: .. versionadded:: 4.0 -UNPROCESSED = UnprocessedParamType() - -#: A unicode string parameter type which is the implicit default. This -#: can also be selected by using ``str`` as type. -STRING = StringParamType() - -#: An integer parameter. This can also be selected by using ``int`` as -#: type. -INT = IntParamType() - -#: A floating point value parameter. This can also be selected by using -#: ``float`` as type. -FLOAT = FloatParamType() - -#: A boolean parameter. This is the default for boolean flags. This can -#: also be selected by using ``bool`` as a type. -BOOL = BoolParamType() - -#: A UUID parameter. -UUID = UUIDParameterType() diff --git a/server/venv/Lib/site-packages/click/utils.py b/server/venv/Lib/site-packages/click/utils.py deleted file mode 100644 index d536434..0000000 --- a/server/venv/Lib/site-packages/click/utils.py +++ /dev/null @@ -1,624 +0,0 @@ -import os -import re -import sys -import typing as t -from functools import update_wrapper -from types import ModuleType -from types import TracebackType - -from ._compat import _default_text_stderr -from ._compat import _default_text_stdout -from ._compat import _find_binary_writer -from ._compat import auto_wrap_for_ansi -from ._compat import binary_streams -from ._compat import open_stream -from ._compat import should_strip_ansi -from ._compat import strip_ansi -from ._compat import text_streams -from ._compat import WIN -from .globals import resolve_color_default - -if t.TYPE_CHECKING: - import typing_extensions as te - - P = te.ParamSpec("P") - -R = t.TypeVar("R") - - -def _posixify(name: str) -> str: - return "-".join(name.split()).lower() - - -def safecall(func: "t.Callable[P, R]") -> "t.Callable[P, t.Optional[R]]": - """Wraps a function so that it swallows exceptions.""" - - def wrapper(*args: "P.args", **kwargs: "P.kwargs") -> t.Optional[R]: - try: - return func(*args, **kwargs) - except Exception: - pass - return None - - return update_wrapper(wrapper, func) - - -def make_str(value: t.Any) -> str: - """Converts a value into a valid string.""" - if isinstance(value, bytes): - try: - return value.decode(sys.getfilesystemencoding()) - except UnicodeError: - return value.decode("utf-8", "replace") - return str(value) - - -def make_default_short_help(help: str, max_length: int = 45) -> str: - """Returns a condensed version of help string.""" - # Consider only the first paragraph. - paragraph_end = help.find("\n\n") - - if paragraph_end != -1: - help = help[:paragraph_end] - - # Collapse newlines, tabs, and spaces. - words = help.split() - - if not words: - return "" - - # The first paragraph started with a "no rewrap" marker, ignore it. - if words[0] == "\b": - words = words[1:] - - total_length = 0 - last_index = len(words) - 1 - - for i, word in enumerate(words): - total_length += len(word) + (i > 0) - - if total_length > max_length: # too long, truncate - break - - if word[-1] == ".": # sentence end, truncate without "..." - return " ".join(words[: i + 1]) - - if total_length == max_length and i != last_index: - break # not at sentence end, truncate with "..." - else: - return " ".join(words) # no truncation needed - - # Account for the length of the suffix. - total_length += len("...") - - # remove words until the length is short enough - while i > 0: - total_length -= len(words[i]) + (i > 0) - - if total_length <= max_length: - break - - i -= 1 - - return " ".join(words[:i]) + "..." - - -class LazyFile: - """A lazy file works like a regular file but it does not fully open - the file but it does perform some basic checks early to see if the - filename parameter does make sense. This is useful for safely opening - files for writing. - """ - - def __init__( - self, - filename: t.Union[str, "os.PathLike[str]"], - mode: str = "r", - encoding: t.Optional[str] = None, - errors: t.Optional[str] = "strict", - atomic: bool = False, - ): - self.name: str = os.fspath(filename) - self.mode = mode - self.encoding = encoding - self.errors = errors - self.atomic = atomic - self._f: t.Optional[t.IO[t.Any]] - self.should_close: bool - - if self.name == "-": - self._f, self.should_close = open_stream(filename, mode, encoding, errors) - else: - if "r" in mode: - # Open and close the file in case we're opening it for - # reading so that we can catch at least some errors in - # some cases early. - open(filename, mode).close() - self._f = None - self.should_close = True - - def __getattr__(self, name: str) -> t.Any: - return getattr(self.open(), name) - - def __repr__(self) -> str: - if self._f is not None: - return repr(self._f) - return f"" - - def open(self) -> t.IO[t.Any]: - """Opens the file if it's not yet open. This call might fail with - a :exc:`FileError`. Not handling this error will produce an error - that Click shows. - """ - if self._f is not None: - return self._f - try: - rv, self.should_close = open_stream( - self.name, self.mode, self.encoding, self.errors, atomic=self.atomic - ) - except OSError as e: # noqa: E402 - from .exceptions import FileError - - raise FileError(self.name, hint=e.strerror) from e - self._f = rv - return rv - - def close(self) -> None: - """Closes the underlying file, no matter what.""" - if self._f is not None: - self._f.close() - - def close_intelligently(self) -> None: - """This function only closes the file if it was opened by the lazy - file wrapper. For instance this will never close stdin. - """ - if self.should_close: - self.close() - - def __enter__(self) -> "LazyFile": - return self - - def __exit__( - self, - exc_type: t.Optional[t.Type[BaseException]], - exc_value: t.Optional[BaseException], - tb: t.Optional[TracebackType], - ) -> None: - self.close_intelligently() - - def __iter__(self) -> t.Iterator[t.AnyStr]: - self.open() - return iter(self._f) # type: ignore - - -class KeepOpenFile: - def __init__(self, file: t.IO[t.Any]) -> None: - self._file: t.IO[t.Any] = file - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._file, name) - - def __enter__(self) -> "KeepOpenFile": - return self - - def __exit__( - self, - exc_type: t.Optional[t.Type[BaseException]], - exc_value: t.Optional[BaseException], - tb: t.Optional[TracebackType], - ) -> None: - pass - - def __repr__(self) -> str: - return repr(self._file) - - def __iter__(self) -> t.Iterator[t.AnyStr]: - return iter(self._file) - - -def echo( - message: t.Optional[t.Any] = None, - file: t.Optional[t.IO[t.Any]] = None, - nl: bool = True, - err: bool = False, - color: t.Optional[bool] = None, -) -> None: - """Print a message and newline to stdout or a file. This should be - used instead of :func:`print` because it provides better support - for different data, files, and environments. - - Compared to :func:`print`, this does the following: - - - Ensures that the output encoding is not misconfigured on Linux. - - Supports Unicode in the Windows console. - - Supports writing to binary outputs, and supports writing bytes - to text outputs. - - Supports colors and styles on Windows. - - Removes ANSI color and style codes if the output does not look - like an interactive terminal. - - Always flushes the output. - - :param message: The string or bytes to output. Other objects are - converted to strings. - :param file: The file to write to. Defaults to ``stdout``. - :param err: Write to ``stderr`` instead of ``stdout``. - :param nl: Print a newline after the message. Enabled by default. - :param color: Force showing or hiding colors and other styles. By - default Click will remove color if the output does not look like - an interactive terminal. - - .. versionchanged:: 6.0 - Support Unicode output on the Windows console. Click does not - modify ``sys.stdout``, so ``sys.stdout.write()`` and ``print()`` - will still not support Unicode. - - .. versionchanged:: 4.0 - Added the ``color`` parameter. - - .. versionadded:: 3.0 - Added the ``err`` parameter. - - .. versionchanged:: 2.0 - Support colors on Windows if colorama is installed. - """ - if file is None: - if err: - file = _default_text_stderr() - else: - file = _default_text_stdout() - - # There are no standard streams attached to write to. For example, - # pythonw on Windows. - if file is None: - return - - # Convert non bytes/text into the native string type. - if message is not None and not isinstance(message, (str, bytes, bytearray)): - out: t.Optional[t.Union[str, bytes]] = str(message) - else: - out = message - - if nl: - out = out or "" - if isinstance(out, str): - out += "\n" - else: - out += b"\n" - - if not out: - file.flush() - return - - # If there is a message and the value looks like bytes, we manually - # need to find the binary stream and write the message in there. - # This is done separately so that most stream types will work as you - # would expect. Eg: you can write to StringIO for other cases. - if isinstance(out, (bytes, bytearray)): - binary_file = _find_binary_writer(file) - - if binary_file is not None: - file.flush() - binary_file.write(out) - binary_file.flush() - return - - # ANSI style code support. For no message or bytes, nothing happens. - # When outputting to a file instead of a terminal, strip codes. - else: - color = resolve_color_default(color) - - if should_strip_ansi(file, color): - out = strip_ansi(out) - elif WIN: - if auto_wrap_for_ansi is not None: - file = auto_wrap_for_ansi(file) # type: ignore - elif not color: - out = strip_ansi(out) - - file.write(out) # type: ignore - file.flush() - - -def get_binary_stream(name: "te.Literal['stdin', 'stdout', 'stderr']") -> t.BinaryIO: - """Returns a system stream for byte processing. - - :param name: the name of the stream to open. Valid names are ``'stdin'``, - ``'stdout'`` and ``'stderr'`` - """ - opener = binary_streams.get(name) - if opener is None: - raise TypeError(f"Unknown standard stream '{name}'") - return opener() - - -def get_text_stream( - name: "te.Literal['stdin', 'stdout', 'stderr']", - encoding: t.Optional[str] = None, - errors: t.Optional[str] = "strict", -) -> t.TextIO: - """Returns a system stream for text processing. This usually returns - a wrapped stream around a binary stream returned from - :func:`get_binary_stream` but it also can take shortcuts for already - correctly configured streams. - - :param name: the name of the stream to open. Valid names are ``'stdin'``, - ``'stdout'`` and ``'stderr'`` - :param encoding: overrides the detected default encoding. - :param errors: overrides the default error mode. - """ - opener = text_streams.get(name) - if opener is None: - raise TypeError(f"Unknown standard stream '{name}'") - return opener(encoding, errors) - - -def open_file( - filename: str, - mode: str = "r", - encoding: t.Optional[str] = None, - errors: t.Optional[str] = "strict", - lazy: bool = False, - atomic: bool = False, -) -> t.IO[t.Any]: - """Open a file, with extra behavior to handle ``'-'`` to indicate - a standard stream, lazy open on write, and atomic write. Similar to - the behavior of the :class:`~click.File` param type. - - If ``'-'`` is given to open ``stdout`` or ``stdin``, the stream is - wrapped so that using it in a context manager will not close it. - This makes it possible to use the function without accidentally - closing a standard stream: - - .. code-block:: python - - with open_file(filename) as f: - ... - - :param filename: The name of the file to open, or ``'-'`` for - ``stdin``/``stdout``. - :param mode: The mode in which to open the file. - :param encoding: The encoding to decode or encode a file opened in - text mode. - :param errors: The error handling mode. - :param lazy: Wait to open the file until it is accessed. For read - mode, the file is temporarily opened to raise access errors - early, then closed until it is read again. - :param atomic: Write to a temporary file and replace the given file - on close. - - .. versionadded:: 3.0 - """ - if lazy: - return t.cast( - t.IO[t.Any], LazyFile(filename, mode, encoding, errors, atomic=atomic) - ) - - f, should_close = open_stream(filename, mode, encoding, errors, atomic=atomic) - - if not should_close: - f = t.cast(t.IO[t.Any], KeepOpenFile(f)) - - return f - - -def format_filename( - filename: "t.Union[str, bytes, os.PathLike[str], os.PathLike[bytes]]", - shorten: bool = False, -) -> str: - """Format a filename as a string for display. Ensures the filename can be - displayed by replacing any invalid bytes or surrogate escapes in the name - with the replacement character ``�``. - - Invalid bytes or surrogate escapes will raise an error when written to a - stream with ``errors="strict". This will typically happen with ``stdout`` - when the locale is something like ``en_GB.UTF-8``. - - Many scenarios *are* safe to write surrogates though, due to PEP 538 and - PEP 540, including: - - - Writing to ``stderr``, which uses ``errors="backslashreplace"``. - - The system has ``LANG=C.UTF-8``, ``C``, or ``POSIX``. Python opens - stdout and stderr with ``errors="surrogateescape"``. - - None of ``LANG/LC_*`` are set. Python assumes ``LANG=C.UTF-8``. - - Python is started in UTF-8 mode with ``PYTHONUTF8=1`` or ``-X utf8``. - Python opens stdout and stderr with ``errors="surrogateescape"``. - - :param filename: formats a filename for UI display. This will also convert - the filename into unicode without failing. - :param shorten: this optionally shortens the filename to strip of the - path that leads up to it. - """ - if shorten: - filename = os.path.basename(filename) - else: - filename = os.fspath(filename) - - if isinstance(filename, bytes): - filename = filename.decode(sys.getfilesystemencoding(), "replace") - else: - filename = filename.encode("utf-8", "surrogateescape").decode( - "utf-8", "replace" - ) - - return filename - - -def get_app_dir(app_name: str, roaming: bool = True, force_posix: bool = False) -> str: - r"""Returns the config folder for the application. The default behavior - is to return whatever is most appropriate for the operating system. - - To give you an idea, for an app called ``"Foo Bar"``, something like - the following folders could be returned: - - Mac OS X: - ``~/Library/Application Support/Foo Bar`` - Mac OS X (POSIX): - ``~/.foo-bar`` - Unix: - ``~/.config/foo-bar`` - Unix (POSIX): - ``~/.foo-bar`` - Windows (roaming): - ``C:\Users\\AppData\Roaming\Foo Bar`` - Windows (not roaming): - ``C:\Users\\AppData\Local\Foo Bar`` - - .. versionadded:: 2.0 - - :param app_name: the application name. This should be properly capitalized - and can contain whitespace. - :param roaming: controls if the folder should be roaming or not on Windows. - Has no effect otherwise. - :param force_posix: if this is set to `True` then on any POSIX system the - folder will be stored in the home folder with a leading - dot instead of the XDG config home or darwin's - application support folder. - """ - if WIN: - key = "APPDATA" if roaming else "LOCALAPPDATA" - folder = os.environ.get(key) - if folder is None: - folder = os.path.expanduser("~") - return os.path.join(folder, app_name) - if force_posix: - return os.path.join(os.path.expanduser(f"~/.{_posixify(app_name)}")) - if sys.platform == "darwin": - return os.path.join( - os.path.expanduser("~/Library/Application Support"), app_name - ) - return os.path.join( - os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")), - _posixify(app_name), - ) - - -class PacifyFlushWrapper: - """This wrapper is used to catch and suppress BrokenPipeErrors resulting - from ``.flush()`` being called on broken pipe during the shutdown/final-GC - of the Python interpreter. Notably ``.flush()`` is always called on - ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any - other cleanup code, and the case where the underlying file is not a broken - pipe, all calls and attributes are proxied. - """ - - def __init__(self, wrapped: t.IO[t.Any]) -> None: - self.wrapped = wrapped - - def flush(self) -> None: - try: - self.wrapped.flush() - except OSError as e: - import errno - - if e.errno != errno.EPIPE: - raise - - def __getattr__(self, attr: str) -> t.Any: - return getattr(self.wrapped, attr) - - -def _detect_program_name( - path: t.Optional[str] = None, _main: t.Optional[ModuleType] = None -) -> str: - """Determine the command used to run the program, for use in help - text. If a file or entry point was executed, the file name is - returned. If ``python -m`` was used to execute a module or package, - ``python -m name`` is returned. - - This doesn't try to be too precise, the goal is to give a concise - name for help text. Files are only shown as their name without the - path. ``python`` is only shown for modules, and the full path to - ``sys.executable`` is not shown. - - :param path: The Python file being executed. Python puts this in - ``sys.argv[0]``, which is used by default. - :param _main: The ``__main__`` module. This should only be passed - during internal testing. - - .. versionadded:: 8.0 - Based on command args detection in the Werkzeug reloader. - - :meta private: - """ - if _main is None: - _main = sys.modules["__main__"] - - if not path: - path = sys.argv[0] - - # The value of __package__ indicates how Python was called. It may - # not exist if a setuptools script is installed as an egg. It may be - # set incorrectly for entry points created with pip on Windows. - # It is set to "" inside a Shiv or PEX zipapp. - if getattr(_main, "__package__", None) in {None, ""} or ( - os.name == "nt" - and _main.__package__ == "" - and not os.path.exists(path) - and os.path.exists(f"{path}.exe") - ): - # Executed a file, like "python app.py". - return os.path.basename(path) - - # Executed a module, like "python -m example". - # Rewritten by Python from "-m script" to "/path/to/script.py". - # Need to look at main module to determine how it was executed. - py_module = t.cast(str, _main.__package__) - name = os.path.splitext(os.path.basename(path))[0] - - # A submodule like "example.cli". - if name != "__main__": - py_module = f"{py_module}.{name}" - - return f"python -m {py_module.lstrip('.')}" - - -def _expand_args( - args: t.Iterable[str], - *, - user: bool = True, - env: bool = True, - glob_recursive: bool = True, -) -> t.List[str]: - """Simulate Unix shell expansion with Python functions. - - See :func:`glob.glob`, :func:`os.path.expanduser`, and - :func:`os.path.expandvars`. - - This is intended for use on Windows, where the shell does not do any - expansion. It may not exactly match what a Unix shell would do. - - :param args: List of command line arguments to expand. - :param user: Expand user home directory. - :param env: Expand environment variables. - :param glob_recursive: ``**`` matches directories recursively. - - .. versionchanged:: 8.1 - Invalid glob patterns are treated as empty expansions rather - than raising an error. - - .. versionadded:: 8.0 - - :meta private: - """ - from glob import glob - - out = [] - - for arg in args: - if user: - arg = os.path.expanduser(arg) - - if env: - arg = os.path.expandvars(arg) - - try: - matches = glob(arg, recursive=glob_recursive) - except re.error: - matches = [] - - if not matches: - out.append(arg) - else: - out.extend(matches) - - return out diff --git a/server/venv/Lib/site-packages/colorama-0.4.6.dist-info/INSTALLER b/server/venv/Lib/site-packages/colorama-0.4.6.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/server/venv/Lib/site-packages/colorama-0.4.6.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/server/venv/Lib/site-packages/colorama-0.4.6.dist-info/METADATA b/server/venv/Lib/site-packages/colorama-0.4.6.dist-info/METADATA deleted file mode 100644 index a1b5c57..0000000 --- a/server/venv/Lib/site-packages/colorama-0.4.6.dist-info/METADATA +++ /dev/null @@ -1,441 +0,0 @@ -Metadata-Version: 2.1 -Name: colorama -Version: 0.4.6 -Summary: Cross-platform colored terminal text. -Project-URL: Homepage, https://github.com/tartley/colorama -Author-email: Jonathan Hartley -License-File: LICENSE.txt -Keywords: ansi,color,colour,crossplatform,terminal,text,windows,xplatform -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Terminals -Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7 -Description-Content-Type: text/x-rst - -.. image:: https://img.shields.io/pypi/v/colorama.svg - :target: https://pypi.org/project/colorama/ - :alt: Latest Version - -.. image:: https://img.shields.io/pypi/pyversions/colorama.svg - :target: https://pypi.org/project/colorama/ - :alt: Supported Python versions - -.. image:: https://github.com/tartley/colorama/actions/workflows/test.yml/badge.svg - :target: https://github.com/tartley/colorama/actions/workflows/test.yml - :alt: Build Status - -Colorama -======== - -Makes ANSI escape character sequences (for producing colored terminal text and -cursor positioning) work under MS Windows. - -.. |donate| image:: https://www.paypalobjects.com/en_US/i/btn/btn_donate_SM.gif - :target: https://www.paypal.com/cgi-bin/webscr?cmd=_donations&business=2MZ9D2GMLYCUJ&item_name=Colorama¤cy_code=USD - :alt: Donate with Paypal - -`PyPI for releases `_ | -`Github for source `_ | -`Colorama for enterprise on Tidelift `_ - -If you find Colorama useful, please |donate| to the authors. Thank you! - -Installation ------------- - -Tested on CPython 2.7, 3.7, 3.8, 3.9 and 3.10 and Pypy 2.7 and 3.8. - -No requirements other than the standard library. - -.. code-block:: bash - - pip install colorama - # or - conda install -c anaconda colorama - -Description ------------ - -ANSI escape character sequences have long been used to produce colored terminal -text and cursor positioning on Unix and Macs. Colorama makes this work on -Windows, too, by wrapping ``stdout``, stripping ANSI sequences it finds (which -would appear as gobbledygook in the output), and converting them into the -appropriate win32 calls to modify the state of the terminal. On other platforms, -Colorama does nothing. - -This has the upshot of providing a simple cross-platform API for printing -colored terminal text from Python, and has the happy side-effect that existing -applications or libraries which use ANSI sequences to produce colored output on -Linux or Macs can now also work on Windows, simply by calling -``colorama.just_fix_windows_console()`` (since v0.4.6) or ``colorama.init()`` -(all versions, but may have other side-effects – see below). - -An alternative approach is to install ``ansi.sys`` on Windows machines, which -provides the same behaviour for all applications running in terminals. Colorama -is intended for situations where that isn't easy (e.g., maybe your app doesn't -have an installer.) - -Demo scripts in the source code repository print some colored text using -ANSI sequences. Compare their output under Gnome-terminal's built in ANSI -handling, versus on Windows Command-Prompt using Colorama: - -.. image:: https://github.com/tartley/colorama/raw/master/screenshots/ubuntu-demo.png - :width: 661 - :height: 357 - :alt: ANSI sequences on Ubuntu under gnome-terminal. - -.. image:: https://github.com/tartley/colorama/raw/master/screenshots/windows-demo.png - :width: 668 - :height: 325 - :alt: Same ANSI sequences on Windows, using Colorama. - -These screenshots show that, on Windows, Colorama does not support ANSI 'dim -text'; it looks the same as 'normal text'. - -Usage ------ - -Initialisation -.............. - -If the only thing you want from Colorama is to get ANSI escapes to work on -Windows, then run: - -.. code-block:: python - - from colorama import just_fix_windows_console - just_fix_windows_console() - -If you're on a recent version of Windows 10 or better, and your stdout/stderr -are pointing to a Windows console, then this will flip the magic configuration -switch to enable Windows' built-in ANSI support. - -If you're on an older version of Windows, and your stdout/stderr are pointing to -a Windows console, then this will wrap ``sys.stdout`` and/or ``sys.stderr`` in a -magic file object that intercepts ANSI escape sequences and issues the -appropriate Win32 calls to emulate them. - -In all other circumstances, it does nothing whatsoever. Basically the idea is -that this makes Windows act like Unix with respect to ANSI escape handling. - -It's safe to call this function multiple times. It's safe to call this function -on non-Windows platforms, but it won't do anything. It's safe to call this -function when one or both of your stdout/stderr are redirected to a file – it -won't do anything to those streams. - -Alternatively, you can use the older interface with more features (but also more -potential footguns): - -.. code-block:: python - - from colorama import init - init() - -This does the same thing as ``just_fix_windows_console``, except for the -following differences: - -- It's not safe to call ``init`` multiple times; you can end up with multiple - layers of wrapping and broken ANSI support. - -- Colorama will apply a heuristic to guess whether stdout/stderr support ANSI, - and if it thinks they don't, then it will wrap ``sys.stdout`` and - ``sys.stderr`` in a magic file object that strips out ANSI escape sequences - before printing them. This happens on all platforms, and can be convenient if - you want to write your code to emit ANSI escape sequences unconditionally, and - let Colorama decide whether they should actually be output. But note that - Colorama's heuristic is not particularly clever. - -- ``init`` also accepts explicit keyword args to enable/disable various - functionality – see below. - -To stop using Colorama before your program exits, simply call ``deinit()``. -This will restore ``stdout`` and ``stderr`` to their original values, so that -Colorama is disabled. To resume using Colorama again, call ``reinit()``; it is -cheaper than calling ``init()`` again (but does the same thing). - -Most users should depend on ``colorama >= 0.4.6``, and use -``just_fix_windows_console``. The old ``init`` interface will be supported -indefinitely for backwards compatibility, but we don't plan to fix any issues -with it, also for backwards compatibility. - -Colored Output -.............. - -Cross-platform printing of colored text can then be done using Colorama's -constant shorthand for ANSI escape sequences. These are deliberately -rudimentary, see below. - -.. code-block:: python - - from colorama import Fore, Back, Style - print(Fore.RED + 'some red text') - print(Back.GREEN + 'and with a green background') - print(Style.DIM + 'and in dim text') - print(Style.RESET_ALL) - print('back to normal now') - -...or simply by manually printing ANSI sequences from your own code: - -.. code-block:: python - - print('\033[31m' + 'some red text') - print('\033[39m') # and reset to default color - -...or, Colorama can be used in conjunction with existing ANSI libraries -such as the venerable `Termcolor `_ -the fabulous `Blessings `_, -or the incredible `_Rich `_. - -If you wish Colorama's Fore, Back and Style constants were more capable, -then consider using one of the above highly capable libraries to generate -colors, etc, and use Colorama just for its primary purpose: to convert -those ANSI sequences to also work on Windows: - -SIMILARLY, do not send PRs adding the generation of new ANSI types to Colorama. -We are only interested in converting ANSI codes to win32 API calls, not -shortcuts like the above to generate ANSI characters. - -.. code-block:: python - - from colorama import just_fix_windows_console - from termcolor import colored - - # use Colorama to make Termcolor work on Windows too - just_fix_windows_console() - - # then use Termcolor for all colored text output - print(colored('Hello, World!', 'green', 'on_red')) - -Available formatting constants are:: - - Fore: BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, RESET. - Back: BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, RESET. - Style: DIM, NORMAL, BRIGHT, RESET_ALL - -``Style.RESET_ALL`` resets foreground, background, and brightness. Colorama will -perform this reset automatically on program exit. - -These are fairly well supported, but not part of the standard:: - - Fore: LIGHTBLACK_EX, LIGHTRED_EX, LIGHTGREEN_EX, LIGHTYELLOW_EX, LIGHTBLUE_EX, LIGHTMAGENTA_EX, LIGHTCYAN_EX, LIGHTWHITE_EX - Back: LIGHTBLACK_EX, LIGHTRED_EX, LIGHTGREEN_EX, LIGHTYELLOW_EX, LIGHTBLUE_EX, LIGHTMAGENTA_EX, LIGHTCYAN_EX, LIGHTWHITE_EX - -Cursor Positioning -.................. - -ANSI codes to reposition the cursor are supported. See ``demos/demo06.py`` for -an example of how to generate them. - -Init Keyword Args -................. - -``init()`` accepts some ``**kwargs`` to override default behaviour. - -init(autoreset=False): - If you find yourself repeatedly sending reset sequences to turn off color - changes at the end of every print, then ``init(autoreset=True)`` will - automate that: - - .. code-block:: python - - from colorama import init - init(autoreset=True) - print(Fore.RED + 'some red text') - print('automatically back to default color again') - -init(strip=None): - Pass ``True`` or ``False`` to override whether ANSI codes should be - stripped from the output. The default behaviour is to strip if on Windows - or if output is redirected (not a tty). - -init(convert=None): - Pass ``True`` or ``False`` to override whether to convert ANSI codes in the - output into win32 calls. The default behaviour is to convert if on Windows - and output is to a tty (terminal). - -init(wrap=True): - On Windows, Colorama works by replacing ``sys.stdout`` and ``sys.stderr`` - with proxy objects, which override the ``.write()`` method to do their work. - If this wrapping causes you problems, then this can be disabled by passing - ``init(wrap=False)``. The default behaviour is to wrap if ``autoreset`` or - ``strip`` or ``convert`` are True. - - When wrapping is disabled, colored printing on non-Windows platforms will - continue to work as normal. To do cross-platform colored output, you can - use Colorama's ``AnsiToWin32`` proxy directly: - - .. code-block:: python - - import sys - from colorama import init, AnsiToWin32 - init(wrap=False) - stream = AnsiToWin32(sys.stderr).stream - - # Python 2 - print >>stream, Fore.BLUE + 'blue text on stderr' - - # Python 3 - print(Fore.BLUE + 'blue text on stderr', file=stream) - -Recognised ANSI Sequences -......................... - -ANSI sequences generally take the form:: - - ESC [ ; ... - -Where ```` is an integer, and ```` is a single letter. Zero or -more params are passed to a ````. If no params are passed, it is -generally synonymous with passing a single zero. No spaces exist in the -sequence; they have been inserted here simply to read more easily. - -The only ANSI sequences that Colorama converts into win32 calls are:: - - ESC [ 0 m # reset all (colors and brightness) - ESC [ 1 m # bright - ESC [ 2 m # dim (looks same as normal brightness) - ESC [ 22 m # normal brightness - - # FOREGROUND: - ESC [ 30 m # black - ESC [ 31 m # red - ESC [ 32 m # green - ESC [ 33 m # yellow - ESC [ 34 m # blue - ESC [ 35 m # magenta - ESC [ 36 m # cyan - ESC [ 37 m # white - ESC [ 39 m # reset - - # BACKGROUND - ESC [ 40 m # black - ESC [ 41 m # red - ESC [ 42 m # green - ESC [ 43 m # yellow - ESC [ 44 m # blue - ESC [ 45 m # magenta - ESC [ 46 m # cyan - ESC [ 47 m # white - ESC [ 49 m # reset - - # cursor positioning - ESC [ y;x H # position cursor at x across, y down - ESC [ y;x f # position cursor at x across, y down - ESC [ n A # move cursor n lines up - ESC [ n B # move cursor n lines down - ESC [ n C # move cursor n characters forward - ESC [ n D # move cursor n characters backward - - # clear the screen - ESC [ mode J # clear the screen - - # clear the line - ESC [ mode K # clear the line - -Multiple numeric params to the ``'m'`` command can be combined into a single -sequence:: - - ESC [ 36 ; 45 ; 1 m # bright cyan text on magenta background - -All other ANSI sequences of the form ``ESC [ ; ... `` -are silently stripped from the output on Windows. - -Any other form of ANSI sequence, such as single-character codes or alternative -initial characters, are not recognised or stripped. It would be cool to add -them though. Let me know if it would be useful for you, via the Issues on -GitHub. - -Status & Known Problems ------------------------ - -I've personally only tested it on Windows XP (CMD, Console2), Ubuntu -(gnome-terminal, xterm), and OS X. - -Some valid ANSI sequences aren't recognised. - -If you're hacking on the code, see `README-hacking.md`_. ESPECIALLY, see the -explanation there of why we do not want PRs that allow Colorama to generate new -types of ANSI codes. - -See outstanding issues and wish-list: -https://github.com/tartley/colorama/issues - -If anything doesn't work for you, or doesn't do what you expected or hoped for, -I'd love to hear about it on that issues list, would be delighted by patches, -and would be happy to grant commit access to anyone who submits a working patch -or two. - -.. _README-hacking.md: README-hacking.md - -License -------- - -Copyright Jonathan Hartley & Arnon Yaari, 2013-2020. BSD 3-Clause license; see -LICENSE file. - -Professional support --------------------- - -.. |tideliftlogo| image:: https://cdn2.hubspot.net/hubfs/4008838/website/logos/logos_for_download/Tidelift_primary-shorthand-logo.png - :alt: Tidelift - :target: https://tidelift.com/subscription/pkg/pypi-colorama?utm_source=pypi-colorama&utm_medium=referral&utm_campaign=readme - -.. list-table:: - :widths: 10 100 - - * - |tideliftlogo| - - Professional support for colorama is available as part of the - `Tidelift Subscription`_. - Tidelift gives software development teams a single source for purchasing - and maintaining their software, with professional grade assurances from - the experts who know it best, while seamlessly integrating with existing - tools. - -.. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-colorama?utm_source=pypi-colorama&utm_medium=referral&utm_campaign=readme - -Thanks ------- - -See the CHANGELOG for more thanks! - -* Marc Schlaich (schlamar) for a ``setup.py`` fix for Python2.5. -* Marc Abramowitz, reported & fixed a crash on exit with closed ``stdout``, - providing a solution to issue #7's setuptools/distutils debate, - and other fixes. -* User 'eryksun', for guidance on correctly instantiating ``ctypes.windll``. -* Matthew McCormick for politely pointing out a longstanding crash on non-Win. -* Ben Hoyt, for a magnificent fix under 64-bit Windows. -* Jesse at Empty Square for submitting a fix for examples in the README. -* User 'jamessp', an observant documentation fix for cursor positioning. -* User 'vaal1239', Dave Mckee & Lackner Kristof for a tiny but much-needed Win7 - fix. -* Julien Stuyck, for wisely suggesting Python3 compatible updates to README. -* Daniel Griffith for multiple fabulous patches. -* Oscar Lesta for a valuable fix to stop ANSI chars being sent to non-tty - output. -* Roger Binns, for many suggestions, valuable feedback, & bug reports. -* Tim Golden for thought and much appreciated feedback on the initial idea. -* User 'Zearin' for updates to the README file. -* John Szakmeister for adding support for light colors -* Charles Merriam for adding documentation to demos -* Jurko for a fix on 64-bit Windows CPython2.5 w/o ctypes -* Florian Bruhin for a fix when stdout or stderr are None -* Thomas Weininger for fixing ValueError on Windows -* Remi Rampin for better Github integration and fixes to the README file -* Simeon Visser for closing a file handle using 'with' and updating classifiers - to include Python 3.3 and 3.4 -* Andy Neff for fixing RESET of LIGHT_EX colors. -* Jonathan Hartley for the initial idea and implementation. diff --git a/server/venv/Lib/site-packages/colorama-0.4.6.dist-info/RECORD b/server/venv/Lib/site-packages/colorama-0.4.6.dist-info/RECORD deleted file mode 100644 index 0e0eb30..0000000 --- a/server/venv/Lib/site-packages/colorama-0.4.6.dist-info/RECORD +++ /dev/null @@ -1,32 +0,0 @@ -colorama-0.4.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -colorama-0.4.6.dist-info/METADATA,sha256=e67SnrUMOym9sz_4TjF3vxvAV4T3aF7NyqRHHH3YEMw,17158 -colorama-0.4.6.dist-info/RECORD,, -colorama-0.4.6.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -colorama-0.4.6.dist-info/WHEEL,sha256=cdcF4Fbd0FPtw2EMIOwH-3rSOTUdTCeOSXRMD1iLUb8,105 -colorama-0.4.6.dist-info/licenses/LICENSE.txt,sha256=ysNcAmhuXQSlpxQL-zs25zrtSWZW6JEQLkKIhteTAxg,1491 -colorama/__init__.py,sha256=wePQA4U20tKgYARySLEC047ucNX-g8pRLpYBuiHlLb8,266 -colorama/__pycache__/__init__.cpython-311.pyc,, -colorama/__pycache__/ansi.cpython-311.pyc,, -colorama/__pycache__/ansitowin32.cpython-311.pyc,, -colorama/__pycache__/initialise.cpython-311.pyc,, -colorama/__pycache__/win32.cpython-311.pyc,, -colorama/__pycache__/winterm.cpython-311.pyc,, -colorama/ansi.py,sha256=Top4EeEuaQdBWdteKMEcGOTeKeF19Q-Wo_6_Cj5kOzQ,2522 -colorama/ansitowin32.py,sha256=vPNYa3OZbxjbuFyaVo0Tmhmy1FZ1lKMWCnT7odXpItk,11128 -colorama/initialise.py,sha256=-hIny86ClXo39ixh5iSCfUIa2f_h_bgKRDW7gqs-KLU,3325 -colorama/tests/__init__.py,sha256=MkgPAEzGQd-Rq0w0PZXSX2LadRWhUECcisJY8lSrm4Q,75 -colorama/tests/__pycache__/__init__.cpython-311.pyc,, -colorama/tests/__pycache__/ansi_test.cpython-311.pyc,, -colorama/tests/__pycache__/ansitowin32_test.cpython-311.pyc,, -colorama/tests/__pycache__/initialise_test.cpython-311.pyc,, -colorama/tests/__pycache__/isatty_test.cpython-311.pyc,, -colorama/tests/__pycache__/utils.cpython-311.pyc,, -colorama/tests/__pycache__/winterm_test.cpython-311.pyc,, -colorama/tests/ansi_test.py,sha256=FeViDrUINIZcr505PAxvU4AjXz1asEiALs9GXMhwRaE,2839 -colorama/tests/ansitowin32_test.py,sha256=RN7AIhMJ5EqDsYaCjVo-o4u8JzDD4ukJbmevWKS70rY,10678 -colorama/tests/initialise_test.py,sha256=BbPy-XfyHwJ6zKozuQOvNvQZzsx9vdb_0bYXn7hsBTc,6741 -colorama/tests/isatty_test.py,sha256=Pg26LRpv0yQDB5Ac-sxgVXG7hsA1NYvapFgApZfYzZg,1866 -colorama/tests/utils.py,sha256=1IIRylG39z5-dzq09R_ngufxyPZxgldNbrxKxUGwGKE,1079 -colorama/tests/winterm_test.py,sha256=qoWFPEjym5gm2RuMwpf3pOis3a5r_PJZFCzK254JL8A,3709 -colorama/win32.py,sha256=YQOKwMTwtGBbsY4dL5HYTvwTeP9wIQra5MvPNddpxZs,6181 -colorama/winterm.py,sha256=XCQFDHjPi6AHYNdZwy0tA02H-Jh48Jp-HvCjeLeLp3U,7134 diff --git a/server/venv/Lib/site-packages/colorama-0.4.6.dist-info/REQUESTED b/server/venv/Lib/site-packages/colorama-0.4.6.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/colorama-0.4.6.dist-info/WHEEL b/server/venv/Lib/site-packages/colorama-0.4.6.dist-info/WHEEL deleted file mode 100644 index d79189f..0000000 --- a/server/venv/Lib/site-packages/colorama-0.4.6.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: hatchling 1.11.1 -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any diff --git a/server/venv/Lib/site-packages/colorama-0.4.6.dist-info/licenses/LICENSE.txt b/server/venv/Lib/site-packages/colorama-0.4.6.dist-info/licenses/LICENSE.txt deleted file mode 100644 index 3105888..0000000 --- a/server/venv/Lib/site-packages/colorama-0.4.6.dist-info/licenses/LICENSE.txt +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2010 Jonathan Hartley -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -* Neither the name of the copyright holders, nor those of its contributors - may be used to endorse or promote products derived from this software without - specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/server/venv/Lib/site-packages/colorama/__init__.py b/server/venv/Lib/site-packages/colorama/__init__.py deleted file mode 100644 index 383101c..0000000 --- a/server/venv/Lib/site-packages/colorama/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -from .initialise import init, deinit, reinit, colorama_text, just_fix_windows_console -from .ansi import Fore, Back, Style, Cursor -from .ansitowin32 import AnsiToWin32 - -__version__ = '0.4.6' - diff --git a/server/venv/Lib/site-packages/colorama/ansi.py b/server/venv/Lib/site-packages/colorama/ansi.py deleted file mode 100644 index 11ec695..0000000 --- a/server/venv/Lib/site-packages/colorama/ansi.py +++ /dev/null @@ -1,102 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -''' -This module generates ANSI character codes to printing colors to terminals. -See: http://en.wikipedia.org/wiki/ANSI_escape_code -''' - -CSI = '\033[' -OSC = '\033]' -BEL = '\a' - - -def code_to_chars(code): - return CSI + str(code) + 'm' - -def set_title(title): - return OSC + '2;' + title + BEL - -def clear_screen(mode=2): - return CSI + str(mode) + 'J' - -def clear_line(mode=2): - return CSI + str(mode) + 'K' - - -class AnsiCodes(object): - def __init__(self): - # the subclasses declare class attributes which are numbers. - # Upon instantiation we define instance attributes, which are the same - # as the class attributes but wrapped with the ANSI escape sequence - for name in dir(self): - if not name.startswith('_'): - value = getattr(self, name) - setattr(self, name, code_to_chars(value)) - - -class AnsiCursor(object): - def UP(self, n=1): - return CSI + str(n) + 'A' - def DOWN(self, n=1): - return CSI + str(n) + 'B' - def FORWARD(self, n=1): - return CSI + str(n) + 'C' - def BACK(self, n=1): - return CSI + str(n) + 'D' - def POS(self, x=1, y=1): - return CSI + str(y) + ';' + str(x) + 'H' - - -class AnsiFore(AnsiCodes): - BLACK = 30 - RED = 31 - GREEN = 32 - YELLOW = 33 - BLUE = 34 - MAGENTA = 35 - CYAN = 36 - WHITE = 37 - RESET = 39 - - # These are fairly well supported, but not part of the standard. - LIGHTBLACK_EX = 90 - LIGHTRED_EX = 91 - LIGHTGREEN_EX = 92 - LIGHTYELLOW_EX = 93 - LIGHTBLUE_EX = 94 - LIGHTMAGENTA_EX = 95 - LIGHTCYAN_EX = 96 - LIGHTWHITE_EX = 97 - - -class AnsiBack(AnsiCodes): - BLACK = 40 - RED = 41 - GREEN = 42 - YELLOW = 43 - BLUE = 44 - MAGENTA = 45 - CYAN = 46 - WHITE = 47 - RESET = 49 - - # These are fairly well supported, but not part of the standard. - LIGHTBLACK_EX = 100 - LIGHTRED_EX = 101 - LIGHTGREEN_EX = 102 - LIGHTYELLOW_EX = 103 - LIGHTBLUE_EX = 104 - LIGHTMAGENTA_EX = 105 - LIGHTCYAN_EX = 106 - LIGHTWHITE_EX = 107 - - -class AnsiStyle(AnsiCodes): - BRIGHT = 1 - DIM = 2 - NORMAL = 22 - RESET_ALL = 0 - -Fore = AnsiFore() -Back = AnsiBack() -Style = AnsiStyle() -Cursor = AnsiCursor() diff --git a/server/venv/Lib/site-packages/colorama/ansitowin32.py b/server/venv/Lib/site-packages/colorama/ansitowin32.py deleted file mode 100644 index abf209e..0000000 --- a/server/venv/Lib/site-packages/colorama/ansitowin32.py +++ /dev/null @@ -1,277 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -import re -import sys -import os - -from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style, BEL -from .winterm import enable_vt_processing, WinTerm, WinColor, WinStyle -from .win32 import windll, winapi_test - - -winterm = None -if windll is not None: - winterm = WinTerm() - - -class StreamWrapper(object): - ''' - Wraps a stream (such as stdout), acting as a transparent proxy for all - attribute access apart from method 'write()', which is delegated to our - Converter instance. - ''' - def __init__(self, wrapped, converter): - # double-underscore everything to prevent clashes with names of - # attributes on the wrapped stream object. - self.__wrapped = wrapped - self.__convertor = converter - - def __getattr__(self, name): - return getattr(self.__wrapped, name) - - def __enter__(self, *args, **kwargs): - # special method lookup bypasses __getattr__/__getattribute__, see - # https://stackoverflow.com/questions/12632894/why-doesnt-getattr-work-with-exit - # thus, contextlib magic methods are not proxied via __getattr__ - return self.__wrapped.__enter__(*args, **kwargs) - - def __exit__(self, *args, **kwargs): - return self.__wrapped.__exit__(*args, **kwargs) - - def __setstate__(self, state): - self.__dict__ = state - - def __getstate__(self): - return self.__dict__ - - def write(self, text): - self.__convertor.write(text) - - def isatty(self): - stream = self.__wrapped - if 'PYCHARM_HOSTED' in os.environ: - if stream is not None and (stream is sys.__stdout__ or stream is sys.__stderr__): - return True - try: - stream_isatty = stream.isatty - except AttributeError: - return False - else: - return stream_isatty() - - @property - def closed(self): - stream = self.__wrapped - try: - return stream.closed - # AttributeError in the case that the stream doesn't support being closed - # ValueError for the case that the stream has already been detached when atexit runs - except (AttributeError, ValueError): - return True - - -class AnsiToWin32(object): - ''' - Implements a 'write()' method which, on Windows, will strip ANSI character - sequences from the text, and if outputting to a tty, will convert them into - win32 function calls. - ''' - ANSI_CSI_RE = re.compile('\001?\033\\[((?:\\d|;)*)([a-zA-Z])\002?') # Control Sequence Introducer - ANSI_OSC_RE = re.compile('\001?\033\\]([^\a]*)(\a)\002?') # Operating System Command - - def __init__(self, wrapped, convert=None, strip=None, autoreset=False): - # The wrapped stream (normally sys.stdout or sys.stderr) - self.wrapped = wrapped - - # should we reset colors to defaults after every .write() - self.autoreset = autoreset - - # create the proxy wrapping our output stream - self.stream = StreamWrapper(wrapped, self) - - on_windows = os.name == 'nt' - # We test if the WinAPI works, because even if we are on Windows - # we may be using a terminal that doesn't support the WinAPI - # (e.g. Cygwin Terminal). In this case it's up to the terminal - # to support the ANSI codes. - conversion_supported = on_windows and winapi_test() - try: - fd = wrapped.fileno() - except Exception: - fd = -1 - system_has_native_ansi = not on_windows or enable_vt_processing(fd) - have_tty = not self.stream.closed and self.stream.isatty() - need_conversion = conversion_supported and not system_has_native_ansi - - # should we strip ANSI sequences from our output? - if strip is None: - strip = need_conversion or not have_tty - self.strip = strip - - # should we should convert ANSI sequences into win32 calls? - if convert is None: - convert = need_conversion and have_tty - self.convert = convert - - # dict of ansi codes to win32 functions and parameters - self.win32_calls = self.get_win32_calls() - - # are we wrapping stderr? - self.on_stderr = self.wrapped is sys.stderr - - def should_wrap(self): - ''' - True if this class is actually needed. If false, then the output - stream will not be affected, nor will win32 calls be issued, so - wrapping stdout is not actually required. This will generally be - False on non-Windows platforms, unless optional functionality like - autoreset has been requested using kwargs to init() - ''' - return self.convert or self.strip or self.autoreset - - def get_win32_calls(self): - if self.convert and winterm: - return { - AnsiStyle.RESET_ALL: (winterm.reset_all, ), - AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT), - AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL), - AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL), - AnsiFore.BLACK: (winterm.fore, WinColor.BLACK), - AnsiFore.RED: (winterm.fore, WinColor.RED), - AnsiFore.GREEN: (winterm.fore, WinColor.GREEN), - AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW), - AnsiFore.BLUE: (winterm.fore, WinColor.BLUE), - AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA), - AnsiFore.CYAN: (winterm.fore, WinColor.CYAN), - AnsiFore.WHITE: (winterm.fore, WinColor.GREY), - AnsiFore.RESET: (winterm.fore, ), - AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True), - AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True), - AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True), - AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True), - AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True), - AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True), - AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True), - AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True), - AnsiBack.BLACK: (winterm.back, WinColor.BLACK), - AnsiBack.RED: (winterm.back, WinColor.RED), - AnsiBack.GREEN: (winterm.back, WinColor.GREEN), - AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW), - AnsiBack.BLUE: (winterm.back, WinColor.BLUE), - AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA), - AnsiBack.CYAN: (winterm.back, WinColor.CYAN), - AnsiBack.WHITE: (winterm.back, WinColor.GREY), - AnsiBack.RESET: (winterm.back, ), - AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True), - AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True), - AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True), - AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True), - AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True), - AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True), - AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True), - AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True), - } - return dict() - - def write(self, text): - if self.strip or self.convert: - self.write_and_convert(text) - else: - self.wrapped.write(text) - self.wrapped.flush() - if self.autoreset: - self.reset_all() - - - def reset_all(self): - if self.convert: - self.call_win32('m', (0,)) - elif not self.strip and not self.stream.closed: - self.wrapped.write(Style.RESET_ALL) - - - def write_and_convert(self, text): - ''' - Write the given text to our wrapped stream, stripping any ANSI - sequences from the text, and optionally converting them into win32 - calls. - ''' - cursor = 0 - text = self.convert_osc(text) - for match in self.ANSI_CSI_RE.finditer(text): - start, end = match.span() - self.write_plain_text(text, cursor, start) - self.convert_ansi(*match.groups()) - cursor = end - self.write_plain_text(text, cursor, len(text)) - - - def write_plain_text(self, text, start, end): - if start < end: - self.wrapped.write(text[start:end]) - self.wrapped.flush() - - - def convert_ansi(self, paramstring, command): - if self.convert: - params = self.extract_params(command, paramstring) - self.call_win32(command, params) - - - def extract_params(self, command, paramstring): - if command in 'Hf': - params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';')) - while len(params) < 2: - # defaults: - params = params + (1,) - else: - params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0) - if len(params) == 0: - # defaults: - if command in 'JKm': - params = (0,) - elif command in 'ABCD': - params = (1,) - - return params - - - def call_win32(self, command, params): - if command == 'm': - for param in params: - if param in self.win32_calls: - func_args = self.win32_calls[param] - func = func_args[0] - args = func_args[1:] - kwargs = dict(on_stderr=self.on_stderr) - func(*args, **kwargs) - elif command in 'J': - winterm.erase_screen(params[0], on_stderr=self.on_stderr) - elif command in 'K': - winterm.erase_line(params[0], on_stderr=self.on_stderr) - elif command in 'Hf': # cursor position - absolute - winterm.set_cursor_position(params, on_stderr=self.on_stderr) - elif command in 'ABCD': # cursor position - relative - n = params[0] - # A - up, B - down, C - forward, D - back - x, y = {'A': (0, -n), 'B': (0, n), 'C': (n, 0), 'D': (-n, 0)}[command] - winterm.cursor_adjust(x, y, on_stderr=self.on_stderr) - - - def convert_osc(self, text): - for match in self.ANSI_OSC_RE.finditer(text): - start, end = match.span() - text = text[:start] + text[end:] - paramstring, command = match.groups() - if command == BEL: - if paramstring.count(";") == 1: - params = paramstring.split(";") - # 0 - change title and icon (we will only change title) - # 1 - change icon (we don't support this) - # 2 - change title - if params[0] in '02': - winterm.set_title(params[1]) - return text - - - def flush(self): - self.wrapped.flush() diff --git a/server/venv/Lib/site-packages/colorama/initialise.py b/server/venv/Lib/site-packages/colorama/initialise.py deleted file mode 100644 index d5fd4b7..0000000 --- a/server/venv/Lib/site-packages/colorama/initialise.py +++ /dev/null @@ -1,121 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -import atexit -import contextlib -import sys - -from .ansitowin32 import AnsiToWin32 - - -def _wipe_internal_state_for_tests(): - global orig_stdout, orig_stderr - orig_stdout = None - orig_stderr = None - - global wrapped_stdout, wrapped_stderr - wrapped_stdout = None - wrapped_stderr = None - - global atexit_done - atexit_done = False - - global fixed_windows_console - fixed_windows_console = False - - try: - # no-op if it wasn't registered - atexit.unregister(reset_all) - except AttributeError: - # python 2: no atexit.unregister. Oh well, we did our best. - pass - - -def reset_all(): - if AnsiToWin32 is not None: # Issue #74: objects might become None at exit - AnsiToWin32(orig_stdout).reset_all() - - -def init(autoreset=False, convert=None, strip=None, wrap=True): - - if not wrap and any([autoreset, convert, strip]): - raise ValueError('wrap=False conflicts with any other arg=True') - - global wrapped_stdout, wrapped_stderr - global orig_stdout, orig_stderr - - orig_stdout = sys.stdout - orig_stderr = sys.stderr - - if sys.stdout is None: - wrapped_stdout = None - else: - sys.stdout = wrapped_stdout = \ - wrap_stream(orig_stdout, convert, strip, autoreset, wrap) - if sys.stderr is None: - wrapped_stderr = None - else: - sys.stderr = wrapped_stderr = \ - wrap_stream(orig_stderr, convert, strip, autoreset, wrap) - - global atexit_done - if not atexit_done: - atexit.register(reset_all) - atexit_done = True - - -def deinit(): - if orig_stdout is not None: - sys.stdout = orig_stdout - if orig_stderr is not None: - sys.stderr = orig_stderr - - -def just_fix_windows_console(): - global fixed_windows_console - - if sys.platform != "win32": - return - if fixed_windows_console: - return - if wrapped_stdout is not None or wrapped_stderr is not None: - # Someone already ran init() and it did stuff, so we won't second-guess them - return - - # On newer versions of Windows, AnsiToWin32.__init__ will implicitly enable the - # native ANSI support in the console as a side-effect. We only need to actually - # replace sys.stdout/stderr if we're in the old-style conversion mode. - new_stdout = AnsiToWin32(sys.stdout, convert=None, strip=None, autoreset=False) - if new_stdout.convert: - sys.stdout = new_stdout - new_stderr = AnsiToWin32(sys.stderr, convert=None, strip=None, autoreset=False) - if new_stderr.convert: - sys.stderr = new_stderr - - fixed_windows_console = True - -@contextlib.contextmanager -def colorama_text(*args, **kwargs): - init(*args, **kwargs) - try: - yield - finally: - deinit() - - -def reinit(): - if wrapped_stdout is not None: - sys.stdout = wrapped_stdout - if wrapped_stderr is not None: - sys.stderr = wrapped_stderr - - -def wrap_stream(stream, convert, strip, autoreset, wrap): - if wrap: - wrapper = AnsiToWin32(stream, - convert=convert, strip=strip, autoreset=autoreset) - if wrapper.should_wrap(): - stream = wrapper.stream - return stream - - -# Use this for initial setup as well, to reduce code duplication -_wipe_internal_state_for_tests() diff --git a/server/venv/Lib/site-packages/colorama/tests/__init__.py b/server/venv/Lib/site-packages/colorama/tests/__init__.py deleted file mode 100644 index 8c5661e..0000000 --- a/server/venv/Lib/site-packages/colorama/tests/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. diff --git a/server/venv/Lib/site-packages/colorama/tests/ansi_test.py b/server/venv/Lib/site-packages/colorama/tests/ansi_test.py deleted file mode 100644 index 0a20c80..0000000 --- a/server/venv/Lib/site-packages/colorama/tests/ansi_test.py +++ /dev/null @@ -1,76 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -import sys -from unittest import TestCase, main - -from ..ansi import Back, Fore, Style -from ..ansitowin32 import AnsiToWin32 - -stdout_orig = sys.stdout -stderr_orig = sys.stderr - - -class AnsiTest(TestCase): - - def setUp(self): - # sanity check: stdout should be a file or StringIO object. - # It will only be AnsiToWin32 if init() has previously wrapped it - self.assertNotEqual(type(sys.stdout), AnsiToWin32) - self.assertNotEqual(type(sys.stderr), AnsiToWin32) - - def tearDown(self): - sys.stdout = stdout_orig - sys.stderr = stderr_orig - - - def testForeAttributes(self): - self.assertEqual(Fore.BLACK, '\033[30m') - self.assertEqual(Fore.RED, '\033[31m') - self.assertEqual(Fore.GREEN, '\033[32m') - self.assertEqual(Fore.YELLOW, '\033[33m') - self.assertEqual(Fore.BLUE, '\033[34m') - self.assertEqual(Fore.MAGENTA, '\033[35m') - self.assertEqual(Fore.CYAN, '\033[36m') - self.assertEqual(Fore.WHITE, '\033[37m') - self.assertEqual(Fore.RESET, '\033[39m') - - # Check the light, extended versions. - self.assertEqual(Fore.LIGHTBLACK_EX, '\033[90m') - self.assertEqual(Fore.LIGHTRED_EX, '\033[91m') - self.assertEqual(Fore.LIGHTGREEN_EX, '\033[92m') - self.assertEqual(Fore.LIGHTYELLOW_EX, '\033[93m') - self.assertEqual(Fore.LIGHTBLUE_EX, '\033[94m') - self.assertEqual(Fore.LIGHTMAGENTA_EX, '\033[95m') - self.assertEqual(Fore.LIGHTCYAN_EX, '\033[96m') - self.assertEqual(Fore.LIGHTWHITE_EX, '\033[97m') - - - def testBackAttributes(self): - self.assertEqual(Back.BLACK, '\033[40m') - self.assertEqual(Back.RED, '\033[41m') - self.assertEqual(Back.GREEN, '\033[42m') - self.assertEqual(Back.YELLOW, '\033[43m') - self.assertEqual(Back.BLUE, '\033[44m') - self.assertEqual(Back.MAGENTA, '\033[45m') - self.assertEqual(Back.CYAN, '\033[46m') - self.assertEqual(Back.WHITE, '\033[47m') - self.assertEqual(Back.RESET, '\033[49m') - - # Check the light, extended versions. - self.assertEqual(Back.LIGHTBLACK_EX, '\033[100m') - self.assertEqual(Back.LIGHTRED_EX, '\033[101m') - self.assertEqual(Back.LIGHTGREEN_EX, '\033[102m') - self.assertEqual(Back.LIGHTYELLOW_EX, '\033[103m') - self.assertEqual(Back.LIGHTBLUE_EX, '\033[104m') - self.assertEqual(Back.LIGHTMAGENTA_EX, '\033[105m') - self.assertEqual(Back.LIGHTCYAN_EX, '\033[106m') - self.assertEqual(Back.LIGHTWHITE_EX, '\033[107m') - - - def testStyleAttributes(self): - self.assertEqual(Style.DIM, '\033[2m') - self.assertEqual(Style.NORMAL, '\033[22m') - self.assertEqual(Style.BRIGHT, '\033[1m') - - -if __name__ == '__main__': - main() diff --git a/server/venv/Lib/site-packages/colorama/tests/ansitowin32_test.py b/server/venv/Lib/site-packages/colorama/tests/ansitowin32_test.py deleted file mode 100644 index 91ca551..0000000 --- a/server/venv/Lib/site-packages/colorama/tests/ansitowin32_test.py +++ /dev/null @@ -1,294 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -from io import StringIO, TextIOWrapper -from unittest import TestCase, main -try: - from contextlib import ExitStack -except ImportError: - # python 2 - from contextlib2 import ExitStack - -try: - from unittest.mock import MagicMock, Mock, patch -except ImportError: - from mock import MagicMock, Mock, patch - -from ..ansitowin32 import AnsiToWin32, StreamWrapper -from ..win32 import ENABLE_VIRTUAL_TERMINAL_PROCESSING -from .utils import osname - - -class StreamWrapperTest(TestCase): - - def testIsAProxy(self): - mockStream = Mock() - wrapper = StreamWrapper(mockStream, None) - self.assertTrue( wrapper.random_attr is mockStream.random_attr ) - - def testDelegatesWrite(self): - mockStream = Mock() - mockConverter = Mock() - wrapper = StreamWrapper(mockStream, mockConverter) - wrapper.write('hello') - self.assertTrue(mockConverter.write.call_args, (('hello',), {})) - - def testDelegatesContext(self): - mockConverter = Mock() - s = StringIO() - with StreamWrapper(s, mockConverter) as fp: - fp.write(u'hello') - self.assertTrue(s.closed) - - def testProxyNoContextManager(self): - mockStream = MagicMock() - mockStream.__enter__.side_effect = AttributeError() - mockConverter = Mock() - with self.assertRaises(AttributeError) as excinfo: - with StreamWrapper(mockStream, mockConverter) as wrapper: - wrapper.write('hello') - - def test_closed_shouldnt_raise_on_closed_stream(self): - stream = StringIO() - stream.close() - wrapper = StreamWrapper(stream, None) - self.assertEqual(wrapper.closed, True) - - def test_closed_shouldnt_raise_on_detached_stream(self): - stream = TextIOWrapper(StringIO()) - stream.detach() - wrapper = StreamWrapper(stream, None) - self.assertEqual(wrapper.closed, True) - -class AnsiToWin32Test(TestCase): - - def testInit(self): - mockStdout = Mock() - auto = Mock() - stream = AnsiToWin32(mockStdout, autoreset=auto) - self.assertEqual(stream.wrapped, mockStdout) - self.assertEqual(stream.autoreset, auto) - - @patch('colorama.ansitowin32.winterm', None) - @patch('colorama.ansitowin32.winapi_test', lambda *_: True) - def testStripIsTrueOnWindows(self): - with osname('nt'): - mockStdout = Mock() - stream = AnsiToWin32(mockStdout) - self.assertTrue(stream.strip) - - def testStripIsFalseOffWindows(self): - with osname('posix'): - mockStdout = Mock(closed=False) - stream = AnsiToWin32(mockStdout) - self.assertFalse(stream.strip) - - def testWriteStripsAnsi(self): - mockStdout = Mock() - stream = AnsiToWin32(mockStdout) - stream.wrapped = Mock() - stream.write_and_convert = Mock() - stream.strip = True - - stream.write('abc') - - self.assertFalse(stream.wrapped.write.called) - self.assertEqual(stream.write_and_convert.call_args, (('abc',), {})) - - def testWriteDoesNotStripAnsi(self): - mockStdout = Mock() - stream = AnsiToWin32(mockStdout) - stream.wrapped = Mock() - stream.write_and_convert = Mock() - stream.strip = False - stream.convert = False - - stream.write('abc') - - self.assertFalse(stream.write_and_convert.called) - self.assertEqual(stream.wrapped.write.call_args, (('abc',), {})) - - def assert_autoresets(self, convert, autoreset=True): - stream = AnsiToWin32(Mock()) - stream.convert = convert - stream.reset_all = Mock() - stream.autoreset = autoreset - stream.winterm = Mock() - - stream.write('abc') - - self.assertEqual(stream.reset_all.called, autoreset) - - def testWriteAutoresets(self): - self.assert_autoresets(convert=True) - self.assert_autoresets(convert=False) - self.assert_autoresets(convert=True, autoreset=False) - self.assert_autoresets(convert=False, autoreset=False) - - def testWriteAndConvertWritesPlainText(self): - stream = AnsiToWin32(Mock()) - stream.write_and_convert( 'abc' ) - self.assertEqual( stream.wrapped.write.call_args, (('abc',), {}) ) - - def testWriteAndConvertStripsAllValidAnsi(self): - stream = AnsiToWin32(Mock()) - stream.call_win32 = Mock() - data = [ - 'abc\033[mdef', - 'abc\033[0mdef', - 'abc\033[2mdef', - 'abc\033[02mdef', - 'abc\033[002mdef', - 'abc\033[40mdef', - 'abc\033[040mdef', - 'abc\033[0;1mdef', - 'abc\033[40;50mdef', - 'abc\033[50;30;40mdef', - 'abc\033[Adef', - 'abc\033[0Gdef', - 'abc\033[1;20;128Hdef', - ] - for datum in data: - stream.wrapped.write.reset_mock() - stream.write_and_convert( datum ) - self.assertEqual( - [args[0] for args in stream.wrapped.write.call_args_list], - [ ('abc',), ('def',) ] - ) - - def testWriteAndConvertSkipsEmptySnippets(self): - stream = AnsiToWin32(Mock()) - stream.call_win32 = Mock() - stream.write_and_convert( '\033[40m\033[41m' ) - self.assertFalse( stream.wrapped.write.called ) - - def testWriteAndConvertCallsWin32WithParamsAndCommand(self): - stream = AnsiToWin32(Mock()) - stream.convert = True - stream.call_win32 = Mock() - stream.extract_params = Mock(return_value='params') - data = { - 'abc\033[adef': ('a', 'params'), - 'abc\033[;;bdef': ('b', 'params'), - 'abc\033[0cdef': ('c', 'params'), - 'abc\033[;;0;;Gdef': ('G', 'params'), - 'abc\033[1;20;128Hdef': ('H', 'params'), - } - for datum, expected in data.items(): - stream.call_win32.reset_mock() - stream.write_and_convert( datum ) - self.assertEqual( stream.call_win32.call_args[0], expected ) - - def test_reset_all_shouldnt_raise_on_closed_orig_stdout(self): - stream = StringIO() - converter = AnsiToWin32(stream) - stream.close() - - converter.reset_all() - - def test_wrap_shouldnt_raise_on_closed_orig_stdout(self): - stream = StringIO() - stream.close() - with \ - patch("colorama.ansitowin32.os.name", "nt"), \ - patch("colorama.ansitowin32.winapi_test", lambda: True): - converter = AnsiToWin32(stream) - self.assertTrue(converter.strip) - self.assertFalse(converter.convert) - - def test_wrap_shouldnt_raise_on_missing_closed_attr(self): - with \ - patch("colorama.ansitowin32.os.name", "nt"), \ - patch("colorama.ansitowin32.winapi_test", lambda: True): - converter = AnsiToWin32(object()) - self.assertTrue(converter.strip) - self.assertFalse(converter.convert) - - def testExtractParams(self): - stream = AnsiToWin32(Mock()) - data = { - '': (0,), - ';;': (0,), - '2': (2,), - ';;002;;': (2,), - '0;1': (0, 1), - ';;003;;456;;': (3, 456), - '11;22;33;44;55': (11, 22, 33, 44, 55), - } - for datum, expected in data.items(): - self.assertEqual(stream.extract_params('m', datum), expected) - - def testCallWin32UsesLookup(self): - listener = Mock() - stream = AnsiToWin32(listener) - stream.win32_calls = { - 1: (lambda *_, **__: listener(11),), - 2: (lambda *_, **__: listener(22),), - 3: (lambda *_, **__: listener(33),), - } - stream.call_win32('m', (3, 1, 99, 2)) - self.assertEqual( - [a[0][0] for a in listener.call_args_list], - [33, 11, 22] ) - - def test_osc_codes(self): - mockStdout = Mock() - stream = AnsiToWin32(mockStdout, convert=True) - with patch('colorama.ansitowin32.winterm') as winterm: - data = [ - '\033]0\x07', # missing arguments - '\033]0;foo\x08', # wrong OSC command - '\033]0;colorama_test_title\x07', # should work - '\033]1;colorama_test_title\x07', # wrong set command - '\033]2;colorama_test_title\x07', # should work - '\033]' + ';' * 64 + '\x08', # see issue #247 - ] - for code in data: - stream.write(code) - self.assertEqual(winterm.set_title.call_count, 2) - - def test_native_windows_ansi(self): - with ExitStack() as stack: - def p(a, b): - stack.enter_context(patch(a, b, create=True)) - # Pretend to be on Windows - p("colorama.ansitowin32.os.name", "nt") - p("colorama.ansitowin32.winapi_test", lambda: True) - p("colorama.win32.winapi_test", lambda: True) - p("colorama.winterm.win32.windll", "non-None") - p("colorama.winterm.get_osfhandle", lambda _: 1234) - - # Pretend that our mock stream has native ANSI support - p( - "colorama.winterm.win32.GetConsoleMode", - lambda _: ENABLE_VIRTUAL_TERMINAL_PROCESSING, - ) - SetConsoleMode = Mock() - p("colorama.winterm.win32.SetConsoleMode", SetConsoleMode) - - stdout = Mock() - stdout.closed = False - stdout.isatty.return_value = True - stdout.fileno.return_value = 1 - - # Our fake console says it has native vt support, so AnsiToWin32 should - # enable that support and do nothing else. - stream = AnsiToWin32(stdout) - SetConsoleMode.assert_called_with(1234, ENABLE_VIRTUAL_TERMINAL_PROCESSING) - self.assertFalse(stream.strip) - self.assertFalse(stream.convert) - self.assertFalse(stream.should_wrap()) - - # Now let's pretend we're on an old Windows console, that doesn't have - # native ANSI support. - p("colorama.winterm.win32.GetConsoleMode", lambda _: 0) - SetConsoleMode = Mock() - p("colorama.winterm.win32.SetConsoleMode", SetConsoleMode) - - stream = AnsiToWin32(stdout) - SetConsoleMode.assert_called_with(1234, ENABLE_VIRTUAL_TERMINAL_PROCESSING) - self.assertTrue(stream.strip) - self.assertTrue(stream.convert) - self.assertTrue(stream.should_wrap()) - - -if __name__ == '__main__': - main() diff --git a/server/venv/Lib/site-packages/colorama/tests/initialise_test.py b/server/venv/Lib/site-packages/colorama/tests/initialise_test.py deleted file mode 100644 index 89f9b07..0000000 --- a/server/venv/Lib/site-packages/colorama/tests/initialise_test.py +++ /dev/null @@ -1,189 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -import sys -from unittest import TestCase, main, skipUnless - -try: - from unittest.mock import patch, Mock -except ImportError: - from mock import patch, Mock - -from ..ansitowin32 import StreamWrapper -from ..initialise import init, just_fix_windows_console, _wipe_internal_state_for_tests -from .utils import osname, replace_by - -orig_stdout = sys.stdout -orig_stderr = sys.stderr - - -class InitTest(TestCase): - - @skipUnless(sys.stdout.isatty(), "sys.stdout is not a tty") - def setUp(self): - # sanity check - self.assertNotWrapped() - - def tearDown(self): - _wipe_internal_state_for_tests() - sys.stdout = orig_stdout - sys.stderr = orig_stderr - - def assertWrapped(self): - self.assertIsNot(sys.stdout, orig_stdout, 'stdout should be wrapped') - self.assertIsNot(sys.stderr, orig_stderr, 'stderr should be wrapped') - self.assertTrue(isinstance(sys.stdout, StreamWrapper), - 'bad stdout wrapper') - self.assertTrue(isinstance(sys.stderr, StreamWrapper), - 'bad stderr wrapper') - - def assertNotWrapped(self): - self.assertIs(sys.stdout, orig_stdout, 'stdout should not be wrapped') - self.assertIs(sys.stderr, orig_stderr, 'stderr should not be wrapped') - - @patch('colorama.initialise.reset_all') - @patch('colorama.ansitowin32.winapi_test', lambda *_: True) - @patch('colorama.ansitowin32.enable_vt_processing', lambda *_: False) - def testInitWrapsOnWindows(self, _): - with osname("nt"): - init() - self.assertWrapped() - - @patch('colorama.initialise.reset_all') - @patch('colorama.ansitowin32.winapi_test', lambda *_: False) - def testInitDoesntWrapOnEmulatedWindows(self, _): - with osname("nt"): - init() - self.assertNotWrapped() - - def testInitDoesntWrapOnNonWindows(self): - with osname("posix"): - init() - self.assertNotWrapped() - - def testInitDoesntWrapIfNone(self): - with replace_by(None): - init() - # We can't use assertNotWrapped here because replace_by(None) - # changes stdout/stderr already. - self.assertIsNone(sys.stdout) - self.assertIsNone(sys.stderr) - - def testInitAutoresetOnWrapsOnAllPlatforms(self): - with osname("posix"): - init(autoreset=True) - self.assertWrapped() - - def testInitWrapOffDoesntWrapOnWindows(self): - with osname("nt"): - init(wrap=False) - self.assertNotWrapped() - - def testInitWrapOffIncompatibleWithAutoresetOn(self): - self.assertRaises(ValueError, lambda: init(autoreset=True, wrap=False)) - - @patch('colorama.win32.SetConsoleTextAttribute') - @patch('colorama.initialise.AnsiToWin32') - def testAutoResetPassedOn(self, mockATW32, _): - with osname("nt"): - init(autoreset=True) - self.assertEqual(len(mockATW32.call_args_list), 2) - self.assertEqual(mockATW32.call_args_list[1][1]['autoreset'], True) - self.assertEqual(mockATW32.call_args_list[0][1]['autoreset'], True) - - @patch('colorama.initialise.AnsiToWin32') - def testAutoResetChangeable(self, mockATW32): - with osname("nt"): - init() - - init(autoreset=True) - self.assertEqual(len(mockATW32.call_args_list), 4) - self.assertEqual(mockATW32.call_args_list[2][1]['autoreset'], True) - self.assertEqual(mockATW32.call_args_list[3][1]['autoreset'], True) - - init() - self.assertEqual(len(mockATW32.call_args_list), 6) - self.assertEqual( - mockATW32.call_args_list[4][1]['autoreset'], False) - self.assertEqual( - mockATW32.call_args_list[5][1]['autoreset'], False) - - - @patch('colorama.initialise.atexit.register') - def testAtexitRegisteredOnlyOnce(self, mockRegister): - init() - self.assertTrue(mockRegister.called) - mockRegister.reset_mock() - init() - self.assertFalse(mockRegister.called) - - -class JustFixWindowsConsoleTest(TestCase): - def _reset(self): - _wipe_internal_state_for_tests() - sys.stdout = orig_stdout - sys.stderr = orig_stderr - - def tearDown(self): - self._reset() - - @patch("colorama.ansitowin32.winapi_test", lambda: True) - def testJustFixWindowsConsole(self): - if sys.platform != "win32": - # just_fix_windows_console should be a no-op - just_fix_windows_console() - self.assertIs(sys.stdout, orig_stdout) - self.assertIs(sys.stderr, orig_stderr) - else: - def fake_std(): - # Emulate stdout=not a tty, stderr=tty - # to check that we handle both cases correctly - stdout = Mock() - stdout.closed = False - stdout.isatty.return_value = False - stdout.fileno.return_value = 1 - sys.stdout = stdout - - stderr = Mock() - stderr.closed = False - stderr.isatty.return_value = True - stderr.fileno.return_value = 2 - sys.stderr = stderr - - for native_ansi in [False, True]: - with patch( - 'colorama.ansitowin32.enable_vt_processing', - lambda *_: native_ansi - ): - self._reset() - fake_std() - - # Regular single-call test - prev_stdout = sys.stdout - prev_stderr = sys.stderr - just_fix_windows_console() - self.assertIs(sys.stdout, prev_stdout) - if native_ansi: - self.assertIs(sys.stderr, prev_stderr) - else: - self.assertIsNot(sys.stderr, prev_stderr) - - # second call without resetting is always a no-op - prev_stdout = sys.stdout - prev_stderr = sys.stderr - just_fix_windows_console() - self.assertIs(sys.stdout, prev_stdout) - self.assertIs(sys.stderr, prev_stderr) - - self._reset() - fake_std() - - # If init() runs first, just_fix_windows_console should be a no-op - init() - prev_stdout = sys.stdout - prev_stderr = sys.stderr - just_fix_windows_console() - self.assertIs(prev_stdout, sys.stdout) - self.assertIs(prev_stderr, sys.stderr) - - -if __name__ == '__main__': - main() diff --git a/server/venv/Lib/site-packages/colorama/tests/isatty_test.py b/server/venv/Lib/site-packages/colorama/tests/isatty_test.py deleted file mode 100644 index 0f84e4b..0000000 --- a/server/venv/Lib/site-packages/colorama/tests/isatty_test.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -import sys -from unittest import TestCase, main - -from ..ansitowin32 import StreamWrapper, AnsiToWin32 -from .utils import pycharm, replace_by, replace_original_by, StreamTTY, StreamNonTTY - - -def is_a_tty(stream): - return StreamWrapper(stream, None).isatty() - -class IsattyTest(TestCase): - - def test_TTY(self): - tty = StreamTTY() - self.assertTrue(is_a_tty(tty)) - with pycharm(): - self.assertTrue(is_a_tty(tty)) - - def test_nonTTY(self): - non_tty = StreamNonTTY() - self.assertFalse(is_a_tty(non_tty)) - with pycharm(): - self.assertFalse(is_a_tty(non_tty)) - - def test_withPycharm(self): - with pycharm(): - self.assertTrue(is_a_tty(sys.stderr)) - self.assertTrue(is_a_tty(sys.stdout)) - - def test_withPycharmTTYOverride(self): - tty = StreamTTY() - with pycharm(), replace_by(tty): - self.assertTrue(is_a_tty(tty)) - - def test_withPycharmNonTTYOverride(self): - non_tty = StreamNonTTY() - with pycharm(), replace_by(non_tty): - self.assertFalse(is_a_tty(non_tty)) - - def test_withPycharmNoneOverride(self): - with pycharm(): - with replace_by(None), replace_original_by(None): - self.assertFalse(is_a_tty(None)) - self.assertFalse(is_a_tty(StreamNonTTY())) - self.assertTrue(is_a_tty(StreamTTY())) - - def test_withPycharmStreamWrapped(self): - with pycharm(): - self.assertTrue(AnsiToWin32(StreamTTY()).stream.isatty()) - self.assertFalse(AnsiToWin32(StreamNonTTY()).stream.isatty()) - self.assertTrue(AnsiToWin32(sys.stdout).stream.isatty()) - self.assertTrue(AnsiToWin32(sys.stderr).stream.isatty()) - - -if __name__ == '__main__': - main() diff --git a/server/venv/Lib/site-packages/colorama/tests/utils.py b/server/venv/Lib/site-packages/colorama/tests/utils.py deleted file mode 100644 index 472fafb..0000000 --- a/server/venv/Lib/site-packages/colorama/tests/utils.py +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -from contextlib import contextmanager -from io import StringIO -import sys -import os - - -class StreamTTY(StringIO): - def isatty(self): - return True - -class StreamNonTTY(StringIO): - def isatty(self): - return False - -@contextmanager -def osname(name): - orig = os.name - os.name = name - yield - os.name = orig - -@contextmanager -def replace_by(stream): - orig_stdout = sys.stdout - orig_stderr = sys.stderr - sys.stdout = stream - sys.stderr = stream - yield - sys.stdout = orig_stdout - sys.stderr = orig_stderr - -@contextmanager -def replace_original_by(stream): - orig_stdout = sys.__stdout__ - orig_stderr = sys.__stderr__ - sys.__stdout__ = stream - sys.__stderr__ = stream - yield - sys.__stdout__ = orig_stdout - sys.__stderr__ = orig_stderr - -@contextmanager -def pycharm(): - os.environ["PYCHARM_HOSTED"] = "1" - non_tty = StreamNonTTY() - with replace_by(non_tty), replace_original_by(non_tty): - yield - del os.environ["PYCHARM_HOSTED"] diff --git a/server/venv/Lib/site-packages/colorama/tests/winterm_test.py b/server/venv/Lib/site-packages/colorama/tests/winterm_test.py deleted file mode 100644 index d0955f9..0000000 --- a/server/venv/Lib/site-packages/colorama/tests/winterm_test.py +++ /dev/null @@ -1,131 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -import sys -from unittest import TestCase, main, skipUnless - -try: - from unittest.mock import Mock, patch -except ImportError: - from mock import Mock, patch - -from ..winterm import WinColor, WinStyle, WinTerm - - -class WinTermTest(TestCase): - - @patch('colorama.winterm.win32') - def testInit(self, mockWin32): - mockAttr = Mock() - mockAttr.wAttributes = 7 + 6 * 16 + 8 - mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr - term = WinTerm() - self.assertEqual(term._fore, 7) - self.assertEqual(term._back, 6) - self.assertEqual(term._style, 8) - - @skipUnless(sys.platform.startswith("win"), "requires Windows") - def testGetAttrs(self): - term = WinTerm() - - term._fore = 0 - term._back = 0 - term._style = 0 - self.assertEqual(term.get_attrs(), 0) - - term._fore = WinColor.YELLOW - self.assertEqual(term.get_attrs(), WinColor.YELLOW) - - term._back = WinColor.MAGENTA - self.assertEqual( - term.get_attrs(), - WinColor.YELLOW + WinColor.MAGENTA * 16) - - term._style = WinStyle.BRIGHT - self.assertEqual( - term.get_attrs(), - WinColor.YELLOW + WinColor.MAGENTA * 16 + WinStyle.BRIGHT) - - @patch('colorama.winterm.win32') - def testResetAll(self, mockWin32): - mockAttr = Mock() - mockAttr.wAttributes = 1 + 2 * 16 + 8 - mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr - term = WinTerm() - - term.set_console = Mock() - term._fore = -1 - term._back = -1 - term._style = -1 - - term.reset_all() - - self.assertEqual(term._fore, 1) - self.assertEqual(term._back, 2) - self.assertEqual(term._style, 8) - self.assertEqual(term.set_console.called, True) - - @skipUnless(sys.platform.startswith("win"), "requires Windows") - def testFore(self): - term = WinTerm() - term.set_console = Mock() - term._fore = 0 - - term.fore(5) - - self.assertEqual(term._fore, 5) - self.assertEqual(term.set_console.called, True) - - @skipUnless(sys.platform.startswith("win"), "requires Windows") - def testBack(self): - term = WinTerm() - term.set_console = Mock() - term._back = 0 - - term.back(5) - - self.assertEqual(term._back, 5) - self.assertEqual(term.set_console.called, True) - - @skipUnless(sys.platform.startswith("win"), "requires Windows") - def testStyle(self): - term = WinTerm() - term.set_console = Mock() - term._style = 0 - - term.style(22) - - self.assertEqual(term._style, 22) - self.assertEqual(term.set_console.called, True) - - @patch('colorama.winterm.win32') - def testSetConsole(self, mockWin32): - mockAttr = Mock() - mockAttr.wAttributes = 0 - mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr - term = WinTerm() - term.windll = Mock() - - term.set_console() - - self.assertEqual( - mockWin32.SetConsoleTextAttribute.call_args, - ((mockWin32.STDOUT, term.get_attrs()), {}) - ) - - @patch('colorama.winterm.win32') - def testSetConsoleOnStderr(self, mockWin32): - mockAttr = Mock() - mockAttr.wAttributes = 0 - mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr - term = WinTerm() - term.windll = Mock() - - term.set_console(on_stderr=True) - - self.assertEqual( - mockWin32.SetConsoleTextAttribute.call_args, - ((mockWin32.STDERR, term.get_attrs()), {}) - ) - - -if __name__ == '__main__': - main() diff --git a/server/venv/Lib/site-packages/colorama/win32.py b/server/venv/Lib/site-packages/colorama/win32.py deleted file mode 100644 index 841b0e2..0000000 --- a/server/venv/Lib/site-packages/colorama/win32.py +++ /dev/null @@ -1,180 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. - -# from winbase.h -STDOUT = -11 -STDERR = -12 - -ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x0004 - -try: - import ctypes - from ctypes import LibraryLoader - windll = LibraryLoader(ctypes.WinDLL) - from ctypes import wintypes -except (AttributeError, ImportError): - windll = None - SetConsoleTextAttribute = lambda *_: None - winapi_test = lambda *_: None -else: - from ctypes import byref, Structure, c_char, POINTER - - COORD = wintypes._COORD - - class CONSOLE_SCREEN_BUFFER_INFO(Structure): - """struct in wincon.h.""" - _fields_ = [ - ("dwSize", COORD), - ("dwCursorPosition", COORD), - ("wAttributes", wintypes.WORD), - ("srWindow", wintypes.SMALL_RECT), - ("dwMaximumWindowSize", COORD), - ] - def __str__(self): - return '(%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d)' % ( - self.dwSize.Y, self.dwSize.X - , self.dwCursorPosition.Y, self.dwCursorPosition.X - , self.wAttributes - , self.srWindow.Top, self.srWindow.Left, self.srWindow.Bottom, self.srWindow.Right - , self.dwMaximumWindowSize.Y, self.dwMaximumWindowSize.X - ) - - _GetStdHandle = windll.kernel32.GetStdHandle - _GetStdHandle.argtypes = [ - wintypes.DWORD, - ] - _GetStdHandle.restype = wintypes.HANDLE - - _GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo - _GetConsoleScreenBufferInfo.argtypes = [ - wintypes.HANDLE, - POINTER(CONSOLE_SCREEN_BUFFER_INFO), - ] - _GetConsoleScreenBufferInfo.restype = wintypes.BOOL - - _SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute - _SetConsoleTextAttribute.argtypes = [ - wintypes.HANDLE, - wintypes.WORD, - ] - _SetConsoleTextAttribute.restype = wintypes.BOOL - - _SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition - _SetConsoleCursorPosition.argtypes = [ - wintypes.HANDLE, - COORD, - ] - _SetConsoleCursorPosition.restype = wintypes.BOOL - - _FillConsoleOutputCharacterA = windll.kernel32.FillConsoleOutputCharacterA - _FillConsoleOutputCharacterA.argtypes = [ - wintypes.HANDLE, - c_char, - wintypes.DWORD, - COORD, - POINTER(wintypes.DWORD), - ] - _FillConsoleOutputCharacterA.restype = wintypes.BOOL - - _FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute - _FillConsoleOutputAttribute.argtypes = [ - wintypes.HANDLE, - wintypes.WORD, - wintypes.DWORD, - COORD, - POINTER(wintypes.DWORD), - ] - _FillConsoleOutputAttribute.restype = wintypes.BOOL - - _SetConsoleTitleW = windll.kernel32.SetConsoleTitleW - _SetConsoleTitleW.argtypes = [ - wintypes.LPCWSTR - ] - _SetConsoleTitleW.restype = wintypes.BOOL - - _GetConsoleMode = windll.kernel32.GetConsoleMode - _GetConsoleMode.argtypes = [ - wintypes.HANDLE, - POINTER(wintypes.DWORD) - ] - _GetConsoleMode.restype = wintypes.BOOL - - _SetConsoleMode = windll.kernel32.SetConsoleMode - _SetConsoleMode.argtypes = [ - wintypes.HANDLE, - wintypes.DWORD - ] - _SetConsoleMode.restype = wintypes.BOOL - - def _winapi_test(handle): - csbi = CONSOLE_SCREEN_BUFFER_INFO() - success = _GetConsoleScreenBufferInfo( - handle, byref(csbi)) - return bool(success) - - def winapi_test(): - return any(_winapi_test(h) for h in - (_GetStdHandle(STDOUT), _GetStdHandle(STDERR))) - - def GetConsoleScreenBufferInfo(stream_id=STDOUT): - handle = _GetStdHandle(stream_id) - csbi = CONSOLE_SCREEN_BUFFER_INFO() - success = _GetConsoleScreenBufferInfo( - handle, byref(csbi)) - return csbi - - def SetConsoleTextAttribute(stream_id, attrs): - handle = _GetStdHandle(stream_id) - return _SetConsoleTextAttribute(handle, attrs) - - def SetConsoleCursorPosition(stream_id, position, adjust=True): - position = COORD(*position) - # If the position is out of range, do nothing. - if position.Y <= 0 or position.X <= 0: - return - # Adjust for Windows' SetConsoleCursorPosition: - # 1. being 0-based, while ANSI is 1-based. - # 2. expecting (x,y), while ANSI uses (y,x). - adjusted_position = COORD(position.Y - 1, position.X - 1) - if adjust: - # Adjust for viewport's scroll position - sr = GetConsoleScreenBufferInfo(STDOUT).srWindow - adjusted_position.Y += sr.Top - adjusted_position.X += sr.Left - # Resume normal processing - handle = _GetStdHandle(stream_id) - return _SetConsoleCursorPosition(handle, adjusted_position) - - def FillConsoleOutputCharacter(stream_id, char, length, start): - handle = _GetStdHandle(stream_id) - char = c_char(char.encode()) - length = wintypes.DWORD(length) - num_written = wintypes.DWORD(0) - # Note that this is hard-coded for ANSI (vs wide) bytes. - success = _FillConsoleOutputCharacterA( - handle, char, length, start, byref(num_written)) - return num_written.value - - def FillConsoleOutputAttribute(stream_id, attr, length, start): - ''' FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )''' - handle = _GetStdHandle(stream_id) - attribute = wintypes.WORD(attr) - length = wintypes.DWORD(length) - num_written = wintypes.DWORD(0) - # Note that this is hard-coded for ANSI (vs wide) bytes. - return _FillConsoleOutputAttribute( - handle, attribute, length, start, byref(num_written)) - - def SetConsoleTitle(title): - return _SetConsoleTitleW(title) - - def GetConsoleMode(handle): - mode = wintypes.DWORD() - success = _GetConsoleMode(handle, byref(mode)) - if not success: - raise ctypes.WinError() - return mode.value - - def SetConsoleMode(handle, mode): - success = _SetConsoleMode(handle, mode) - if not success: - raise ctypes.WinError() diff --git a/server/venv/Lib/site-packages/colorama/winterm.py b/server/venv/Lib/site-packages/colorama/winterm.py deleted file mode 100644 index aad867e..0000000 --- a/server/venv/Lib/site-packages/colorama/winterm.py +++ /dev/null @@ -1,195 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -try: - from msvcrt import get_osfhandle -except ImportError: - def get_osfhandle(_): - raise OSError("This isn't windows!") - - -from . import win32 - -# from wincon.h -class WinColor(object): - BLACK = 0 - BLUE = 1 - GREEN = 2 - CYAN = 3 - RED = 4 - MAGENTA = 5 - YELLOW = 6 - GREY = 7 - -# from wincon.h -class WinStyle(object): - NORMAL = 0x00 # dim text, dim background - BRIGHT = 0x08 # bright text, dim background - BRIGHT_BACKGROUND = 0x80 # dim text, bright background - -class WinTerm(object): - - def __init__(self): - self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes - self.set_attrs(self._default) - self._default_fore = self._fore - self._default_back = self._back - self._default_style = self._style - # In order to emulate LIGHT_EX in windows, we borrow the BRIGHT style. - # So that LIGHT_EX colors and BRIGHT style do not clobber each other, - # we track them separately, since LIGHT_EX is overwritten by Fore/Back - # and BRIGHT is overwritten by Style codes. - self._light = 0 - - def get_attrs(self): - return self._fore + self._back * 16 + (self._style | self._light) - - def set_attrs(self, value): - self._fore = value & 7 - self._back = (value >> 4) & 7 - self._style = value & (WinStyle.BRIGHT | WinStyle.BRIGHT_BACKGROUND) - - def reset_all(self, on_stderr=None): - self.set_attrs(self._default) - self.set_console(attrs=self._default) - self._light = 0 - - def fore(self, fore=None, light=False, on_stderr=False): - if fore is None: - fore = self._default_fore - self._fore = fore - # Emulate LIGHT_EX with BRIGHT Style - if light: - self._light |= WinStyle.BRIGHT - else: - self._light &= ~WinStyle.BRIGHT - self.set_console(on_stderr=on_stderr) - - def back(self, back=None, light=False, on_stderr=False): - if back is None: - back = self._default_back - self._back = back - # Emulate LIGHT_EX with BRIGHT_BACKGROUND Style - if light: - self._light |= WinStyle.BRIGHT_BACKGROUND - else: - self._light &= ~WinStyle.BRIGHT_BACKGROUND - self.set_console(on_stderr=on_stderr) - - def style(self, style=None, on_stderr=False): - if style is None: - style = self._default_style - self._style = style - self.set_console(on_stderr=on_stderr) - - def set_console(self, attrs=None, on_stderr=False): - if attrs is None: - attrs = self.get_attrs() - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR - win32.SetConsoleTextAttribute(handle, attrs) - - def get_position(self, handle): - position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition - # Because Windows coordinates are 0-based, - # and win32.SetConsoleCursorPosition expects 1-based. - position.X += 1 - position.Y += 1 - return position - - def set_cursor_position(self, position=None, on_stderr=False): - if position is None: - # I'm not currently tracking the position, so there is no default. - # position = self.get_position() - return - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR - win32.SetConsoleCursorPosition(handle, position) - - def cursor_adjust(self, x, y, on_stderr=False): - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR - position = self.get_position(handle) - adjusted_position = (position.Y + y, position.X + x) - win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False) - - def erase_screen(self, mode=0, on_stderr=False): - # 0 should clear from the cursor to the end of the screen. - # 1 should clear from the cursor to the beginning of the screen. - # 2 should clear the entire screen, and move cursor to (1,1) - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR - csbi = win32.GetConsoleScreenBufferInfo(handle) - # get the number of character cells in the current buffer - cells_in_screen = csbi.dwSize.X * csbi.dwSize.Y - # get number of character cells before current cursor position - cells_before_cursor = csbi.dwSize.X * csbi.dwCursorPosition.Y + csbi.dwCursorPosition.X - if mode == 0: - from_coord = csbi.dwCursorPosition - cells_to_erase = cells_in_screen - cells_before_cursor - elif mode == 1: - from_coord = win32.COORD(0, 0) - cells_to_erase = cells_before_cursor - elif mode == 2: - from_coord = win32.COORD(0, 0) - cells_to_erase = cells_in_screen - else: - # invalid mode - return - # fill the entire screen with blanks - win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) - # now set the buffer's attributes accordingly - win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord) - if mode == 2: - # put the cursor where needed - win32.SetConsoleCursorPosition(handle, (1, 1)) - - def erase_line(self, mode=0, on_stderr=False): - # 0 should clear from the cursor to the end of the line. - # 1 should clear from the cursor to the beginning of the line. - # 2 should clear the entire line. - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR - csbi = win32.GetConsoleScreenBufferInfo(handle) - if mode == 0: - from_coord = csbi.dwCursorPosition - cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X - elif mode == 1: - from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) - cells_to_erase = csbi.dwCursorPosition.X - elif mode == 2: - from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) - cells_to_erase = csbi.dwSize.X - else: - # invalid mode - return - # fill the entire screen with blanks - win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) - # now set the buffer's attributes accordingly - win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord) - - def set_title(self, title): - win32.SetConsoleTitle(title) - - -def enable_vt_processing(fd): - if win32.windll is None or not win32.winapi_test(): - return False - - try: - handle = get_osfhandle(fd) - mode = win32.GetConsoleMode(handle) - win32.SetConsoleMode( - handle, - mode | win32.ENABLE_VIRTUAL_TERMINAL_PROCESSING, - ) - - mode = win32.GetConsoleMode(handle) - if mode & win32.ENABLE_VIRTUAL_TERMINAL_PROCESSING: - return True - # Can get TypeError in testsuite where 'fd' is a Mock() - except (OSError, TypeError): - return False diff --git a/server/venv/Lib/site-packages/distutils-precedence.pth b/server/venv/Lib/site-packages/distutils-precedence.pth deleted file mode 100644 index 7f009fe..0000000 --- a/server/venv/Lib/site-packages/distutils-precedence.pth +++ /dev/null @@ -1 +0,0 @@ -import os; var = 'SETUPTOOLS_USE_DISTUTILS'; enabled = os.environ.get(var, 'local') == 'local'; enabled and __import__('_distutils_hack').add_shim(); diff --git a/server/venv/Lib/site-packages/dns/__init__.py b/server/venv/Lib/site-packages/dns/__init__.py deleted file mode 100644 index a4249b9..0000000 --- a/server/venv/Lib/site-packages/dns/__init__.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009, 2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""dnspython DNS toolkit""" - -__all__ = [ - "asyncbackend", - "asyncquery", - "asyncresolver", - "dnssec", - "dnssecalgs", - "dnssectypes", - "e164", - "edns", - "entropy", - "exception", - "flags", - "immutable", - "inet", - "ipv4", - "ipv6", - "message", - "name", - "namedict", - "node", - "opcode", - "query", - "quic", - "rcode", - "rdata", - "rdataclass", - "rdataset", - "rdatatype", - "renderer", - "resolver", - "reversename", - "rrset", - "serial", - "set", - "tokenizer", - "transaction", - "tsig", - "tsigkeyring", - "ttl", - "rdtypes", - "update", - "version", - "versioned", - "wire", - "xfr", - "zone", - "zonetypes", - "zonefile", -] - -from dns.version import version as __version__ # noqa diff --git a/server/venv/Lib/site-packages/dns/_asyncbackend.py b/server/venv/Lib/site-packages/dns/_asyncbackend.py deleted file mode 100644 index 49f14fe..0000000 --- a/server/venv/Lib/site-packages/dns/_asyncbackend.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# This is a nullcontext for both sync and async. 3.7 has a nullcontext, -# but it is only for sync use. - - -class NullContext: - def __init__(self, enter_result=None): - self.enter_result = enter_result - - def __enter__(self): - return self.enter_result - - def __exit__(self, exc_type, exc_value, traceback): - pass - - async def __aenter__(self): - return self.enter_result - - async def __aexit__(self, exc_type, exc_value, traceback): - pass - - -# These are declared here so backends can import them without creating -# circular dependencies with dns.asyncbackend. - - -class Socket: # pragma: no cover - async def close(self): - pass - - async def getpeername(self): - raise NotImplementedError - - async def getsockname(self): - raise NotImplementedError - - async def getpeercert(self, timeout): - raise NotImplementedError - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc_value, traceback): - await self.close() - - -class DatagramSocket(Socket): # pragma: no cover - def __init__(self, family: int): - self.family = family - - async def sendto(self, what, destination, timeout): - raise NotImplementedError - - async def recvfrom(self, size, timeout): - raise NotImplementedError - - -class StreamSocket(Socket): # pragma: no cover - async def sendall(self, what, timeout): - raise NotImplementedError - - async def recv(self, size, timeout): - raise NotImplementedError - - -class NullTransport: - async def connect_tcp(self, host, port, timeout, local_address): - raise NotImplementedError - - -class Backend: # pragma: no cover - def name(self): - return "unknown" - - async def make_socket( - self, - af, - socktype, - proto=0, - source=None, - destination=None, - timeout=None, - ssl_context=None, - server_hostname=None, - ): - raise NotImplementedError - - def datagram_connection_required(self): - return False - - async def sleep(self, interval): - raise NotImplementedError - - def get_transport_class(self): - raise NotImplementedError - - async def wait_for(self, awaitable, timeout): - raise NotImplementedError diff --git a/server/venv/Lib/site-packages/dns/_asyncio_backend.py b/server/venv/Lib/site-packages/dns/_asyncio_backend.py deleted file mode 100644 index 2631228..0000000 --- a/server/venv/Lib/site-packages/dns/_asyncio_backend.py +++ /dev/null @@ -1,275 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -"""asyncio library query support""" - -import asyncio -import socket -import sys - -import dns._asyncbackend -import dns.exception - -_is_win32 = sys.platform == "win32" - - -def _get_running_loop(): - try: - return asyncio.get_running_loop() - except AttributeError: # pragma: no cover - return asyncio.get_event_loop() - - -class _DatagramProtocol: - def __init__(self): - self.transport = None - self.recvfrom = None - - def connection_made(self, transport): - self.transport = transport - - def datagram_received(self, data, addr): - if self.recvfrom and not self.recvfrom.done(): - self.recvfrom.set_result((data, addr)) - - def error_received(self, exc): # pragma: no cover - if self.recvfrom and not self.recvfrom.done(): - self.recvfrom.set_exception(exc) - - def connection_lost(self, exc): - if self.recvfrom and not self.recvfrom.done(): - if exc is None: - # EOF we triggered. Is there a better way to do this? - try: - raise EOFError - except EOFError as e: - self.recvfrom.set_exception(e) - else: - self.recvfrom.set_exception(exc) - - def close(self): - self.transport.close() - - -async def _maybe_wait_for(awaitable, timeout): - if timeout is not None: - try: - return await asyncio.wait_for(awaitable, timeout) - except asyncio.TimeoutError: - raise dns.exception.Timeout(timeout=timeout) - else: - return await awaitable - - -class DatagramSocket(dns._asyncbackend.DatagramSocket): - def __init__(self, family, transport, protocol): - super().__init__(family) - self.transport = transport - self.protocol = protocol - - async def sendto(self, what, destination, timeout): # pragma: no cover - # no timeout for asyncio sendto - self.transport.sendto(what, destination) - return len(what) - - async def recvfrom(self, size, timeout): - # ignore size as there's no way I know to tell protocol about it - done = _get_running_loop().create_future() - try: - assert self.protocol.recvfrom is None - self.protocol.recvfrom = done - await _maybe_wait_for(done, timeout) - return done.result() - finally: - self.protocol.recvfrom = None - - async def close(self): - self.protocol.close() - - async def getpeername(self): - return self.transport.get_extra_info("peername") - - async def getsockname(self): - return self.transport.get_extra_info("sockname") - - async def getpeercert(self, timeout): - raise NotImplementedError - - -class StreamSocket(dns._asyncbackend.StreamSocket): - def __init__(self, af, reader, writer): - self.family = af - self.reader = reader - self.writer = writer - - async def sendall(self, what, timeout): - self.writer.write(what) - return await _maybe_wait_for(self.writer.drain(), timeout) - - async def recv(self, size, timeout): - return await _maybe_wait_for(self.reader.read(size), timeout) - - async def close(self): - self.writer.close() - - async def getpeername(self): - return self.writer.get_extra_info("peername") - - async def getsockname(self): - return self.writer.get_extra_info("sockname") - - async def getpeercert(self, timeout): - return self.writer.get_extra_info("peercert") - - -try: - import anyio - import httpcore - import httpcore._backends.anyio - import httpx - - _CoreAsyncNetworkBackend = httpcore.AsyncNetworkBackend - _CoreAnyIOStream = httpcore._backends.anyio.AnyIOStream - - from dns.query import _compute_times, _expiration_for_this_attempt, _remaining - - class _NetworkBackend(_CoreAsyncNetworkBackend): - def __init__(self, resolver, local_port, bootstrap_address, family): - super().__init__() - self._local_port = local_port - self._resolver = resolver - self._bootstrap_address = bootstrap_address - self._family = family - if local_port != 0: - raise NotImplementedError( - "the asyncio transport for HTTPX cannot set the local port" - ) - - async def connect_tcp( - self, host, port, timeout, local_address, socket_options=None - ): # pylint: disable=signature-differs - addresses = [] - _, expiration = _compute_times(timeout) - if dns.inet.is_address(host): - addresses.append(host) - elif self._bootstrap_address is not None: - addresses.append(self._bootstrap_address) - else: - timeout = _remaining(expiration) - family = self._family - if local_address: - family = dns.inet.af_for_address(local_address) - answers = await self._resolver.resolve_name( - host, family=family, lifetime=timeout - ) - addresses = answers.addresses() - for address in addresses: - try: - attempt_expiration = _expiration_for_this_attempt(2.0, expiration) - timeout = _remaining(attempt_expiration) - with anyio.fail_after(timeout): - stream = await anyio.connect_tcp( - remote_host=address, - remote_port=port, - local_host=local_address, - ) - return _CoreAnyIOStream(stream) - except Exception: - pass - raise httpcore.ConnectError - - async def connect_unix_socket( - self, path, timeout, socket_options=None - ): # pylint: disable=signature-differs - raise NotImplementedError - - async def sleep(self, seconds): # pylint: disable=signature-differs - await anyio.sleep(seconds) - - class _HTTPTransport(httpx.AsyncHTTPTransport): - def __init__( - self, - *args, - local_port=0, - bootstrap_address=None, - resolver=None, - family=socket.AF_UNSPEC, - **kwargs, - ): - if resolver is None: - # pylint: disable=import-outside-toplevel,redefined-outer-name - import dns.asyncresolver - - resolver = dns.asyncresolver.Resolver() - super().__init__(*args, **kwargs) - self._pool._network_backend = _NetworkBackend( - resolver, local_port, bootstrap_address, family - ) - -except ImportError: - _HTTPTransport = dns._asyncbackend.NullTransport # type: ignore - - -class Backend(dns._asyncbackend.Backend): - def name(self): - return "asyncio" - - async def make_socket( - self, - af, - socktype, - proto=0, - source=None, - destination=None, - timeout=None, - ssl_context=None, - server_hostname=None, - ): - if destination is None and socktype == socket.SOCK_DGRAM and _is_win32: - raise NotImplementedError( - "destinationless datagram sockets " - "are not supported by asyncio " - "on Windows" - ) - loop = _get_running_loop() - if socktype == socket.SOCK_DGRAM: - transport, protocol = await loop.create_datagram_endpoint( - _DatagramProtocol, - source, - family=af, - proto=proto, - remote_addr=destination, - ) - return DatagramSocket(af, transport, protocol) - elif socktype == socket.SOCK_STREAM: - if destination is None: - # This shouldn't happen, but we check to make code analysis software - # happier. - raise ValueError("destination required for stream sockets") - (r, w) = await _maybe_wait_for( - asyncio.open_connection( - destination[0], - destination[1], - ssl=ssl_context, - family=af, - proto=proto, - local_addr=source, - server_hostname=server_hostname, - ), - timeout, - ) - return StreamSocket(af, r, w) - raise NotImplementedError( - "unsupported socket " + f"type {socktype}" - ) # pragma: no cover - - async def sleep(self, interval): - await asyncio.sleep(interval) - - def datagram_connection_required(self): - return _is_win32 - - def get_transport_class(self): - return _HTTPTransport - - async def wait_for(self, awaitable, timeout): - return await _maybe_wait_for(awaitable, timeout) diff --git a/server/venv/Lib/site-packages/dns/_ddr.py b/server/venv/Lib/site-packages/dns/_ddr.py deleted file mode 100644 index bf5c11e..0000000 --- a/server/venv/Lib/site-packages/dns/_ddr.py +++ /dev/null @@ -1,154 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license -# -# Support for Discovery of Designated Resolvers - -import socket -import time -from urllib.parse import urlparse - -import dns.asyncbackend -import dns.inet -import dns.name -import dns.nameserver -import dns.query -import dns.rdtypes.svcbbase - -# The special name of the local resolver when using DDR -_local_resolver_name = dns.name.from_text("_dns.resolver.arpa") - - -# -# Processing is split up into I/O independent and I/O dependent parts to -# make supporting sync and async versions easy. -# - - -class _SVCBInfo: - def __init__(self, bootstrap_address, port, hostname, nameservers): - self.bootstrap_address = bootstrap_address - self.port = port - self.hostname = hostname - self.nameservers = nameservers - - def ddr_check_certificate(self, cert): - """Verify that the _SVCBInfo's address is in the cert's subjectAltName (SAN)""" - for name, value in cert["subjectAltName"]: - if name == "IP Address" and value == self.bootstrap_address: - return True - return False - - def make_tls_context(self): - ssl = dns.query.ssl - ctx = ssl.create_default_context() - ctx.minimum_version = ssl.TLSVersion.TLSv1_2 - return ctx - - def ddr_tls_check_sync(self, lifetime): - ctx = self.make_tls_context() - expiration = time.time() + lifetime - with socket.create_connection( - (self.bootstrap_address, self.port), lifetime - ) as s: - with ctx.wrap_socket(s, server_hostname=self.hostname) as ts: - ts.settimeout(dns.query._remaining(expiration)) - ts.do_handshake() - cert = ts.getpeercert() - return self.ddr_check_certificate(cert) - - async def ddr_tls_check_async(self, lifetime, backend=None): - if backend is None: - backend = dns.asyncbackend.get_default_backend() - ctx = self.make_tls_context() - expiration = time.time() + lifetime - async with await backend.make_socket( - dns.inet.af_for_address(self.bootstrap_address), - socket.SOCK_STREAM, - 0, - None, - (self.bootstrap_address, self.port), - lifetime, - ctx, - self.hostname, - ) as ts: - cert = await ts.getpeercert(dns.query._remaining(expiration)) - return self.ddr_check_certificate(cert) - - -def _extract_nameservers_from_svcb(answer): - bootstrap_address = answer.nameserver - if not dns.inet.is_address(bootstrap_address): - return [] - infos = [] - for rr in answer.rrset.processing_order(): - nameservers = [] - param = rr.params.get(dns.rdtypes.svcbbase.ParamKey.ALPN) - if param is None: - continue - alpns = set(param.ids) - host = rr.target.to_text(omit_final_dot=True) - port = None - param = rr.params.get(dns.rdtypes.svcbbase.ParamKey.PORT) - if param is not None: - port = param.port - # For now we ignore address hints and address resolution and always use the - # bootstrap address - if b"h2" in alpns: - param = rr.params.get(dns.rdtypes.svcbbase.ParamKey.DOHPATH) - if param is None or not param.value.endswith(b"{?dns}"): - continue - path = param.value[:-6].decode() - if not path.startswith("/"): - path = "/" + path - if port is None: - port = 443 - url = f"https://{host}:{port}{path}" - # check the URL - try: - urlparse(url) - nameservers.append(dns.nameserver.DoHNameserver(url, bootstrap_address)) - except Exception: - # continue processing other ALPN types - pass - if b"dot" in alpns: - if port is None: - port = 853 - nameservers.append( - dns.nameserver.DoTNameserver(bootstrap_address, port, host) - ) - if b"doq" in alpns: - if port is None: - port = 853 - nameservers.append( - dns.nameserver.DoQNameserver(bootstrap_address, port, True, host) - ) - if len(nameservers) > 0: - infos.append(_SVCBInfo(bootstrap_address, port, host, nameservers)) - return infos - - -def _get_nameservers_sync(answer, lifetime): - """Return a list of TLS-validated resolver nameservers extracted from an SVCB - answer.""" - nameservers = [] - infos = _extract_nameservers_from_svcb(answer) - for info in infos: - try: - if info.ddr_tls_check_sync(lifetime): - nameservers.extend(info.nameservers) - except Exception: - pass - return nameservers - - -async def _get_nameservers_async(answer, lifetime): - """Return a list of TLS-validated resolver nameservers extracted from an SVCB - answer.""" - nameservers = [] - infos = _extract_nameservers_from_svcb(answer) - for info in infos: - try: - if await info.ddr_tls_check_async(lifetime): - nameservers.extend(info.nameservers) - except Exception: - pass - return nameservers diff --git a/server/venv/Lib/site-packages/dns/_immutable_ctx.py b/server/venv/Lib/site-packages/dns/_immutable_ctx.py deleted file mode 100644 index ae7a33b..0000000 --- a/server/venv/Lib/site-packages/dns/_immutable_ctx.py +++ /dev/null @@ -1,76 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# This implementation of the immutable decorator requires python >= -# 3.7, and is significantly more storage efficient when making classes -# with slots immutable. It's also faster. - -import contextvars -import inspect - -_in__init__ = contextvars.ContextVar("_immutable_in__init__", default=False) - - -class _Immutable: - """Immutable mixin class""" - - # We set slots to the empty list to say "we don't have any attributes". - # We do this so that if we're mixed in with a class with __slots__, we - # don't cause a __dict__ to be added which would waste space. - - __slots__ = () - - def __setattr__(self, name, value): - if _in__init__.get() is not self: - raise TypeError("object doesn't support attribute assignment") - else: - super().__setattr__(name, value) - - def __delattr__(self, name): - if _in__init__.get() is not self: - raise TypeError("object doesn't support attribute assignment") - else: - super().__delattr__(name) - - -def _immutable_init(f): - def nf(*args, **kwargs): - previous = _in__init__.set(args[0]) - try: - # call the actual __init__ - f(*args, **kwargs) - finally: - _in__init__.reset(previous) - - nf.__signature__ = inspect.signature(f) - return nf - - -def immutable(cls): - if _Immutable in cls.__mro__: - # Some ancestor already has the mixin, so just make sure we keep - # following the __init__ protocol. - cls.__init__ = _immutable_init(cls.__init__) - if hasattr(cls, "__setstate__"): - cls.__setstate__ = _immutable_init(cls.__setstate__) - ncls = cls - else: - # Mixin the Immutable class and follow the __init__ protocol. - class ncls(_Immutable, cls): - # We have to do the __slots__ declaration here too! - __slots__ = () - - @_immutable_init - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - if hasattr(cls, "__setstate__"): - - @_immutable_init - def __setstate__(self, *args, **kwargs): - super().__setstate__(*args, **kwargs) - - # make ncls have the same name and module as cls - ncls.__name__ = cls.__name__ - ncls.__qualname__ = cls.__qualname__ - ncls.__module__ = cls.__module__ - return ncls diff --git a/server/venv/Lib/site-packages/dns/_trio_backend.py b/server/venv/Lib/site-packages/dns/_trio_backend.py deleted file mode 100644 index 4d9fb82..0000000 --- a/server/venv/Lib/site-packages/dns/_trio_backend.py +++ /dev/null @@ -1,246 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -"""trio async I/O library query support""" - -import socket - -import trio -import trio.socket # type: ignore - -import dns._asyncbackend -import dns.exception -import dns.inet - - -def _maybe_timeout(timeout): - if timeout is not None: - return trio.move_on_after(timeout) - else: - return dns._asyncbackend.NullContext() - - -# for brevity -_lltuple = dns.inet.low_level_address_tuple - -# pylint: disable=redefined-outer-name - - -class DatagramSocket(dns._asyncbackend.DatagramSocket): - def __init__(self, socket): - super().__init__(socket.family) - self.socket = socket - - async def sendto(self, what, destination, timeout): - with _maybe_timeout(timeout): - return await self.socket.sendto(what, destination) - raise dns.exception.Timeout( - timeout=timeout - ) # pragma: no cover lgtm[py/unreachable-statement] - - async def recvfrom(self, size, timeout): - with _maybe_timeout(timeout): - return await self.socket.recvfrom(size) - raise dns.exception.Timeout(timeout=timeout) # lgtm[py/unreachable-statement] - - async def close(self): - self.socket.close() - - async def getpeername(self): - return self.socket.getpeername() - - async def getsockname(self): - return self.socket.getsockname() - - async def getpeercert(self, timeout): - raise NotImplementedError - - -class StreamSocket(dns._asyncbackend.StreamSocket): - def __init__(self, family, stream, tls=False): - self.family = family - self.stream = stream - self.tls = tls - - async def sendall(self, what, timeout): - with _maybe_timeout(timeout): - return await self.stream.send_all(what) - raise dns.exception.Timeout(timeout=timeout) # lgtm[py/unreachable-statement] - - async def recv(self, size, timeout): - with _maybe_timeout(timeout): - return await self.stream.receive_some(size) - raise dns.exception.Timeout(timeout=timeout) # lgtm[py/unreachable-statement] - - async def close(self): - await self.stream.aclose() - - async def getpeername(self): - if self.tls: - return self.stream.transport_stream.socket.getpeername() - else: - return self.stream.socket.getpeername() - - async def getsockname(self): - if self.tls: - return self.stream.transport_stream.socket.getsockname() - else: - return self.stream.socket.getsockname() - - async def getpeercert(self, timeout): - if self.tls: - with _maybe_timeout(timeout): - await self.stream.do_handshake() - return self.stream.getpeercert() - else: - raise NotImplementedError - - -try: - import httpcore - import httpcore._backends.trio - import httpx - - _CoreAsyncNetworkBackend = httpcore.AsyncNetworkBackend - _CoreTrioStream = httpcore._backends.trio.TrioStream - - from dns.query import _compute_times, _expiration_for_this_attempt, _remaining - - class _NetworkBackend(_CoreAsyncNetworkBackend): - def __init__(self, resolver, local_port, bootstrap_address, family): - super().__init__() - self._local_port = local_port - self._resolver = resolver - self._bootstrap_address = bootstrap_address - self._family = family - - async def connect_tcp( - self, host, port, timeout, local_address, socket_options=None - ): # pylint: disable=signature-differs - addresses = [] - _, expiration = _compute_times(timeout) - if dns.inet.is_address(host): - addresses.append(host) - elif self._bootstrap_address is not None: - addresses.append(self._bootstrap_address) - else: - timeout = _remaining(expiration) - family = self._family - if local_address: - family = dns.inet.af_for_address(local_address) - answers = await self._resolver.resolve_name( - host, family=family, lifetime=timeout - ) - addresses = answers.addresses() - for address in addresses: - try: - af = dns.inet.af_for_address(address) - if local_address is not None or self._local_port != 0: - source = (local_address, self._local_port) - else: - source = None - destination = (address, port) - attempt_expiration = _expiration_for_this_attempt(2.0, expiration) - timeout = _remaining(attempt_expiration) - sock = await Backend().make_socket( - af, socket.SOCK_STREAM, 0, source, destination, timeout - ) - return _CoreTrioStream(sock.stream) - except Exception: - continue - raise httpcore.ConnectError - - async def connect_unix_socket( - self, path, timeout, socket_options=None - ): # pylint: disable=signature-differs - raise NotImplementedError - - async def sleep(self, seconds): # pylint: disable=signature-differs - await trio.sleep(seconds) - - class _HTTPTransport(httpx.AsyncHTTPTransport): - def __init__( - self, - *args, - local_port=0, - bootstrap_address=None, - resolver=None, - family=socket.AF_UNSPEC, - **kwargs, - ): - if resolver is None: - # pylint: disable=import-outside-toplevel,redefined-outer-name - import dns.asyncresolver - - resolver = dns.asyncresolver.Resolver() - super().__init__(*args, **kwargs) - self._pool._network_backend = _NetworkBackend( - resolver, local_port, bootstrap_address, family - ) - -except ImportError: - _HTTPTransport = dns._asyncbackend.NullTransport # type: ignore - - -class Backend(dns._asyncbackend.Backend): - def name(self): - return "trio" - - async def make_socket( - self, - af, - socktype, - proto=0, - source=None, - destination=None, - timeout=None, - ssl_context=None, - server_hostname=None, - ): - s = trio.socket.socket(af, socktype, proto) - stream = None - try: - if source: - await s.bind(_lltuple(source, af)) - if socktype == socket.SOCK_STREAM: - connected = False - with _maybe_timeout(timeout): - await s.connect(_lltuple(destination, af)) - connected = True - if not connected: - raise dns.exception.Timeout( - timeout=timeout - ) # lgtm[py/unreachable-statement] - except Exception: # pragma: no cover - s.close() - raise - if socktype == socket.SOCK_DGRAM: - return DatagramSocket(s) - elif socktype == socket.SOCK_STREAM: - stream = trio.SocketStream(s) - tls = False - if ssl_context: - tls = True - try: - stream = trio.SSLStream( - stream, ssl_context, server_hostname=server_hostname - ) - except Exception: # pragma: no cover - await stream.aclose() - raise - return StreamSocket(af, stream, tls) - raise NotImplementedError( - "unsupported socket " + f"type {socktype}" - ) # pragma: no cover - - async def sleep(self, interval): - await trio.sleep(interval) - - def get_transport_class(self): - return _HTTPTransport - - async def wait_for(self, awaitable, timeout): - with _maybe_timeout(timeout): - return await awaitable - raise dns.exception.Timeout( - timeout=timeout - ) # pragma: no cover lgtm[py/unreachable-statement] diff --git a/server/venv/Lib/site-packages/dns/asyncbackend.py b/server/venv/Lib/site-packages/dns/asyncbackend.py deleted file mode 100644 index 07d50e1..0000000 --- a/server/venv/Lib/site-packages/dns/asyncbackend.py +++ /dev/null @@ -1,101 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -from typing import Dict - -import dns.exception - -# pylint: disable=unused-import -from dns._asyncbackend import ( # noqa: F401 lgtm[py/unused-import] - Backend, - DatagramSocket, - Socket, - StreamSocket, -) - -# pylint: enable=unused-import - -_default_backend = None - -_backends: Dict[str, Backend] = {} - -# Allow sniffio import to be disabled for testing purposes -_no_sniffio = False - - -class AsyncLibraryNotFoundError(dns.exception.DNSException): - pass - - -def get_backend(name: str) -> Backend: - """Get the specified asynchronous backend. - - *name*, a ``str``, the name of the backend. Currently the "trio" - and "asyncio" backends are available. - - Raises NotImplementError if an unknown backend name is specified. - """ - # pylint: disable=import-outside-toplevel,redefined-outer-name - backend = _backends.get(name) - if backend: - return backend - if name == "trio": - import dns._trio_backend - - backend = dns._trio_backend.Backend() - elif name == "asyncio": - import dns._asyncio_backend - - backend = dns._asyncio_backend.Backend() - else: - raise NotImplementedError(f"unimplemented async backend {name}") - _backends[name] = backend - return backend - - -def sniff() -> str: - """Attempt to determine the in-use asynchronous I/O library by using - the ``sniffio`` module if it is available. - - Returns the name of the library, or raises AsyncLibraryNotFoundError - if the library cannot be determined. - """ - # pylint: disable=import-outside-toplevel - try: - if _no_sniffio: - raise ImportError - import sniffio - - try: - return sniffio.current_async_library() - except sniffio.AsyncLibraryNotFoundError: - raise AsyncLibraryNotFoundError("sniffio cannot determine async library") - except ImportError: - import asyncio - - try: - asyncio.get_running_loop() - return "asyncio" - except RuntimeError: - raise AsyncLibraryNotFoundError("no async library detected") - - -def get_default_backend() -> Backend: - """Get the default backend, initializing it if necessary.""" - if _default_backend: - return _default_backend - - return set_default_backend(sniff()) - - -def set_default_backend(name: str) -> Backend: - """Set the default backend. - - It's not normally necessary to call this method, as - ``get_default_backend()`` will initialize the backend - appropriately in many cases. If ``sniffio`` is not installed, or - in testing situations, this function allows the backend to be set - explicitly. - """ - global _default_backend - _default_backend = get_backend(name) - return _default_backend diff --git a/server/venv/Lib/site-packages/dns/asyncquery.py b/server/venv/Lib/site-packages/dns/asyncquery.py deleted file mode 100644 index ecf9c1a..0000000 --- a/server/venv/Lib/site-packages/dns/asyncquery.py +++ /dev/null @@ -1,758 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Talk to a DNS server.""" - -import base64 -import contextlib -import socket -import struct -import time -from typing import Any, Dict, Optional, Tuple, Union - -import dns.asyncbackend -import dns.exception -import dns.inet -import dns.message -import dns.name -import dns.quic -import dns.rcode -import dns.rdataclass -import dns.rdatatype -import dns.transaction -from dns._asyncbackend import NullContext -from dns.query import ( - BadResponse, - NoDOH, - NoDOQ, - UDPMode, - _compute_times, - _have_http2, - _matches_destination, - _remaining, - have_doh, - ssl, -) - -if have_doh: - import httpx - -# for brevity -_lltuple = dns.inet.low_level_address_tuple - - -def _source_tuple(af, address, port): - # Make a high level source tuple, or return None if address and port - # are both None - if address or port: - if address is None: - if af == socket.AF_INET: - address = "0.0.0.0" - elif af == socket.AF_INET6: - address = "::" - else: - raise NotImplementedError(f"unknown address family {af}") - return (address, port) - else: - return None - - -def _timeout(expiration, now=None): - if expiration is not None: - if not now: - now = time.time() - return max(expiration - now, 0) - else: - return None - - -async def send_udp( - sock: dns.asyncbackend.DatagramSocket, - what: Union[dns.message.Message, bytes], - destination: Any, - expiration: Optional[float] = None, -) -> Tuple[int, float]: - """Send a DNS message to the specified UDP socket. - - *sock*, a ``dns.asyncbackend.DatagramSocket``. - - *what*, a ``bytes`` or ``dns.message.Message``, the message to send. - - *destination*, a destination tuple appropriate for the address family - of the socket, specifying where to send the query. - - *expiration*, a ``float`` or ``None``, the absolute time at which - a timeout exception should be raised. If ``None``, no timeout will - occur. The expiration value is meaningless for the asyncio backend, as - asyncio's transport sendto() never blocks. - - Returns an ``(int, float)`` tuple of bytes sent and the sent time. - """ - - if isinstance(what, dns.message.Message): - what = what.to_wire() - sent_time = time.time() - n = await sock.sendto(what, destination, _timeout(expiration, sent_time)) - return (n, sent_time) - - -async def receive_udp( - sock: dns.asyncbackend.DatagramSocket, - destination: Optional[Any] = None, - expiration: Optional[float] = None, - ignore_unexpected: bool = False, - one_rr_per_rrset: bool = False, - keyring: Optional[Dict[dns.name.Name, dns.tsig.Key]] = None, - request_mac: Optional[bytes] = b"", - ignore_trailing: bool = False, - raise_on_truncation: bool = False, -) -> Any: - """Read a DNS message from a UDP socket. - - *sock*, a ``dns.asyncbackend.DatagramSocket``. - - See :py:func:`dns.query.receive_udp()` for the documentation of the other - parameters, and exceptions. - - Returns a ``(dns.message.Message, float, tuple)`` tuple of the received message, the - received time, and the address where the message arrived from. - """ - - wire = b"" - while 1: - (wire, from_address) = await sock.recvfrom(65535, _timeout(expiration)) - if _matches_destination( - sock.family, from_address, destination, ignore_unexpected - ): - break - received_time = time.time() - r = dns.message.from_wire( - wire, - keyring=keyring, - request_mac=request_mac, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - raise_on_truncation=raise_on_truncation, - ) - return (r, received_time, from_address) - - -async def udp( - q: dns.message.Message, - where: str, - timeout: Optional[float] = None, - port: int = 53, - source: Optional[str] = None, - source_port: int = 0, - ignore_unexpected: bool = False, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - raise_on_truncation: bool = False, - sock: Optional[dns.asyncbackend.DatagramSocket] = None, - backend: Optional[dns.asyncbackend.Backend] = None, -) -> dns.message.Message: - """Return the response obtained after sending a query via UDP. - - *sock*, a ``dns.asyncbackend.DatagramSocket``, or ``None``, - the socket to use for the query. If ``None``, the default, a - socket is created. Note that if a socket is provided, the - *source*, *source_port*, and *backend* are ignored. - - *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, - the default, then dnspython will use the default backend. - - See :py:func:`dns.query.udp()` for the documentation of the other - parameters, exceptions, and return type of this method. - """ - wire = q.to_wire() - (begin_time, expiration) = _compute_times(timeout) - af = dns.inet.af_for_address(where) - destination = _lltuple((where, port), af) - if sock: - cm: contextlib.AbstractAsyncContextManager = NullContext(sock) - else: - if not backend: - backend = dns.asyncbackend.get_default_backend() - stuple = _source_tuple(af, source, source_port) - if backend.datagram_connection_required(): - dtuple = (where, port) - else: - dtuple = None - cm = await backend.make_socket(af, socket.SOCK_DGRAM, 0, stuple, dtuple) - async with cm as s: - await send_udp(s, wire, destination, expiration) - (r, received_time, _) = await receive_udp( - s, - destination, - expiration, - ignore_unexpected, - one_rr_per_rrset, - q.keyring, - q.mac, - ignore_trailing, - raise_on_truncation, - ) - r.time = received_time - begin_time - if not q.is_response(r): - raise BadResponse - return r - - -async def udp_with_fallback( - q: dns.message.Message, - where: str, - timeout: Optional[float] = None, - port: int = 53, - source: Optional[str] = None, - source_port: int = 0, - ignore_unexpected: bool = False, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - udp_sock: Optional[dns.asyncbackend.DatagramSocket] = None, - tcp_sock: Optional[dns.asyncbackend.StreamSocket] = None, - backend: Optional[dns.asyncbackend.Backend] = None, -) -> Tuple[dns.message.Message, bool]: - """Return the response to the query, trying UDP first and falling back - to TCP if UDP results in a truncated response. - - *udp_sock*, a ``dns.asyncbackend.DatagramSocket``, or ``None``, - the socket to use for the UDP query. If ``None``, the default, a - socket is created. Note that if a socket is provided the *source*, - *source_port*, and *backend* are ignored for the UDP query. - - *tcp_sock*, a ``dns.asyncbackend.StreamSocket``, or ``None``, the - socket to use for the TCP query. If ``None``, the default, a - socket is created. Note that if a socket is provided *where*, - *source*, *source_port*, and *backend* are ignored for the TCP query. - - *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, - the default, then dnspython will use the default backend. - - See :py:func:`dns.query.udp_with_fallback()` for the documentation - of the other parameters, exceptions, and return type of this - method. - """ - try: - response = await udp( - q, - where, - timeout, - port, - source, - source_port, - ignore_unexpected, - one_rr_per_rrset, - ignore_trailing, - True, - udp_sock, - backend, - ) - return (response, False) - except dns.message.Truncated: - response = await tcp( - q, - where, - timeout, - port, - source, - source_port, - one_rr_per_rrset, - ignore_trailing, - tcp_sock, - backend, - ) - return (response, True) - - -async def send_tcp( - sock: dns.asyncbackend.StreamSocket, - what: Union[dns.message.Message, bytes], - expiration: Optional[float] = None, -) -> Tuple[int, float]: - """Send a DNS message to the specified TCP socket. - - *sock*, a ``dns.asyncbackend.StreamSocket``. - - See :py:func:`dns.query.send_tcp()` for the documentation of the other - parameters, exceptions, and return type of this method. - """ - - if isinstance(what, dns.message.Message): - wire = what.to_wire() - else: - wire = what - l = len(wire) - # copying the wire into tcpmsg is inefficient, but lets us - # avoid writev() or doing a short write that would get pushed - # onto the net - tcpmsg = struct.pack("!H", l) + wire - sent_time = time.time() - await sock.sendall(tcpmsg, _timeout(expiration, sent_time)) - return (len(tcpmsg), sent_time) - - -async def _read_exactly(sock, count, expiration): - """Read the specified number of bytes from stream. Keep trying until we - either get the desired amount, or we hit EOF. - """ - s = b"" - while count > 0: - n = await sock.recv(count, _timeout(expiration)) - if n == b"": - raise EOFError - count = count - len(n) - s = s + n - return s - - -async def receive_tcp( - sock: dns.asyncbackend.StreamSocket, - expiration: Optional[float] = None, - one_rr_per_rrset: bool = False, - keyring: Optional[Dict[dns.name.Name, dns.tsig.Key]] = None, - request_mac: Optional[bytes] = b"", - ignore_trailing: bool = False, -) -> Tuple[dns.message.Message, float]: - """Read a DNS message from a TCP socket. - - *sock*, a ``dns.asyncbackend.StreamSocket``. - - See :py:func:`dns.query.receive_tcp()` for the documentation of the other - parameters, exceptions, and return type of this method. - """ - - ldata = await _read_exactly(sock, 2, expiration) - (l,) = struct.unpack("!H", ldata) - wire = await _read_exactly(sock, l, expiration) - received_time = time.time() - r = dns.message.from_wire( - wire, - keyring=keyring, - request_mac=request_mac, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - ) - return (r, received_time) - - -async def tcp( - q: dns.message.Message, - where: str, - timeout: Optional[float] = None, - port: int = 53, - source: Optional[str] = None, - source_port: int = 0, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - sock: Optional[dns.asyncbackend.StreamSocket] = None, - backend: Optional[dns.asyncbackend.Backend] = None, -) -> dns.message.Message: - """Return the response obtained after sending a query via TCP. - - *sock*, a ``dns.asyncbacket.StreamSocket``, or ``None``, the - socket to use for the query. If ``None``, the default, a socket - is created. Note that if a socket is provided - *where*, *port*, *source*, *source_port*, and *backend* are ignored. - - *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, - the default, then dnspython will use the default backend. - - See :py:func:`dns.query.tcp()` for the documentation of the other - parameters, exceptions, and return type of this method. - """ - - wire = q.to_wire() - (begin_time, expiration) = _compute_times(timeout) - if sock: - # Verify that the socket is connected, as if it's not connected, - # it's not writable, and the polling in send_tcp() will time out or - # hang forever. - await sock.getpeername() - cm: contextlib.AbstractAsyncContextManager = NullContext(sock) - else: - # These are simple (address, port) pairs, not family-dependent tuples - # you pass to low-level socket code. - af = dns.inet.af_for_address(where) - stuple = _source_tuple(af, source, source_port) - dtuple = (where, port) - if not backend: - backend = dns.asyncbackend.get_default_backend() - cm = await backend.make_socket( - af, socket.SOCK_STREAM, 0, stuple, dtuple, timeout - ) - async with cm as s: - await send_tcp(s, wire, expiration) - (r, received_time) = await receive_tcp( - s, expiration, one_rr_per_rrset, q.keyring, q.mac, ignore_trailing - ) - r.time = received_time - begin_time - if not q.is_response(r): - raise BadResponse - return r - - -async def tls( - q: dns.message.Message, - where: str, - timeout: Optional[float] = None, - port: int = 853, - source: Optional[str] = None, - source_port: int = 0, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - sock: Optional[dns.asyncbackend.StreamSocket] = None, - backend: Optional[dns.asyncbackend.Backend] = None, - ssl_context: Optional[ssl.SSLContext] = None, - server_hostname: Optional[str] = None, -) -> dns.message.Message: - """Return the response obtained after sending a query via TLS. - - *sock*, an ``asyncbackend.StreamSocket``, or ``None``, the socket - to use for the query. If ``None``, the default, a socket is - created. Note that if a socket is provided, it must be a - connected SSL stream socket, and *where*, *port*, - *source*, *source_port*, *backend*, *ssl_context*, and *server_hostname* - are ignored. - - *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, - the default, then dnspython will use the default backend. - - See :py:func:`dns.query.tls()` for the documentation of the other - parameters, exceptions, and return type of this method. - """ - (begin_time, expiration) = _compute_times(timeout) - if sock: - cm: contextlib.AbstractAsyncContextManager = NullContext(sock) - else: - if ssl_context is None: - # See the comment about ssl.create_default_context() in query.py - ssl_context = ssl.create_default_context() # lgtm[py/insecure-protocol] - ssl_context.minimum_version = ssl.TLSVersion.TLSv1_2 - if server_hostname is None: - ssl_context.check_hostname = False - af = dns.inet.af_for_address(where) - stuple = _source_tuple(af, source, source_port) - dtuple = (where, port) - if not backend: - backend = dns.asyncbackend.get_default_backend() - cm = await backend.make_socket( - af, - socket.SOCK_STREAM, - 0, - stuple, - dtuple, - timeout, - ssl_context, - server_hostname, - ) - async with cm as s: - timeout = _timeout(expiration) - response = await tcp( - q, - where, - timeout, - port, - source, - source_port, - one_rr_per_rrset, - ignore_trailing, - s, - backend, - ) - end_time = time.time() - response.time = end_time - begin_time - return response - - -async def https( - q: dns.message.Message, - where: str, - timeout: Optional[float] = None, - port: int = 443, - source: Optional[str] = None, - source_port: int = 0, # pylint: disable=W0613 - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - client: Optional["httpx.AsyncClient"] = None, - path: str = "/dns-query", - post: bool = True, - verify: Union[bool, str] = True, - bootstrap_address: Optional[str] = None, - resolver: Optional["dns.asyncresolver.Resolver"] = None, - family: Optional[int] = socket.AF_UNSPEC, -) -> dns.message.Message: - """Return the response obtained after sending a query via DNS-over-HTTPS. - - *client*, a ``httpx.AsyncClient``. If provided, the client to use for - the query. - - Unlike the other dnspython async functions, a backend cannot be provided - in this function because httpx always auto-detects the async backend. - - See :py:func:`dns.query.https()` for the documentation of the other - parameters, exceptions, and return type of this method. - """ - - if not have_doh: - raise NoDOH # pragma: no cover - if client and not isinstance(client, httpx.AsyncClient): - raise ValueError("session parameter must be an httpx.AsyncClient") - - wire = q.to_wire() - try: - af = dns.inet.af_for_address(where) - except ValueError: - af = None - transport = None - headers = {"accept": "application/dns-message"} - if af is not None and dns.inet.is_address(where): - if af == socket.AF_INET: - url = "https://{}:{}{}".format(where, port, path) - elif af == socket.AF_INET6: - url = "https://[{}]:{}{}".format(where, port, path) - else: - url = where - - backend = dns.asyncbackend.get_default_backend() - - if source is None: - local_address = None - local_port = 0 - else: - local_address = source - local_port = source_port - transport = backend.get_transport_class()( - local_address=local_address, - http1=True, - http2=_have_http2, - verify=verify, - local_port=local_port, - bootstrap_address=bootstrap_address, - resolver=resolver, - family=family, - ) - - if client: - cm: contextlib.AbstractAsyncContextManager = NullContext(client) - else: - cm = httpx.AsyncClient( - http1=True, http2=_have_http2, verify=verify, transport=transport - ) - - async with cm as the_client: - # see https://tools.ietf.org/html/rfc8484#section-4.1.1 for DoH - # GET and POST examples - if post: - headers.update( - { - "content-type": "application/dns-message", - "content-length": str(len(wire)), - } - ) - response = await backend.wait_for( - the_client.post(url, headers=headers, content=wire), timeout - ) - else: - wire = base64.urlsafe_b64encode(wire).rstrip(b"=") - twire = wire.decode() # httpx does a repr() if we give it bytes - response = await backend.wait_for( - the_client.get(url, headers=headers, params={"dns": twire}), timeout - ) - - # see https://tools.ietf.org/html/rfc8484#section-4.2.1 for info about DoH - # status codes - if response.status_code < 200 or response.status_code > 299: - raise ValueError( - "{} responded with status code {}" - "\nResponse body: {!r}".format( - where, response.status_code, response.content - ) - ) - r = dns.message.from_wire( - response.content, - keyring=q.keyring, - request_mac=q.request_mac, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - ) - r.time = response.elapsed.total_seconds() - if not q.is_response(r): - raise BadResponse - return r - - -async def inbound_xfr( - where: str, - txn_manager: dns.transaction.TransactionManager, - query: Optional[dns.message.Message] = None, - port: int = 53, - timeout: Optional[float] = None, - lifetime: Optional[float] = None, - source: Optional[str] = None, - source_port: int = 0, - udp_mode: UDPMode = UDPMode.NEVER, - backend: Optional[dns.asyncbackend.Backend] = None, -) -> None: - """Conduct an inbound transfer and apply it via a transaction from the - txn_manager. - - *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, - the default, then dnspython will use the default backend. - - See :py:func:`dns.query.inbound_xfr()` for the documentation of - the other parameters, exceptions, and return type of this method. - """ - if query is None: - (query, serial) = dns.xfr.make_query(txn_manager) - else: - serial = dns.xfr.extract_serial_from_query(query) - rdtype = query.question[0].rdtype - is_ixfr = rdtype == dns.rdatatype.IXFR - origin = txn_manager.from_wire_origin() - wire = query.to_wire() - af = dns.inet.af_for_address(where) - stuple = _source_tuple(af, source, source_port) - dtuple = (where, port) - (_, expiration) = _compute_times(lifetime) - retry = True - while retry: - retry = False - if is_ixfr and udp_mode != UDPMode.NEVER: - sock_type = socket.SOCK_DGRAM - is_udp = True - else: - sock_type = socket.SOCK_STREAM - is_udp = False - if not backend: - backend = dns.asyncbackend.get_default_backend() - s = await backend.make_socket( - af, sock_type, 0, stuple, dtuple, _timeout(expiration) - ) - async with s: - if is_udp: - await s.sendto(wire, dtuple, _timeout(expiration)) - else: - tcpmsg = struct.pack("!H", len(wire)) + wire - await s.sendall(tcpmsg, expiration) - with dns.xfr.Inbound(txn_manager, rdtype, serial, is_udp) as inbound: - done = False - tsig_ctx = None - while not done: - (_, mexpiration) = _compute_times(timeout) - if mexpiration is None or ( - expiration is not None and mexpiration > expiration - ): - mexpiration = expiration - if is_udp: - destination = _lltuple((where, port), af) - while True: - timeout = _timeout(mexpiration) - (rwire, from_address) = await s.recvfrom(65535, timeout) - if _matches_destination( - af, from_address, destination, True - ): - break - else: - ldata = await _read_exactly(s, 2, mexpiration) - (l,) = struct.unpack("!H", ldata) - rwire = await _read_exactly(s, l, mexpiration) - is_ixfr = rdtype == dns.rdatatype.IXFR - r = dns.message.from_wire( - rwire, - keyring=query.keyring, - request_mac=query.mac, - xfr=True, - origin=origin, - tsig_ctx=tsig_ctx, - multi=(not is_udp), - one_rr_per_rrset=is_ixfr, - ) - try: - done = inbound.process_message(r) - except dns.xfr.UseTCP: - assert is_udp # should not happen if we used TCP! - if udp_mode == UDPMode.ONLY: - raise - done = True - retry = True - udp_mode = UDPMode.NEVER - continue - tsig_ctx = r.tsig_ctx - if not retry and query.keyring and not r.had_tsig: - raise dns.exception.FormError("missing TSIG") - - -async def quic( - q: dns.message.Message, - where: str, - timeout: Optional[float] = None, - port: int = 853, - source: Optional[str] = None, - source_port: int = 0, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - connection: Optional[dns.quic.AsyncQuicConnection] = None, - verify: Union[bool, str] = True, - backend: Optional[dns.asyncbackend.Backend] = None, - server_hostname: Optional[str] = None, -) -> dns.message.Message: - """Return the response obtained after sending an asynchronous query via - DNS-over-QUIC. - - *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, - the default, then dnspython will use the default backend. - - See :py:func:`dns.query.quic()` for the documentation of the other - parameters, exceptions, and return type of this method. - """ - - if not dns.quic.have_quic: - raise NoDOQ("DNS-over-QUIC is not available.") # pragma: no cover - - q.id = 0 - wire = q.to_wire() - the_connection: dns.quic.AsyncQuicConnection - if connection: - cfactory = dns.quic.null_factory - mfactory = dns.quic.null_factory - the_connection = connection - else: - (cfactory, mfactory) = dns.quic.factories_for_backend(backend) - - async with cfactory() as context: - async with mfactory( - context, verify_mode=verify, server_name=server_hostname - ) as the_manager: - if not connection: - the_connection = the_manager.connect(where, port, source, source_port) - (start, expiration) = _compute_times(timeout) - stream = await the_connection.make_stream(timeout) - async with stream: - await stream.send(wire, True) - wire = await stream.receive(_remaining(expiration)) - finish = time.time() - r = dns.message.from_wire( - wire, - keyring=q.keyring, - request_mac=q.request_mac, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - ) - r.time = max(finish - start, 0.0) - if not q.is_response(r): - raise BadResponse - return r diff --git a/server/venv/Lib/site-packages/dns/asyncresolver.py b/server/venv/Lib/site-packages/dns/asyncresolver.py deleted file mode 100644 index 8f5e062..0000000 --- a/server/venv/Lib/site-packages/dns/asyncresolver.py +++ /dev/null @@ -1,475 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Asynchronous DNS stub resolver.""" - -import socket -import time -from typing import Any, Dict, List, Optional, Union - -import dns._ddr -import dns.asyncbackend -import dns.asyncquery -import dns.exception -import dns.name -import dns.query -import dns.rdataclass -import dns.rdatatype -import dns.resolver # lgtm[py/import-and-import-from] - -# import some resolver symbols for brevity -from dns.resolver import NXDOMAIN, NoAnswer, NoRootSOA, NotAbsolute - -# for indentation purposes below -_udp = dns.asyncquery.udp -_tcp = dns.asyncquery.tcp - - -class Resolver(dns.resolver.BaseResolver): - """Asynchronous DNS stub resolver.""" - - async def resolve( - self, - qname: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, - rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, - tcp: bool = False, - source: Optional[str] = None, - raise_on_no_answer: bool = True, - source_port: int = 0, - lifetime: Optional[float] = None, - search: Optional[bool] = None, - backend: Optional[dns.asyncbackend.Backend] = None, - ) -> dns.resolver.Answer: - """Query nameservers asynchronously to find the answer to the question. - - *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, - the default, then dnspython will use the default backend. - - See :py:func:`dns.resolver.Resolver.resolve()` for the - documentation of the other parameters, exceptions, and return - type of this method. - """ - - resolution = dns.resolver._Resolution( - self, qname, rdtype, rdclass, tcp, raise_on_no_answer, search - ) - if not backend: - backend = dns.asyncbackend.get_default_backend() - start = time.time() - while True: - (request, answer) = resolution.next_request() - # Note we need to say "if answer is not None" and not just - # "if answer" because answer implements __len__, and python - # will call that. We want to return if we have an answer - # object, including in cases where its length is 0. - if answer is not None: - # cache hit! - return answer - assert request is not None # needed for type checking - done = False - while not done: - (nameserver, tcp, backoff) = resolution.next_nameserver() - if backoff: - await backend.sleep(backoff) - timeout = self._compute_timeout(start, lifetime, resolution.errors) - try: - response = await nameserver.async_query( - request, - timeout=timeout, - source=source, - source_port=source_port, - max_size=tcp, - backend=backend, - ) - except Exception as ex: - (_, done) = resolution.query_result(None, ex) - continue - (answer, done) = resolution.query_result(response, None) - # Note we need to say "if answer is not None" and not just - # "if answer" because answer implements __len__, and python - # will call that. We want to return if we have an answer - # object, including in cases where its length is 0. - if answer is not None: - return answer - - async def resolve_address( - self, ipaddr: str, *args: Any, **kwargs: Any - ) -> dns.resolver.Answer: - """Use an asynchronous resolver to run a reverse query for PTR - records. - - This utilizes the resolve() method to perform a PTR lookup on the - specified IP address. - - *ipaddr*, a ``str``, the IPv4 or IPv6 address you want to get - the PTR record for. - - All other arguments that can be passed to the resolve() function - except for rdtype and rdclass are also supported by this - function. - - """ - # We make a modified kwargs for type checking happiness, as otherwise - # we get a legit warning about possibly having rdtype and rdclass - # in the kwargs more than once. - modified_kwargs: Dict[str, Any] = {} - modified_kwargs.update(kwargs) - modified_kwargs["rdtype"] = dns.rdatatype.PTR - modified_kwargs["rdclass"] = dns.rdataclass.IN - return await self.resolve( - dns.reversename.from_address(ipaddr), *args, **modified_kwargs - ) - - async def resolve_name( - self, - name: Union[dns.name.Name, str], - family: int = socket.AF_UNSPEC, - **kwargs: Any, - ) -> dns.resolver.HostAnswers: - """Use an asynchronous resolver to query for address records. - - This utilizes the resolve() method to perform A and/or AAAA lookups on - the specified name. - - *qname*, a ``dns.name.Name`` or ``str``, the name to resolve. - - *family*, an ``int``, the address family. If socket.AF_UNSPEC - (the default), both A and AAAA records will be retrieved. - - All other arguments that can be passed to the resolve() function - except for rdtype and rdclass are also supported by this - function. - """ - # We make a modified kwargs for type checking happiness, as otherwise - # we get a legit warning about possibly having rdtype and rdclass - # in the kwargs more than once. - modified_kwargs: Dict[str, Any] = {} - modified_kwargs.update(kwargs) - modified_kwargs.pop("rdtype", None) - modified_kwargs["rdclass"] = dns.rdataclass.IN - - if family == socket.AF_INET: - v4 = await self.resolve(name, dns.rdatatype.A, **modified_kwargs) - return dns.resolver.HostAnswers.make(v4=v4) - elif family == socket.AF_INET6: - v6 = await self.resolve(name, dns.rdatatype.AAAA, **modified_kwargs) - return dns.resolver.HostAnswers.make(v6=v6) - elif family != socket.AF_UNSPEC: - raise NotImplementedError(f"unknown address family {family}") - - raise_on_no_answer = modified_kwargs.pop("raise_on_no_answer", True) - lifetime = modified_kwargs.pop("lifetime", None) - start = time.time() - v6 = await self.resolve( - name, - dns.rdatatype.AAAA, - raise_on_no_answer=False, - lifetime=self._compute_timeout(start, lifetime), - **modified_kwargs, - ) - # Note that setting name ensures we query the same name - # for A as we did for AAAA. (This is just in case search lists - # are active by default in the resolver configuration and - # we might be talking to a server that says NXDOMAIN when it - # wants to say NOERROR no data. - name = v6.qname - v4 = await self.resolve( - name, - dns.rdatatype.A, - raise_on_no_answer=False, - lifetime=self._compute_timeout(start, lifetime), - **modified_kwargs, - ) - answers = dns.resolver.HostAnswers.make( - v6=v6, v4=v4, add_empty=not raise_on_no_answer - ) - if not answers: - raise NoAnswer(response=v6.response) - return answers - - # pylint: disable=redefined-outer-name - - async def canonical_name(self, name: Union[dns.name.Name, str]) -> dns.name.Name: - """Determine the canonical name of *name*. - - The canonical name is the name the resolver uses for queries - after all CNAME and DNAME renamings have been applied. - - *name*, a ``dns.name.Name`` or ``str``, the query name. - - This method can raise any exception that ``resolve()`` can - raise, other than ``dns.resolver.NoAnswer`` and - ``dns.resolver.NXDOMAIN``. - - Returns a ``dns.name.Name``. - """ - try: - answer = await self.resolve(name, raise_on_no_answer=False) - canonical_name = answer.canonical_name - except dns.resolver.NXDOMAIN as e: - canonical_name = e.canonical_name - return canonical_name - - async def try_ddr(self, lifetime: float = 5.0) -> None: - """Try to update the resolver's nameservers using Discovery of Designated - Resolvers (DDR). If successful, the resolver will subsequently use - DNS-over-HTTPS or DNS-over-TLS for future queries. - - *lifetime*, a float, is the maximum time to spend attempting DDR. The default - is 5 seconds. - - If the SVCB query is successful and results in a non-empty list of nameservers, - then the resolver's nameservers are set to the returned servers in priority - order. - - The current implementation does not use any address hints from the SVCB record, - nor does it resolve addresses for the SCVB target name, rather it assumes that - the bootstrap nameserver will always be one of the addresses and uses it. - A future revision to the code may offer fuller support. The code verifies that - the bootstrap nameserver is in the Subject Alternative Name field of the - TLS certficate. - """ - try: - expiration = time.time() + lifetime - answer = await self.resolve( - dns._ddr._local_resolver_name, "svcb", lifetime=lifetime - ) - timeout = dns.query._remaining(expiration) - nameservers = await dns._ddr._get_nameservers_async(answer, timeout) - if len(nameservers) > 0: - self.nameservers = nameservers - except Exception: - pass - - -default_resolver = None - - -def get_default_resolver() -> Resolver: - """Get the default asynchronous resolver, initializing it if necessary.""" - if default_resolver is None: - reset_default_resolver() - assert default_resolver is not None - return default_resolver - - -def reset_default_resolver() -> None: - """Re-initialize default asynchronous resolver. - - Note that the resolver configuration (i.e. /etc/resolv.conf on UNIX - systems) will be re-read immediately. - """ - - global default_resolver - default_resolver = Resolver() - - -async def resolve( - qname: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, - rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, - tcp: bool = False, - source: Optional[str] = None, - raise_on_no_answer: bool = True, - source_port: int = 0, - lifetime: Optional[float] = None, - search: Optional[bool] = None, - backend: Optional[dns.asyncbackend.Backend] = None, -) -> dns.resolver.Answer: - """Query nameservers asynchronously to find the answer to the question. - - This is a convenience function that uses the default resolver - object to make the query. - - See :py:func:`dns.asyncresolver.Resolver.resolve` for more - information on the parameters. - """ - - return await get_default_resolver().resolve( - qname, - rdtype, - rdclass, - tcp, - source, - raise_on_no_answer, - source_port, - lifetime, - search, - backend, - ) - - -async def resolve_address( - ipaddr: str, *args: Any, **kwargs: Any -) -> dns.resolver.Answer: - """Use a resolver to run a reverse query for PTR records. - - See :py:func:`dns.asyncresolver.Resolver.resolve_address` for more - information on the parameters. - """ - - return await get_default_resolver().resolve_address(ipaddr, *args, **kwargs) - - -async def resolve_name( - name: Union[dns.name.Name, str], family: int = socket.AF_UNSPEC, **kwargs: Any -) -> dns.resolver.HostAnswers: - """Use a resolver to asynchronously query for address records. - - See :py:func:`dns.asyncresolver.Resolver.resolve_name` for more - information on the parameters. - """ - - return await get_default_resolver().resolve_name(name, family, **kwargs) - - -async def canonical_name(name: Union[dns.name.Name, str]) -> dns.name.Name: - """Determine the canonical name of *name*. - - See :py:func:`dns.resolver.Resolver.canonical_name` for more - information on the parameters and possible exceptions. - """ - - return await get_default_resolver().canonical_name(name) - - -async def try_ddr(timeout: float = 5.0) -> None: - """Try to update the default resolver's nameservers using Discovery of Designated - Resolvers (DDR). If successful, the resolver will subsequently use - DNS-over-HTTPS or DNS-over-TLS for future queries. - - See :py:func:`dns.resolver.Resolver.try_ddr` for more information. - """ - return await get_default_resolver().try_ddr(timeout) - - -async def zone_for_name( - name: Union[dns.name.Name, str], - rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, - tcp: bool = False, - resolver: Optional[Resolver] = None, - backend: Optional[dns.asyncbackend.Backend] = None, -) -> dns.name.Name: - """Find the name of the zone which contains the specified name. - - See :py:func:`dns.resolver.Resolver.zone_for_name` for more - information on the parameters and possible exceptions. - """ - - if isinstance(name, str): - name = dns.name.from_text(name, dns.name.root) - if resolver is None: - resolver = get_default_resolver() - if not name.is_absolute(): - raise NotAbsolute(name) - while True: - try: - answer = await resolver.resolve( - name, dns.rdatatype.SOA, rdclass, tcp, backend=backend - ) - assert answer.rrset is not None - if answer.rrset.name == name: - return name - # otherwise we were CNAMEd or DNAMEd and need to look higher - except (NXDOMAIN, NoAnswer): - pass - try: - name = name.parent() - except dns.name.NoParent: # pragma: no cover - raise NoRootSOA - - -async def make_resolver_at( - where: Union[dns.name.Name, str], - port: int = 53, - family: int = socket.AF_UNSPEC, - resolver: Optional[Resolver] = None, -) -> Resolver: - """Make a stub resolver using the specified destination as the full resolver. - - *where*, a ``dns.name.Name`` or ``str`` the domain name or IP address of the - full resolver. - - *port*, an ``int``, the port to use. If not specified, the default is 53. - - *family*, an ``int``, the address family to use. This parameter is used if - *where* is not an address. The default is ``socket.AF_UNSPEC`` in which case - the first address returned by ``resolve_name()`` will be used, otherwise the - first address of the specified family will be used. - - *resolver*, a ``dns.asyncresolver.Resolver`` or ``None``, the resolver to use for - resolution of hostnames. If not specified, the default resolver will be used. - - Returns a ``dns.resolver.Resolver`` or raises an exception. - """ - if resolver is None: - resolver = get_default_resolver() - nameservers: List[Union[str, dns.nameserver.Nameserver]] = [] - if isinstance(where, str) and dns.inet.is_address(where): - nameservers.append(dns.nameserver.Do53Nameserver(where, port)) - else: - answers = await resolver.resolve_name(where, family) - for address in answers.addresses(): - nameservers.append(dns.nameserver.Do53Nameserver(address, port)) - res = dns.asyncresolver.Resolver(configure=False) - res.nameservers = nameservers - return res - - -async def resolve_at( - where: Union[dns.name.Name, str], - qname: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, - rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, - tcp: bool = False, - source: Optional[str] = None, - raise_on_no_answer: bool = True, - source_port: int = 0, - lifetime: Optional[float] = None, - search: Optional[bool] = None, - backend: Optional[dns.asyncbackend.Backend] = None, - port: int = 53, - family: int = socket.AF_UNSPEC, - resolver: Optional[Resolver] = None, -) -> dns.resolver.Answer: - """Query nameservers to find the answer to the question. - - This is a convenience function that calls ``dns.asyncresolver.make_resolver_at()`` - to make a resolver, and then uses it to resolve the query. - - See ``dns.asyncresolver.Resolver.resolve`` for more information on the resolution - parameters, and ``dns.asyncresolver.make_resolver_at`` for information about the - resolver parameters *where*, *port*, *family*, and *resolver*. - - If making more than one query, it is more efficient to call - ``dns.asyncresolver.make_resolver_at()`` and then use that resolver for the queries - instead of calling ``resolve_at()`` multiple times. - """ - res = await make_resolver_at(where, port, family, resolver) - return await res.resolve( - qname, - rdtype, - rdclass, - tcp, - source, - raise_on_no_answer, - source_port, - lifetime, - search, - backend, - ) diff --git a/server/venv/Lib/site-packages/dns/dnssec.py b/server/venv/Lib/site-packages/dns/dnssec.py deleted file mode 100644 index 2949f61..0000000 --- a/server/venv/Lib/site-packages/dns/dnssec.py +++ /dev/null @@ -1,1222 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Common DNSSEC-related functions and constants.""" - - -import base64 -import contextlib -import functools -import hashlib -import struct -import time -from datetime import datetime -from typing import Callable, Dict, List, Optional, Set, Tuple, Union, cast - -import dns.exception -import dns.name -import dns.node -import dns.rdata -import dns.rdataclass -import dns.rdataset -import dns.rdatatype -import dns.rrset -import dns.transaction -import dns.zone -from dns.dnssectypes import Algorithm, DSDigest, NSEC3Hash -from dns.exception import ( # pylint: disable=W0611 - AlgorithmKeyMismatch, - DeniedByPolicy, - UnsupportedAlgorithm, - ValidationFailure, -) -from dns.rdtypes.ANY.CDNSKEY import CDNSKEY -from dns.rdtypes.ANY.CDS import CDS -from dns.rdtypes.ANY.DNSKEY import DNSKEY -from dns.rdtypes.ANY.DS import DS -from dns.rdtypes.ANY.NSEC import NSEC, Bitmap -from dns.rdtypes.ANY.NSEC3PARAM import NSEC3PARAM -from dns.rdtypes.ANY.RRSIG import RRSIG, sigtime_to_posixtime -from dns.rdtypes.dnskeybase import Flag - -PublicKey = Union[ - "GenericPublicKey", - "rsa.RSAPublicKey", - "ec.EllipticCurvePublicKey", - "ed25519.Ed25519PublicKey", - "ed448.Ed448PublicKey", -] - -PrivateKey = Union[ - "GenericPrivateKey", - "rsa.RSAPrivateKey", - "ec.EllipticCurvePrivateKey", - "ed25519.Ed25519PrivateKey", - "ed448.Ed448PrivateKey", -] - -RRsetSigner = Callable[[dns.transaction.Transaction, dns.rrset.RRset], None] - - -def algorithm_from_text(text: str) -> Algorithm: - """Convert text into a DNSSEC algorithm value. - - *text*, a ``str``, the text to convert to into an algorithm value. - - Returns an ``int``. - """ - - return Algorithm.from_text(text) - - -def algorithm_to_text(value: Union[Algorithm, int]) -> str: - """Convert a DNSSEC algorithm value to text - - *value*, a ``dns.dnssec.Algorithm``. - - Returns a ``str``, the name of a DNSSEC algorithm. - """ - - return Algorithm.to_text(value) - - -def to_timestamp(value: Union[datetime, str, float, int]) -> int: - """Convert various format to a timestamp""" - if isinstance(value, datetime): - return int(value.timestamp()) - elif isinstance(value, str): - return sigtime_to_posixtime(value) - elif isinstance(value, float): - return int(value) - elif isinstance(value, int): - return value - else: - raise TypeError("Unsupported timestamp type") - - -def key_id(key: Union[DNSKEY, CDNSKEY]) -> int: - """Return the key id (a 16-bit number) for the specified key. - - *key*, a ``dns.rdtypes.ANY.DNSKEY.DNSKEY`` - - Returns an ``int`` between 0 and 65535 - """ - - rdata = key.to_wire() - if key.algorithm == Algorithm.RSAMD5: - return (rdata[-3] << 8) + rdata[-2] - else: - total = 0 - for i in range(len(rdata) // 2): - total += (rdata[2 * i] << 8) + rdata[2 * i + 1] - if len(rdata) % 2 != 0: - total += rdata[len(rdata) - 1] << 8 - total += (total >> 16) & 0xFFFF - return total & 0xFFFF - - -class Policy: - def __init__(self): - pass - - def ok_to_sign(self, _: DNSKEY) -> bool: # pragma: no cover - return False - - def ok_to_validate(self, _: DNSKEY) -> bool: # pragma: no cover - return False - - def ok_to_create_ds(self, _: DSDigest) -> bool: # pragma: no cover - return False - - def ok_to_validate_ds(self, _: DSDigest) -> bool: # pragma: no cover - return False - - -class SimpleDeny(Policy): - def __init__(self, deny_sign, deny_validate, deny_create_ds, deny_validate_ds): - super().__init__() - self._deny_sign = deny_sign - self._deny_validate = deny_validate - self._deny_create_ds = deny_create_ds - self._deny_validate_ds = deny_validate_ds - - def ok_to_sign(self, key: DNSKEY) -> bool: - return key.algorithm not in self._deny_sign - - def ok_to_validate(self, key: DNSKEY) -> bool: - return key.algorithm not in self._deny_validate - - def ok_to_create_ds(self, algorithm: DSDigest) -> bool: - return algorithm not in self._deny_create_ds - - def ok_to_validate_ds(self, algorithm: DSDigest) -> bool: - return algorithm not in self._deny_validate_ds - - -rfc_8624_policy = SimpleDeny( - {Algorithm.RSAMD5, Algorithm.DSA, Algorithm.DSANSEC3SHA1, Algorithm.ECCGOST}, - {Algorithm.RSAMD5, Algorithm.DSA, Algorithm.DSANSEC3SHA1}, - {DSDigest.NULL, DSDigest.SHA1, DSDigest.GOST}, - {DSDigest.NULL}, -) - -allow_all_policy = SimpleDeny(set(), set(), set(), set()) - - -default_policy = rfc_8624_policy - - -def make_ds( - name: Union[dns.name.Name, str], - key: dns.rdata.Rdata, - algorithm: Union[DSDigest, str], - origin: Optional[dns.name.Name] = None, - policy: Optional[Policy] = None, - validating: bool = False, -) -> DS: - """Create a DS record for a DNSSEC key. - - *name*, a ``dns.name.Name`` or ``str``, the owner name of the DS record. - - *key*, a ``dns.rdtypes.ANY.DNSKEY.DNSKEY`` or ``dns.rdtypes.ANY.DNSKEY.CDNSKEY``, - the key the DS is about. - - *algorithm*, a ``str`` or ``int`` specifying the hash algorithm. - The currently supported hashes are "SHA1", "SHA256", and "SHA384". Case - does not matter for these strings. - - *origin*, a ``dns.name.Name`` or ``None``. If *key* is a relative name, - then it will be made absolute using the specified origin. - - *policy*, a ``dns.dnssec.Policy`` or ``None``. If ``None``, the default policy, - ``dns.dnssec.default_policy`` is used; this policy defaults to that of RFC 8624. - - *validating*, a ``bool``. If ``True``, then policy is checked in - validating mode, i.e. "Is it ok to validate using this digest algorithm?". - Otherwise the policy is checked in creating mode, i.e. "Is it ok to create a DS with - this digest algorithm?". - - Raises ``UnsupportedAlgorithm`` if the algorithm is unknown. - - Raises ``DeniedByPolicy`` if the algorithm is denied by policy. - - Returns a ``dns.rdtypes.ANY.DS.DS`` - """ - - if policy is None: - policy = default_policy - try: - if isinstance(algorithm, str): - algorithm = DSDigest[algorithm.upper()] - except Exception: - raise UnsupportedAlgorithm('unsupported algorithm "%s"' % algorithm) - if validating: - check = policy.ok_to_validate_ds - else: - check = policy.ok_to_create_ds - if not check(algorithm): - raise DeniedByPolicy - if not isinstance(key, (DNSKEY, CDNSKEY)): - raise ValueError("key is not a DNSKEY/CDNSKEY") - if algorithm == DSDigest.SHA1: - dshash = hashlib.sha1() - elif algorithm == DSDigest.SHA256: - dshash = hashlib.sha256() - elif algorithm == DSDigest.SHA384: - dshash = hashlib.sha384() - else: - raise UnsupportedAlgorithm('unsupported algorithm "%s"' % algorithm) - - if isinstance(name, str): - name = dns.name.from_text(name, origin) - wire = name.canonicalize().to_wire() - assert wire is not None - dshash.update(wire) - dshash.update(key.to_wire(origin=origin)) - digest = dshash.digest() - - dsrdata = struct.pack("!HBB", key_id(key), key.algorithm, algorithm) + digest - ds = dns.rdata.from_wire( - dns.rdataclass.IN, dns.rdatatype.DS, dsrdata, 0, len(dsrdata) - ) - return cast(DS, ds) - - -def make_cds( - name: Union[dns.name.Name, str], - key: dns.rdata.Rdata, - algorithm: Union[DSDigest, str], - origin: Optional[dns.name.Name] = None, -) -> CDS: - """Create a CDS record for a DNSSEC key. - - *name*, a ``dns.name.Name`` or ``str``, the owner name of the DS record. - - *key*, a ``dns.rdtypes.ANY.DNSKEY.DNSKEY`` or ``dns.rdtypes.ANY.DNSKEY.CDNSKEY``, - the key the DS is about. - - *algorithm*, a ``str`` or ``int`` specifying the hash algorithm. - The currently supported hashes are "SHA1", "SHA256", and "SHA384". Case - does not matter for these strings. - - *origin*, a ``dns.name.Name`` or ``None``. If *key* is a relative name, - then it will be made absolute using the specified origin. - - Raises ``UnsupportedAlgorithm`` if the algorithm is unknown. - - Returns a ``dns.rdtypes.ANY.DS.CDS`` - """ - - ds = make_ds(name, key, algorithm, origin) - return CDS( - rdclass=ds.rdclass, - rdtype=dns.rdatatype.CDS, - key_tag=ds.key_tag, - algorithm=ds.algorithm, - digest_type=ds.digest_type, - digest=ds.digest, - ) - - -def _find_candidate_keys( - keys: Dict[dns.name.Name, Union[dns.rdataset.Rdataset, dns.node.Node]], rrsig: RRSIG -) -> Optional[List[DNSKEY]]: - value = keys.get(rrsig.signer) - if isinstance(value, dns.node.Node): - rdataset = value.get_rdataset(dns.rdataclass.IN, dns.rdatatype.DNSKEY) - else: - rdataset = value - if rdataset is None: - return None - return [ - cast(DNSKEY, rd) - for rd in rdataset - if rd.algorithm == rrsig.algorithm - and key_id(rd) == rrsig.key_tag - and (rd.flags & Flag.ZONE) == Flag.ZONE # RFC 4034 2.1.1 - and rd.protocol == 3 # RFC 4034 2.1.2 - ] - - -def _get_rrname_rdataset( - rrset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]], -) -> Tuple[dns.name.Name, dns.rdataset.Rdataset]: - if isinstance(rrset, tuple): - return rrset[0], rrset[1] - else: - return rrset.name, rrset - - -def _validate_signature(sig: bytes, data: bytes, key: DNSKEY) -> None: - public_cls = get_algorithm_cls_from_dnskey(key).public_cls - try: - public_key = public_cls.from_dnskey(key) - except ValueError: - raise ValidationFailure("invalid public key") - public_key.verify(sig, data) - - -def _validate_rrsig( - rrset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]], - rrsig: RRSIG, - keys: Dict[dns.name.Name, Union[dns.node.Node, dns.rdataset.Rdataset]], - origin: Optional[dns.name.Name] = None, - now: Optional[float] = None, - policy: Optional[Policy] = None, -) -> None: - """Validate an RRset against a single signature rdata, throwing an - exception if validation is not successful. - - *rrset*, the RRset to validate. This can be a - ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) - tuple. - - *rrsig*, a ``dns.rdata.Rdata``, the signature to validate. - - *keys*, the key dictionary, used to find the DNSKEY associated - with a given name. The dictionary is keyed by a - ``dns.name.Name``, and has ``dns.node.Node`` or - ``dns.rdataset.Rdataset`` values. - - *origin*, a ``dns.name.Name`` or ``None``, the origin to use for relative - names. - - *now*, a ``float`` or ``None``, the time, in seconds since the epoch, to - use as the current time when validating. If ``None``, the actual current - time is used. - - *policy*, a ``dns.dnssec.Policy`` or ``None``. If ``None``, the default policy, - ``dns.dnssec.default_policy`` is used; this policy defaults to that of RFC 8624. - - Raises ``ValidationFailure`` if the signature is expired, not yet valid, - the public key is invalid, the algorithm is unknown, the verification - fails, etc. - - Raises ``UnsupportedAlgorithm`` if the algorithm is recognized by - dnspython but not implemented. - """ - - if policy is None: - policy = default_policy - - candidate_keys = _find_candidate_keys(keys, rrsig) - if candidate_keys is None: - raise ValidationFailure("unknown key") - - if now is None: - now = time.time() - if rrsig.expiration < now: - raise ValidationFailure("expired") - if rrsig.inception > now: - raise ValidationFailure("not yet valid") - - data = _make_rrsig_signature_data(rrset, rrsig, origin) - - for candidate_key in candidate_keys: - if not policy.ok_to_validate(candidate_key): - continue - try: - _validate_signature(rrsig.signature, data, candidate_key) - return - except (InvalidSignature, ValidationFailure): - # this happens on an individual validation failure - continue - # nothing verified -- raise failure: - raise ValidationFailure("verify failure") - - -def _validate( - rrset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]], - rrsigset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]], - keys: Dict[dns.name.Name, Union[dns.node.Node, dns.rdataset.Rdataset]], - origin: Optional[dns.name.Name] = None, - now: Optional[float] = None, - policy: Optional[Policy] = None, -) -> None: - """Validate an RRset against a signature RRset, throwing an exception - if none of the signatures validate. - - *rrset*, the RRset to validate. This can be a - ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) - tuple. - - *rrsigset*, the signature RRset. This can be a - ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) - tuple. - - *keys*, the key dictionary, used to find the DNSKEY associated - with a given name. The dictionary is keyed by a - ``dns.name.Name``, and has ``dns.node.Node`` or - ``dns.rdataset.Rdataset`` values. - - *origin*, a ``dns.name.Name``, the origin to use for relative names; - defaults to None. - - *now*, an ``int`` or ``None``, the time, in seconds since the epoch, to - use as the current time when validating. If ``None``, the actual current - time is used. - - *policy*, a ``dns.dnssec.Policy`` or ``None``. If ``None``, the default policy, - ``dns.dnssec.default_policy`` is used; this policy defaults to that of RFC 8624. - - Raises ``ValidationFailure`` if the signature is expired, not yet valid, - the public key is invalid, the algorithm is unknown, the verification - fails, etc. - """ - - if policy is None: - policy = default_policy - - if isinstance(origin, str): - origin = dns.name.from_text(origin, dns.name.root) - - if isinstance(rrset, tuple): - rrname = rrset[0] - else: - rrname = rrset.name - - if isinstance(rrsigset, tuple): - rrsigname = rrsigset[0] - rrsigrdataset = rrsigset[1] - else: - rrsigname = rrsigset.name - rrsigrdataset = rrsigset - - rrname = rrname.choose_relativity(origin) - rrsigname = rrsigname.choose_relativity(origin) - if rrname != rrsigname: - raise ValidationFailure("owner names do not match") - - for rrsig in rrsigrdataset: - if not isinstance(rrsig, RRSIG): - raise ValidationFailure("expected an RRSIG") - try: - _validate_rrsig(rrset, rrsig, keys, origin, now, policy) - return - except (ValidationFailure, UnsupportedAlgorithm): - pass - raise ValidationFailure("no RRSIGs validated") - - -def _sign( - rrset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]], - private_key: PrivateKey, - signer: dns.name.Name, - dnskey: DNSKEY, - inception: Optional[Union[datetime, str, int, float]] = None, - expiration: Optional[Union[datetime, str, int, float]] = None, - lifetime: Optional[int] = None, - verify: bool = False, - policy: Optional[Policy] = None, - origin: Optional[dns.name.Name] = None, -) -> RRSIG: - """Sign RRset using private key. - - *rrset*, the RRset to validate. This can be a - ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) - tuple. - - *private_key*, the private key to use for signing, a - ``cryptography.hazmat.primitives.asymmetric`` private key class applicable - for DNSSEC. - - *signer*, a ``dns.name.Name``, the Signer's name. - - *dnskey*, a ``DNSKEY`` matching ``private_key``. - - *inception*, a ``datetime``, ``str``, ``int``, ``float`` or ``None``, the - signature inception time. If ``None``, the current time is used. If a ``str``, the - format is "YYYYMMDDHHMMSS" or alternatively the number of seconds since the UNIX - epoch in text form; this is the same the RRSIG rdata's text form. - Values of type `int` or `float` are interpreted as seconds since the UNIX epoch. - - *expiration*, a ``datetime``, ``str``, ``int``, ``float`` or ``None``, the signature - expiration time. If ``None``, the expiration time will be the inception time plus - the value of the *lifetime* parameter. See the description of *inception* above - for how the various parameter types are interpreted. - - *lifetime*, an ``int`` or ``None``, the signature lifetime in seconds. This - parameter is only meaningful if *expiration* is ``None``. - - *verify*, a ``bool``. If set to ``True``, the signer will verify signatures - after they are created; the default is ``False``. - - *policy*, a ``dns.dnssec.Policy`` or ``None``. If ``None``, the default policy, - ``dns.dnssec.default_policy`` is used; this policy defaults to that of RFC 8624. - - *origin*, a ``dns.name.Name`` or ``None``. If ``None``, the default, then all - names in the rrset (including its owner name) must be absolute; otherwise the - specified origin will be used to make names absolute when signing. - - Raises ``DeniedByPolicy`` if the signature is denied by policy. - """ - - if policy is None: - policy = default_policy - if not policy.ok_to_sign(dnskey): - raise DeniedByPolicy - - if isinstance(rrset, tuple): - rdclass = rrset[1].rdclass - rdtype = rrset[1].rdtype - rrname = rrset[0] - original_ttl = rrset[1].ttl - else: - rdclass = rrset.rdclass - rdtype = rrset.rdtype - rrname = rrset.name - original_ttl = rrset.ttl - - if inception is not None: - rrsig_inception = to_timestamp(inception) - else: - rrsig_inception = int(time.time()) - - if expiration is not None: - rrsig_expiration = to_timestamp(expiration) - elif lifetime is not None: - rrsig_expiration = rrsig_inception + lifetime - else: - raise ValueError("expiration or lifetime must be specified") - - # Derelativize now because we need a correct labels length for the - # rrsig_template. - if origin is not None: - rrname = rrname.derelativize(origin) - labels = len(rrname) - 1 - - # Adjust labels appropriately for wildcards. - if rrname.is_wild(): - labels -= 1 - - rrsig_template = RRSIG( - rdclass=rdclass, - rdtype=dns.rdatatype.RRSIG, - type_covered=rdtype, - algorithm=dnskey.algorithm, - labels=labels, - original_ttl=original_ttl, - expiration=rrsig_expiration, - inception=rrsig_inception, - key_tag=key_id(dnskey), - signer=signer, - signature=b"", - ) - - data = dns.dnssec._make_rrsig_signature_data(rrset, rrsig_template, origin) - - if isinstance(private_key, GenericPrivateKey): - signing_key = private_key - else: - try: - private_cls = get_algorithm_cls_from_dnskey(dnskey) - signing_key = private_cls(key=private_key) - except UnsupportedAlgorithm: - raise TypeError("Unsupported key algorithm") - - signature = signing_key.sign(data, verify) - - return cast(RRSIG, rrsig_template.replace(signature=signature)) - - -def _make_rrsig_signature_data( - rrset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]], - rrsig: RRSIG, - origin: Optional[dns.name.Name] = None, -) -> bytes: - """Create signature rdata. - - *rrset*, the RRset to sign/validate. This can be a - ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) - tuple. - - *rrsig*, a ``dns.rdata.Rdata``, the signature to validate, or the - signature template used when signing. - - *origin*, a ``dns.name.Name`` or ``None``, the origin to use for relative - names. - - Raises ``UnsupportedAlgorithm`` if the algorithm is recognized by - dnspython but not implemented. - """ - - if isinstance(origin, str): - origin = dns.name.from_text(origin, dns.name.root) - - signer = rrsig.signer - if not signer.is_absolute(): - if origin is None: - raise ValidationFailure("relative RR name without an origin specified") - signer = signer.derelativize(origin) - - # For convenience, allow the rrset to be specified as a (name, - # rdataset) tuple as well as a proper rrset - rrname, rdataset = _get_rrname_rdataset(rrset) - - data = b"" - data += rrsig.to_wire(origin=signer)[:18] - data += rrsig.signer.to_digestable(signer) - - # Derelativize the name before considering labels. - if not rrname.is_absolute(): - if origin is None: - raise ValidationFailure("relative RR name without an origin specified") - rrname = rrname.derelativize(origin) - - name_len = len(rrname) - if rrname.is_wild() and rrsig.labels != name_len - 2: - raise ValidationFailure("wild owner name has wrong label length") - if name_len - 1 < rrsig.labels: - raise ValidationFailure("owner name longer than RRSIG labels") - elif rrsig.labels < name_len - 1: - suffix = rrname.split(rrsig.labels + 1)[1] - rrname = dns.name.from_text("*", suffix) - rrnamebuf = rrname.to_digestable() - rrfixed = struct.pack("!HHI", rdataset.rdtype, rdataset.rdclass, rrsig.original_ttl) - rdatas = [rdata.to_digestable(origin) for rdata in rdataset] - for rdata in sorted(rdatas): - data += rrnamebuf - data += rrfixed - rrlen = struct.pack("!H", len(rdata)) - data += rrlen - data += rdata - - return data - - -def _make_dnskey( - public_key: PublicKey, - algorithm: Union[int, str], - flags: int = Flag.ZONE, - protocol: int = 3, -) -> DNSKEY: - """Convert a public key to DNSKEY Rdata - - *public_key*, a ``PublicKey`` (``GenericPublicKey`` or - ``cryptography.hazmat.primitives.asymmetric``) to convert. - - *algorithm*, a ``str`` or ``int`` specifying the DNSKEY algorithm. - - *flags*: DNSKEY flags field as an integer. - - *protocol*: DNSKEY protocol field as an integer. - - Raises ``ValueError`` if the specified key algorithm parameters are not - unsupported, ``TypeError`` if the key type is unsupported, - `UnsupportedAlgorithm` if the algorithm is unknown and - `AlgorithmKeyMismatch` if the algorithm does not match the key type. - - Return DNSKEY ``Rdata``. - """ - - algorithm = Algorithm.make(algorithm) - - if isinstance(public_key, GenericPublicKey): - return public_key.to_dnskey(flags=flags, protocol=protocol) - else: - public_cls = get_algorithm_cls(algorithm).public_cls - return public_cls(key=public_key).to_dnskey(flags=flags, protocol=protocol) - - -def _make_cdnskey( - public_key: PublicKey, - algorithm: Union[int, str], - flags: int = Flag.ZONE, - protocol: int = 3, -) -> CDNSKEY: - """Convert a public key to CDNSKEY Rdata - - *public_key*, the public key to convert, a - ``cryptography.hazmat.primitives.asymmetric`` public key class applicable - for DNSSEC. - - *algorithm*, a ``str`` or ``int`` specifying the DNSKEY algorithm. - - *flags*: DNSKEY flags field as an integer. - - *protocol*: DNSKEY protocol field as an integer. - - Raises ``ValueError`` if the specified key algorithm parameters are not - unsupported, ``TypeError`` if the key type is unsupported, - `UnsupportedAlgorithm` if the algorithm is unknown and - `AlgorithmKeyMismatch` if the algorithm does not match the key type. - - Return CDNSKEY ``Rdata``. - """ - - dnskey = _make_dnskey(public_key, algorithm, flags, protocol) - - return CDNSKEY( - rdclass=dnskey.rdclass, - rdtype=dns.rdatatype.CDNSKEY, - flags=dnskey.flags, - protocol=dnskey.protocol, - algorithm=dnskey.algorithm, - key=dnskey.key, - ) - - -def nsec3_hash( - domain: Union[dns.name.Name, str], - salt: Optional[Union[str, bytes]], - iterations: int, - algorithm: Union[int, str], -) -> str: - """ - Calculate the NSEC3 hash, according to - https://tools.ietf.org/html/rfc5155#section-5 - - *domain*, a ``dns.name.Name`` or ``str``, the name to hash. - - *salt*, a ``str``, ``bytes``, or ``None``, the hash salt. If a - string, it is decoded as a hex string. - - *iterations*, an ``int``, the number of iterations. - - *algorithm*, a ``str`` or ``int``, the hash algorithm. - The only defined algorithm is SHA1. - - Returns a ``str``, the encoded NSEC3 hash. - """ - - b32_conversion = str.maketrans( - "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567", "0123456789ABCDEFGHIJKLMNOPQRSTUV" - ) - - try: - if isinstance(algorithm, str): - algorithm = NSEC3Hash[algorithm.upper()] - except Exception: - raise ValueError("Wrong hash algorithm (only SHA1 is supported)") - - if algorithm != NSEC3Hash.SHA1: - raise ValueError("Wrong hash algorithm (only SHA1 is supported)") - - if salt is None: - salt_encoded = b"" - elif isinstance(salt, str): - if len(salt) % 2 == 0: - salt_encoded = bytes.fromhex(salt) - else: - raise ValueError("Invalid salt length") - else: - salt_encoded = salt - - if not isinstance(domain, dns.name.Name): - domain = dns.name.from_text(domain) - domain_encoded = domain.canonicalize().to_wire() - assert domain_encoded is not None - - digest = hashlib.sha1(domain_encoded + salt_encoded).digest() - for _ in range(iterations): - digest = hashlib.sha1(digest + salt_encoded).digest() - - output = base64.b32encode(digest).decode("utf-8") - output = output.translate(b32_conversion) - - return output - - -def make_ds_rdataset( - rrset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]], - algorithms: Set[Union[DSDigest, str]], - origin: Optional[dns.name.Name] = None, -) -> dns.rdataset.Rdataset: - """Create a DS record from DNSKEY/CDNSKEY/CDS. - - *rrset*, the RRset to create DS Rdataset for. This can be a - ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) - tuple. - - *algorithms*, a set of ``str`` or ``int`` specifying the hash algorithms. - The currently supported hashes are "SHA1", "SHA256", and "SHA384". Case - does not matter for these strings. If the RRset is a CDS, only digest - algorithms matching algorithms are accepted. - - *origin*, a ``dns.name.Name`` or ``None``. If `key` is a relative name, - then it will be made absolute using the specified origin. - - Raises ``UnsupportedAlgorithm`` if any of the algorithms are unknown and - ``ValueError`` if the given RRset is not usable. - - Returns a ``dns.rdataset.Rdataset`` - """ - - rrname, rdataset = _get_rrname_rdataset(rrset) - - if rdataset.rdtype not in ( - dns.rdatatype.DNSKEY, - dns.rdatatype.CDNSKEY, - dns.rdatatype.CDS, - ): - raise ValueError("rrset not a DNSKEY/CDNSKEY/CDS") - - _algorithms = set() - for algorithm in algorithms: - try: - if isinstance(algorithm, str): - algorithm = DSDigest[algorithm.upper()] - except Exception: - raise UnsupportedAlgorithm('unsupported algorithm "%s"' % algorithm) - _algorithms.add(algorithm) - - if rdataset.rdtype == dns.rdatatype.CDS: - res = [] - for rdata in cds_rdataset_to_ds_rdataset(rdataset): - if rdata.digest_type in _algorithms: - res.append(rdata) - if len(res) == 0: - raise ValueError("no acceptable CDS rdata found") - return dns.rdataset.from_rdata_list(rdataset.ttl, res) - - res = [] - for algorithm in _algorithms: - res.extend(dnskey_rdataset_to_cds_rdataset(rrname, rdataset, algorithm, origin)) - return dns.rdataset.from_rdata_list(rdataset.ttl, res) - - -def cds_rdataset_to_ds_rdataset( - rdataset: dns.rdataset.Rdataset, -) -> dns.rdataset.Rdataset: - """Create a CDS record from DS. - - *rdataset*, a ``dns.rdataset.Rdataset``, to create DS Rdataset for. - - Raises ``ValueError`` if the rdataset is not CDS. - - Returns a ``dns.rdataset.Rdataset`` - """ - - if rdataset.rdtype != dns.rdatatype.CDS: - raise ValueError("rdataset not a CDS") - res = [] - for rdata in rdataset: - res.append( - CDS( - rdclass=rdata.rdclass, - rdtype=dns.rdatatype.DS, - key_tag=rdata.key_tag, - algorithm=rdata.algorithm, - digest_type=rdata.digest_type, - digest=rdata.digest, - ) - ) - return dns.rdataset.from_rdata_list(rdataset.ttl, res) - - -def dnskey_rdataset_to_cds_rdataset( - name: Union[dns.name.Name, str], - rdataset: dns.rdataset.Rdataset, - algorithm: Union[DSDigest, str], - origin: Optional[dns.name.Name] = None, -) -> dns.rdataset.Rdataset: - """Create a CDS record from DNSKEY/CDNSKEY. - - *name*, a ``dns.name.Name`` or ``str``, the owner name of the CDS record. - - *rdataset*, a ``dns.rdataset.Rdataset``, to create DS Rdataset for. - - *algorithm*, a ``str`` or ``int`` specifying the hash algorithm. - The currently supported hashes are "SHA1", "SHA256", and "SHA384". Case - does not matter for these strings. - - *origin*, a ``dns.name.Name`` or ``None``. If `key` is a relative name, - then it will be made absolute using the specified origin. - - Raises ``UnsupportedAlgorithm`` if the algorithm is unknown or - ``ValueError`` if the rdataset is not DNSKEY/CDNSKEY. - - Returns a ``dns.rdataset.Rdataset`` - """ - - if rdataset.rdtype not in (dns.rdatatype.DNSKEY, dns.rdatatype.CDNSKEY): - raise ValueError("rdataset not a DNSKEY/CDNSKEY") - res = [] - for rdata in rdataset: - res.append(make_cds(name, rdata, algorithm, origin)) - return dns.rdataset.from_rdata_list(rdataset.ttl, res) - - -def dnskey_rdataset_to_cdnskey_rdataset( - rdataset: dns.rdataset.Rdataset, -) -> dns.rdataset.Rdataset: - """Create a CDNSKEY record from DNSKEY. - - *rdataset*, a ``dns.rdataset.Rdataset``, to create CDNSKEY Rdataset for. - - Returns a ``dns.rdataset.Rdataset`` - """ - - if rdataset.rdtype != dns.rdatatype.DNSKEY: - raise ValueError("rdataset not a DNSKEY") - res = [] - for rdata in rdataset: - res.append( - CDNSKEY( - rdclass=rdataset.rdclass, - rdtype=rdataset.rdtype, - flags=rdata.flags, - protocol=rdata.protocol, - algorithm=rdata.algorithm, - key=rdata.key, - ) - ) - return dns.rdataset.from_rdata_list(rdataset.ttl, res) - - -def default_rrset_signer( - txn: dns.transaction.Transaction, - rrset: dns.rrset.RRset, - signer: dns.name.Name, - ksks: List[Tuple[PrivateKey, DNSKEY]], - zsks: List[Tuple[PrivateKey, DNSKEY]], - inception: Optional[Union[datetime, str, int, float]] = None, - expiration: Optional[Union[datetime, str, int, float]] = None, - lifetime: Optional[int] = None, - policy: Optional[Policy] = None, - origin: Optional[dns.name.Name] = None, -) -> None: - """Default RRset signer""" - - if rrset.rdtype in set( - [ - dns.rdatatype.RdataType.DNSKEY, - dns.rdatatype.RdataType.CDS, - dns.rdatatype.RdataType.CDNSKEY, - ] - ): - keys = ksks - else: - keys = zsks - - for private_key, dnskey in keys: - rrsig = dns.dnssec.sign( - rrset=rrset, - private_key=private_key, - dnskey=dnskey, - inception=inception, - expiration=expiration, - lifetime=lifetime, - signer=signer, - policy=policy, - origin=origin, - ) - txn.add(rrset.name, rrset.ttl, rrsig) - - -def sign_zone( - zone: dns.zone.Zone, - txn: Optional[dns.transaction.Transaction] = None, - keys: Optional[List[Tuple[PrivateKey, DNSKEY]]] = None, - add_dnskey: bool = True, - dnskey_ttl: Optional[int] = None, - inception: Optional[Union[datetime, str, int, float]] = None, - expiration: Optional[Union[datetime, str, int, float]] = None, - lifetime: Optional[int] = None, - nsec3: Optional[NSEC3PARAM] = None, - rrset_signer: Optional[RRsetSigner] = None, - policy: Optional[Policy] = None, -) -> None: - """Sign zone. - - *zone*, a ``dns.zone.Zone``, the zone to sign. - - *txn*, a ``dns.transaction.Transaction``, an optional transaction to use for - signing. - - *keys*, a list of (``PrivateKey``, ``DNSKEY``) tuples, to use for signing. KSK/ZSK - roles are assigned automatically if the SEP flag is used, otherwise all RRsets are - signed by all keys. - - *add_dnskey*, a ``bool``. If ``True``, the default, all specified DNSKEYs are - automatically added to the zone on signing. - - *dnskey_ttl*, a``int``, specifies the TTL for DNSKEY RRs. If not specified the TTL - of the existing DNSKEY RRset used or the TTL of the SOA RRset. - - *inception*, a ``datetime``, ``str``, ``int``, ``float`` or ``None``, the signature - inception time. If ``None``, the current time is used. If a ``str``, the format is - "YYYYMMDDHHMMSS" or alternatively the number of seconds since the UNIX epoch in text - form; this is the same the RRSIG rdata's text form. Values of type `int` or `float` - are interpreted as seconds since the UNIX epoch. - - *expiration*, a ``datetime``, ``str``, ``int``, ``float`` or ``None``, the signature - expiration time. If ``None``, the expiration time will be the inception time plus - the value of the *lifetime* parameter. See the description of *inception* above for - how the various parameter types are interpreted. - - *lifetime*, an ``int`` or ``None``, the signature lifetime in seconds. This - parameter is only meaningful if *expiration* is ``None``. - - *nsec3*, a ``NSEC3PARAM`` Rdata, configures signing using NSEC3. Not yet - implemented. - - *rrset_signer*, a ``Callable``, an optional function for signing RRsets. The - function requires two arguments: transaction and RRset. If the not specified, - ``dns.dnssec.default_rrset_signer`` will be used. - - Returns ``None``. - """ - - ksks = [] - zsks = [] - - # if we have both KSKs and ZSKs, split by SEP flag. if not, sign all - # records with all keys - if keys: - for key in keys: - if key[1].flags & Flag.SEP: - ksks.append(key) - else: - zsks.append(key) - if not ksks: - ksks = keys - if not zsks: - zsks = keys - else: - keys = [] - - if txn: - cm: contextlib.AbstractContextManager = contextlib.nullcontext(txn) - else: - cm = zone.writer() - - with cm as _txn: - if add_dnskey: - if dnskey_ttl is None: - dnskey = _txn.get(zone.origin, dns.rdatatype.DNSKEY) - if dnskey: - dnskey_ttl = dnskey.ttl - else: - soa = _txn.get(zone.origin, dns.rdatatype.SOA) - dnskey_ttl = soa.ttl - for _, dnskey in keys: - _txn.add(zone.origin, dnskey_ttl, dnskey) - - if nsec3: - raise NotImplementedError("Signing with NSEC3 not yet implemented") - else: - _rrset_signer = rrset_signer or functools.partial( - default_rrset_signer, - signer=zone.origin, - ksks=ksks, - zsks=zsks, - inception=inception, - expiration=expiration, - lifetime=lifetime, - policy=policy, - origin=zone.origin, - ) - return _sign_zone_nsec(zone, _txn, _rrset_signer) - - -def _sign_zone_nsec( - zone: dns.zone.Zone, - txn: dns.transaction.Transaction, - rrset_signer: Optional[RRsetSigner] = None, -) -> None: - """NSEC zone signer""" - - def _txn_add_nsec( - txn: dns.transaction.Transaction, - name: dns.name.Name, - next_secure: Optional[dns.name.Name], - rdclass: dns.rdataclass.RdataClass, - ttl: int, - rrset_signer: Optional[RRsetSigner] = None, - ) -> None: - """NSEC zone signer helper""" - mandatory_types = set( - [dns.rdatatype.RdataType.RRSIG, dns.rdatatype.RdataType.NSEC] - ) - node = txn.get_node(name) - if node and next_secure: - types = ( - set([rdataset.rdtype for rdataset in node.rdatasets]) | mandatory_types - ) - windows = Bitmap.from_rdtypes(list(types)) - rrset = dns.rrset.from_rdata( - name, - ttl, - NSEC( - rdclass=rdclass, - rdtype=dns.rdatatype.RdataType.NSEC, - next=next_secure, - windows=windows, - ), - ) - txn.add(rrset) - if rrset_signer: - rrset_signer(txn, rrset) - - rrsig_ttl = zone.get_soa().minimum - delegation = None - last_secure = None - - for name in sorted(txn.iterate_names()): - if delegation and name.is_subdomain(delegation): - # names below delegations are not secure - continue - elif txn.get(name, dns.rdatatype.NS) and name != zone.origin: - # inside delegation - delegation = name - else: - # outside delegation - delegation = None - - if rrset_signer: - node = txn.get_node(name) - if node: - for rdataset in node.rdatasets: - if rdataset.rdtype == dns.rdatatype.RRSIG: - # do not sign RRSIGs - continue - elif delegation and rdataset.rdtype != dns.rdatatype.DS: - # do not sign delegations except DS records - continue - else: - rrset = dns.rrset.from_rdata(name, rdataset.ttl, *rdataset) - rrset_signer(txn, rrset) - - # We need "is not None" as the empty name is False because its length is 0. - if last_secure is not None: - _txn_add_nsec(txn, last_secure, name, zone.rdclass, rrsig_ttl, rrset_signer) - last_secure = name - - if last_secure: - _txn_add_nsec( - txn, last_secure, zone.origin, zone.rdclass, rrsig_ttl, rrset_signer - ) - - -def _need_pyca(*args, **kwargs): - raise ImportError( - "DNSSEC validation requires python cryptography" - ) # pragma: no cover - - -try: - from cryptography.exceptions import InvalidSignature - from cryptography.hazmat.primitives.asymmetric import dsa # pylint: disable=W0611 - from cryptography.hazmat.primitives.asymmetric import ec # pylint: disable=W0611 - from cryptography.hazmat.primitives.asymmetric import ed448 # pylint: disable=W0611 - from cryptography.hazmat.primitives.asymmetric import rsa # pylint: disable=W0611 - from cryptography.hazmat.primitives.asymmetric import ( # pylint: disable=W0611 - ed25519, - ) - - from dns.dnssecalgs import ( # pylint: disable=C0412 - get_algorithm_cls, - get_algorithm_cls_from_dnskey, - ) - from dns.dnssecalgs.base import GenericPrivateKey, GenericPublicKey -except ImportError: # pragma: no cover - validate = _need_pyca - validate_rrsig = _need_pyca - sign = _need_pyca - make_dnskey = _need_pyca - make_cdnskey = _need_pyca - _have_pyca = False -else: - validate = _validate # type: ignore - validate_rrsig = _validate_rrsig # type: ignore - sign = _sign - make_dnskey = _make_dnskey - make_cdnskey = _make_cdnskey - _have_pyca = True - -### BEGIN generated Algorithm constants - -RSAMD5 = Algorithm.RSAMD5 -DH = Algorithm.DH -DSA = Algorithm.DSA -ECC = Algorithm.ECC -RSASHA1 = Algorithm.RSASHA1 -DSANSEC3SHA1 = Algorithm.DSANSEC3SHA1 -RSASHA1NSEC3SHA1 = Algorithm.RSASHA1NSEC3SHA1 -RSASHA256 = Algorithm.RSASHA256 -RSASHA512 = Algorithm.RSASHA512 -ECCGOST = Algorithm.ECCGOST -ECDSAP256SHA256 = Algorithm.ECDSAP256SHA256 -ECDSAP384SHA384 = Algorithm.ECDSAP384SHA384 -ED25519 = Algorithm.ED25519 -ED448 = Algorithm.ED448 -INDIRECT = Algorithm.INDIRECT -PRIVATEDNS = Algorithm.PRIVATEDNS -PRIVATEOID = Algorithm.PRIVATEOID - -### END generated Algorithm constants diff --git a/server/venv/Lib/site-packages/dns/dnssecalgs/__init__.py b/server/venv/Lib/site-packages/dns/dnssecalgs/__init__.py deleted file mode 100644 index d1ffd51..0000000 --- a/server/venv/Lib/site-packages/dns/dnssecalgs/__init__.py +++ /dev/null @@ -1,121 +0,0 @@ -from typing import Dict, Optional, Tuple, Type, Union - -import dns.name - -try: - from dns.dnssecalgs.base import GenericPrivateKey - from dns.dnssecalgs.dsa import PrivateDSA, PrivateDSANSEC3SHA1 - from dns.dnssecalgs.ecdsa import PrivateECDSAP256SHA256, PrivateECDSAP384SHA384 - from dns.dnssecalgs.eddsa import PrivateED448, PrivateED25519 - from dns.dnssecalgs.rsa import ( - PrivateRSAMD5, - PrivateRSASHA1, - PrivateRSASHA1NSEC3SHA1, - PrivateRSASHA256, - PrivateRSASHA512, - ) - - _have_cryptography = True -except ImportError: - _have_cryptography = False - -from dns.dnssectypes import Algorithm -from dns.exception import UnsupportedAlgorithm -from dns.rdtypes.ANY.DNSKEY import DNSKEY - -AlgorithmPrefix = Optional[Union[bytes, dns.name.Name]] - -algorithms: Dict[Tuple[Algorithm, AlgorithmPrefix], Type[GenericPrivateKey]] = {} -if _have_cryptography: - algorithms.update( - { - (Algorithm.RSAMD5, None): PrivateRSAMD5, - (Algorithm.DSA, None): PrivateDSA, - (Algorithm.RSASHA1, None): PrivateRSASHA1, - (Algorithm.DSANSEC3SHA1, None): PrivateDSANSEC3SHA1, - (Algorithm.RSASHA1NSEC3SHA1, None): PrivateRSASHA1NSEC3SHA1, - (Algorithm.RSASHA256, None): PrivateRSASHA256, - (Algorithm.RSASHA512, None): PrivateRSASHA512, - (Algorithm.ECDSAP256SHA256, None): PrivateECDSAP256SHA256, - (Algorithm.ECDSAP384SHA384, None): PrivateECDSAP384SHA384, - (Algorithm.ED25519, None): PrivateED25519, - (Algorithm.ED448, None): PrivateED448, - } - ) - - -def get_algorithm_cls( - algorithm: Union[int, str], prefix: AlgorithmPrefix = None -) -> Type[GenericPrivateKey]: - """Get Private Key class from Algorithm. - - *algorithm*, a ``str`` or ``int`` specifying the DNSKEY algorithm. - - Raises ``UnsupportedAlgorithm`` if the algorithm is unknown. - - Returns a ``dns.dnssecalgs.GenericPrivateKey`` - """ - algorithm = Algorithm.make(algorithm) - cls = algorithms.get((algorithm, prefix)) - if cls: - return cls - raise UnsupportedAlgorithm( - 'algorithm "%s" not supported by dnspython' % Algorithm.to_text(algorithm) - ) - - -def get_algorithm_cls_from_dnskey(dnskey: DNSKEY) -> Type[GenericPrivateKey]: - """Get Private Key class from DNSKEY. - - *dnskey*, a ``DNSKEY`` to get Algorithm class for. - - Raises ``UnsupportedAlgorithm`` if the algorithm is unknown. - - Returns a ``dns.dnssecalgs.GenericPrivateKey`` - """ - prefix: AlgorithmPrefix = None - if dnskey.algorithm == Algorithm.PRIVATEDNS: - prefix, _ = dns.name.from_wire(dnskey.key, 0) - elif dnskey.algorithm == Algorithm.PRIVATEOID: - length = int(dnskey.key[0]) - prefix = dnskey.key[0 : length + 1] - return get_algorithm_cls(dnskey.algorithm, prefix) - - -def register_algorithm_cls( - algorithm: Union[int, str], - algorithm_cls: Type[GenericPrivateKey], - name: Optional[Union[dns.name.Name, str]] = None, - oid: Optional[bytes] = None, -) -> None: - """Register Algorithm Private Key class. - - *algorithm*, a ``str`` or ``int`` specifying the DNSKEY algorithm. - - *algorithm_cls*: A `GenericPrivateKey` class. - - *name*, an optional ``dns.name.Name`` or ``str``, for for PRIVATEDNS algorithms. - - *oid*: an optional BER-encoded `bytes` for PRIVATEOID algorithms. - - Raises ``ValueError`` if a name or oid is specified incorrectly. - """ - if not issubclass(algorithm_cls, GenericPrivateKey): - raise TypeError("Invalid algorithm class") - algorithm = Algorithm.make(algorithm) - prefix: AlgorithmPrefix = None - if algorithm == Algorithm.PRIVATEDNS: - if name is None: - raise ValueError("Name required for PRIVATEDNS algorithms") - if isinstance(name, str): - name = dns.name.from_text(name) - prefix = name - elif algorithm == Algorithm.PRIVATEOID: - if oid is None: - raise ValueError("OID required for PRIVATEOID algorithms") - prefix = bytes([len(oid)]) + oid - elif name: - raise ValueError("Name only supported for PRIVATEDNS algorithm") - elif oid: - raise ValueError("OID only supported for PRIVATEOID algorithm") - algorithms[(algorithm, prefix)] = algorithm_cls diff --git a/server/venv/Lib/site-packages/dns/dnssecalgs/base.py b/server/venv/Lib/site-packages/dns/dnssecalgs/base.py deleted file mode 100644 index e990575..0000000 --- a/server/venv/Lib/site-packages/dns/dnssecalgs/base.py +++ /dev/null @@ -1,84 +0,0 @@ -from abc import ABC, abstractmethod # pylint: disable=no-name-in-module -from typing import Any, Optional, Type - -import dns.rdataclass -import dns.rdatatype -from dns.dnssectypes import Algorithm -from dns.exception import AlgorithmKeyMismatch -from dns.rdtypes.ANY.DNSKEY import DNSKEY -from dns.rdtypes.dnskeybase import Flag - - -class GenericPublicKey(ABC): - algorithm: Algorithm - - @abstractmethod - def __init__(self, key: Any) -> None: - pass - - @abstractmethod - def verify(self, signature: bytes, data: bytes) -> None: - """Verify signed DNSSEC data""" - - @abstractmethod - def encode_key_bytes(self) -> bytes: - """Encode key as bytes for DNSKEY""" - - @classmethod - def _ensure_algorithm_key_combination(cls, key: DNSKEY) -> None: - if key.algorithm != cls.algorithm: - raise AlgorithmKeyMismatch - - def to_dnskey(self, flags: int = Flag.ZONE, protocol: int = 3) -> DNSKEY: - """Return public key as DNSKEY""" - return DNSKEY( - rdclass=dns.rdataclass.IN, - rdtype=dns.rdatatype.DNSKEY, - flags=flags, - protocol=protocol, - algorithm=self.algorithm, - key=self.encode_key_bytes(), - ) - - @classmethod - @abstractmethod - def from_dnskey(cls, key: DNSKEY) -> "GenericPublicKey": - """Create public key from DNSKEY""" - - @classmethod - @abstractmethod - def from_pem(cls, public_pem: bytes) -> "GenericPublicKey": - """Create public key from PEM-encoded SubjectPublicKeyInfo as specified - in RFC 5280""" - - @abstractmethod - def to_pem(self) -> bytes: - """Return public-key as PEM-encoded SubjectPublicKeyInfo as specified - in RFC 5280""" - - -class GenericPrivateKey(ABC): - public_cls: Type[GenericPublicKey] - - @abstractmethod - def __init__(self, key: Any) -> None: - pass - - @abstractmethod - def sign(self, data: bytes, verify: bool = False) -> bytes: - """Sign DNSSEC data""" - - @abstractmethod - def public_key(self) -> "GenericPublicKey": - """Return public key instance""" - - @classmethod - @abstractmethod - def from_pem( - cls, private_pem: bytes, password: Optional[bytes] = None - ) -> "GenericPrivateKey": - """Create private key from PEM-encoded PKCS#8""" - - @abstractmethod - def to_pem(self, password: Optional[bytes] = None) -> bytes: - """Return private key as PEM-encoded PKCS#8""" diff --git a/server/venv/Lib/site-packages/dns/dnssecalgs/cryptography.py b/server/venv/Lib/site-packages/dns/dnssecalgs/cryptography.py deleted file mode 100644 index 5a31a81..0000000 --- a/server/venv/Lib/site-packages/dns/dnssecalgs/cryptography.py +++ /dev/null @@ -1,68 +0,0 @@ -from typing import Any, Optional, Type - -from cryptography.hazmat.primitives import serialization - -from dns.dnssecalgs.base import GenericPrivateKey, GenericPublicKey -from dns.exception import AlgorithmKeyMismatch - - -class CryptographyPublicKey(GenericPublicKey): - key: Any = None - key_cls: Any = None - - def __init__(self, key: Any) -> None: # pylint: disable=super-init-not-called - if self.key_cls is None: - raise TypeError("Undefined private key class") - if not isinstance( # pylint: disable=isinstance-second-argument-not-valid-type - key, self.key_cls - ): - raise AlgorithmKeyMismatch - self.key = key - - @classmethod - def from_pem(cls, public_pem: bytes) -> "GenericPublicKey": - key = serialization.load_pem_public_key(public_pem) - return cls(key=key) - - def to_pem(self) -> bytes: - return self.key.public_bytes( - encoding=serialization.Encoding.PEM, - format=serialization.PublicFormat.SubjectPublicKeyInfo, - ) - - -class CryptographyPrivateKey(GenericPrivateKey): - key: Any = None - key_cls: Any = None - public_cls: Type[CryptographyPublicKey] - - def __init__(self, key: Any) -> None: # pylint: disable=super-init-not-called - if self.key_cls is None: - raise TypeError("Undefined private key class") - if not isinstance( # pylint: disable=isinstance-second-argument-not-valid-type - key, self.key_cls - ): - raise AlgorithmKeyMismatch - self.key = key - - def public_key(self) -> "CryptographyPublicKey": - return self.public_cls(key=self.key.public_key()) - - @classmethod - def from_pem( - cls, private_pem: bytes, password: Optional[bytes] = None - ) -> "GenericPrivateKey": - key = serialization.load_pem_private_key(private_pem, password=password) - return cls(key=key) - - def to_pem(self, password: Optional[bytes] = None) -> bytes: - encryption_algorithm: serialization.KeySerializationEncryption - if password: - encryption_algorithm = serialization.BestAvailableEncryption(password) - else: - encryption_algorithm = serialization.NoEncryption() - return self.key.private_bytes( - encoding=serialization.Encoding.PEM, - format=serialization.PrivateFormat.PKCS8, - encryption_algorithm=encryption_algorithm, - ) diff --git a/server/venv/Lib/site-packages/dns/dnssecalgs/dsa.py b/server/venv/Lib/site-packages/dns/dnssecalgs/dsa.py deleted file mode 100644 index 0fe4690..0000000 --- a/server/venv/Lib/site-packages/dns/dnssecalgs/dsa.py +++ /dev/null @@ -1,101 +0,0 @@ -import struct - -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives.asymmetric import dsa, utils - -from dns.dnssecalgs.cryptography import CryptographyPrivateKey, CryptographyPublicKey -from dns.dnssectypes import Algorithm -from dns.rdtypes.ANY.DNSKEY import DNSKEY - - -class PublicDSA(CryptographyPublicKey): - key: dsa.DSAPublicKey - key_cls = dsa.DSAPublicKey - algorithm = Algorithm.DSA - chosen_hash = hashes.SHA1() - - def verify(self, signature: bytes, data: bytes) -> None: - sig_r = signature[1:21] - sig_s = signature[21:] - sig = utils.encode_dss_signature( - int.from_bytes(sig_r, "big"), int.from_bytes(sig_s, "big") - ) - self.key.verify(sig, data, self.chosen_hash) - - def encode_key_bytes(self) -> bytes: - """Encode a public key per RFC 2536, section 2.""" - pn = self.key.public_numbers() - dsa_t = (self.key.key_size // 8 - 64) // 8 - if dsa_t > 8: - raise ValueError("unsupported DSA key size") - octets = 64 + dsa_t * 8 - res = struct.pack("!B", dsa_t) - res += pn.parameter_numbers.q.to_bytes(20, "big") - res += pn.parameter_numbers.p.to_bytes(octets, "big") - res += pn.parameter_numbers.g.to_bytes(octets, "big") - res += pn.y.to_bytes(octets, "big") - return res - - @classmethod - def from_dnskey(cls, key: DNSKEY) -> "PublicDSA": - cls._ensure_algorithm_key_combination(key) - keyptr = key.key - (t,) = struct.unpack("!B", keyptr[0:1]) - keyptr = keyptr[1:] - octets = 64 + t * 8 - dsa_q = keyptr[0:20] - keyptr = keyptr[20:] - dsa_p = keyptr[0:octets] - keyptr = keyptr[octets:] - dsa_g = keyptr[0:octets] - keyptr = keyptr[octets:] - dsa_y = keyptr[0:octets] - return cls( - key=dsa.DSAPublicNumbers( # type: ignore - int.from_bytes(dsa_y, "big"), - dsa.DSAParameterNumbers( - int.from_bytes(dsa_p, "big"), - int.from_bytes(dsa_q, "big"), - int.from_bytes(dsa_g, "big"), - ), - ).public_key(default_backend()), - ) - - -class PrivateDSA(CryptographyPrivateKey): - key: dsa.DSAPrivateKey - key_cls = dsa.DSAPrivateKey - public_cls = PublicDSA - - def sign(self, data: bytes, verify: bool = False) -> bytes: - """Sign using a private key per RFC 2536, section 3.""" - public_dsa_key = self.key.public_key() - if public_dsa_key.key_size > 1024: - raise ValueError("DSA key size overflow") - der_signature = self.key.sign(data, self.public_cls.chosen_hash) - dsa_r, dsa_s = utils.decode_dss_signature(der_signature) - dsa_t = (public_dsa_key.key_size // 8 - 64) // 8 - octets = 20 - signature = ( - struct.pack("!B", dsa_t) - + int.to_bytes(dsa_r, length=octets, byteorder="big") - + int.to_bytes(dsa_s, length=octets, byteorder="big") - ) - if verify: - self.public_key().verify(signature, data) - return signature - - @classmethod - def generate(cls, key_size: int) -> "PrivateDSA": - return cls( - key=dsa.generate_private_key(key_size=key_size), - ) - - -class PublicDSANSEC3SHA1(PublicDSA): - algorithm = Algorithm.DSANSEC3SHA1 - - -class PrivateDSANSEC3SHA1(PrivateDSA): - public_cls = PublicDSANSEC3SHA1 diff --git a/server/venv/Lib/site-packages/dns/dnssecalgs/ecdsa.py b/server/venv/Lib/site-packages/dns/dnssecalgs/ecdsa.py deleted file mode 100644 index a31d79f..0000000 --- a/server/venv/Lib/site-packages/dns/dnssecalgs/ecdsa.py +++ /dev/null @@ -1,89 +0,0 @@ -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives.asymmetric import ec, utils - -from dns.dnssecalgs.cryptography import CryptographyPrivateKey, CryptographyPublicKey -from dns.dnssectypes import Algorithm -from dns.rdtypes.ANY.DNSKEY import DNSKEY - - -class PublicECDSA(CryptographyPublicKey): - key: ec.EllipticCurvePublicKey - key_cls = ec.EllipticCurvePublicKey - algorithm: Algorithm - chosen_hash: hashes.HashAlgorithm - curve: ec.EllipticCurve - octets: int - - def verify(self, signature: bytes, data: bytes) -> None: - sig_r = signature[0 : self.octets] - sig_s = signature[self.octets :] - sig = utils.encode_dss_signature( - int.from_bytes(sig_r, "big"), int.from_bytes(sig_s, "big") - ) - self.key.verify(sig, data, ec.ECDSA(self.chosen_hash)) - - def encode_key_bytes(self) -> bytes: - """Encode a public key per RFC 6605, section 4.""" - pn = self.key.public_numbers() - return pn.x.to_bytes(self.octets, "big") + pn.y.to_bytes(self.octets, "big") - - @classmethod - def from_dnskey(cls, key: DNSKEY) -> "PublicECDSA": - cls._ensure_algorithm_key_combination(key) - ecdsa_x = key.key[0 : cls.octets] - ecdsa_y = key.key[cls.octets : cls.octets * 2] - return cls( - key=ec.EllipticCurvePublicNumbers( - curve=cls.curve, - x=int.from_bytes(ecdsa_x, "big"), - y=int.from_bytes(ecdsa_y, "big"), - ).public_key(default_backend()), - ) - - -class PrivateECDSA(CryptographyPrivateKey): - key: ec.EllipticCurvePrivateKey - key_cls = ec.EllipticCurvePrivateKey - public_cls = PublicECDSA - - def sign(self, data: bytes, verify: bool = False) -> bytes: - """Sign using a private key per RFC 6605, section 4.""" - der_signature = self.key.sign(data, ec.ECDSA(self.public_cls.chosen_hash)) - dsa_r, dsa_s = utils.decode_dss_signature(der_signature) - signature = int.to_bytes( - dsa_r, length=self.public_cls.octets, byteorder="big" - ) + int.to_bytes(dsa_s, length=self.public_cls.octets, byteorder="big") - if verify: - self.public_key().verify(signature, data) - return signature - - @classmethod - def generate(cls) -> "PrivateECDSA": - return cls( - key=ec.generate_private_key( - curve=cls.public_cls.curve, backend=default_backend() - ), - ) - - -class PublicECDSAP256SHA256(PublicECDSA): - algorithm = Algorithm.ECDSAP256SHA256 - chosen_hash = hashes.SHA256() - curve = ec.SECP256R1() - octets = 32 - - -class PrivateECDSAP256SHA256(PrivateECDSA): - public_cls = PublicECDSAP256SHA256 - - -class PublicECDSAP384SHA384(PublicECDSA): - algorithm = Algorithm.ECDSAP384SHA384 - chosen_hash = hashes.SHA384() - curve = ec.SECP384R1() - octets = 48 - - -class PrivateECDSAP384SHA384(PrivateECDSA): - public_cls = PublicECDSAP384SHA384 diff --git a/server/venv/Lib/site-packages/dns/dnssecalgs/eddsa.py b/server/venv/Lib/site-packages/dns/dnssecalgs/eddsa.py deleted file mode 100644 index 7050534..0000000 --- a/server/venv/Lib/site-packages/dns/dnssecalgs/eddsa.py +++ /dev/null @@ -1,65 +0,0 @@ -from typing import Type - -from cryptography.hazmat.primitives import serialization -from cryptography.hazmat.primitives.asymmetric import ed448, ed25519 - -from dns.dnssecalgs.cryptography import CryptographyPrivateKey, CryptographyPublicKey -from dns.dnssectypes import Algorithm -from dns.rdtypes.ANY.DNSKEY import DNSKEY - - -class PublicEDDSA(CryptographyPublicKey): - def verify(self, signature: bytes, data: bytes) -> None: - self.key.verify(signature, data) - - def encode_key_bytes(self) -> bytes: - """Encode a public key per RFC 8080, section 3.""" - return self.key.public_bytes( - encoding=serialization.Encoding.Raw, format=serialization.PublicFormat.Raw - ) - - @classmethod - def from_dnskey(cls, key: DNSKEY) -> "PublicEDDSA": - cls._ensure_algorithm_key_combination(key) - return cls( - key=cls.key_cls.from_public_bytes(key.key), - ) - - -class PrivateEDDSA(CryptographyPrivateKey): - public_cls: Type[PublicEDDSA] - - def sign(self, data: bytes, verify: bool = False) -> bytes: - """Sign using a private key per RFC 8080, section 4.""" - signature = self.key.sign(data) - if verify: - self.public_key().verify(signature, data) - return signature - - @classmethod - def generate(cls) -> "PrivateEDDSA": - return cls(key=cls.key_cls.generate()) - - -class PublicED25519(PublicEDDSA): - key: ed25519.Ed25519PublicKey - key_cls = ed25519.Ed25519PublicKey - algorithm = Algorithm.ED25519 - - -class PrivateED25519(PrivateEDDSA): - key: ed25519.Ed25519PrivateKey - key_cls = ed25519.Ed25519PrivateKey - public_cls = PublicED25519 - - -class PublicED448(PublicEDDSA): - key: ed448.Ed448PublicKey - key_cls = ed448.Ed448PublicKey - algorithm = Algorithm.ED448 - - -class PrivateED448(PrivateEDDSA): - key: ed448.Ed448PrivateKey - key_cls = ed448.Ed448PrivateKey - public_cls = PublicED448 diff --git a/server/venv/Lib/site-packages/dns/dnssecalgs/rsa.py b/server/venv/Lib/site-packages/dns/dnssecalgs/rsa.py deleted file mode 100644 index e95dcf1..0000000 --- a/server/venv/Lib/site-packages/dns/dnssecalgs/rsa.py +++ /dev/null @@ -1,119 +0,0 @@ -import math -import struct - -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives.asymmetric import padding, rsa - -from dns.dnssecalgs.cryptography import CryptographyPrivateKey, CryptographyPublicKey -from dns.dnssectypes import Algorithm -from dns.rdtypes.ANY.DNSKEY import DNSKEY - - -class PublicRSA(CryptographyPublicKey): - key: rsa.RSAPublicKey - key_cls = rsa.RSAPublicKey - algorithm: Algorithm - chosen_hash: hashes.HashAlgorithm - - def verify(self, signature: bytes, data: bytes) -> None: - self.key.verify(signature, data, padding.PKCS1v15(), self.chosen_hash) - - def encode_key_bytes(self) -> bytes: - """Encode a public key per RFC 3110, section 2.""" - pn = self.key.public_numbers() - _exp_len = math.ceil(int.bit_length(pn.e) / 8) - exp = int.to_bytes(pn.e, length=_exp_len, byteorder="big") - if _exp_len > 255: - exp_header = b"\0" + struct.pack("!H", _exp_len) - else: - exp_header = struct.pack("!B", _exp_len) - if pn.n.bit_length() < 512 or pn.n.bit_length() > 4096: - raise ValueError("unsupported RSA key length") - return exp_header + exp + pn.n.to_bytes((pn.n.bit_length() + 7) // 8, "big") - - @classmethod - def from_dnskey(cls, key: DNSKEY) -> "PublicRSA": - cls._ensure_algorithm_key_combination(key) - keyptr = key.key - (bytes_,) = struct.unpack("!B", keyptr[0:1]) - keyptr = keyptr[1:] - if bytes_ == 0: - (bytes_,) = struct.unpack("!H", keyptr[0:2]) - keyptr = keyptr[2:] - rsa_e = keyptr[0:bytes_] - rsa_n = keyptr[bytes_:] - return cls( - key=rsa.RSAPublicNumbers( - int.from_bytes(rsa_e, "big"), int.from_bytes(rsa_n, "big") - ).public_key(default_backend()) - ) - - -class PrivateRSA(CryptographyPrivateKey): - key: rsa.RSAPrivateKey - key_cls = rsa.RSAPrivateKey - public_cls = PublicRSA - default_public_exponent = 65537 - - def sign(self, data: bytes, verify: bool = False) -> bytes: - """Sign using a private key per RFC 3110, section 3.""" - signature = self.key.sign(data, padding.PKCS1v15(), self.public_cls.chosen_hash) - if verify: - self.public_key().verify(signature, data) - return signature - - @classmethod - def generate(cls, key_size: int) -> "PrivateRSA": - return cls( - key=rsa.generate_private_key( - public_exponent=cls.default_public_exponent, - key_size=key_size, - backend=default_backend(), - ) - ) - - -class PublicRSAMD5(PublicRSA): - algorithm = Algorithm.RSAMD5 - chosen_hash = hashes.MD5() - - -class PrivateRSAMD5(PrivateRSA): - public_cls = PublicRSAMD5 - - -class PublicRSASHA1(PublicRSA): - algorithm = Algorithm.RSASHA1 - chosen_hash = hashes.SHA1() - - -class PrivateRSASHA1(PrivateRSA): - public_cls = PublicRSASHA1 - - -class PublicRSASHA1NSEC3SHA1(PublicRSA): - algorithm = Algorithm.RSASHA1NSEC3SHA1 - chosen_hash = hashes.SHA1() - - -class PrivateRSASHA1NSEC3SHA1(PrivateRSA): - public_cls = PublicRSASHA1NSEC3SHA1 - - -class PublicRSASHA256(PublicRSA): - algorithm = Algorithm.RSASHA256 - chosen_hash = hashes.SHA256() - - -class PrivateRSASHA256(PrivateRSA): - public_cls = PublicRSASHA256 - - -class PublicRSASHA512(PublicRSA): - algorithm = Algorithm.RSASHA512 - chosen_hash = hashes.SHA512() - - -class PrivateRSASHA512(PrivateRSA): - public_cls = PublicRSASHA512 diff --git a/server/venv/Lib/site-packages/dns/dnssectypes.py b/server/venv/Lib/site-packages/dns/dnssectypes.py deleted file mode 100644 index 02131e0..0000000 --- a/server/venv/Lib/site-packages/dns/dnssectypes.py +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Common DNSSEC-related types.""" - -# This is a separate file to avoid import circularity between dns.dnssec and -# the implementations of the DS and DNSKEY types. - -import dns.enum - - -class Algorithm(dns.enum.IntEnum): - RSAMD5 = 1 - DH = 2 - DSA = 3 - ECC = 4 - RSASHA1 = 5 - DSANSEC3SHA1 = 6 - RSASHA1NSEC3SHA1 = 7 - RSASHA256 = 8 - RSASHA512 = 10 - ECCGOST = 12 - ECDSAP256SHA256 = 13 - ECDSAP384SHA384 = 14 - ED25519 = 15 - ED448 = 16 - INDIRECT = 252 - PRIVATEDNS = 253 - PRIVATEOID = 254 - - @classmethod - def _maximum(cls): - return 255 - - -class DSDigest(dns.enum.IntEnum): - """DNSSEC Delegation Signer Digest Algorithm""" - - NULL = 0 - SHA1 = 1 - SHA256 = 2 - GOST = 3 - SHA384 = 4 - - @classmethod - def _maximum(cls): - return 255 - - -class NSEC3Hash(dns.enum.IntEnum): - """NSEC3 hash algorithm""" - - SHA1 = 1 - - @classmethod - def _maximum(cls): - return 255 diff --git a/server/venv/Lib/site-packages/dns/e164.py b/server/venv/Lib/site-packages/dns/e164.py deleted file mode 100644 index 453736d..0000000 --- a/server/venv/Lib/site-packages/dns/e164.py +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2006-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS E.164 helpers.""" - -from typing import Iterable, Optional, Union - -import dns.exception -import dns.name -import dns.resolver - -#: The public E.164 domain. -public_enum_domain = dns.name.from_text("e164.arpa.") - - -def from_e164( - text: str, origin: Optional[dns.name.Name] = public_enum_domain -) -> dns.name.Name: - """Convert an E.164 number in textual form into a Name object whose - value is the ENUM domain name for that number. - - Non-digits in the text are ignored, i.e. "16505551212", - "+1.650.555.1212" and "1 (650) 555-1212" are all the same. - - *text*, a ``str``, is an E.164 number in textual form. - - *origin*, a ``dns.name.Name``, the domain in which the number - should be constructed. The default is ``e164.arpa.``. - - Returns a ``dns.name.Name``. - """ - - parts = [d for d in text if d.isdigit()] - parts.reverse() - return dns.name.from_text(".".join(parts), origin=origin) - - -def to_e164( - name: dns.name.Name, - origin: Optional[dns.name.Name] = public_enum_domain, - want_plus_prefix: bool = True, -) -> str: - """Convert an ENUM domain name into an E.164 number. - - Note that dnspython does not have any information about preferred - number formats within national numbering plans, so all numbers are - emitted as a simple string of digits, prefixed by a '+' (unless - *want_plus_prefix* is ``False``). - - *name* is a ``dns.name.Name``, the ENUM domain name. - - *origin* is a ``dns.name.Name``, a domain containing the ENUM - domain name. The name is relativized to this domain before being - converted to text. If ``None``, no relativization is done. - - *want_plus_prefix* is a ``bool``. If True, add a '+' to the beginning of - the returned number. - - Returns a ``str``. - - """ - if origin is not None: - name = name.relativize(origin) - dlabels = [d for d in name.labels if d.isdigit() and len(d) == 1] - if len(dlabels) != len(name.labels): - raise dns.exception.SyntaxError("non-digit labels in ENUM domain name") - dlabels.reverse() - text = b"".join(dlabels) - if want_plus_prefix: - text = b"+" + text - return text.decode() - - -def query( - number: str, - domains: Iterable[Union[dns.name.Name, str]], - resolver: Optional[dns.resolver.Resolver] = None, -) -> dns.resolver.Answer: - """Look for NAPTR RRs for the specified number in the specified domains. - - e.g. lookup('16505551212', ['e164.dnspython.org.', 'e164.arpa.']) - - *number*, a ``str`` is the number to look for. - - *domains* is an iterable containing ``dns.name.Name`` values. - - *resolver*, a ``dns.resolver.Resolver``, is the resolver to use. If - ``None``, the default resolver is used. - """ - - if resolver is None: - resolver = dns.resolver.get_default_resolver() - e_nx = dns.resolver.NXDOMAIN() - for domain in domains: - if isinstance(domain, str): - domain = dns.name.from_text(domain) - qname = dns.e164.from_e164(number, domain) - try: - return resolver.resolve(qname, "NAPTR") - except dns.resolver.NXDOMAIN as e: - e_nx += e - raise e_nx diff --git a/server/venv/Lib/site-packages/dns/edns.py b/server/venv/Lib/site-packages/dns/edns.py deleted file mode 100644 index f05baac..0000000 --- a/server/venv/Lib/site-packages/dns/edns.py +++ /dev/null @@ -1,477 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2009-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""EDNS Options""" - -import math -import socket -import struct -from typing import Any, Dict, Optional, Union - -import dns.enum -import dns.inet -import dns.rdata -import dns.wire - - -class OptionType(dns.enum.IntEnum): - #: NSID - NSID = 3 - #: DAU - DAU = 5 - #: DHU - DHU = 6 - #: N3U - N3U = 7 - #: ECS (client-subnet) - ECS = 8 - #: EXPIRE - EXPIRE = 9 - #: COOKIE - COOKIE = 10 - #: KEEPALIVE - KEEPALIVE = 11 - #: PADDING - PADDING = 12 - #: CHAIN - CHAIN = 13 - #: EDE (extended-dns-error) - EDE = 15 - - @classmethod - def _maximum(cls): - return 65535 - - -class Option: - - """Base class for all EDNS option types.""" - - def __init__(self, otype: Union[OptionType, str]): - """Initialize an option. - - *otype*, a ``dns.edns.OptionType``, is the option type. - """ - self.otype = OptionType.make(otype) - - def to_wire(self, file: Optional[Any] = None) -> Optional[bytes]: - """Convert an option to wire format. - - Returns a ``bytes`` or ``None``. - - """ - raise NotImplementedError # pragma: no cover - - @classmethod - def from_wire_parser(cls, otype: OptionType, parser: "dns.wire.Parser") -> "Option": - """Build an EDNS option object from wire format. - - *otype*, a ``dns.edns.OptionType``, is the option type. - - *parser*, a ``dns.wire.Parser``, the parser, which should be - restructed to the option length. - - Returns a ``dns.edns.Option``. - """ - raise NotImplementedError # pragma: no cover - - def _cmp(self, other): - """Compare an EDNS option with another option of the same type. - - Returns < 0 if < *other*, 0 if == *other*, and > 0 if > *other*. - """ - wire = self.to_wire() - owire = other.to_wire() - if wire == owire: - return 0 - if wire > owire: - return 1 - return -1 - - def __eq__(self, other): - if not isinstance(other, Option): - return False - if self.otype != other.otype: - return False - return self._cmp(other) == 0 - - def __ne__(self, other): - if not isinstance(other, Option): - return True - if self.otype != other.otype: - return True - return self._cmp(other) != 0 - - def __lt__(self, other): - if not isinstance(other, Option) or self.otype != other.otype: - return NotImplemented - return self._cmp(other) < 0 - - def __le__(self, other): - if not isinstance(other, Option) or self.otype != other.otype: - return NotImplemented - return self._cmp(other) <= 0 - - def __ge__(self, other): - if not isinstance(other, Option) or self.otype != other.otype: - return NotImplemented - return self._cmp(other) >= 0 - - def __gt__(self, other): - if not isinstance(other, Option) or self.otype != other.otype: - return NotImplemented - return self._cmp(other) > 0 - - def __str__(self): - return self.to_text() - - -class GenericOption(Option): # lgtm[py/missing-equals] - - """Generic Option Class - - This class is used for EDNS option types for which we have no better - implementation. - """ - - def __init__(self, otype: Union[OptionType, str], data: Union[bytes, str]): - super().__init__(otype) - self.data = dns.rdata.Rdata._as_bytes(data, True) - - def to_wire(self, file: Optional[Any] = None) -> Optional[bytes]: - if file: - file.write(self.data) - return None - else: - return self.data - - def to_text(self) -> str: - return "Generic %d" % self.otype - - @classmethod - def from_wire_parser( - cls, otype: Union[OptionType, str], parser: "dns.wire.Parser" - ) -> Option: - return cls(otype, parser.get_remaining()) - - -class ECSOption(Option): # lgtm[py/missing-equals] - """EDNS Client Subnet (ECS, RFC7871)""" - - def __init__(self, address: str, srclen: Optional[int] = None, scopelen: int = 0): - """*address*, a ``str``, is the client address information. - - *srclen*, an ``int``, the source prefix length, which is the - leftmost number of bits of the address to be used for the - lookup. The default is 24 for IPv4 and 56 for IPv6. - - *scopelen*, an ``int``, the scope prefix length. This value - must be 0 in queries, and should be set in responses. - """ - - super().__init__(OptionType.ECS) - af = dns.inet.af_for_address(address) - - if af == socket.AF_INET6: - self.family = 2 - if srclen is None: - srclen = 56 - address = dns.rdata.Rdata._as_ipv6_address(address) - srclen = dns.rdata.Rdata._as_int(srclen, 0, 128) - scopelen = dns.rdata.Rdata._as_int(scopelen, 0, 128) - elif af == socket.AF_INET: - self.family = 1 - if srclen is None: - srclen = 24 - address = dns.rdata.Rdata._as_ipv4_address(address) - srclen = dns.rdata.Rdata._as_int(srclen, 0, 32) - scopelen = dns.rdata.Rdata._as_int(scopelen, 0, 32) - else: # pragma: no cover (this will never happen) - raise ValueError("Bad address family") - - assert srclen is not None - self.address = address - self.srclen = srclen - self.scopelen = scopelen - - addrdata = dns.inet.inet_pton(af, address) - nbytes = int(math.ceil(srclen / 8.0)) - - # Truncate to srclen and pad to the end of the last octet needed - # See RFC section 6 - self.addrdata = addrdata[:nbytes] - nbits = srclen % 8 - if nbits != 0: - last = struct.pack("B", ord(self.addrdata[-1:]) & (0xFF << (8 - nbits))) - self.addrdata = self.addrdata[:-1] + last - - def to_text(self) -> str: - return "ECS {}/{} scope/{}".format(self.address, self.srclen, self.scopelen) - - @staticmethod - def from_text(text: str) -> Option: - """Convert a string into a `dns.edns.ECSOption` - - *text*, a `str`, the text form of the option. - - Returns a `dns.edns.ECSOption`. - - Examples: - - >>> import dns.edns - >>> - >>> # basic example - >>> dns.edns.ECSOption.from_text('1.2.3.4/24') - >>> - >>> # also understands scope - >>> dns.edns.ECSOption.from_text('1.2.3.4/24/32') - >>> - >>> # IPv6 - >>> dns.edns.ECSOption.from_text('2001:4b98::1/64/64') - >>> - >>> # it understands results from `dns.edns.ECSOption.to_text()` - >>> dns.edns.ECSOption.from_text('ECS 1.2.3.4/24/32') - """ - optional_prefix = "ECS" - tokens = text.split() - ecs_text = None - if len(tokens) == 1: - ecs_text = tokens[0] - elif len(tokens) == 2: - if tokens[0] != optional_prefix: - raise ValueError('could not parse ECS from "{}"'.format(text)) - ecs_text = tokens[1] - else: - raise ValueError('could not parse ECS from "{}"'.format(text)) - n_slashes = ecs_text.count("/") - if n_slashes == 1: - address, tsrclen = ecs_text.split("/") - tscope = "0" - elif n_slashes == 2: - address, tsrclen, tscope = ecs_text.split("/") - else: - raise ValueError('could not parse ECS from "{}"'.format(text)) - try: - scope = int(tscope) - except ValueError: - raise ValueError( - "invalid scope " + '"{}": scope must be an integer'.format(tscope) - ) - try: - srclen = int(tsrclen) - except ValueError: - raise ValueError( - "invalid srclen " + '"{}": srclen must be an integer'.format(tsrclen) - ) - return ECSOption(address, srclen, scope) - - def to_wire(self, file: Optional[Any] = None) -> Optional[bytes]: - value = ( - struct.pack("!HBB", self.family, self.srclen, self.scopelen) + self.addrdata - ) - if file: - file.write(value) - return None - else: - return value - - @classmethod - def from_wire_parser( - cls, otype: Union[OptionType, str], parser: "dns.wire.Parser" - ) -> Option: - family, src, scope = parser.get_struct("!HBB") - addrlen = int(math.ceil(src / 8.0)) - prefix = parser.get_bytes(addrlen) - if family == 1: - pad = 4 - addrlen - addr = dns.ipv4.inet_ntoa(prefix + b"\x00" * pad) - elif family == 2: - pad = 16 - addrlen - addr = dns.ipv6.inet_ntoa(prefix + b"\x00" * pad) - else: - raise ValueError("unsupported family") - - return cls(addr, src, scope) - - -class EDECode(dns.enum.IntEnum): - OTHER = 0 - UNSUPPORTED_DNSKEY_ALGORITHM = 1 - UNSUPPORTED_DS_DIGEST_TYPE = 2 - STALE_ANSWER = 3 - FORGED_ANSWER = 4 - DNSSEC_INDETERMINATE = 5 - DNSSEC_BOGUS = 6 - SIGNATURE_EXPIRED = 7 - SIGNATURE_NOT_YET_VALID = 8 - DNSKEY_MISSING = 9 - RRSIGS_MISSING = 10 - NO_ZONE_KEY_BIT_SET = 11 - NSEC_MISSING = 12 - CACHED_ERROR = 13 - NOT_READY = 14 - BLOCKED = 15 - CENSORED = 16 - FILTERED = 17 - PROHIBITED = 18 - STALE_NXDOMAIN_ANSWER = 19 - NOT_AUTHORITATIVE = 20 - NOT_SUPPORTED = 21 - NO_REACHABLE_AUTHORITY = 22 - NETWORK_ERROR = 23 - INVALID_DATA = 24 - - @classmethod - def _maximum(cls): - return 65535 - - -class EDEOption(Option): # lgtm[py/missing-equals] - """Extended DNS Error (EDE, RFC8914)""" - - def __init__(self, code: Union[EDECode, str], text: Optional[str] = None): - """*code*, a ``dns.edns.EDECode`` or ``str``, the info code of the - extended error. - - *text*, a ``str`` or ``None``, specifying additional information about - the error. - """ - - super().__init__(OptionType.EDE) - - self.code = EDECode.make(code) - if text is not None and not isinstance(text, str): - raise ValueError("text must be string or None") - self.text = text - - def to_text(self) -> str: - output = f"EDE {self.code}" - if self.text is not None: - output += f": {self.text}" - return output - - def to_wire(self, file: Optional[Any] = None) -> Optional[bytes]: - value = struct.pack("!H", self.code) - if self.text is not None: - value += self.text.encode("utf8") - - if file: - file.write(value) - return None - else: - return value - - @classmethod - def from_wire_parser( - cls, otype: Union[OptionType, str], parser: "dns.wire.Parser" - ) -> Option: - code = EDECode.make(parser.get_uint16()) - text = parser.get_remaining() - - if text: - if text[-1] == 0: # text MAY be null-terminated - text = text[:-1] - btext = text.decode("utf8") - else: - btext = None - - return cls(code, btext) - - -_type_to_class: Dict[OptionType, Any] = { - OptionType.ECS: ECSOption, - OptionType.EDE: EDEOption, -} - - -def get_option_class(otype: OptionType) -> Any: - """Return the class for the specified option type. - - The GenericOption class is used if a more specific class is not - known. - """ - - cls = _type_to_class.get(otype) - if cls is None: - cls = GenericOption - return cls - - -def option_from_wire_parser( - otype: Union[OptionType, str], parser: "dns.wire.Parser" -) -> Option: - """Build an EDNS option object from wire format. - - *otype*, an ``int``, is the option type. - - *parser*, a ``dns.wire.Parser``, the parser, which should be - restricted to the option length. - - Returns an instance of a subclass of ``dns.edns.Option``. - """ - otype = OptionType.make(otype) - cls = get_option_class(otype) - return cls.from_wire_parser(otype, parser) - - -def option_from_wire( - otype: Union[OptionType, str], wire: bytes, current: int, olen: int -) -> Option: - """Build an EDNS option object from wire format. - - *otype*, an ``int``, is the option type. - - *wire*, a ``bytes``, is the wire-format message. - - *current*, an ``int``, is the offset in *wire* of the beginning - of the rdata. - - *olen*, an ``int``, is the length of the wire-format option data - - Returns an instance of a subclass of ``dns.edns.Option``. - """ - parser = dns.wire.Parser(wire, current) - with parser.restrict_to(olen): - return option_from_wire_parser(otype, parser) - - -def register_type(implementation: Any, otype: OptionType) -> None: - """Register the implementation of an option type. - - *implementation*, a ``class``, is a subclass of ``dns.edns.Option``. - - *otype*, an ``int``, is the option type. - """ - - _type_to_class[otype] = implementation - - -### BEGIN generated OptionType constants - -NSID = OptionType.NSID -DAU = OptionType.DAU -DHU = OptionType.DHU -N3U = OptionType.N3U -ECS = OptionType.ECS -EXPIRE = OptionType.EXPIRE -COOKIE = OptionType.COOKIE -KEEPALIVE = OptionType.KEEPALIVE -PADDING = OptionType.PADDING -CHAIN = OptionType.CHAIN -EDE = OptionType.EDE - -### END generated OptionType constants diff --git a/server/venv/Lib/site-packages/dns/entropy.py b/server/venv/Lib/site-packages/dns/entropy.py deleted file mode 100644 index 4dcdc62..0000000 --- a/server/venv/Lib/site-packages/dns/entropy.py +++ /dev/null @@ -1,130 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2009-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import hashlib -import os -import random -import threading -import time -from typing import Any, Optional - - -class EntropyPool: - # This is an entropy pool for Python implementations that do not - # have a working SystemRandom. I'm not sure there are any, but - # leaving this code doesn't hurt anything as the library code - # is used if present. - - def __init__(self, seed: Optional[bytes] = None): - self.pool_index = 0 - self.digest: Optional[bytearray] = None - self.next_byte = 0 - self.lock = threading.Lock() - self.hash = hashlib.sha1() - self.hash_len = 20 - self.pool = bytearray(b"\0" * self.hash_len) - if seed is not None: - self._stir(seed) - self.seeded = True - self.seed_pid = os.getpid() - else: - self.seeded = False - self.seed_pid = 0 - - def _stir(self, entropy: bytes) -> None: - for c in entropy: - if self.pool_index == self.hash_len: - self.pool_index = 0 - b = c & 0xFF - self.pool[self.pool_index] ^= b - self.pool_index += 1 - - def stir(self, entropy: bytes) -> None: - with self.lock: - self._stir(entropy) - - def _maybe_seed(self) -> None: - if not self.seeded or self.seed_pid != os.getpid(): - try: - seed = os.urandom(16) - except Exception: # pragma: no cover - try: - with open("/dev/urandom", "rb", 0) as r: - seed = r.read(16) - except Exception: - seed = str(time.time()).encode() - self.seeded = True - self.seed_pid = os.getpid() - self.digest = None - seed = bytearray(seed) - self._stir(seed) - - def random_8(self) -> int: - with self.lock: - self._maybe_seed() - if self.digest is None or self.next_byte == self.hash_len: - self.hash.update(bytes(self.pool)) - self.digest = bytearray(self.hash.digest()) - self._stir(self.digest) - self.next_byte = 0 - value = self.digest[self.next_byte] - self.next_byte += 1 - return value - - def random_16(self) -> int: - return self.random_8() * 256 + self.random_8() - - def random_32(self) -> int: - return self.random_16() * 65536 + self.random_16() - - def random_between(self, first: int, last: int) -> int: - size = last - first + 1 - if size > 4294967296: - raise ValueError("too big") - if size > 65536: - rand = self.random_32 - max = 4294967295 - elif size > 256: - rand = self.random_16 - max = 65535 - else: - rand = self.random_8 - max = 255 - return first + size * rand() // (max + 1) - - -pool = EntropyPool() - -system_random: Optional[Any] -try: - system_random = random.SystemRandom() -except Exception: # pragma: no cover - system_random = None - - -def random_16() -> int: - if system_random is not None: - return system_random.randrange(0, 65536) - else: - return pool.random_16() - - -def between(first: int, last: int) -> int: - if system_random is not None: - return system_random.randrange(first, last + 1) - else: - return pool.random_between(first, last) diff --git a/server/venv/Lib/site-packages/dns/enum.py b/server/venv/Lib/site-packages/dns/enum.py deleted file mode 100644 index 71461f1..0000000 --- a/server/venv/Lib/site-packages/dns/enum.py +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import enum -from typing import Type, TypeVar, Union - -TIntEnum = TypeVar("TIntEnum", bound="IntEnum") - - -class IntEnum(enum.IntEnum): - @classmethod - def _missing_(cls, value): - cls._check_value(value) - val = int.__new__(cls, value) - val._name_ = cls._extra_to_text(value, None) or f"{cls._prefix()}{value}" - val._value_ = value - return val - - @classmethod - def _check_value(cls, value): - max = cls._maximum() - if not isinstance(value, int): - raise TypeError - if value < 0 or value > max: - name = cls._short_name() - raise ValueError(f"{name} must be an int between >= 0 and <= {max}") - - @classmethod - def from_text(cls: Type[TIntEnum], text: str) -> TIntEnum: - text = text.upper() - try: - return cls[text] - except KeyError: - pass - value = cls._extra_from_text(text) - if value: - return value - prefix = cls._prefix() - if text.startswith(prefix) and text[len(prefix) :].isdigit(): - value = int(text[len(prefix) :]) - cls._check_value(value) - try: - return cls(value) - except ValueError: - return value - raise cls._unknown_exception_class() - - @classmethod - def to_text(cls: Type[TIntEnum], value: int) -> str: - cls._check_value(value) - try: - text = cls(value).name - except ValueError: - text = None - text = cls._extra_to_text(value, text) - if text is None: - text = f"{cls._prefix()}{value}" - return text - - @classmethod - def make(cls: Type[TIntEnum], value: Union[int, str]) -> TIntEnum: - """Convert text or a value into an enumerated type, if possible. - - *value*, the ``int`` or ``str`` to convert. - - Raises a class-specific exception if a ``str`` is provided that - cannot be converted. - - Raises ``ValueError`` if the value is out of range. - - Returns an enumeration from the calling class corresponding to the - value, if one is defined, or an ``int`` otherwise. - """ - - if isinstance(value, str): - return cls.from_text(value) - cls._check_value(value) - return cls(value) - - @classmethod - def _maximum(cls): - raise NotImplementedError # pragma: no cover - - @classmethod - def _short_name(cls): - return cls.__name__.lower() - - @classmethod - def _prefix(cls): - return "" - - @classmethod - def _extra_from_text(cls, text): # pylint: disable=W0613 - return None - - @classmethod - def _extra_to_text(cls, value, current_text): # pylint: disable=W0613 - return current_text - - @classmethod - def _unknown_exception_class(cls): - return ValueError diff --git a/server/venv/Lib/site-packages/dns/exception.py b/server/venv/Lib/site-packages/dns/exception.py deleted file mode 100644 index 6982373..0000000 --- a/server/venv/Lib/site-packages/dns/exception.py +++ /dev/null @@ -1,169 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Common DNS Exceptions. - -Dnspython modules may also define their own exceptions, which will -always be subclasses of ``DNSException``. -""" - - -from typing import Optional, Set - - -class DNSException(Exception): - """Abstract base class shared by all dnspython exceptions. - - It supports two basic modes of operation: - - a) Old/compatible mode is used if ``__init__`` was called with - empty *kwargs*. In compatible mode all *args* are passed - to the standard Python Exception class as before and all *args* are - printed by the standard ``__str__`` implementation. Class variable - ``msg`` (or doc string if ``msg`` is ``None``) is returned from ``str()`` - if *args* is empty. - - b) New/parametrized mode is used if ``__init__`` was called with - non-empty *kwargs*. - In the new mode *args* must be empty and all kwargs must match - those set in class variable ``supp_kwargs``. All kwargs are stored inside - ``self.kwargs`` and used in a new ``__str__`` implementation to construct - a formatted message based on the ``fmt`` class variable, a ``string``. - - In the simplest case it is enough to override the ``supp_kwargs`` - and ``fmt`` class variables to get nice parametrized messages. - """ - - msg: Optional[str] = None # non-parametrized message - supp_kwargs: Set[str] = set() # accepted parameters for _fmt_kwargs (sanity check) - fmt: Optional[str] = None # message parametrized with results from _fmt_kwargs - - def __init__(self, *args, **kwargs): - self._check_params(*args, **kwargs) - if kwargs: - # This call to a virtual method from __init__ is ok in our usage - self.kwargs = self._check_kwargs(**kwargs) # lgtm[py/init-calls-subclass] - self.msg = str(self) - else: - self.kwargs = dict() # defined but empty for old mode exceptions - if self.msg is None: - # doc string is better implicit message than empty string - self.msg = self.__doc__ - if args: - super().__init__(*args) - else: - super().__init__(self.msg) - - def _check_params(self, *args, **kwargs): - """Old exceptions supported only args and not kwargs. - - For sanity we do not allow to mix old and new behavior.""" - if args or kwargs: - assert bool(args) != bool( - kwargs - ), "keyword arguments are mutually exclusive with positional args" - - def _check_kwargs(self, **kwargs): - if kwargs: - assert ( - set(kwargs.keys()) == self.supp_kwargs - ), "following set of keyword args is required: %s" % (self.supp_kwargs) - return kwargs - - def _fmt_kwargs(self, **kwargs): - """Format kwargs before printing them. - - Resulting dictionary has to have keys necessary for str.format call - on fmt class variable. - """ - fmtargs = {} - for kw, data in kwargs.items(): - if isinstance(data, (list, set)): - # convert list of to list of str() - fmtargs[kw] = list(map(str, data)) - if len(fmtargs[kw]) == 1: - # remove list brackets [] from single-item lists - fmtargs[kw] = fmtargs[kw].pop() - else: - fmtargs[kw] = data - return fmtargs - - def __str__(self): - if self.kwargs and self.fmt: - # provide custom message constructed from keyword arguments - fmtargs = self._fmt_kwargs(**self.kwargs) - return self.fmt.format(**fmtargs) - else: - # print *args directly in the same way as old DNSException - return super().__str__() - - -class FormError(DNSException): - """DNS message is malformed.""" - - -class SyntaxError(DNSException): - """Text input is malformed.""" - - -class UnexpectedEnd(SyntaxError): - """Text input ended unexpectedly.""" - - -class TooBig(DNSException): - """The DNS message is too big.""" - - -class Timeout(DNSException): - """The DNS operation timed out.""" - - supp_kwargs = {"timeout"} - fmt = "The DNS operation timed out after {timeout:.3f} seconds" - - # We do this as otherwise mypy complains about unexpected keyword argument - # idna_exception - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - -class UnsupportedAlgorithm(DNSException): - """The DNSSEC algorithm is not supported.""" - - -class AlgorithmKeyMismatch(UnsupportedAlgorithm): - """The DNSSEC algorithm is not supported for the given key type.""" - - -class ValidationFailure(DNSException): - """The DNSSEC signature is invalid.""" - - -class DeniedByPolicy(DNSException): - """Denied by DNSSEC policy.""" - - -class ExceptionWrapper: - def __init__(self, exception_class): - self.exception_class = exception_class - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if exc_type is not None and not isinstance(exc_val, self.exception_class): - raise self.exception_class(str(exc_val)) from exc_val - return False diff --git a/server/venv/Lib/site-packages/dns/flags.py b/server/venv/Lib/site-packages/dns/flags.py deleted file mode 100644 index 4c60be1..0000000 --- a/server/venv/Lib/site-packages/dns/flags.py +++ /dev/null @@ -1,123 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Message Flags.""" - -import enum -from typing import Any - -# Standard DNS flags - - -class Flag(enum.IntFlag): - #: Query Response - QR = 0x8000 - #: Authoritative Answer - AA = 0x0400 - #: Truncated Response - TC = 0x0200 - #: Recursion Desired - RD = 0x0100 - #: Recursion Available - RA = 0x0080 - #: Authentic Data - AD = 0x0020 - #: Checking Disabled - CD = 0x0010 - - -# EDNS flags - - -class EDNSFlag(enum.IntFlag): - #: DNSSEC answer OK - DO = 0x8000 - - -def _from_text(text: str, enum_class: Any) -> int: - flags = 0 - tokens = text.split() - for t in tokens: - flags |= enum_class[t.upper()] - return flags - - -def _to_text(flags: int, enum_class: Any) -> str: - text_flags = [] - for k, v in enum_class.__members__.items(): - if flags & v != 0: - text_flags.append(k) - return " ".join(text_flags) - - -def from_text(text: str) -> int: - """Convert a space-separated list of flag text values into a flags - value. - - Returns an ``int`` - """ - - return _from_text(text, Flag) - - -def to_text(flags: int) -> str: - """Convert a flags value into a space-separated list of flag text - values. - - Returns a ``str``. - """ - - return _to_text(flags, Flag) - - -def edns_from_text(text: str) -> int: - """Convert a space-separated list of EDNS flag text values into a EDNS - flags value. - - Returns an ``int`` - """ - - return _from_text(text, EDNSFlag) - - -def edns_to_text(flags: int) -> str: - """Convert an EDNS flags value into a space-separated list of EDNS flag - text values. - - Returns a ``str``. - """ - - return _to_text(flags, EDNSFlag) - - -### BEGIN generated Flag constants - -QR = Flag.QR -AA = Flag.AA -TC = Flag.TC -RD = Flag.RD -RA = Flag.RA -AD = Flag.AD -CD = Flag.CD - -### END generated Flag constants - -### BEGIN generated EDNSFlag constants - -DO = EDNSFlag.DO - -### END generated EDNSFlag constants diff --git a/server/venv/Lib/site-packages/dns/grange.py b/server/venv/Lib/site-packages/dns/grange.py deleted file mode 100644 index 3a52278..0000000 --- a/server/venv/Lib/site-packages/dns/grange.py +++ /dev/null @@ -1,72 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2012-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS GENERATE range conversion.""" - -from typing import Tuple - -import dns - - -def from_text(text: str) -> Tuple[int, int, int]: - """Convert the text form of a range in a ``$GENERATE`` statement to an - integer. - - *text*, a ``str``, the textual range in ``$GENERATE`` form. - - Returns a tuple of three ``int`` values ``(start, stop, step)``. - """ - - start = -1 - stop = -1 - step = 1 - cur = "" - state = 0 - # state 0 1 2 - # x - y / z - - if text and text[0] == "-": - raise dns.exception.SyntaxError("Start cannot be a negative number") - - for c in text: - if c == "-" and state == 0: - start = int(cur) - cur = "" - state = 1 - elif c == "/": - stop = int(cur) - cur = "" - state = 2 - elif c.isdigit(): - cur += c - else: - raise dns.exception.SyntaxError("Could not parse %s" % (c)) - - if state == 0: - raise dns.exception.SyntaxError("no stop value specified") - elif state == 1: - stop = int(cur) - else: - assert state == 2 - step = int(cur) - - assert step >= 1 - assert start >= 0 - if start > stop: - raise dns.exception.SyntaxError("start must be <= stop") - - return (start, stop, step) diff --git a/server/venv/Lib/site-packages/dns/immutable.py b/server/venv/Lib/site-packages/dns/immutable.py deleted file mode 100644 index cab8d6f..0000000 --- a/server/venv/Lib/site-packages/dns/immutable.py +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import collections.abc -from typing import Any - -from dns._immutable_ctx import immutable - - -@immutable -class Dict(collections.abc.Mapping): # lgtm[py/missing-equals] - def __init__(self, dictionary: Any, no_copy: bool = False): - """Make an immutable dictionary from the specified dictionary. - - If *no_copy* is `True`, then *dictionary* will be wrapped instead - of copied. Only set this if you are sure there will be no external - references to the dictionary. - """ - if no_copy and isinstance(dictionary, dict): - self._odict = dictionary - else: - self._odict = dict(dictionary) - self._hash = None - - def __getitem__(self, key): - return self._odict.__getitem__(key) - - def __hash__(self): # pylint: disable=invalid-hash-returned - if self._hash is None: - h = 0 - for key in sorted(self._odict.keys()): - h ^= hash(key) - object.__setattr__(self, "_hash", h) - # this does return an int, but pylint doesn't figure that out - return self._hash - - def __len__(self): - return len(self._odict) - - def __iter__(self): - return iter(self._odict) - - -def constify(o: Any) -> Any: - """ - Convert mutable types to immutable types. - """ - if isinstance(o, bytearray): - return bytes(o) - if isinstance(o, tuple): - try: - hash(o) - return o - except Exception: - return tuple(constify(elt) for elt in o) - if isinstance(o, list): - return tuple(constify(elt) for elt in o) - if isinstance(o, dict): - cdict = dict() - for k, v in o.items(): - cdict[k] = constify(v) - return Dict(cdict, True) - return o diff --git a/server/venv/Lib/site-packages/dns/inet.py b/server/venv/Lib/site-packages/dns/inet.py deleted file mode 100644 index 02e925c..0000000 --- a/server/venv/Lib/site-packages/dns/inet.py +++ /dev/null @@ -1,180 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Generic Internet address helper functions.""" - -import socket -from typing import Any, Optional, Tuple - -import dns.ipv4 -import dns.ipv6 - -# We assume that AF_INET and AF_INET6 are always defined. We keep -# these here for the benefit of any old code (unlikely though that -# is!). -AF_INET = socket.AF_INET -AF_INET6 = socket.AF_INET6 - - -def inet_pton(family: int, text: str) -> bytes: - """Convert the textual form of a network address into its binary form. - - *family* is an ``int``, the address family. - - *text* is a ``str``, the textual address. - - Raises ``NotImplementedError`` if the address family specified is not - implemented. - - Returns a ``bytes``. - """ - - if family == AF_INET: - return dns.ipv4.inet_aton(text) - elif family == AF_INET6: - return dns.ipv6.inet_aton(text, True) - else: - raise NotImplementedError - - -def inet_ntop(family: int, address: bytes) -> str: - """Convert the binary form of a network address into its textual form. - - *family* is an ``int``, the address family. - - *address* is a ``bytes``, the network address in binary form. - - Raises ``NotImplementedError`` if the address family specified is not - implemented. - - Returns a ``str``. - """ - - if family == AF_INET: - return dns.ipv4.inet_ntoa(address) - elif family == AF_INET6: - return dns.ipv6.inet_ntoa(address) - else: - raise NotImplementedError - - -def af_for_address(text: str) -> int: - """Determine the address family of a textual-form network address. - - *text*, a ``str``, the textual address. - - Raises ``ValueError`` if the address family cannot be determined - from the input. - - Returns an ``int``. - """ - - try: - dns.ipv4.inet_aton(text) - return AF_INET - except Exception: - try: - dns.ipv6.inet_aton(text, True) - return AF_INET6 - except Exception: - raise ValueError - - -def is_multicast(text: str) -> bool: - """Is the textual-form network address a multicast address? - - *text*, a ``str``, the textual address. - - Raises ``ValueError`` if the address family cannot be determined - from the input. - - Returns a ``bool``. - """ - - try: - first = dns.ipv4.inet_aton(text)[0] - return first >= 224 and first <= 239 - except Exception: - try: - first = dns.ipv6.inet_aton(text, True)[0] - return first == 255 - except Exception: - raise ValueError - - -def is_address(text: str) -> bool: - """Is the specified string an IPv4 or IPv6 address? - - *text*, a ``str``, the textual address. - - Returns a ``bool``. - """ - - try: - dns.ipv4.inet_aton(text) - return True - except Exception: - try: - dns.ipv6.inet_aton(text, True) - return True - except Exception: - return False - - -def low_level_address_tuple( - high_tuple: Tuple[str, int], af: Optional[int] = None -) -> Any: - """Given a "high-level" address tuple, i.e. - an (address, port) return the appropriate "low-level" address tuple - suitable for use in socket calls. - - If an *af* other than ``None`` is provided, it is assumed the - address in the high-level tuple is valid and has that af. If af - is ``None``, then af_for_address will be called. - """ - address, port = high_tuple - if af is None: - af = af_for_address(address) - if af == AF_INET: - return (address, port) - elif af == AF_INET6: - i = address.find("%") - if i < 0: - # no scope, shortcut! - return (address, port, 0, 0) - # try to avoid getaddrinfo() - addrpart = address[:i] - scope = address[i + 1 :] - if scope.isdigit(): - return (addrpart, port, 0, int(scope)) - try: - return (addrpart, port, 0, socket.if_nametoindex(scope)) - except AttributeError: # pragma: no cover (we can't really test this) - ai_flags = socket.AI_NUMERICHOST - ((*_, tup), *_) = socket.getaddrinfo(address, port, flags=ai_flags) - return tup - else: - raise NotImplementedError(f"unknown address family {af}") - - -def any_for_af(af): - """Return the 'any' address for the specified address family.""" - if af == socket.AF_INET: - return "0.0.0.0" - elif af == socket.AF_INET6: - return "::" - raise NotImplementedError(f"unknown address family {af}") diff --git a/server/venv/Lib/site-packages/dns/ipv4.py b/server/venv/Lib/site-packages/dns/ipv4.py deleted file mode 100644 index f549150..0000000 --- a/server/venv/Lib/site-packages/dns/ipv4.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""IPv4 helper functions.""" - -import struct -from typing import Union - -import dns.exception - - -def inet_ntoa(address: bytes) -> str: - """Convert an IPv4 address in binary form to text form. - - *address*, a ``bytes``, the IPv4 address in binary form. - - Returns a ``str``. - """ - - if len(address) != 4: - raise dns.exception.SyntaxError - return "%u.%u.%u.%u" % (address[0], address[1], address[2], address[3]) - - -def inet_aton(text: Union[str, bytes]) -> bytes: - """Convert an IPv4 address in text form to binary form. - - *text*, a ``str`` or ``bytes``, the IPv4 address in textual form. - - Returns a ``bytes``. - """ - - if not isinstance(text, bytes): - btext = text.encode() - else: - btext = text - parts = btext.split(b".") - if len(parts) != 4: - raise dns.exception.SyntaxError - for part in parts: - if not part.isdigit(): - raise dns.exception.SyntaxError - if len(part) > 1 and part[0] == ord("0"): - # No leading zeros - raise dns.exception.SyntaxError - try: - b = [int(part) for part in parts] - return struct.pack("BBBB", *b) - except Exception: - raise dns.exception.SyntaxError diff --git a/server/venv/Lib/site-packages/dns/ipv6.py b/server/venv/Lib/site-packages/dns/ipv6.py deleted file mode 100644 index 0cc3d86..0000000 --- a/server/venv/Lib/site-packages/dns/ipv6.py +++ /dev/null @@ -1,208 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""IPv6 helper functions.""" - -import binascii -import re -from typing import List, Union - -import dns.exception -import dns.ipv4 - -_leading_zero = re.compile(r"0+([0-9a-f]+)") - - -def inet_ntoa(address: bytes) -> str: - """Convert an IPv6 address in binary form to text form. - - *address*, a ``bytes``, the IPv6 address in binary form. - - Raises ``ValueError`` if the address isn't 16 bytes long. - Returns a ``str``. - """ - - if len(address) != 16: - raise ValueError("IPv6 addresses are 16 bytes long") - hex = binascii.hexlify(address) - chunks = [] - i = 0 - l = len(hex) - while i < l: - chunk = hex[i : i + 4].decode() - # strip leading zeros. we do this with an re instead of - # with lstrip() because lstrip() didn't support chars until - # python 2.2.2 - m = _leading_zero.match(chunk) - if m is not None: - chunk = m.group(1) - chunks.append(chunk) - i += 4 - # - # Compress the longest subsequence of 0-value chunks to :: - # - best_start = 0 - best_len = 0 - start = -1 - last_was_zero = False - for i in range(8): - if chunks[i] != "0": - if last_was_zero: - end = i - current_len = end - start - if current_len > best_len: - best_start = start - best_len = current_len - last_was_zero = False - elif not last_was_zero: - start = i - last_was_zero = True - if last_was_zero: - end = 8 - current_len = end - start - if current_len > best_len: - best_start = start - best_len = current_len - if best_len > 1: - if best_start == 0 and (best_len == 6 or best_len == 5 and chunks[5] == "ffff"): - # We have an embedded IPv4 address - if best_len == 6: - prefix = "::" - else: - prefix = "::ffff:" - thex = prefix + dns.ipv4.inet_ntoa(address[12:]) - else: - thex = ( - ":".join(chunks[:best_start]) - + "::" - + ":".join(chunks[best_start + best_len :]) - ) - else: - thex = ":".join(chunks) - return thex - - -_v4_ending = re.compile(rb"(.*):(\d+\.\d+\.\d+\.\d+)$") -_colon_colon_start = re.compile(rb"::.*") -_colon_colon_end = re.compile(rb".*::$") - - -def inet_aton(text: Union[str, bytes], ignore_scope: bool = False) -> bytes: - """Convert an IPv6 address in text form to binary form. - - *text*, a ``str``, the IPv6 address in textual form. - - *ignore_scope*, a ``bool``. If ``True``, a scope will be ignored. - If ``False``, the default, it is an error for a scope to be present. - - Returns a ``bytes``. - """ - - # - # Our aim here is not something fast; we just want something that works. - # - if not isinstance(text, bytes): - btext = text.encode() - else: - btext = text - - if ignore_scope: - parts = btext.split(b"%") - l = len(parts) - if l == 2: - btext = parts[0] - elif l > 2: - raise dns.exception.SyntaxError - - if btext == b"": - raise dns.exception.SyntaxError - elif btext.endswith(b":") and not btext.endswith(b"::"): - raise dns.exception.SyntaxError - elif btext.startswith(b":") and not btext.startswith(b"::"): - raise dns.exception.SyntaxError - elif btext == b"::": - btext = b"0::" - # - # Get rid of the icky dot-quad syntax if we have it. - # - m = _v4_ending.match(btext) - if m is not None: - b = dns.ipv4.inet_aton(m.group(2)) - btext = ( - "{}:{:02x}{:02x}:{:02x}{:02x}".format( - m.group(1).decode(), b[0], b[1], b[2], b[3] - ) - ).encode() - # - # Try to turn '::' into ':'; if no match try to - # turn '::' into ':' - # - m = _colon_colon_start.match(btext) - if m is not None: - btext = btext[1:] - else: - m = _colon_colon_end.match(btext) - if m is not None: - btext = btext[:-1] - # - # Now canonicalize into 8 chunks of 4 hex digits each - # - chunks = btext.split(b":") - l = len(chunks) - if l > 8: - raise dns.exception.SyntaxError - seen_empty = False - canonical: List[bytes] = [] - for c in chunks: - if c == b"": - if seen_empty: - raise dns.exception.SyntaxError - seen_empty = True - for _ in range(0, 8 - l + 1): - canonical.append(b"0000") - else: - lc = len(c) - if lc > 4: - raise dns.exception.SyntaxError - if lc != 4: - c = (b"0" * (4 - lc)) + c - canonical.append(c) - if l < 8 and not seen_empty: - raise dns.exception.SyntaxError - btext = b"".join(canonical) - - # - # Finally we can go to binary. - # - try: - return binascii.unhexlify(btext) - except (binascii.Error, TypeError): - raise dns.exception.SyntaxError - - -_mapped_prefix = b"\x00" * 10 + b"\xff\xff" - - -def is_mapped(address: bytes) -> bool: - """Is the specified address a mapped IPv4 address? - - *address*, a ``bytes`` is an IPv6 address in binary form. - - Returns a ``bool``. - """ - - return address.startswith(_mapped_prefix) diff --git a/server/venv/Lib/site-packages/dns/message.py b/server/venv/Lib/site-packages/dns/message.py deleted file mode 100644 index daae636..0000000 --- a/server/venv/Lib/site-packages/dns/message.py +++ /dev/null @@ -1,1829 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Messages""" - -import contextlib -import io -import time -from typing import Any, Dict, List, Optional, Tuple, Union - -import dns.edns -import dns.entropy -import dns.enum -import dns.exception -import dns.flags -import dns.name -import dns.opcode -import dns.rcode -import dns.rdata -import dns.rdataclass -import dns.rdatatype -import dns.rdtypes.ANY.OPT -import dns.rdtypes.ANY.TSIG -import dns.renderer -import dns.rrset -import dns.tsig -import dns.ttl -import dns.wire - - -class ShortHeader(dns.exception.FormError): - """The DNS packet passed to from_wire() is too short.""" - - -class TrailingJunk(dns.exception.FormError): - """The DNS packet passed to from_wire() has extra junk at the end of it.""" - - -class UnknownHeaderField(dns.exception.DNSException): - """The header field name was not recognized when converting from text - into a message.""" - - -class BadEDNS(dns.exception.FormError): - """An OPT record occurred somewhere other than - the additional data section.""" - - -class BadTSIG(dns.exception.FormError): - """A TSIG record occurred somewhere other than the end of - the additional data section.""" - - -class UnknownTSIGKey(dns.exception.DNSException): - """A TSIG with an unknown key was received.""" - - -class Truncated(dns.exception.DNSException): - """The truncated flag is set.""" - - supp_kwargs = {"message"} - - # We do this as otherwise mypy complains about unexpected keyword argument - # idna_exception - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - def message(self): - """As much of the message as could be processed. - - Returns a ``dns.message.Message``. - """ - return self.kwargs["message"] - - -class NotQueryResponse(dns.exception.DNSException): - """Message is not a response to a query.""" - - -class ChainTooLong(dns.exception.DNSException): - """The CNAME chain is too long.""" - - -class AnswerForNXDOMAIN(dns.exception.DNSException): - """The rcode is NXDOMAIN but an answer was found.""" - - -class NoPreviousName(dns.exception.SyntaxError): - """No previous name was known.""" - - -class MessageSection(dns.enum.IntEnum): - """Message sections""" - - QUESTION = 0 - ANSWER = 1 - AUTHORITY = 2 - ADDITIONAL = 3 - - @classmethod - def _maximum(cls): - return 3 - - -class MessageError: - def __init__(self, exception: Exception, offset: int): - self.exception = exception - self.offset = offset - - -DEFAULT_EDNS_PAYLOAD = 1232 -MAX_CHAIN = 16 - -IndexKeyType = Tuple[ - int, - dns.name.Name, - dns.rdataclass.RdataClass, - dns.rdatatype.RdataType, - Optional[dns.rdatatype.RdataType], - Optional[dns.rdataclass.RdataClass], -] -IndexType = Dict[IndexKeyType, dns.rrset.RRset] -SectionType = Union[int, str, List[dns.rrset.RRset]] - - -class Message: - """A DNS message.""" - - _section_enum = MessageSection - - def __init__(self, id: Optional[int] = None): - if id is None: - self.id = dns.entropy.random_16() - else: - self.id = id - self.flags = 0 - self.sections: List[List[dns.rrset.RRset]] = [[], [], [], []] - self.opt: Optional[dns.rrset.RRset] = None - self.request_payload = 0 - self.pad = 0 - self.keyring: Any = None - self.tsig: Optional[dns.rrset.RRset] = None - self.request_mac = b"" - self.xfr = False - self.origin: Optional[dns.name.Name] = None - self.tsig_ctx: Optional[Any] = None - self.index: IndexType = {} - self.errors: List[MessageError] = [] - self.time = 0.0 - - @property - def question(self) -> List[dns.rrset.RRset]: - """The question section.""" - return self.sections[0] - - @question.setter - def question(self, v): - self.sections[0] = v - - @property - def answer(self) -> List[dns.rrset.RRset]: - """The answer section.""" - return self.sections[1] - - @answer.setter - def answer(self, v): - self.sections[1] = v - - @property - def authority(self) -> List[dns.rrset.RRset]: - """The authority section.""" - return self.sections[2] - - @authority.setter - def authority(self, v): - self.sections[2] = v - - @property - def additional(self) -> List[dns.rrset.RRset]: - """The additional data section.""" - return self.sections[3] - - @additional.setter - def additional(self, v): - self.sections[3] = v - - def __repr__(self): - return "" - - def __str__(self): - return self.to_text() - - def to_text( - self, - origin: Optional[dns.name.Name] = None, - relativize: bool = True, - **kw: Dict[str, Any], - ) -> str: - """Convert the message to text. - - The *origin*, *relativize*, and any other keyword - arguments are passed to the RRset ``to_wire()`` method. - - Returns a ``str``. - """ - - s = io.StringIO() - s.write("id %d\n" % self.id) - s.write("opcode %s\n" % dns.opcode.to_text(self.opcode())) - s.write("rcode %s\n" % dns.rcode.to_text(self.rcode())) - s.write("flags %s\n" % dns.flags.to_text(self.flags)) - if self.edns >= 0: - s.write("edns %s\n" % self.edns) - if self.ednsflags != 0: - s.write("eflags %s\n" % dns.flags.edns_to_text(self.ednsflags)) - s.write("payload %d\n" % self.payload) - for opt in self.options: - s.write("option %s\n" % opt.to_text()) - for name, which in self._section_enum.__members__.items(): - s.write(f";{name}\n") - for rrset in self.section_from_number(which): - s.write(rrset.to_text(origin, relativize, **kw)) - s.write("\n") - # - # We strip off the final \n so the caller can print the result without - # doing weird things to get around eccentricities in Python print - # formatting - # - return s.getvalue()[:-1] - - def __eq__(self, other): - """Two messages are equal if they have the same content in the - header, question, answer, and authority sections. - - Returns a ``bool``. - """ - - if not isinstance(other, Message): - return False - if self.id != other.id: - return False - if self.flags != other.flags: - return False - for i, section in enumerate(self.sections): - other_section = other.sections[i] - for n in section: - if n not in other_section: - return False - for n in other_section: - if n not in section: - return False - return True - - def __ne__(self, other): - return not self.__eq__(other) - - def is_response(self, other: "Message") -> bool: - """Is *other*, also a ``dns.message.Message``, a response to this - message? - - Returns a ``bool``. - """ - - if ( - other.flags & dns.flags.QR == 0 - or self.id != other.id - or dns.opcode.from_flags(self.flags) != dns.opcode.from_flags(other.flags) - ): - return False - if other.rcode() in { - dns.rcode.FORMERR, - dns.rcode.SERVFAIL, - dns.rcode.NOTIMP, - dns.rcode.REFUSED, - }: - # We don't check the question section in these cases if - # the other question section is empty, even though they - # still really ought to have a question section. - if len(other.question) == 0: - return True - if dns.opcode.is_update(self.flags): - # This is assuming the "sender doesn't include anything - # from the update", but we don't care to check the other - # case, which is that all the sections are returned and - # identical. - return True - for n in self.question: - if n not in other.question: - return False - for n in other.question: - if n not in self.question: - return False - return True - - def section_number(self, section: List[dns.rrset.RRset]) -> int: - """Return the "section number" of the specified section for use - in indexing. - - *section* is one of the section attributes of this message. - - Raises ``ValueError`` if the section isn't known. - - Returns an ``int``. - """ - - for i, our_section in enumerate(self.sections): - if section is our_section: - return self._section_enum(i) - raise ValueError("unknown section") - - def section_from_number(self, number: int) -> List[dns.rrset.RRset]: - """Return the section list associated with the specified section - number. - - *number* is a section number `int` or the text form of a section - name. - - Raises ``ValueError`` if the section isn't known. - - Returns a ``list``. - """ - - section = self._section_enum.make(number) - return self.sections[section] - - def find_rrset( - self, - section: SectionType, - name: dns.name.Name, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, - deleting: Optional[dns.rdataclass.RdataClass] = None, - create: bool = False, - force_unique: bool = False, - idna_codec: Optional[dns.name.IDNACodec] = None, - ) -> dns.rrset.RRset: - """Find the RRset with the given attributes in the specified section. - - *section*, an ``int`` section number, a ``str`` section name, or one of - the section attributes of this message. This specifies the - the section of the message to search. For example:: - - my_message.find_rrset(my_message.answer, name, rdclass, rdtype) - my_message.find_rrset(dns.message.ANSWER, name, rdclass, rdtype) - my_message.find_rrset("ANSWER", name, rdclass, rdtype) - - *name*, a ``dns.name.Name`` or ``str``, the name of the RRset. - - *rdclass*, an ``int`` or ``str``, the class of the RRset. - - *rdtype*, an ``int`` or ``str``, the type of the RRset. - - *covers*, an ``int`` or ``str``, the covers value of the RRset. - The default is ``dns.rdatatype.NONE``. - - *deleting*, an ``int``, ``str``, or ``None``, the deleting value of the - RRset. The default is ``None``. - - *create*, a ``bool``. If ``True``, create the RRset if it is not found. - The created RRset is appended to *section*. - - *force_unique*, a ``bool``. If ``True`` and *create* is also ``True``, - create a new RRset regardless of whether a matching RRset exists - already. The default is ``False``. This is useful when creating - DDNS Update messages, as order matters for them. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder - is used. - - Raises ``KeyError`` if the RRset was not found and create was - ``False``. - - Returns a ``dns.rrset.RRset object``. - """ - - if isinstance(section, int): - section_number = section - section = self.section_from_number(section_number) - elif isinstance(section, str): - section_number = MessageSection.from_text(section) - section = self.section_from_number(section_number) - else: - section_number = self.section_number(section) - if isinstance(name, str): - name = dns.name.from_text(name, idna_codec=idna_codec) - rdtype = dns.rdatatype.RdataType.make(rdtype) - rdclass = dns.rdataclass.RdataClass.make(rdclass) - covers = dns.rdatatype.RdataType.make(covers) - if deleting is not None: - deleting = dns.rdataclass.RdataClass.make(deleting) - key = (section_number, name, rdclass, rdtype, covers, deleting) - if not force_unique: - if self.index is not None: - rrset = self.index.get(key) - if rrset is not None: - return rrset - else: - for rrset in section: - if rrset.full_match(name, rdclass, rdtype, covers, deleting): - return rrset - if not create: - raise KeyError - rrset = dns.rrset.RRset(name, rdclass, rdtype, covers, deleting) - section.append(rrset) - if self.index is not None: - self.index[key] = rrset - return rrset - - def get_rrset( - self, - section: SectionType, - name: dns.name.Name, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, - deleting: Optional[dns.rdataclass.RdataClass] = None, - create: bool = False, - force_unique: bool = False, - idna_codec: Optional[dns.name.IDNACodec] = None, - ) -> Optional[dns.rrset.RRset]: - """Get the RRset with the given attributes in the specified section. - - If the RRset is not found, None is returned. - - *section*, an ``int`` section number, a ``str`` section name, or one of - the section attributes of this message. This specifies the - the section of the message to search. For example:: - - my_message.get_rrset(my_message.answer, name, rdclass, rdtype) - my_message.get_rrset(dns.message.ANSWER, name, rdclass, rdtype) - my_message.get_rrset("ANSWER", name, rdclass, rdtype) - - *name*, a ``dns.name.Name`` or ``str``, the name of the RRset. - - *rdclass*, an ``int`` or ``str``, the class of the RRset. - - *rdtype*, an ``int`` or ``str``, the type of the RRset. - - *covers*, an ``int`` or ``str``, the covers value of the RRset. - The default is ``dns.rdatatype.NONE``. - - *deleting*, an ``int``, ``str``, or ``None``, the deleting value of the - RRset. The default is ``None``. - - *create*, a ``bool``. If ``True``, create the RRset if it is not found. - The created RRset is appended to *section*. - - *force_unique*, a ``bool``. If ``True`` and *create* is also ``True``, - create a new RRset regardless of whether a matching RRset exists - already. The default is ``False``. This is useful when creating - DDNS Update messages, as order matters for them. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder - is used. - - Returns a ``dns.rrset.RRset object`` or ``None``. - """ - - try: - rrset = self.find_rrset( - section, - name, - rdclass, - rdtype, - covers, - deleting, - create, - force_unique, - idna_codec, - ) - except KeyError: - rrset = None - return rrset - - def _compute_opt_reserve(self) -> int: - """Compute the size required for the OPT RR, padding excluded""" - if not self.opt: - return 0 - # 1 byte for the root name, 10 for the standard RR fields - size = 11 - # This would be more efficient if options had a size() method, but we won't - # worry about that for now. We also don't worry if there is an existing padding - # option, as it is unlikely and probably harmless, as the worst case is that we - # may add another, and this seems to be legal. - for option in self.opt[0].options: - wire = option.to_wire() - # We add 4 here to account for the option type and length - size += len(wire) + 4 - if self.pad: - # Padding will be added, so again add the option type and length. - size += 4 - return size - - def _compute_tsig_reserve(self) -> int: - """Compute the size required for the TSIG RR""" - # This would be more efficient if TSIGs had a size method, but we won't - # worry about for now. Also, we can't really cope with the potential - # compressibility of the TSIG owner name, so we estimate with the uncompressed - # size. We will disable compression when TSIG and padding are both is active - # so that the padding comes out right. - if not self.tsig: - return 0 - f = io.BytesIO() - self.tsig.to_wire(f) - return len(f.getvalue()) - - def to_wire( - self, - origin: Optional[dns.name.Name] = None, - max_size: int = 0, - multi: bool = False, - tsig_ctx: Optional[Any] = None, - **kw: Dict[str, Any], - ) -> bytes: - """Return a string containing the message in DNS compressed wire - format. - - Additional keyword arguments are passed to the RRset ``to_wire()`` - method. - - *origin*, a ``dns.name.Name`` or ``None``, the origin to be appended - to any relative names. If ``None``, and the message has an origin - attribute that is not ``None``, then it will be used. - - *max_size*, an ``int``, the maximum size of the wire format - output; default is 0, which means "the message's request - payload, if nonzero, or 65535". - - *multi*, a ``bool``, should be set to ``True`` if this message is - part of a multiple message sequence. - - *tsig_ctx*, a ``dns.tsig.HMACTSig`` or ``dns.tsig.GSSTSig`` object, the - ongoing TSIG context, used when signing zone transfers. - - Raises ``dns.exception.TooBig`` if *max_size* was exceeded. - - Returns a ``bytes``. - """ - - if origin is None and self.origin is not None: - origin = self.origin - if max_size == 0: - if self.request_payload != 0: - max_size = self.request_payload - else: - max_size = 65535 - if max_size < 512: - max_size = 512 - elif max_size > 65535: - max_size = 65535 - r = dns.renderer.Renderer(self.id, self.flags, max_size, origin) - opt_reserve = self._compute_opt_reserve() - r.reserve(opt_reserve) - tsig_reserve = self._compute_tsig_reserve() - r.reserve(tsig_reserve) - for rrset in self.question: - r.add_question(rrset.name, rrset.rdtype, rrset.rdclass) - for rrset in self.answer: - r.add_rrset(dns.renderer.ANSWER, rrset, **kw) - for rrset in self.authority: - r.add_rrset(dns.renderer.AUTHORITY, rrset, **kw) - for rrset in self.additional: - r.add_rrset(dns.renderer.ADDITIONAL, rrset, **kw) - r.release_reserved() - if self.opt is not None: - r.add_opt(self.opt, self.pad, opt_reserve, tsig_reserve) - r.write_header() - if self.tsig is not None: - (new_tsig, ctx) = dns.tsig.sign( - r.get_wire(), - self.keyring, - self.tsig[0], - int(time.time()), - self.request_mac, - tsig_ctx, - multi, - ) - self.tsig.clear() - self.tsig.add(new_tsig) - r.add_rrset(dns.renderer.ADDITIONAL, self.tsig) - r.write_header() - if multi: - self.tsig_ctx = ctx - return r.get_wire() - - @staticmethod - def _make_tsig( - keyname, algorithm, time_signed, fudge, mac, original_id, error, other - ): - tsig = dns.rdtypes.ANY.TSIG.TSIG( - dns.rdataclass.ANY, - dns.rdatatype.TSIG, - algorithm, - time_signed, - fudge, - mac, - original_id, - error, - other, - ) - return dns.rrset.from_rdata(keyname, 0, tsig) - - def use_tsig( - self, - keyring: Any, - keyname: Optional[Union[dns.name.Name, str]] = None, - fudge: int = 300, - original_id: Optional[int] = None, - tsig_error: int = 0, - other_data: bytes = b"", - algorithm: Union[dns.name.Name, str] = dns.tsig.default_algorithm, - ) -> None: - """When sending, a TSIG signature using the specified key - should be added. - - *key*, a ``dns.tsig.Key`` is the key to use. If a key is specified, - the *keyring* and *algorithm* fields are not used. - - *keyring*, a ``dict``, ``callable`` or ``dns.tsig.Key``, is either - the TSIG keyring or key to use. - - The format of a keyring dict is a mapping from TSIG key name, as - ``dns.name.Name`` to ``dns.tsig.Key`` or a TSIG secret, a ``bytes``. - If a ``dict`` *keyring* is specified but a *keyname* is not, the key - used will be the first key in the *keyring*. Note that the order of - keys in a dictionary is not defined, so applications should supply a - keyname when a ``dict`` keyring is used, unless they know the keyring - contains only one key. If a ``callable`` keyring is specified, the - callable will be called with the message and the keyname, and is - expected to return a key. - - *keyname*, a ``dns.name.Name``, ``str`` or ``None``, the name of - this TSIG key to use; defaults to ``None``. If *keyring* is a - ``dict``, the key must be defined in it. If *keyring* is a - ``dns.tsig.Key``, this is ignored. - - *fudge*, an ``int``, the TSIG time fudge. - - *original_id*, an ``int``, the TSIG original id. If ``None``, - the message's id is used. - - *tsig_error*, an ``int``, the TSIG error code. - - *other_data*, a ``bytes``, the TSIG other data. - - *algorithm*, a ``dns.name.Name`` or ``str``, the TSIG algorithm to use. This is - only used if *keyring* is a ``dict``, and the key entry is a ``bytes``. - """ - - if isinstance(keyring, dns.tsig.Key): - key = keyring - keyname = key.name - elif callable(keyring): - key = keyring(self, keyname) - else: - if isinstance(keyname, str): - keyname = dns.name.from_text(keyname) - if keyname is None: - keyname = next(iter(keyring)) - key = keyring[keyname] - if isinstance(key, bytes): - key = dns.tsig.Key(keyname, key, algorithm) - self.keyring = key - if original_id is None: - original_id = self.id - self.tsig = self._make_tsig( - keyname, - self.keyring.algorithm, - 0, - fudge, - b"\x00" * dns.tsig.mac_sizes[self.keyring.algorithm], - original_id, - tsig_error, - other_data, - ) - - @property - def keyname(self) -> Optional[dns.name.Name]: - if self.tsig: - return self.tsig.name - else: - return None - - @property - def keyalgorithm(self) -> Optional[dns.name.Name]: - if self.tsig: - return self.tsig[0].algorithm - else: - return None - - @property - def mac(self) -> Optional[bytes]: - if self.tsig: - return self.tsig[0].mac - else: - return None - - @property - def tsig_error(self) -> Optional[int]: - if self.tsig: - return self.tsig[0].error - else: - return None - - @property - def had_tsig(self) -> bool: - return bool(self.tsig) - - @staticmethod - def _make_opt(flags=0, payload=DEFAULT_EDNS_PAYLOAD, options=None): - opt = dns.rdtypes.ANY.OPT.OPT(payload, dns.rdatatype.OPT, options or ()) - return dns.rrset.from_rdata(dns.name.root, int(flags), opt) - - def use_edns( - self, - edns: Optional[Union[int, bool]] = 0, - ednsflags: int = 0, - payload: int = DEFAULT_EDNS_PAYLOAD, - request_payload: Optional[int] = None, - options: Optional[List[dns.edns.Option]] = None, - pad: int = 0, - ) -> None: - """Configure EDNS behavior. - - *edns*, an ``int``, is the EDNS level to use. Specifying ``None``, ``False``, - or ``-1`` means "do not use EDNS", and in this case the other parameters are - ignored. Specifying ``True`` is equivalent to specifying 0, i.e. "use EDNS0". - - *ednsflags*, an ``int``, the EDNS flag values. - - *payload*, an ``int``, is the EDNS sender's payload field, which is the maximum - size of UDP datagram the sender can handle. I.e. how big a response to this - message can be. - - *request_payload*, an ``int``, is the EDNS payload size to use when sending this - message. If not specified, defaults to the value of *payload*. - - *options*, a list of ``dns.edns.Option`` objects or ``None``, the EDNS options. - - *pad*, a non-negative ``int``. If 0, the default, do not pad; otherwise add - padding bytes to make the message size a multiple of *pad*. Note that if - padding is non-zero, an EDNS PADDING option will always be added to the - message. - """ - - if edns is None or edns is False: - edns = -1 - elif edns is True: - edns = 0 - if edns < 0: - self.opt = None - self.request_payload = 0 - else: - # make sure the EDNS version in ednsflags agrees with edns - ednsflags &= 0xFF00FFFF - ednsflags |= edns << 16 - if options is None: - options = [] - self.opt = self._make_opt(ednsflags, payload, options) - if request_payload is None: - request_payload = payload - self.request_payload = request_payload - self.pad = pad - - @property - def edns(self) -> int: - if self.opt: - return (self.ednsflags & 0xFF0000) >> 16 - else: - return -1 - - @property - def ednsflags(self) -> int: - if self.opt: - return self.opt.ttl - else: - return 0 - - @ednsflags.setter - def ednsflags(self, v): - if self.opt: - self.opt.ttl = v - elif v: - self.opt = self._make_opt(v) - - @property - def payload(self) -> int: - if self.opt: - return self.opt[0].payload - else: - return 0 - - @property - def options(self) -> Tuple: - if self.opt: - return self.opt[0].options - else: - return () - - def want_dnssec(self, wanted: bool = True) -> None: - """Enable or disable 'DNSSEC desired' flag in requests. - - *wanted*, a ``bool``. If ``True``, then DNSSEC data is - desired in the response, EDNS is enabled if required, and then - the DO bit is set. If ``False``, the DO bit is cleared if - EDNS is enabled. - """ - - if wanted: - self.ednsflags |= dns.flags.DO - elif self.opt: - self.ednsflags &= ~dns.flags.DO - - def rcode(self) -> dns.rcode.Rcode: - """Return the rcode. - - Returns a ``dns.rcode.Rcode``. - """ - return dns.rcode.from_flags(int(self.flags), int(self.ednsflags)) - - def set_rcode(self, rcode: dns.rcode.Rcode) -> None: - """Set the rcode. - - *rcode*, a ``dns.rcode.Rcode``, is the rcode to set. - """ - (value, evalue) = dns.rcode.to_flags(rcode) - self.flags &= 0xFFF0 - self.flags |= value - self.ednsflags &= 0x00FFFFFF - self.ednsflags |= evalue - - def opcode(self) -> dns.opcode.Opcode: - """Return the opcode. - - Returns a ``dns.opcode.Opcode``. - """ - return dns.opcode.from_flags(int(self.flags)) - - def set_opcode(self, opcode: dns.opcode.Opcode) -> None: - """Set the opcode. - - *opcode*, a ``dns.opcode.Opcode``, is the opcode to set. - """ - self.flags &= 0x87FF - self.flags |= dns.opcode.to_flags(opcode) - - def _get_one_rr_per_rrset(self, value): - # What the caller picked is fine. - return value - - # pylint: disable=unused-argument - - def _parse_rr_header(self, section, name, rdclass, rdtype): - return (rdclass, rdtype, None, False) - - # pylint: enable=unused-argument - - def _parse_special_rr_header(self, section, count, position, name, rdclass, rdtype): - if rdtype == dns.rdatatype.OPT: - if ( - section != MessageSection.ADDITIONAL - or self.opt - or name != dns.name.root - ): - raise BadEDNS - elif rdtype == dns.rdatatype.TSIG: - if ( - section != MessageSection.ADDITIONAL - or rdclass != dns.rdatatype.ANY - or position != count - 1 - ): - raise BadTSIG - return (rdclass, rdtype, None, False) - - -class ChainingResult: - """The result of a call to dns.message.QueryMessage.resolve_chaining(). - - The ``answer`` attribute is the answer RRSet, or ``None`` if it doesn't - exist. - - The ``canonical_name`` attribute is the canonical name after all - chaining has been applied (this is the same name as ``rrset.name`` in cases - where rrset is not ``None``). - - The ``minimum_ttl`` attribute is the minimum TTL, i.e. the TTL to - use if caching the data. It is the smallest of all the CNAME TTLs - and either the answer TTL if it exists or the SOA TTL and SOA - minimum values for negative answers. - - The ``cnames`` attribute is a list of all the CNAME RRSets followed to - get to the canonical name. - """ - - def __init__( - self, - canonical_name: dns.name.Name, - answer: Optional[dns.rrset.RRset], - minimum_ttl: int, - cnames: List[dns.rrset.RRset], - ): - self.canonical_name = canonical_name - self.answer = answer - self.minimum_ttl = minimum_ttl - self.cnames = cnames - - -class QueryMessage(Message): - def resolve_chaining(self) -> ChainingResult: - """Follow the CNAME chain in the response to determine the answer - RRset. - - Raises ``dns.message.NotQueryResponse`` if the message is not - a response. - - Raises ``dns.message.ChainTooLong`` if the CNAME chain is too long. - - Raises ``dns.message.AnswerForNXDOMAIN`` if the rcode is NXDOMAIN - but an answer was found. - - Raises ``dns.exception.FormError`` if the question count is not 1. - - Returns a ChainingResult object. - """ - if self.flags & dns.flags.QR == 0: - raise NotQueryResponse - if len(self.question) != 1: - raise dns.exception.FormError - question = self.question[0] - qname = question.name - min_ttl = dns.ttl.MAX_TTL - answer = None - count = 0 - cnames = [] - while count < MAX_CHAIN: - try: - answer = self.find_rrset( - self.answer, qname, question.rdclass, question.rdtype - ) - min_ttl = min(min_ttl, answer.ttl) - break - except KeyError: - if question.rdtype != dns.rdatatype.CNAME: - try: - crrset = self.find_rrset( - self.answer, qname, question.rdclass, dns.rdatatype.CNAME - ) - cnames.append(crrset) - min_ttl = min(min_ttl, crrset.ttl) - for rd in crrset: - qname = rd.target - break - count += 1 - continue - except KeyError: - # Exit the chaining loop - break - else: - # Exit the chaining loop - break - if count >= MAX_CHAIN: - raise ChainTooLong - if self.rcode() == dns.rcode.NXDOMAIN and answer is not None: - raise AnswerForNXDOMAIN - if answer is None: - # Further minimize the TTL with NCACHE. - auname = qname - while True: - # Look for an SOA RR whose owner name is a superdomain - # of qname. - try: - srrset = self.find_rrset( - self.authority, auname, question.rdclass, dns.rdatatype.SOA - ) - min_ttl = min(min_ttl, srrset.ttl, srrset[0].minimum) - break - except KeyError: - try: - auname = auname.parent() - except dns.name.NoParent: - break - return ChainingResult(qname, answer, min_ttl, cnames) - - def canonical_name(self) -> dns.name.Name: - """Return the canonical name of the first name in the question - section. - - Raises ``dns.message.NotQueryResponse`` if the message is not - a response. - - Raises ``dns.message.ChainTooLong`` if the CNAME chain is too long. - - Raises ``dns.message.AnswerForNXDOMAIN`` if the rcode is NXDOMAIN - but an answer was found. - - Raises ``dns.exception.FormError`` if the question count is not 1. - """ - return self.resolve_chaining().canonical_name - - -def _maybe_import_update(): - # We avoid circular imports by doing this here. We do it in another - # function as doing it in _message_factory_from_opcode() makes "dns" - # a local symbol, and the first line fails :) - - # pylint: disable=redefined-outer-name,import-outside-toplevel,unused-import - import dns.update # noqa: F401 - - -def _message_factory_from_opcode(opcode): - if opcode == dns.opcode.QUERY: - return QueryMessage - elif opcode == dns.opcode.UPDATE: - _maybe_import_update() - return dns.update.UpdateMessage - else: - return Message - - -class _WireReader: - - """Wire format reader. - - parser: the binary parser - message: The message object being built - initialize_message: Callback to set message parsing options - question_only: Are we only reading the question? - one_rr_per_rrset: Put each RR into its own RRset? - keyring: TSIG keyring - ignore_trailing: Ignore trailing junk at end of request? - multi: Is this message part of a multi-message sequence? - DNS dynamic updates. - continue_on_error: try to extract as much information as possible from - the message, accumulating MessageErrors in the *errors* attribute instead of - raising them. - """ - - def __init__( - self, - wire, - initialize_message, - question_only=False, - one_rr_per_rrset=False, - ignore_trailing=False, - keyring=None, - multi=False, - continue_on_error=False, - ): - self.parser = dns.wire.Parser(wire) - self.message = None - self.initialize_message = initialize_message - self.question_only = question_only - self.one_rr_per_rrset = one_rr_per_rrset - self.ignore_trailing = ignore_trailing - self.keyring = keyring - self.multi = multi - self.continue_on_error = continue_on_error - self.errors = [] - - def _get_question(self, section_number, qcount): - """Read the next *qcount* records from the wire data and add them to - the question section. - """ - assert self.message is not None - section = self.message.sections[section_number] - for _ in range(qcount): - qname = self.parser.get_name(self.message.origin) - (rdtype, rdclass) = self.parser.get_struct("!HH") - (rdclass, rdtype, _, _) = self.message._parse_rr_header( - section_number, qname, rdclass, rdtype - ) - self.message.find_rrset( - section, qname, rdclass, rdtype, create=True, force_unique=True - ) - - def _add_error(self, e): - self.errors.append(MessageError(e, self.parser.current)) - - def _get_section(self, section_number, count): - """Read the next I{count} records from the wire data and add them to - the specified section. - - section_number: the section of the message to which to add records - count: the number of records to read - """ - assert self.message is not None - section = self.message.sections[section_number] - force_unique = self.one_rr_per_rrset - for i in range(count): - rr_start = self.parser.current - absolute_name = self.parser.get_name() - if self.message.origin is not None: - name = absolute_name.relativize(self.message.origin) - else: - name = absolute_name - (rdtype, rdclass, ttl, rdlen) = self.parser.get_struct("!HHIH") - if rdtype in (dns.rdatatype.OPT, dns.rdatatype.TSIG): - ( - rdclass, - rdtype, - deleting, - empty, - ) = self.message._parse_special_rr_header( - section_number, count, i, name, rdclass, rdtype - ) - else: - (rdclass, rdtype, deleting, empty) = self.message._parse_rr_header( - section_number, name, rdclass, rdtype - ) - rdata_start = self.parser.current - try: - if empty: - if rdlen > 0: - raise dns.exception.FormError - rd = None - covers = dns.rdatatype.NONE - else: - with self.parser.restrict_to(rdlen): - rd = dns.rdata.from_wire_parser( - rdclass, rdtype, self.parser, self.message.origin - ) - covers = rd.covers() - if self.message.xfr and rdtype == dns.rdatatype.SOA: - force_unique = True - if rdtype == dns.rdatatype.OPT: - self.message.opt = dns.rrset.from_rdata(name, ttl, rd) - elif rdtype == dns.rdatatype.TSIG: - if self.keyring is None: - raise UnknownTSIGKey("got signed message without keyring") - if isinstance(self.keyring, dict): - key = self.keyring.get(absolute_name) - if isinstance(key, bytes): - key = dns.tsig.Key(absolute_name, key, rd.algorithm) - elif callable(self.keyring): - key = self.keyring(self.message, absolute_name) - else: - key = self.keyring - if key is None: - raise UnknownTSIGKey("key '%s' unknown" % name) - self.message.keyring = key - self.message.tsig_ctx = dns.tsig.validate( - self.parser.wire, - key, - absolute_name, - rd, - int(time.time()), - self.message.request_mac, - rr_start, - self.message.tsig_ctx, - self.multi, - ) - self.message.tsig = dns.rrset.from_rdata(absolute_name, 0, rd) - else: - rrset = self.message.find_rrset( - section, - name, - rdclass, - rdtype, - covers, - deleting, - True, - force_unique, - ) - if rd is not None: - if ttl > 0x7FFFFFFF: - ttl = 0 - rrset.add(rd, ttl) - except Exception as e: - if self.continue_on_error: - self._add_error(e) - self.parser.seek(rdata_start + rdlen) - else: - raise - - def read(self): - """Read a wire format DNS message and build a dns.message.Message - object.""" - - if self.parser.remaining() < 12: - raise ShortHeader - (id, flags, qcount, ancount, aucount, adcount) = self.parser.get_struct( - "!HHHHHH" - ) - factory = _message_factory_from_opcode(dns.opcode.from_flags(flags)) - self.message = factory(id=id) - self.message.flags = dns.flags.Flag(flags) - self.initialize_message(self.message) - self.one_rr_per_rrset = self.message._get_one_rr_per_rrset( - self.one_rr_per_rrset - ) - try: - self._get_question(MessageSection.QUESTION, qcount) - if self.question_only: - return self.message - self._get_section(MessageSection.ANSWER, ancount) - self._get_section(MessageSection.AUTHORITY, aucount) - self._get_section(MessageSection.ADDITIONAL, adcount) - if not self.ignore_trailing and self.parser.remaining() != 0: - raise TrailingJunk - if self.multi and self.message.tsig_ctx and not self.message.had_tsig: - self.message.tsig_ctx.update(self.parser.wire) - except Exception as e: - if self.continue_on_error: - self._add_error(e) - else: - raise - return self.message - - -def from_wire( - wire: bytes, - keyring: Optional[Any] = None, - request_mac: Optional[bytes] = b"", - xfr: bool = False, - origin: Optional[dns.name.Name] = None, - tsig_ctx: Optional[Union[dns.tsig.HMACTSig, dns.tsig.GSSTSig]] = None, - multi: bool = False, - question_only: bool = False, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - raise_on_truncation: bool = False, - continue_on_error: bool = False, -) -> Message: - """Convert a DNS wire format message into a message object. - - *keyring*, a ``dns.tsig.Key`` or ``dict``, the key or keyring to use if the message - is signed. - - *request_mac*, a ``bytes`` or ``None``. If the message is a response to a - TSIG-signed request, *request_mac* should be set to the MAC of that request. - - *xfr*, a ``bool``, should be set to ``True`` if this message is part of a zone - transfer. - - *origin*, a ``dns.name.Name`` or ``None``. If the message is part of a zone - transfer, *origin* should be the origin name of the zone. If not ``None``, names - will be relativized to the origin. - - *tsig_ctx*, a ``dns.tsig.HMACTSig`` or ``dns.tsig.GSSTSig`` object, the ongoing TSIG - context, used when validating zone transfers. - - *multi*, a ``bool``, should be set to ``True`` if this message is part of a multiple - message sequence. - - *question_only*, a ``bool``. If ``True``, read only up to the end of the question - section. - - *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own RRset. - - *ignore_trailing*, a ``bool``. If ``True``, ignore trailing junk at end of the - message. - - *raise_on_truncation*, a ``bool``. If ``True``, raise an exception if the TC bit is - set. - - *continue_on_error*, a ``bool``. If ``True``, try to continue parsing even if - errors occur. Erroneous rdata will be ignored. Errors will be accumulated as a - list of MessageError objects in the message's ``errors`` attribute. This option is - recommended only for DNS analysis tools, or for use in a server as part of an error - handling path. The default is ``False``. - - Raises ``dns.message.ShortHeader`` if the message is less than 12 octets long. - - Raises ``dns.message.TrailingJunk`` if there were octets in the message past the end - of the proper DNS message, and *ignore_trailing* is ``False``. - - Raises ``dns.message.BadEDNS`` if an OPT record was in the wrong section, or - occurred more than once. - - Raises ``dns.message.BadTSIG`` if a TSIG record was not the last record of the - additional data section. - - Raises ``dns.message.Truncated`` if the TC flag is set and *raise_on_truncation* is - ``True``. - - Returns a ``dns.message.Message``. - """ - - # We permit None for request_mac solely for backwards compatibility - if request_mac is None: - request_mac = b"" - - def initialize_message(message): - message.request_mac = request_mac - message.xfr = xfr - message.origin = origin - message.tsig_ctx = tsig_ctx - - reader = _WireReader( - wire, - initialize_message, - question_only, - one_rr_per_rrset, - ignore_trailing, - keyring, - multi, - continue_on_error, - ) - try: - m = reader.read() - except dns.exception.FormError: - if ( - reader.message - and (reader.message.flags & dns.flags.TC) - and raise_on_truncation - ): - raise Truncated(message=reader.message) - else: - raise - # Reading a truncated message might not have any errors, so we - # have to do this check here too. - if m.flags & dns.flags.TC and raise_on_truncation: - raise Truncated(message=m) - if continue_on_error: - m.errors = reader.errors - - return m - - -class _TextReader: - - """Text format reader. - - tok: the tokenizer. - message: The message object being built. - DNS dynamic updates. - last_name: The most recently read name when building a message object. - one_rr_per_rrset: Put each RR into its own RRset? - origin: The origin for relative names - relativize: relativize names? - relativize_to: the origin to relativize to. - """ - - def __init__( - self, - text, - idna_codec, - one_rr_per_rrset=False, - origin=None, - relativize=True, - relativize_to=None, - ): - self.message = None - self.tok = dns.tokenizer.Tokenizer(text, idna_codec=idna_codec) - self.last_name = None - self.one_rr_per_rrset = one_rr_per_rrset - self.origin = origin - self.relativize = relativize - self.relativize_to = relativize_to - self.id = None - self.edns = -1 - self.ednsflags = 0 - self.payload = DEFAULT_EDNS_PAYLOAD - self.rcode = None - self.opcode = dns.opcode.QUERY - self.flags = 0 - - def _header_line(self, _): - """Process one line from the text format header section.""" - - token = self.tok.get() - what = token.value - if what == "id": - self.id = self.tok.get_int() - elif what == "flags": - while True: - token = self.tok.get() - if not token.is_identifier(): - self.tok.unget(token) - break - self.flags = self.flags | dns.flags.from_text(token.value) - elif what == "edns": - self.edns = self.tok.get_int() - self.ednsflags = self.ednsflags | (self.edns << 16) - elif what == "eflags": - if self.edns < 0: - self.edns = 0 - while True: - token = self.tok.get() - if not token.is_identifier(): - self.tok.unget(token) - break - self.ednsflags = self.ednsflags | dns.flags.edns_from_text(token.value) - elif what == "payload": - self.payload = self.tok.get_int() - if self.edns < 0: - self.edns = 0 - elif what == "opcode": - text = self.tok.get_string() - self.opcode = dns.opcode.from_text(text) - self.flags = self.flags | dns.opcode.to_flags(self.opcode) - elif what == "rcode": - text = self.tok.get_string() - self.rcode = dns.rcode.from_text(text) - else: - raise UnknownHeaderField - self.tok.get_eol() - - def _question_line(self, section_number): - """Process one line from the text format question section.""" - - section = self.message.sections[section_number] - token = self.tok.get(want_leading=True) - if not token.is_whitespace(): - self.last_name = self.tok.as_name( - token, self.message.origin, self.relativize, self.relativize_to - ) - name = self.last_name - if name is None: - raise NoPreviousName - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - # Class - try: - rdclass = dns.rdataclass.from_text(token.value) - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - except dns.exception.SyntaxError: - raise dns.exception.SyntaxError - except Exception: - rdclass = dns.rdataclass.IN - # Type - rdtype = dns.rdatatype.from_text(token.value) - (rdclass, rdtype, _, _) = self.message._parse_rr_header( - section_number, name, rdclass, rdtype - ) - self.message.find_rrset( - section, name, rdclass, rdtype, create=True, force_unique=True - ) - self.tok.get_eol() - - def _rr_line(self, section_number): - """Process one line from the text format answer, authority, or - additional data sections. - """ - - section = self.message.sections[section_number] - # Name - token = self.tok.get(want_leading=True) - if not token.is_whitespace(): - self.last_name = self.tok.as_name( - token, self.message.origin, self.relativize, self.relativize_to - ) - name = self.last_name - if name is None: - raise NoPreviousName - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - # TTL - try: - ttl = int(token.value, 0) - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - except dns.exception.SyntaxError: - raise dns.exception.SyntaxError - except Exception: - ttl = 0 - # Class - try: - rdclass = dns.rdataclass.from_text(token.value) - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - except dns.exception.SyntaxError: - raise dns.exception.SyntaxError - except Exception: - rdclass = dns.rdataclass.IN - # Type - rdtype = dns.rdatatype.from_text(token.value) - (rdclass, rdtype, deleting, empty) = self.message._parse_rr_header( - section_number, name, rdclass, rdtype - ) - token = self.tok.get() - if empty and not token.is_eol_or_eof(): - raise dns.exception.SyntaxError - if not empty and token.is_eol_or_eof(): - raise dns.exception.UnexpectedEnd - if not token.is_eol_or_eof(): - self.tok.unget(token) - rd = dns.rdata.from_text( - rdclass, - rdtype, - self.tok, - self.message.origin, - self.relativize, - self.relativize_to, - ) - covers = rd.covers() - else: - rd = None - covers = dns.rdatatype.NONE - rrset = self.message.find_rrset( - section, - name, - rdclass, - rdtype, - covers, - deleting, - True, - self.one_rr_per_rrset, - ) - if rd is not None: - rrset.add(rd, ttl) - - def _make_message(self): - factory = _message_factory_from_opcode(self.opcode) - message = factory(id=self.id) - message.flags = self.flags - if self.edns >= 0: - message.use_edns(self.edns, self.ednsflags, self.payload) - if self.rcode: - message.set_rcode(self.rcode) - if self.origin: - message.origin = self.origin - return message - - def read(self): - """Read a text format DNS message and build a dns.message.Message - object.""" - - line_method = self._header_line - section_number = None - while 1: - token = self.tok.get(True, True) - if token.is_eol_or_eof(): - break - if token.is_comment(): - u = token.value.upper() - if u == "HEADER": - line_method = self._header_line - - if self.message: - message = self.message - else: - # If we don't have a message, create one with the current - # opcode, so that we know which section names to parse. - message = self._make_message() - try: - section_number = message._section_enum.from_text(u) - # We found a section name. If we don't have a message, - # use the one we just created. - if not self.message: - self.message = message - self.one_rr_per_rrset = message._get_one_rr_per_rrset( - self.one_rr_per_rrset - ) - if section_number == MessageSection.QUESTION: - line_method = self._question_line - else: - line_method = self._rr_line - except Exception: - # It's just a comment. - pass - self.tok.get_eol() - continue - self.tok.unget(token) - line_method(section_number) - if not self.message: - self.message = self._make_message() - return self.message - - -def from_text( - text: str, - idna_codec: Optional[dns.name.IDNACodec] = None, - one_rr_per_rrset: bool = False, - origin: Optional[dns.name.Name] = None, - relativize: bool = True, - relativize_to: Optional[dns.name.Name] = None, -) -> Message: - """Convert the text format message into a message object. - - The reader stops after reading the first blank line in the input to - facilitate reading multiple messages from a single file with - ``dns.message.from_file()``. - - *text*, a ``str``, the text format message. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder - is used. - - *one_rr_per_rrset*, a ``bool``. If ``True``, then each RR is put - into its own rrset. The default is ``False``. - - *origin*, a ``dns.name.Name`` (or ``None``), the - origin to use for relative names. - - *relativize*, a ``bool``. If true, name will be relativized. - - *relativize_to*, a ``dns.name.Name`` (or ``None``), the origin to use - when relativizing names. If not set, the *origin* value will be used. - - Raises ``dns.message.UnknownHeaderField`` if a header is unknown. - - Raises ``dns.exception.SyntaxError`` if the text is badly formed. - - Returns a ``dns.message.Message object`` - """ - - # 'text' can also be a file, but we don't publish that fact - # since it's an implementation detail. The official file - # interface is from_file(). - - reader = _TextReader( - text, idna_codec, one_rr_per_rrset, origin, relativize, relativize_to - ) - return reader.read() - - -def from_file( - f: Any, - idna_codec: Optional[dns.name.IDNACodec] = None, - one_rr_per_rrset: bool = False, -) -> Message: - """Read the next text format message from the specified file. - - Message blocks are separated by a single blank line. - - *f*, a ``file`` or ``str``. If *f* is text, it is treated as the - pathname of a file to open. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder - is used. - - *one_rr_per_rrset*, a ``bool``. If ``True``, then each RR is put - into its own rrset. The default is ``False``. - - Raises ``dns.message.UnknownHeaderField`` if a header is unknown. - - Raises ``dns.exception.SyntaxError`` if the text is badly formed. - - Returns a ``dns.message.Message object`` - """ - - if isinstance(f, str): - cm: contextlib.AbstractContextManager = open(f) - else: - cm = contextlib.nullcontext(f) - with cm as f: - return from_text(f, idna_codec, one_rr_per_rrset) - assert False # for mypy lgtm[py/unreachable-statement] - - -def make_query( - qname: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str], - rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, - use_edns: Optional[Union[int, bool]] = None, - want_dnssec: bool = False, - ednsflags: Optional[int] = None, - payload: Optional[int] = None, - request_payload: Optional[int] = None, - options: Optional[List[dns.edns.Option]] = None, - idna_codec: Optional[dns.name.IDNACodec] = None, - id: Optional[int] = None, - flags: int = dns.flags.RD, - pad: int = 0, -) -> QueryMessage: - """Make a query message. - - The query name, type, and class may all be specified either - as objects of the appropriate type, or as strings. - - The query will have a randomly chosen query id, and its DNS flags - will be set to dns.flags.RD. - - qname, a ``dns.name.Name`` or ``str``, the query name. - - *rdtype*, an ``int`` or ``str``, the desired rdata type. - - *rdclass*, an ``int`` or ``str``, the desired rdata class; the default - is class IN. - - *use_edns*, an ``int``, ``bool`` or ``None``. The EDNS level to use; the - default is ``None``. If ``None``, EDNS will be enabled only if other - parameters (*ednsflags*, *payload*, *request_payload*, or *options*) are - set. - See the description of dns.message.Message.use_edns() for the possible - values for use_edns and their meanings. - - *want_dnssec*, a ``bool``. If ``True``, DNSSEC data is desired. - - *ednsflags*, an ``int``, the EDNS flag values. - - *payload*, an ``int``, is the EDNS sender's payload field, which is the - maximum size of UDP datagram the sender can handle. I.e. how big - a response to this message can be. - - *request_payload*, an ``int``, is the EDNS payload size to use when - sending this message. If not specified, defaults to the value of - *payload*. - - *options*, a list of ``dns.edns.Option`` objects or ``None``, the EDNS - options. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder - is used. - - *id*, an ``int`` or ``None``, the desired query id. The default is - ``None``, which generates a random query id. - - *flags*, an ``int``, the desired query flags. The default is - ``dns.flags.RD``. - - *pad*, a non-negative ``int``. If 0, the default, do not pad; otherwise add - padding bytes to make the message size a multiple of *pad*. Note that if - padding is non-zero, an EDNS PADDING option will always be added to the - message. - - Returns a ``dns.message.QueryMessage`` - """ - - if isinstance(qname, str): - qname = dns.name.from_text(qname, idna_codec=idna_codec) - rdtype = dns.rdatatype.RdataType.make(rdtype) - rdclass = dns.rdataclass.RdataClass.make(rdclass) - m = QueryMessage(id=id) - m.flags = dns.flags.Flag(flags) - m.find_rrset(m.question, qname, rdclass, rdtype, create=True, force_unique=True) - # only pass keywords on to use_edns if they have been set to a - # non-None value. Setting a field will turn EDNS on if it hasn't - # been configured. - kwargs: Dict[str, Any] = {} - if ednsflags is not None: - kwargs["ednsflags"] = ednsflags - if payload is not None: - kwargs["payload"] = payload - if request_payload is not None: - kwargs["request_payload"] = request_payload - if options is not None: - kwargs["options"] = options - if kwargs and use_edns is None: - use_edns = 0 - kwargs["edns"] = use_edns - kwargs["pad"] = pad - m.use_edns(**kwargs) - m.want_dnssec(want_dnssec) - return m - - -def make_response( - query: Message, - recursion_available: bool = False, - our_payload: int = 8192, - fudge: int = 300, - tsig_error: int = 0, -) -> Message: - """Make a message which is a response for the specified query. - The message returned is really a response skeleton; it has all - of the infrastructure required of a response, but none of the - content. - - The response's question section is a shallow copy of the query's - question section, so the query's question RRsets should not be - changed. - - *query*, a ``dns.message.Message``, the query to respond to. - - *recursion_available*, a ``bool``, should RA be set in the response? - - *our_payload*, an ``int``, the payload size to advertise in EDNS - responses. - - *fudge*, an ``int``, the TSIG time fudge. - - *tsig_error*, an ``int``, the TSIG error. - - Returns a ``dns.message.Message`` object whose specific class is - appropriate for the query. For example, if query is a - ``dns.update.UpdateMessage``, response will be too. - """ - - if query.flags & dns.flags.QR: - raise dns.exception.FormError("specified query message is not a query") - factory = _message_factory_from_opcode(query.opcode()) - response = factory(id=query.id) - response.flags = dns.flags.QR | (query.flags & dns.flags.RD) - if recursion_available: - response.flags |= dns.flags.RA - response.set_opcode(query.opcode()) - response.question = list(query.question) - if query.edns >= 0: - response.use_edns(0, 0, our_payload, query.payload) - if query.had_tsig: - response.use_tsig( - query.keyring, - query.keyname, - fudge, - None, - tsig_error, - b"", - query.keyalgorithm, - ) - response.request_mac = query.mac - return response - - -### BEGIN generated MessageSection constants - -QUESTION = MessageSection.QUESTION -ANSWER = MessageSection.ANSWER -AUTHORITY = MessageSection.AUTHORITY -ADDITIONAL = MessageSection.ADDITIONAL - -### END generated MessageSection constants diff --git a/server/venv/Lib/site-packages/dns/name.py b/server/venv/Lib/site-packages/dns/name.py deleted file mode 100644 index f452bfe..0000000 --- a/server/venv/Lib/site-packages/dns/name.py +++ /dev/null @@ -1,1084 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Names. -""" - -import copy -import encodings.idna # type: ignore -import struct -from typing import Any, Dict, Iterable, Optional, Tuple, Union - -try: - import idna # type: ignore - - have_idna_2008 = True -except ImportError: # pragma: no cover - have_idna_2008 = False - -import dns.enum -import dns.exception -import dns.immutable -import dns.wire - -CompressType = Dict["Name", int] - - -class NameRelation(dns.enum.IntEnum): - """Name relation result from fullcompare().""" - - # This is an IntEnum for backwards compatibility in case anyone - # has hardwired the constants. - - #: The compared names have no relationship to each other. - NONE = 0 - #: the first name is a superdomain of the second. - SUPERDOMAIN = 1 - #: The first name is a subdomain of the second. - SUBDOMAIN = 2 - #: The compared names are equal. - EQUAL = 3 - #: The compared names have a common ancestor. - COMMONANCESTOR = 4 - - @classmethod - def _maximum(cls): - return cls.COMMONANCESTOR - - @classmethod - def _short_name(cls): - return cls.__name__ - - -# Backwards compatibility -NAMERELN_NONE = NameRelation.NONE -NAMERELN_SUPERDOMAIN = NameRelation.SUPERDOMAIN -NAMERELN_SUBDOMAIN = NameRelation.SUBDOMAIN -NAMERELN_EQUAL = NameRelation.EQUAL -NAMERELN_COMMONANCESTOR = NameRelation.COMMONANCESTOR - - -class EmptyLabel(dns.exception.SyntaxError): - """A DNS label is empty.""" - - -class BadEscape(dns.exception.SyntaxError): - """An escaped code in a text format of DNS name is invalid.""" - - -class BadPointer(dns.exception.FormError): - """A DNS compression pointer points forward instead of backward.""" - - -class BadLabelType(dns.exception.FormError): - """The label type in DNS name wire format is unknown.""" - - -class NeedAbsoluteNameOrOrigin(dns.exception.DNSException): - """An attempt was made to convert a non-absolute name to - wire when there was also a non-absolute (or missing) origin.""" - - -class NameTooLong(dns.exception.FormError): - """A DNS name is > 255 octets long.""" - - -class LabelTooLong(dns.exception.SyntaxError): - """A DNS label is > 63 octets long.""" - - -class AbsoluteConcatenation(dns.exception.DNSException): - """An attempt was made to append anything other than the - empty name to an absolute DNS name.""" - - -class NoParent(dns.exception.DNSException): - """An attempt was made to get the parent of the root name - or the empty name.""" - - -class NoIDNA2008(dns.exception.DNSException): - """IDNA 2008 processing was requested but the idna module is not - available.""" - - -class IDNAException(dns.exception.DNSException): - """IDNA processing raised an exception.""" - - supp_kwargs = {"idna_exception"} - fmt = "IDNA processing exception: {idna_exception}" - - # We do this as otherwise mypy complains about unexpected keyword argument - # idna_exception - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - -_escaped = b'"().;\\@$' -_escaped_text = '"().;\\@$' - - -def _escapify(label: Union[bytes, str]) -> str: - """Escape the characters in label which need it. - @returns: the escaped string - @rtype: string""" - if isinstance(label, bytes): - # Ordinary DNS label mode. Escape special characters and values - # < 0x20 or > 0x7f. - text = "" - for c in label: - if c in _escaped: - text += "\\" + chr(c) - elif c > 0x20 and c < 0x7F: - text += chr(c) - else: - text += "\\%03d" % c - return text - - # Unicode label mode. Escape only special characters and values < 0x20 - text = "" - for uc in label: - if uc in _escaped_text: - text += "\\" + uc - elif uc <= "\x20": - text += "\\%03d" % ord(uc) - else: - text += uc - return text - - -class IDNACodec: - """Abstract base class for IDNA encoder/decoders.""" - - def __init__(self): - pass - - def is_idna(self, label: bytes) -> bool: - return label.lower().startswith(b"xn--") - - def encode(self, label: str) -> bytes: - raise NotImplementedError # pragma: no cover - - def decode(self, label: bytes) -> str: - # We do not apply any IDNA policy on decode. - if self.is_idna(label): - try: - slabel = label[4:].decode("punycode") - return _escapify(slabel) - except Exception as e: - raise IDNAException(idna_exception=e) - else: - return _escapify(label) - - -class IDNA2003Codec(IDNACodec): - """IDNA 2003 encoder/decoder.""" - - def __init__(self, strict_decode: bool = False): - """Initialize the IDNA 2003 encoder/decoder. - - *strict_decode* is a ``bool``. If `True`, then IDNA2003 checking - is done when decoding. This can cause failures if the name - was encoded with IDNA2008. The default is `False`. - """ - - super().__init__() - self.strict_decode = strict_decode - - def encode(self, label: str) -> bytes: - """Encode *label*.""" - - if label == "": - return b"" - try: - return encodings.idna.ToASCII(label) - except UnicodeError: - raise LabelTooLong - - def decode(self, label: bytes) -> str: - """Decode *label*.""" - if not self.strict_decode: - return super().decode(label) - if label == b"": - return "" - try: - return _escapify(encodings.idna.ToUnicode(label)) - except Exception as e: - raise IDNAException(idna_exception=e) - - -class IDNA2008Codec(IDNACodec): - """IDNA 2008 encoder/decoder.""" - - def __init__( - self, - uts_46: bool = False, - transitional: bool = False, - allow_pure_ascii: bool = False, - strict_decode: bool = False, - ): - """Initialize the IDNA 2008 encoder/decoder. - - *uts_46* is a ``bool``. If True, apply Unicode IDNA - compatibility processing as described in Unicode Technical - Standard #46 (https://unicode.org/reports/tr46/). - If False, do not apply the mapping. The default is False. - - *transitional* is a ``bool``: If True, use the - "transitional" mode described in Unicode Technical Standard - #46. The default is False. - - *allow_pure_ascii* is a ``bool``. If True, then a label which - consists of only ASCII characters is allowed. This is less - strict than regular IDNA 2008, but is also necessary for mixed - names, e.g. a name with starting with "_sip._tcp." and ending - in an IDN suffix which would otherwise be disallowed. The - default is False. - - *strict_decode* is a ``bool``: If True, then IDNA2008 checking - is done when decoding. This can cause failures if the name - was encoded with IDNA2003. The default is False. - """ - super().__init__() - self.uts_46 = uts_46 - self.transitional = transitional - self.allow_pure_ascii = allow_pure_ascii - self.strict_decode = strict_decode - - def encode(self, label: str) -> bytes: - if label == "": - return b"" - if self.allow_pure_ascii and is_all_ascii(label): - encoded = label.encode("ascii") - if len(encoded) > 63: - raise LabelTooLong - return encoded - if not have_idna_2008: - raise NoIDNA2008 - try: - if self.uts_46: - label = idna.uts46_remap(label, False, self.transitional) - return idna.alabel(label) - except idna.IDNAError as e: - if e.args[0] == "Label too long": - raise LabelTooLong - else: - raise IDNAException(idna_exception=e) - - def decode(self, label: bytes) -> str: - if not self.strict_decode: - return super().decode(label) - if label == b"": - return "" - if not have_idna_2008: - raise NoIDNA2008 - try: - ulabel = idna.ulabel(label) - if self.uts_46: - ulabel = idna.uts46_remap(ulabel, False, self.transitional) - return _escapify(ulabel) - except (idna.IDNAError, UnicodeError) as e: - raise IDNAException(idna_exception=e) - - -IDNA_2003_Practical = IDNA2003Codec(False) -IDNA_2003_Strict = IDNA2003Codec(True) -IDNA_2003 = IDNA_2003_Practical -IDNA_2008_Practical = IDNA2008Codec(True, False, True, False) -IDNA_2008_UTS_46 = IDNA2008Codec(True, False, False, False) -IDNA_2008_Strict = IDNA2008Codec(False, False, False, True) -IDNA_2008_Transitional = IDNA2008Codec(True, True, False, False) -IDNA_2008 = IDNA_2008_Practical - - -def _validate_labels(labels: Tuple[bytes, ...]) -> None: - """Check for empty labels in the middle of a label sequence, - labels that are too long, and for too many labels. - - Raises ``dns.name.NameTooLong`` if the name as a whole is too long. - - Raises ``dns.name.EmptyLabel`` if a label is empty (i.e. the root - label) and appears in a position other than the end of the label - sequence - - """ - - l = len(labels) - total = 0 - i = -1 - j = 0 - for label in labels: - ll = len(label) - total += ll + 1 - if ll > 63: - raise LabelTooLong - if i < 0 and label == b"": - i = j - j += 1 - if total > 255: - raise NameTooLong - if i >= 0 and i != l - 1: - raise EmptyLabel - - -def _maybe_convert_to_binary(label: Union[bytes, str]) -> bytes: - """If label is ``str``, convert it to ``bytes``. If it is already - ``bytes`` just return it. - - """ - - if isinstance(label, bytes): - return label - if isinstance(label, str): - return label.encode() - raise ValueError # pragma: no cover - - -@dns.immutable.immutable -class Name: - - """A DNS name. - - The dns.name.Name class represents a DNS name as a tuple of - labels. Each label is a ``bytes`` in DNS wire format. Instances - of the class are immutable. - """ - - __slots__ = ["labels"] - - def __init__(self, labels: Iterable[Union[bytes, str]]): - """*labels* is any iterable whose values are ``str`` or ``bytes``.""" - - blabels = [_maybe_convert_to_binary(x) for x in labels] - self.labels = tuple(blabels) - _validate_labels(self.labels) - - def __copy__(self): - return Name(self.labels) - - def __deepcopy__(self, memo): - return Name(copy.deepcopy(self.labels, memo)) - - def __getstate__(self): - # Names can be pickled - return {"labels": self.labels} - - def __setstate__(self, state): - super().__setattr__("labels", state["labels"]) - _validate_labels(self.labels) - - def is_absolute(self) -> bool: - """Is the most significant label of this name the root label? - - Returns a ``bool``. - """ - - return len(self.labels) > 0 and self.labels[-1] == b"" - - def is_wild(self) -> bool: - """Is this name wild? (I.e. Is the least significant label '*'?) - - Returns a ``bool``. - """ - - return len(self.labels) > 0 and self.labels[0] == b"*" - - def __hash__(self) -> int: - """Return a case-insensitive hash of the name. - - Returns an ``int``. - """ - - h = 0 - for label in self.labels: - for c in label.lower(): - h += (h << 3) + c - return h - - def fullcompare(self, other: "Name") -> Tuple[NameRelation, int, int]: - """Compare two names, returning a 3-tuple - ``(relation, order, nlabels)``. - - *relation* describes the relation ship between the names, - and is one of: ``dns.name.NameRelation.NONE``, - ``dns.name.NameRelation.SUPERDOMAIN``, ``dns.name.NameRelation.SUBDOMAIN``, - ``dns.name.NameRelation.EQUAL``, or ``dns.name.NameRelation.COMMONANCESTOR``. - - *order* is < 0 if *self* < *other*, > 0 if *self* > *other*, and == - 0 if *self* == *other*. A relative name is always less than an - absolute name. If both names have the same relativity, then - the DNSSEC order relation is used to order them. - - *nlabels* is the number of significant labels that the two names - have in common. - - Here are some examples. Names ending in "." are absolute names, - those not ending in "." are relative names. - - ============= ============= =========== ===== ======= - self other relation order nlabels - ============= ============= =========== ===== ======= - www.example. www.example. equal 0 3 - www.example. example. subdomain > 0 2 - example. www.example. superdomain < 0 2 - example1.com. example2.com. common anc. < 0 2 - example1 example2. none < 0 0 - example1. example2 none > 0 0 - ============= ============= =========== ===== ======= - """ - - sabs = self.is_absolute() - oabs = other.is_absolute() - if sabs != oabs: - if sabs: - return (NameRelation.NONE, 1, 0) - else: - return (NameRelation.NONE, -1, 0) - l1 = len(self.labels) - l2 = len(other.labels) - ldiff = l1 - l2 - if ldiff < 0: - l = l1 - else: - l = l2 - - order = 0 - nlabels = 0 - namereln = NameRelation.NONE - while l > 0: - l -= 1 - l1 -= 1 - l2 -= 1 - label1 = self.labels[l1].lower() - label2 = other.labels[l2].lower() - if label1 < label2: - order = -1 - if nlabels > 0: - namereln = NameRelation.COMMONANCESTOR - return (namereln, order, nlabels) - elif label1 > label2: - order = 1 - if nlabels > 0: - namereln = NameRelation.COMMONANCESTOR - return (namereln, order, nlabels) - nlabels += 1 - order = ldiff - if ldiff < 0: - namereln = NameRelation.SUPERDOMAIN - elif ldiff > 0: - namereln = NameRelation.SUBDOMAIN - else: - namereln = NameRelation.EQUAL - return (namereln, order, nlabels) - - def is_subdomain(self, other: "Name") -> bool: - """Is self a subdomain of other? - - Note that the notion of subdomain includes equality, e.g. - "dnspython.org" is a subdomain of itself. - - Returns a ``bool``. - """ - - (nr, _, _) = self.fullcompare(other) - if nr == NameRelation.SUBDOMAIN or nr == NameRelation.EQUAL: - return True - return False - - def is_superdomain(self, other: "Name") -> bool: - """Is self a superdomain of other? - - Note that the notion of superdomain includes equality, e.g. - "dnspython.org" is a superdomain of itself. - - Returns a ``bool``. - """ - - (nr, _, _) = self.fullcompare(other) - if nr == NameRelation.SUPERDOMAIN or nr == NameRelation.EQUAL: - return True - return False - - def canonicalize(self) -> "Name": - """Return a name which is equal to the current name, but is in - DNSSEC canonical form. - """ - - return Name([x.lower() for x in self.labels]) - - def __eq__(self, other): - if isinstance(other, Name): - return self.fullcompare(other)[1] == 0 - else: - return False - - def __ne__(self, other): - if isinstance(other, Name): - return self.fullcompare(other)[1] != 0 - else: - return True - - def __lt__(self, other): - if isinstance(other, Name): - return self.fullcompare(other)[1] < 0 - else: - return NotImplemented - - def __le__(self, other): - if isinstance(other, Name): - return self.fullcompare(other)[1] <= 0 - else: - return NotImplemented - - def __ge__(self, other): - if isinstance(other, Name): - return self.fullcompare(other)[1] >= 0 - else: - return NotImplemented - - def __gt__(self, other): - if isinstance(other, Name): - return self.fullcompare(other)[1] > 0 - else: - return NotImplemented - - def __repr__(self): - return "" - - def __str__(self): - return self.to_text(False) - - def to_text(self, omit_final_dot: bool = False) -> str: - """Convert name to DNS text format. - - *omit_final_dot* is a ``bool``. If True, don't emit the final - dot (denoting the root label) for absolute names. The default - is False. - - Returns a ``str``. - """ - - if len(self.labels) == 0: - return "@" - if len(self.labels) == 1 and self.labels[0] == b"": - return "." - if omit_final_dot and self.is_absolute(): - l = self.labels[:-1] - else: - l = self.labels - s = ".".join(map(_escapify, l)) - return s - - def to_unicode( - self, omit_final_dot: bool = False, idna_codec: Optional[IDNACodec] = None - ) -> str: - """Convert name to Unicode text format. - - IDN ACE labels are converted to Unicode. - - *omit_final_dot* is a ``bool``. If True, don't emit the final - dot (denoting the root label) for absolute names. The default - is False. - *idna_codec* specifies the IDNA encoder/decoder. If None, the - dns.name.IDNA_2003_Practical encoder/decoder is used. - The IDNA_2003_Practical decoder does - not impose any policy, it just decodes punycode, so if you - don't want checking for compliance, you can use this decoder - for IDNA2008 as well. - - Returns a ``str``. - """ - - if len(self.labels) == 0: - return "@" - if len(self.labels) == 1 and self.labels[0] == b"": - return "." - if omit_final_dot and self.is_absolute(): - l = self.labels[:-1] - else: - l = self.labels - if idna_codec is None: - idna_codec = IDNA_2003_Practical - return ".".join([idna_codec.decode(x) for x in l]) - - def to_digestable(self, origin: Optional["Name"] = None) -> bytes: - """Convert name to a format suitable for digesting in hashes. - - The name is canonicalized and converted to uncompressed wire - format. All names in wire format are absolute. If the name - is a relative name, then an origin must be supplied. - - *origin* is a ``dns.name.Name`` or ``None``. If the name is - relative and origin is not ``None``, then origin will be appended - to the name. - - Raises ``dns.name.NeedAbsoluteNameOrOrigin`` if the name is - relative and no origin was provided. - - Returns a ``bytes``. - """ - - digest = self.to_wire(origin=origin, canonicalize=True) - assert digest is not None - return digest - - def to_wire( - self, - file: Optional[Any] = None, - compress: Optional[CompressType] = None, - origin: Optional["Name"] = None, - canonicalize: bool = False, - ) -> Optional[bytes]: - """Convert name to wire format, possibly compressing it. - - *file* is the file where the name is emitted (typically an - io.BytesIO file). If ``None`` (the default), a ``bytes`` - containing the wire name will be returned. - - *compress*, a ``dict``, is the compression table to use. If - ``None`` (the default), names will not be compressed. Note that - the compression code assumes that compression offset 0 is the - start of *file*, and thus compression will not be correct - if this is not the case. - - *origin* is a ``dns.name.Name`` or ``None``. If the name is - relative and origin is not ``None``, then *origin* will be appended - to it. - - *canonicalize*, a ``bool``, indicates whether the name should - be canonicalized; that is, converted to a format suitable for - digesting in hashes. - - Raises ``dns.name.NeedAbsoluteNameOrOrigin`` if the name is - relative and no origin was provided. - - Returns a ``bytes`` or ``None``. - """ - - if file is None: - out = bytearray() - for label in self.labels: - out.append(len(label)) - if canonicalize: - out += label.lower() - else: - out += label - if not self.is_absolute(): - if origin is None or not origin.is_absolute(): - raise NeedAbsoluteNameOrOrigin - for label in origin.labels: - out.append(len(label)) - if canonicalize: - out += label.lower() - else: - out += label - return bytes(out) - - labels: Iterable[bytes] - if not self.is_absolute(): - if origin is None or not origin.is_absolute(): - raise NeedAbsoluteNameOrOrigin - labels = list(self.labels) - labels.extend(list(origin.labels)) - else: - labels = self.labels - i = 0 - for label in labels: - n = Name(labels[i:]) - i += 1 - if compress is not None: - pos = compress.get(n) - else: - pos = None - if pos is not None: - value = 0xC000 + pos - s = struct.pack("!H", value) - file.write(s) - break - else: - if compress is not None and len(n) > 1: - pos = file.tell() - if pos <= 0x3FFF: - compress[n] = pos - l = len(label) - file.write(struct.pack("!B", l)) - if l > 0: - if canonicalize: - file.write(label.lower()) - else: - file.write(label) - return None - - def __len__(self) -> int: - """The length of the name (in labels). - - Returns an ``int``. - """ - - return len(self.labels) - - def __getitem__(self, index): - return self.labels[index] - - def __add__(self, other): - return self.concatenate(other) - - def __sub__(self, other): - return self.relativize(other) - - def split(self, depth: int) -> Tuple["Name", "Name"]: - """Split a name into a prefix and suffix names at the specified depth. - - *depth* is an ``int`` specifying the number of labels in the suffix - - Raises ``ValueError`` if *depth* was not >= 0 and <= the length of the - name. - - Returns the tuple ``(prefix, suffix)``. - """ - - l = len(self.labels) - if depth == 0: - return (self, dns.name.empty) - elif depth == l: - return (dns.name.empty, self) - elif depth < 0 or depth > l: - raise ValueError("depth must be >= 0 and <= the length of the name") - return (Name(self[:-depth]), Name(self[-depth:])) - - def concatenate(self, other: "Name") -> "Name": - """Return a new name which is the concatenation of self and other. - - Raises ``dns.name.AbsoluteConcatenation`` if the name is - absolute and *other* is not the empty name. - - Returns a ``dns.name.Name``. - """ - - if self.is_absolute() and len(other) > 0: - raise AbsoluteConcatenation - labels = list(self.labels) - labels.extend(list(other.labels)) - return Name(labels) - - def relativize(self, origin: "Name") -> "Name": - """If the name is a subdomain of *origin*, return a new name which is - the name relative to origin. Otherwise return the name. - - For example, relativizing ``www.dnspython.org.`` to origin - ``dnspython.org.`` returns the name ``www``. Relativizing ``example.`` - to origin ``dnspython.org.`` returns ``example.``. - - Returns a ``dns.name.Name``. - """ - - if origin is not None and self.is_subdomain(origin): - return Name(self[: -len(origin)]) - else: - return self - - def derelativize(self, origin: "Name") -> "Name": - """If the name is a relative name, return a new name which is the - concatenation of the name and origin. Otherwise return the name. - - For example, derelativizing ``www`` to origin ``dnspython.org.`` - returns the name ``www.dnspython.org.``. Derelativizing ``example.`` - to origin ``dnspython.org.`` returns ``example.``. - - Returns a ``dns.name.Name``. - """ - - if not self.is_absolute(): - return self.concatenate(origin) - else: - return self - - def choose_relativity( - self, origin: Optional["Name"] = None, relativize: bool = True - ) -> "Name": - """Return a name with the relativity desired by the caller. - - If *origin* is ``None``, then the name is returned. - Otherwise, if *relativize* is ``True`` the name is - relativized, and if *relativize* is ``False`` the name is - derelativized. - - Returns a ``dns.name.Name``. - """ - - if origin: - if relativize: - return self.relativize(origin) - else: - return self.derelativize(origin) - else: - return self - - def parent(self) -> "Name": - """Return the parent of the name. - - For example, the parent of ``www.dnspython.org.`` is ``dnspython.org``. - - Raises ``dns.name.NoParent`` if the name is either the root name or the - empty name, and thus has no parent. - - Returns a ``dns.name.Name``. - """ - - if self == root or self == empty: - raise NoParent - return Name(self.labels[1:]) - - -#: The root name, '.' -root = Name([b""]) - -#: The empty name. -empty = Name([]) - - -def from_unicode( - text: str, origin: Optional[Name] = root, idna_codec: Optional[IDNACodec] = None -) -> Name: - """Convert unicode text into a Name object. - - Labels are encoded in IDN ACE form according to rules specified by - the IDNA codec. - - *text*, a ``str``, is the text to convert into a name. - - *origin*, a ``dns.name.Name``, specifies the origin to - append to non-absolute names. The default is the root name. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder - is used. - - Returns a ``dns.name.Name``. - """ - - if not isinstance(text, str): - raise ValueError("input to from_unicode() must be a unicode string") - if not (origin is None or isinstance(origin, Name)): - raise ValueError("origin must be a Name or None") - labels = [] - label = "" - escaping = False - edigits = 0 - total = 0 - if idna_codec is None: - idna_codec = IDNA_2003 - if text == "@": - text = "" - if text: - if text in [".", "\u3002", "\uff0e", "\uff61"]: - return Name([b""]) # no Unicode "u" on this constant! - for c in text: - if escaping: - if edigits == 0: - if c.isdigit(): - total = int(c) - edigits += 1 - else: - label += c - escaping = False - else: - if not c.isdigit(): - raise BadEscape - total *= 10 - total += int(c) - edigits += 1 - if edigits == 3: - escaping = False - label += chr(total) - elif c in [".", "\u3002", "\uff0e", "\uff61"]: - if len(label) == 0: - raise EmptyLabel - labels.append(idna_codec.encode(label)) - label = "" - elif c == "\\": - escaping = True - edigits = 0 - total = 0 - else: - label += c - if escaping: - raise BadEscape - if len(label) > 0: - labels.append(idna_codec.encode(label)) - else: - labels.append(b"") - - if (len(labels) == 0 or labels[-1] != b"") and origin is not None: - labels.extend(list(origin.labels)) - return Name(labels) - - -def is_all_ascii(text: str) -> bool: - for c in text: - if ord(c) > 0x7F: - return False - return True - - -def from_text( - text: Union[bytes, str], - origin: Optional[Name] = root, - idna_codec: Optional[IDNACodec] = None, -) -> Name: - """Convert text into a Name object. - - *text*, a ``bytes`` or ``str``, is the text to convert into a name. - - *origin*, a ``dns.name.Name``, specifies the origin to - append to non-absolute names. The default is the root name. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder - is used. - - Returns a ``dns.name.Name``. - """ - - if isinstance(text, str): - if not is_all_ascii(text): - # Some codepoint in the input text is > 127, so IDNA applies. - return from_unicode(text, origin, idna_codec) - # The input is all ASCII, so treat this like an ordinary non-IDNA - # domain name. Note that "all ASCII" is about the input text, - # not the codepoints in the domain name. E.g. if text has value - # - # r'\150\151\152\153\154\155\156\157\158\159' - # - # then it's still "all ASCII" even though the domain name has - # codepoints > 127. - text = text.encode("ascii") - if not isinstance(text, bytes): - raise ValueError("input to from_text() must be a string") - if not (origin is None or isinstance(origin, Name)): - raise ValueError("origin must be a Name or None") - labels = [] - label = b"" - escaping = False - edigits = 0 - total = 0 - if text == b"@": - text = b"" - if text: - if text == b".": - return Name([b""]) - for c in text: - byte_ = struct.pack("!B", c) - if escaping: - if edigits == 0: - if byte_.isdigit(): - total = int(byte_) - edigits += 1 - else: - label += byte_ - escaping = False - else: - if not byte_.isdigit(): - raise BadEscape - total *= 10 - total += int(byte_) - edigits += 1 - if edigits == 3: - escaping = False - label += struct.pack("!B", total) - elif byte_ == b".": - if len(label) == 0: - raise EmptyLabel - labels.append(label) - label = b"" - elif byte_ == b"\\": - escaping = True - edigits = 0 - total = 0 - else: - label += byte_ - if escaping: - raise BadEscape - if len(label) > 0: - labels.append(label) - else: - labels.append(b"") - if (len(labels) == 0 or labels[-1] != b"") and origin is not None: - labels.extend(list(origin.labels)) - return Name(labels) - - -# we need 'dns.wire.Parser' quoted as dns.name and dns.wire depend on each other. - - -def from_wire_parser(parser: "dns.wire.Parser") -> Name: - """Convert possibly compressed wire format into a Name. - - *parser* is a dns.wire.Parser. - - Raises ``dns.name.BadPointer`` if a compression pointer did not - point backwards in the message. - - Raises ``dns.name.BadLabelType`` if an invalid label type was encountered. - - Returns a ``dns.name.Name`` - """ - - labels = [] - biggest_pointer = parser.current - with parser.restore_furthest(): - count = parser.get_uint8() - while count != 0: - if count < 64: - labels.append(parser.get_bytes(count)) - elif count >= 192: - current = (count & 0x3F) * 256 + parser.get_uint8() - if current >= biggest_pointer: - raise BadPointer - biggest_pointer = current - parser.seek(current) - else: - raise BadLabelType - count = parser.get_uint8() - labels.append(b"") - return Name(labels) - - -def from_wire(message: bytes, current: int) -> Tuple[Name, int]: - """Convert possibly compressed wire format into a Name. - - *message* is a ``bytes`` containing an entire DNS message in DNS - wire form. - - *current*, an ``int``, is the offset of the beginning of the name - from the start of the message - - Raises ``dns.name.BadPointer`` if a compression pointer did not - point backwards in the message. - - Raises ``dns.name.BadLabelType`` if an invalid label type was encountered. - - Returns a ``(dns.name.Name, int)`` tuple consisting of the name - that was read and the number of bytes of the wire format message - which were consumed reading it. - """ - - if not isinstance(message, bytes): - raise ValueError("input to from_wire() must be a byte string") - parser = dns.wire.Parser(message, current) - name = from_wire_parser(parser) - return (name, parser.current - current) diff --git a/server/venv/Lib/site-packages/dns/namedict.py b/server/venv/Lib/site-packages/dns/namedict.py deleted file mode 100644 index ca8b197..0000000 --- a/server/venv/Lib/site-packages/dns/namedict.py +++ /dev/null @@ -1,109 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# Copyright (C) 2016 Coresec Systems AB -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND CORESEC SYSTEMS AB DISCLAIMS ALL -# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL CORESEC -# SYSTEMS AB BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, -# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION -# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS name dictionary""" - -# pylint seems to be confused about this one! -from collections.abc import MutableMapping # pylint: disable=no-name-in-module - -import dns.name - - -class NameDict(MutableMapping): - """A dictionary whose keys are dns.name.Name objects. - - In addition to being like a regular Python dictionary, this - dictionary can also get the deepest match for a given key. - """ - - __slots__ = ["max_depth", "max_depth_items", "__store"] - - def __init__(self, *args, **kwargs): - super().__init__() - self.__store = dict() - #: the maximum depth of the keys that have ever been added - self.max_depth = 0 - #: the number of items of maximum depth - self.max_depth_items = 0 - self.update(dict(*args, **kwargs)) - - def __update_max_depth(self, key): - if len(key) == self.max_depth: - self.max_depth_items = self.max_depth_items + 1 - elif len(key) > self.max_depth: - self.max_depth = len(key) - self.max_depth_items = 1 - - def __getitem__(self, key): - return self.__store[key] - - def __setitem__(self, key, value): - if not isinstance(key, dns.name.Name): - raise ValueError("NameDict key must be a name") - self.__store[key] = value - self.__update_max_depth(key) - - def __delitem__(self, key): - self.__store.pop(key) - if len(key) == self.max_depth: - self.max_depth_items = self.max_depth_items - 1 - if self.max_depth_items == 0: - self.max_depth = 0 - for k in self.__store: - self.__update_max_depth(k) - - def __iter__(self): - return iter(self.__store) - - def __len__(self): - return len(self.__store) - - def has_key(self, key): - return key in self.__store - - def get_deepest_match(self, name): - """Find the deepest match to *name* in the dictionary. - - The deepest match is the longest name in the dictionary which is - a superdomain of *name*. Note that *superdomain* includes matching - *name* itself. - - *name*, a ``dns.name.Name``, the name to find. - - Returns a ``(key, value)`` where *key* is the deepest - ``dns.name.Name``, and *value* is the value associated with *key*. - """ - - depth = len(name) - if depth > self.max_depth: - depth = self.max_depth - for i in range(-depth, 0): - n = dns.name.Name(name[i:]) - if n in self: - return (n, self[n]) - v = self[dns.name.empty] - return (dns.name.empty, v) diff --git a/server/venv/Lib/site-packages/dns/nameserver.py b/server/venv/Lib/site-packages/dns/nameserver.py deleted file mode 100644 index 5910139..0000000 --- a/server/venv/Lib/site-packages/dns/nameserver.py +++ /dev/null @@ -1,329 +0,0 @@ -from typing import Optional, Union -from urllib.parse import urlparse - -import dns.asyncbackend -import dns.asyncquery -import dns.inet -import dns.message -import dns.query - - -class Nameserver: - def __init__(self): - pass - - def __str__(self): - raise NotImplementedError - - def kind(self) -> str: - raise NotImplementedError - - def is_always_max_size(self) -> bool: - raise NotImplementedError - - def answer_nameserver(self) -> str: - raise NotImplementedError - - def answer_port(self) -> int: - raise NotImplementedError - - def query( - self, - request: dns.message.QueryMessage, - timeout: float, - source: Optional[str], - source_port: int, - max_size: bool, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - ) -> dns.message.Message: - raise NotImplementedError - - async def async_query( - self, - request: dns.message.QueryMessage, - timeout: float, - source: Optional[str], - source_port: int, - max_size: bool, - backend: dns.asyncbackend.Backend, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - ) -> dns.message.Message: - raise NotImplementedError - - -class AddressAndPortNameserver(Nameserver): - def __init__(self, address: str, port: int): - super().__init__() - self.address = address - self.port = port - - def kind(self) -> str: - raise NotImplementedError - - def is_always_max_size(self) -> bool: - return False - - def __str__(self): - ns_kind = self.kind() - return f"{ns_kind}:{self.address}@{self.port}" - - def answer_nameserver(self) -> str: - return self.address - - def answer_port(self) -> int: - return self.port - - -class Do53Nameserver(AddressAndPortNameserver): - def __init__(self, address: str, port: int = 53): - super().__init__(address, port) - - def kind(self): - return "Do53" - - def query( - self, - request: dns.message.QueryMessage, - timeout: float, - source: Optional[str], - source_port: int, - max_size: bool, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - ) -> dns.message.Message: - if max_size: - response = dns.query.tcp( - request, - self.address, - timeout=timeout, - port=self.port, - source=source, - source_port=source_port, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - ) - else: - response = dns.query.udp( - request, - self.address, - timeout=timeout, - port=self.port, - source=source, - source_port=source_port, - raise_on_truncation=True, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - ) - return response - - async def async_query( - self, - request: dns.message.QueryMessage, - timeout: float, - source: Optional[str], - source_port: int, - max_size: bool, - backend: dns.asyncbackend.Backend, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - ) -> dns.message.Message: - if max_size: - response = await dns.asyncquery.tcp( - request, - self.address, - timeout=timeout, - port=self.port, - source=source, - source_port=source_port, - backend=backend, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - ) - else: - response = await dns.asyncquery.udp( - request, - self.address, - timeout=timeout, - port=self.port, - source=source, - source_port=source_port, - raise_on_truncation=True, - backend=backend, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - ) - return response - - -class DoHNameserver(Nameserver): - def __init__(self, url: str, bootstrap_address: Optional[str] = None): - super().__init__() - self.url = url - self.bootstrap_address = bootstrap_address - - def kind(self): - return "DoH" - - def is_always_max_size(self) -> bool: - return True - - def __str__(self): - return self.url - - def answer_nameserver(self) -> str: - return self.url - - def answer_port(self) -> int: - port = urlparse(self.url).port - if port is None: - port = 443 - return port - - def query( - self, - request: dns.message.QueryMessage, - timeout: float, - source: Optional[str], - source_port: int, - max_size: bool = False, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - ) -> dns.message.Message: - return dns.query.https( - request, - self.url, - timeout=timeout, - bootstrap_address=self.bootstrap_address, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - ) - - async def async_query( - self, - request: dns.message.QueryMessage, - timeout: float, - source: Optional[str], - source_port: int, - max_size: bool, - backend: dns.asyncbackend.Backend, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - ) -> dns.message.Message: - return await dns.asyncquery.https( - request, - self.url, - timeout=timeout, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - ) - - -class DoTNameserver(AddressAndPortNameserver): - def __init__(self, address: str, port: int = 853, hostname: Optional[str] = None): - super().__init__(address, port) - self.hostname = hostname - - def kind(self): - return "DoT" - - def query( - self, - request: dns.message.QueryMessage, - timeout: float, - source: Optional[str], - source_port: int, - max_size: bool = False, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - ) -> dns.message.Message: - return dns.query.tls( - request, - self.address, - port=self.port, - timeout=timeout, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - server_hostname=self.hostname, - ) - - async def async_query( - self, - request: dns.message.QueryMessage, - timeout: float, - source: Optional[str], - source_port: int, - max_size: bool, - backend: dns.asyncbackend.Backend, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - ) -> dns.message.Message: - return await dns.asyncquery.tls( - request, - self.address, - port=self.port, - timeout=timeout, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - server_hostname=self.hostname, - ) - - -class DoQNameserver(AddressAndPortNameserver): - def __init__( - self, - address: str, - port: int = 853, - verify: Union[bool, str] = True, - server_hostname: Optional[str] = None, - ): - super().__init__(address, port) - self.verify = verify - self.server_hostname = server_hostname - - def kind(self): - return "DoQ" - - def query( - self, - request: dns.message.QueryMessage, - timeout: float, - source: Optional[str], - source_port: int, - max_size: bool = False, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - ) -> dns.message.Message: - return dns.query.quic( - request, - self.address, - port=self.port, - timeout=timeout, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - verify=self.verify, - server_hostname=self.server_hostname, - ) - - async def async_query( - self, - request: dns.message.QueryMessage, - timeout: float, - source: Optional[str], - source_port: int, - max_size: bool, - backend: dns.asyncbackend.Backend, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - ) -> dns.message.Message: - return await dns.asyncquery.quic( - request, - self.address, - port=self.port, - timeout=timeout, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - verify=self.verify, - server_hostname=self.server_hostname, - ) diff --git a/server/venv/Lib/site-packages/dns/node.py b/server/venv/Lib/site-packages/dns/node.py deleted file mode 100644 index c670243..0000000 --- a/server/venv/Lib/site-packages/dns/node.py +++ /dev/null @@ -1,360 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS nodes. A node is a set of rdatasets.""" - -import enum -import io -from typing import Any, Dict, Optional - -import dns.immutable -import dns.name -import dns.rdataclass -import dns.rdataset -import dns.rdatatype -import dns.renderer -import dns.rrset - -_cname_types = { - dns.rdatatype.CNAME, -} - -# "neutral" types can coexist with a CNAME and thus are not "other data" -_neutral_types = { - dns.rdatatype.NSEC, # RFC 4035 section 2.5 - dns.rdatatype.NSEC3, # This is not likely to happen, but not impossible! - dns.rdatatype.KEY, # RFC 4035 section 2.5, RFC 3007 -} - - -def _matches_type_or_its_signature(rdtypes, rdtype, covers): - return rdtype in rdtypes or (rdtype == dns.rdatatype.RRSIG and covers in rdtypes) - - -@enum.unique -class NodeKind(enum.Enum): - """Rdatasets in nodes""" - - REGULAR = 0 # a.k.a "other data" - NEUTRAL = 1 - CNAME = 2 - - @classmethod - def classify( - cls, rdtype: dns.rdatatype.RdataType, covers: dns.rdatatype.RdataType - ) -> "NodeKind": - if _matches_type_or_its_signature(_cname_types, rdtype, covers): - return NodeKind.CNAME - elif _matches_type_or_its_signature(_neutral_types, rdtype, covers): - return NodeKind.NEUTRAL - else: - return NodeKind.REGULAR - - @classmethod - def classify_rdataset(cls, rdataset: dns.rdataset.Rdataset) -> "NodeKind": - return cls.classify(rdataset.rdtype, rdataset.covers) - - -class Node: - - """A Node is a set of rdatasets. - - A node is either a CNAME node or an "other data" node. A CNAME - node contains only CNAME, KEY, NSEC, and NSEC3 rdatasets along with their - covering RRSIG rdatasets. An "other data" node contains any - rdataset other than a CNAME or RRSIG(CNAME) rdataset. When - changes are made to a node, the CNAME or "other data" state is - always consistent with the update, i.e. the most recent change - wins. For example, if you have a node which contains a CNAME - rdataset, and then add an MX rdataset to it, then the CNAME - rdataset will be deleted. Likewise if you have a node containing - an MX rdataset and add a CNAME rdataset, the MX rdataset will be - deleted. - """ - - __slots__ = ["rdatasets"] - - def __init__(self): - # the set of rdatasets, represented as a list. - self.rdatasets = [] - - def to_text(self, name: dns.name.Name, **kw: Dict[str, Any]) -> str: - """Convert a node to text format. - - Each rdataset at the node is printed. Any keyword arguments - to this method are passed on to the rdataset's to_text() method. - - *name*, a ``dns.name.Name``, the owner name of the - rdatasets. - - Returns a ``str``. - - """ - - s = io.StringIO() - for rds in self.rdatasets: - if len(rds) > 0: - s.write(rds.to_text(name, **kw)) # type: ignore[arg-type] - s.write("\n") - return s.getvalue()[:-1] - - def __repr__(self): - return "" - - def __eq__(self, other): - # - # This is inefficient. Good thing we don't need to do it much. - # - for rd in self.rdatasets: - if rd not in other.rdatasets: - return False - for rd in other.rdatasets: - if rd not in self.rdatasets: - return False - return True - - def __ne__(self, other): - return not self.__eq__(other) - - def __len__(self): - return len(self.rdatasets) - - def __iter__(self): - return iter(self.rdatasets) - - def _append_rdataset(self, rdataset): - """Append rdataset to the node with special handling for CNAME and - other data conditions. - - Specifically, if the rdataset being appended has ``NodeKind.CNAME``, - then all rdatasets other than KEY, NSEC, NSEC3, and their covering - RRSIGs are deleted. If the rdataset being appended has - ``NodeKind.REGULAR`` then CNAME and RRSIG(CNAME) are deleted. - """ - # Make having just one rdataset at the node fast. - if len(self.rdatasets) > 0: - kind = NodeKind.classify_rdataset(rdataset) - if kind == NodeKind.CNAME: - self.rdatasets = [ - rds - for rds in self.rdatasets - if NodeKind.classify_rdataset(rds) != NodeKind.REGULAR - ] - elif kind == NodeKind.REGULAR: - self.rdatasets = [ - rds - for rds in self.rdatasets - if NodeKind.classify_rdataset(rds) != NodeKind.CNAME - ] - # Otherwise the rdataset is NodeKind.NEUTRAL and we do not need to - # edit self.rdatasets. - self.rdatasets.append(rdataset) - - def find_rdataset( - self, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, - create: bool = False, - ) -> dns.rdataset.Rdataset: - """Find an rdataset matching the specified properties in the - current node. - - *rdclass*, a ``dns.rdataclass.RdataClass``, the class of the rdataset. - - *rdtype*, a ``dns.rdatatype.RdataType``, the type of the rdataset. - - *covers*, a ``dns.rdatatype.RdataType``, the covered type. - Usually this value is ``dns.rdatatype.NONE``, but if the - rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, - then the covers value will be the rdata type the SIG/RRSIG - covers. The library treats the SIG and RRSIG types as if they - were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). - This makes RRSIGs much easier to work with than if RRSIGs - covering different rdata types were aggregated into a single - RRSIG rdataset. - - *create*, a ``bool``. If True, create the rdataset if it is not found. - - Raises ``KeyError`` if an rdataset of the desired type and class does - not exist and *create* is not ``True``. - - Returns a ``dns.rdataset.Rdataset``. - """ - - for rds in self.rdatasets: - if rds.match(rdclass, rdtype, covers): - return rds - if not create: - raise KeyError - rds = dns.rdataset.Rdataset(rdclass, rdtype, covers) - self._append_rdataset(rds) - return rds - - def get_rdataset( - self, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, - create: bool = False, - ) -> Optional[dns.rdataset.Rdataset]: - """Get an rdataset matching the specified properties in the - current node. - - None is returned if an rdataset of the specified type and - class does not exist and *create* is not ``True``. - - *rdclass*, an ``int``, the class of the rdataset. - - *rdtype*, an ``int``, the type of the rdataset. - - *covers*, an ``int``, the covered type. Usually this value is - dns.rdatatype.NONE, but if the rdtype is dns.rdatatype.SIG or - dns.rdatatype.RRSIG, then the covers value will be the rdata - type the SIG/RRSIG covers. The library treats the SIG and RRSIG - types as if they were a family of - types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). This makes RRSIGs much - easier to work with than if RRSIGs covering different rdata - types were aggregated into a single RRSIG rdataset. - - *create*, a ``bool``. If True, create the rdataset if it is not found. - - Returns a ``dns.rdataset.Rdataset`` or ``None``. - """ - - try: - rds = self.find_rdataset(rdclass, rdtype, covers, create) - except KeyError: - rds = None - return rds - - def delete_rdataset( - self, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, - ) -> None: - """Delete the rdataset matching the specified properties in the - current node. - - If a matching rdataset does not exist, it is not an error. - - *rdclass*, an ``int``, the class of the rdataset. - - *rdtype*, an ``int``, the type of the rdataset. - - *covers*, an ``int``, the covered type. - """ - - rds = self.get_rdataset(rdclass, rdtype, covers) - if rds is not None: - self.rdatasets.remove(rds) - - def replace_rdataset(self, replacement: dns.rdataset.Rdataset) -> None: - """Replace an rdataset. - - It is not an error if there is no rdataset matching *replacement*. - - Ownership of the *replacement* object is transferred to the node; - in other words, this method does not store a copy of *replacement* - at the node, it stores *replacement* itself. - - *replacement*, a ``dns.rdataset.Rdataset``. - - Raises ``ValueError`` if *replacement* is not a - ``dns.rdataset.Rdataset``. - """ - - if not isinstance(replacement, dns.rdataset.Rdataset): - raise ValueError("replacement is not an rdataset") - if isinstance(replacement, dns.rrset.RRset): - # RRsets are not good replacements as the match() method - # is not compatible. - replacement = replacement.to_rdataset() - self.delete_rdataset( - replacement.rdclass, replacement.rdtype, replacement.covers - ) - self._append_rdataset(replacement) - - def classify(self) -> NodeKind: - """Classify a node. - - A node which contains a CNAME or RRSIG(CNAME) is a - ``NodeKind.CNAME`` node. - - A node which contains only "neutral" types, i.e. types allowed to - co-exist with a CNAME, is a ``NodeKind.NEUTRAL`` node. The neutral - types are NSEC, NSEC3, KEY, and their associated RRSIGS. An empty node - is also considered neutral. - - A node which contains some rdataset which is not a CNAME, RRSIG(CNAME), - or a neutral type is a a ``NodeKind.REGULAR`` node. Regular nodes are - also commonly referred to as "other data". - """ - for rdataset in self.rdatasets: - kind = NodeKind.classify(rdataset.rdtype, rdataset.covers) - if kind != NodeKind.NEUTRAL: - return kind - return NodeKind.NEUTRAL - - def is_immutable(self) -> bool: - return False - - -@dns.immutable.immutable -class ImmutableNode(Node): - def __init__(self, node): - super().__init__() - self.rdatasets = tuple( - [dns.rdataset.ImmutableRdataset(rds) for rds in node.rdatasets] - ) - - def find_rdataset( - self, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, - create: bool = False, - ) -> dns.rdataset.Rdataset: - if create: - raise TypeError("immutable") - return super().find_rdataset(rdclass, rdtype, covers, False) - - def get_rdataset( - self, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, - create: bool = False, - ) -> Optional[dns.rdataset.Rdataset]: - if create: - raise TypeError("immutable") - return super().get_rdataset(rdclass, rdtype, covers, False) - - def delete_rdataset( - self, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, - ) -> None: - raise TypeError("immutable") - - def replace_rdataset(self, replacement: dns.rdataset.Rdataset) -> None: - raise TypeError("immutable") - - def is_immutable(self) -> bool: - return True diff --git a/server/venv/Lib/site-packages/dns/opcode.py b/server/venv/Lib/site-packages/dns/opcode.py deleted file mode 100644 index 78b43d2..0000000 --- a/server/venv/Lib/site-packages/dns/opcode.py +++ /dev/null @@ -1,117 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Opcodes.""" - -import dns.enum -import dns.exception - - -class Opcode(dns.enum.IntEnum): - #: Query - QUERY = 0 - #: Inverse Query (historical) - IQUERY = 1 - #: Server Status (unspecified and unimplemented anywhere) - STATUS = 2 - #: Notify - NOTIFY = 4 - #: Dynamic Update - UPDATE = 5 - - @classmethod - def _maximum(cls): - return 15 - - @classmethod - def _unknown_exception_class(cls): - return UnknownOpcode - - -class UnknownOpcode(dns.exception.DNSException): - """An DNS opcode is unknown.""" - - -def from_text(text: str) -> Opcode: - """Convert text into an opcode. - - *text*, a ``str``, the textual opcode - - Raises ``dns.opcode.UnknownOpcode`` if the opcode is unknown. - - Returns an ``int``. - """ - - return Opcode.from_text(text) - - -def from_flags(flags: int) -> Opcode: - """Extract an opcode from DNS message flags. - - *flags*, an ``int``, the DNS flags. - - Returns an ``int``. - """ - - return Opcode((flags & 0x7800) >> 11) - - -def to_flags(value: Opcode) -> int: - """Convert an opcode to a value suitable for ORing into DNS message - flags. - - *value*, an ``int``, the DNS opcode value. - - Returns an ``int``. - """ - - return (value << 11) & 0x7800 - - -def to_text(value: Opcode) -> str: - """Convert an opcode to text. - - *value*, an ``int`` the opcode value, - - Raises ``dns.opcode.UnknownOpcode`` if the opcode is unknown. - - Returns a ``str``. - """ - - return Opcode.to_text(value) - - -def is_update(flags: int) -> bool: - """Is the opcode in flags UPDATE? - - *flags*, an ``int``, the DNS message flags. - - Returns a ``bool``. - """ - - return from_flags(flags) == Opcode.UPDATE - - -### BEGIN generated Opcode constants - -QUERY = Opcode.QUERY -IQUERY = Opcode.IQUERY -STATUS = Opcode.STATUS -NOTIFY = Opcode.NOTIFY -UPDATE = Opcode.UPDATE - -### END generated Opcode constants diff --git a/server/venv/Lib/site-packages/dns/py.typed b/server/venv/Lib/site-packages/dns/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/dns/query.py b/server/venv/Lib/site-packages/dns/query.py deleted file mode 100644 index 0d71125..0000000 --- a/server/venv/Lib/site-packages/dns/query.py +++ /dev/null @@ -1,1519 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Talk to a DNS server.""" - -import base64 -import contextlib -import enum -import errno -import os -import selectors -import socket -import struct -import time -from typing import Any, Dict, Optional, Tuple, Union - -import dns.exception -import dns.inet -import dns.message -import dns.name -import dns.quic -import dns.rcode -import dns.rdataclass -import dns.rdatatype -import dns.serial -import dns.transaction -import dns.tsig -import dns.xfr - - -def _remaining(expiration): - if expiration is None: - return None - timeout = expiration - time.time() - if timeout <= 0.0: - raise dns.exception.Timeout - return timeout - - -def _expiration_for_this_attempt(timeout, expiration): - if expiration is None: - return None - return min(time.time() + timeout, expiration) - - -_have_httpx = False -_have_http2 = False -try: - import httpcore - import httpcore._backends.sync - import httpx - - _CoreNetworkBackend = httpcore.NetworkBackend - _CoreSyncStream = httpcore._backends.sync.SyncStream - - _have_httpx = True - try: - # See if http2 support is available. - with httpx.Client(http2=True): - _have_http2 = True - except Exception: - pass - - class _NetworkBackend(_CoreNetworkBackend): - def __init__(self, resolver, local_port, bootstrap_address, family): - super().__init__() - self._local_port = local_port - self._resolver = resolver - self._bootstrap_address = bootstrap_address - self._family = family - - def connect_tcp( - self, host, port, timeout, local_address, socket_options=None - ): # pylint: disable=signature-differs - addresses = [] - _, expiration = _compute_times(timeout) - if dns.inet.is_address(host): - addresses.append(host) - elif self._bootstrap_address is not None: - addresses.append(self._bootstrap_address) - else: - timeout = _remaining(expiration) - family = self._family - if local_address: - family = dns.inet.af_for_address(local_address) - answers = self._resolver.resolve_name( - host, family=family, lifetime=timeout - ) - addresses = answers.addresses() - for address in addresses: - af = dns.inet.af_for_address(address) - if local_address is not None or self._local_port != 0: - source = dns.inet.low_level_address_tuple( - (local_address, self._local_port), af - ) - else: - source = None - sock = _make_socket(af, socket.SOCK_STREAM, source) - attempt_expiration = _expiration_for_this_attempt(2.0, expiration) - try: - _connect( - sock, - dns.inet.low_level_address_tuple((address, port), af), - attempt_expiration, - ) - return _CoreSyncStream(sock) - except Exception: - pass - raise httpcore.ConnectError - - def connect_unix_socket( - self, path, timeout, socket_options=None - ): # pylint: disable=signature-differs - raise NotImplementedError - - class _HTTPTransport(httpx.HTTPTransport): - def __init__( - self, - *args, - local_port=0, - bootstrap_address=None, - resolver=None, - family=socket.AF_UNSPEC, - **kwargs, - ): - if resolver is None: - # pylint: disable=import-outside-toplevel,redefined-outer-name - import dns.resolver - - resolver = dns.resolver.Resolver() - super().__init__(*args, **kwargs) - self._pool._network_backend = _NetworkBackend( - resolver, local_port, bootstrap_address, family - ) - -except ImportError: # pragma: no cover - - class _HTTPTransport: # type: ignore - def connect_tcp(self, host, port, timeout, local_address): - raise NotImplementedError - - -have_doh = _have_httpx - -try: - import ssl -except ImportError: # pragma: no cover - - class ssl: # type: ignore - class WantReadException(Exception): - pass - - class WantWriteException(Exception): - pass - - class SSLContext: - pass - - class SSLSocket: - pass - - @classmethod - def create_default_context(cls, *args, **kwargs): - raise Exception("no ssl support") # pylint: disable=broad-exception-raised - - -# Function used to create a socket. Can be overridden if needed in special -# situations. -socket_factory = socket.socket - - -class UnexpectedSource(dns.exception.DNSException): - """A DNS query response came from an unexpected address or port.""" - - -class BadResponse(dns.exception.FormError): - """A DNS query response does not respond to the question asked.""" - - -class NoDOH(dns.exception.DNSException): - """DNS over HTTPS (DOH) was requested but the httpx module is not - available.""" - - -class NoDOQ(dns.exception.DNSException): - """DNS over QUIC (DOQ) was requested but the aioquic module is not - available.""" - - -# for backwards compatibility -TransferError = dns.xfr.TransferError - - -def _compute_times(timeout): - now = time.time() - if timeout is None: - return (now, None) - else: - return (now, now + timeout) - - -def _wait_for(fd, readable, writable, _, expiration): - # Use the selected selector class to wait for any of the specified - # events. An "expiration" absolute time is converted into a relative - # timeout. - # - # The unused parameter is 'error', which is always set when - # selecting for read or write, and we have no error-only selects. - - if readable and isinstance(fd, ssl.SSLSocket) and fd.pending() > 0: - return True - sel = _selector_class() - events = 0 - if readable: - events |= selectors.EVENT_READ - if writable: - events |= selectors.EVENT_WRITE - if events: - sel.register(fd, events) - if expiration is None: - timeout = None - else: - timeout = expiration - time.time() - if timeout <= 0.0: - raise dns.exception.Timeout - if not sel.select(timeout): - raise dns.exception.Timeout - - -def _set_selector_class(selector_class): - # Internal API. Do not use. - - global _selector_class - - _selector_class = selector_class - - -if hasattr(selectors, "PollSelector"): - # Prefer poll() on platforms that support it because it has no - # limits on the maximum value of a file descriptor (plus it will - # be more efficient for high values). - # - # We ignore typing here as we can't say _selector_class is Any - # on python < 3.8 due to a bug. - _selector_class = selectors.PollSelector # type: ignore -else: - _selector_class = selectors.SelectSelector # type: ignore - - -def _wait_for_readable(s, expiration): - _wait_for(s, True, False, True, expiration) - - -def _wait_for_writable(s, expiration): - _wait_for(s, False, True, True, expiration) - - -def _addresses_equal(af, a1, a2): - # Convert the first value of the tuple, which is a textual format - # address into binary form, so that we are not confused by different - # textual representations of the same address - try: - n1 = dns.inet.inet_pton(af, a1[0]) - n2 = dns.inet.inet_pton(af, a2[0]) - except dns.exception.SyntaxError: - return False - return n1 == n2 and a1[1:] == a2[1:] - - -def _matches_destination(af, from_address, destination, ignore_unexpected): - # Check that from_address is appropriate for a response to a query - # sent to destination. - if not destination: - return True - if _addresses_equal(af, from_address, destination) or ( - dns.inet.is_multicast(destination[0]) and from_address[1:] == destination[1:] - ): - return True - elif ignore_unexpected: - return False - raise UnexpectedSource( - f"got a response from {from_address} instead of " f"{destination}" - ) - - -def _destination_and_source( - where, port, source, source_port, where_must_be_address=True -): - # Apply defaults and compute destination and source tuples - # suitable for use in connect(), sendto(), or bind(). - af = None - destination = None - try: - af = dns.inet.af_for_address(where) - destination = where - except Exception: - if where_must_be_address: - raise - # URLs are ok so eat the exception - if source: - saf = dns.inet.af_for_address(source) - if af: - # We know the destination af, so source had better agree! - if saf != af: - raise ValueError( - "different address families for source and destination" - ) - else: - # We didn't know the destination af, but we know the source, - # so that's our af. - af = saf - if source_port and not source: - # Caller has specified a source_port but not an address, so we - # need to return a source, and we need to use the appropriate - # wildcard address as the address. - try: - source = dns.inet.any_for_af(af) - except Exception: - # we catch this and raise ValueError for backwards compatibility - raise ValueError("source_port specified but address family is unknown") - # Convert high-level (address, port) tuples into low-level address - # tuples. - if destination: - destination = dns.inet.low_level_address_tuple((destination, port), af) - if source: - source = dns.inet.low_level_address_tuple((source, source_port), af) - return (af, destination, source) - - -def _make_socket(af, type, source, ssl_context=None, server_hostname=None): - s = socket_factory(af, type) - try: - s.setblocking(False) - if source is not None: - s.bind(source) - if ssl_context: - # LGTM gets a false positive here, as our default context is OK - return ssl_context.wrap_socket( - s, - do_handshake_on_connect=False, # lgtm[py/insecure-protocol] - server_hostname=server_hostname, - ) - else: - return s - except Exception: - s.close() - raise - - -def https( - q: dns.message.Message, - where: str, - timeout: Optional[float] = None, - port: int = 443, - source: Optional[str] = None, - source_port: int = 0, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - session: Optional[Any] = None, - path: str = "/dns-query", - post: bool = True, - bootstrap_address: Optional[str] = None, - verify: Union[bool, str] = True, - resolver: Optional["dns.resolver.Resolver"] = None, - family: Optional[int] = socket.AF_UNSPEC, -) -> dns.message.Message: - """Return the response obtained after sending a query via DNS-over-HTTPS. - - *q*, a ``dns.message.Message``, the query to send. - - *where*, a ``str``, the nameserver IP address or the full URL. If an IP address is - given, the URL will be constructed using the following schema: - https://:/. - - *timeout*, a ``float`` or ``None``, the number of seconds to wait before the query - times out. If ``None``, the default, wait forever. - - *port*, a ``int``, the port to send the query to. The default is 443. - - *source*, a ``str`` containing an IPv4 or IPv6 address, specifying the source - address. The default is the wildcard address. - - *source_port*, an ``int``, the port from which to send the message. The default is - 0. - - *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own RRset. - - *ignore_trailing*, a ``bool``. If ``True``, ignore trailing junk at end of the - received message. - - *session*, an ``httpx.Client``. If provided, the client session to use to send the - queries. - - *path*, a ``str``. If *where* is an IP address, then *path* will be used to - construct the URL to send the DNS query to. - - *post*, a ``bool``. If ``True``, the default, POST method will be used. - - *bootstrap_address*, a ``str``, the IP address to use to bypass resolution. - - *verify*, a ``bool`` or ``str``. If a ``True``, then TLS certificate verification - of the server is done using the default CA bundle; if ``False``, then no - verification is done; if a `str` then it specifies the path to a certificate file or - directory which will be used for verification. - - *resolver*, a ``dns.resolver.Resolver`` or ``None``, the resolver to use for - resolution of hostnames in URLs. If not specified, a new resolver with a default - configuration will be used; note this is *not* the default resolver as that resolver - might have been configured to use DoH causing a chicken-and-egg problem. This - parameter only has an effect if the HTTP library is httpx. - - *family*, an ``int``, the address family. If socket.AF_UNSPEC (the default), both A - and AAAA records will be retrieved. - - Returns a ``dns.message.Message``. - """ - - if not have_doh: - raise NoDOH # pragma: no cover - if session and not isinstance(session, httpx.Client): - raise ValueError("session parameter must be an httpx.Client") - - wire = q.to_wire() - (af, _, the_source) = _destination_and_source( - where, port, source, source_port, False - ) - transport = None - headers = {"accept": "application/dns-message"} - if af is not None and dns.inet.is_address(where): - if af == socket.AF_INET: - url = "https://{}:{}{}".format(where, port, path) - elif af == socket.AF_INET6: - url = "https://[{}]:{}{}".format(where, port, path) - else: - url = where - - # set source port and source address - - if the_source is None: - local_address = None - local_port = 0 - else: - local_address = the_source[0] - local_port = the_source[1] - transport = _HTTPTransport( - local_address=local_address, - http1=True, - http2=_have_http2, - verify=verify, - local_port=local_port, - bootstrap_address=bootstrap_address, - resolver=resolver, - family=family, - ) - - if session: - cm: contextlib.AbstractContextManager = contextlib.nullcontext(session) - else: - cm = httpx.Client( - http1=True, http2=_have_http2, verify=verify, transport=transport - ) - with cm as session: - # see https://tools.ietf.org/html/rfc8484#section-4.1.1 for DoH - # GET and POST examples - if post: - headers.update( - { - "content-type": "application/dns-message", - "content-length": str(len(wire)), - } - ) - response = session.post(url, headers=headers, content=wire, timeout=timeout) - else: - wire = base64.urlsafe_b64encode(wire).rstrip(b"=") - twire = wire.decode() # httpx does a repr() if we give it bytes - response = session.get( - url, headers=headers, timeout=timeout, params={"dns": twire} - ) - - # see https://tools.ietf.org/html/rfc8484#section-4.2.1 for info about DoH - # status codes - if response.status_code < 200 or response.status_code > 299: - raise ValueError( - "{} responded with status code {}" - "\nResponse body: {}".format(where, response.status_code, response.content) - ) - r = dns.message.from_wire( - response.content, - keyring=q.keyring, - request_mac=q.request_mac, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - ) - r.time = response.elapsed.total_seconds() - if not q.is_response(r): - raise BadResponse - return r - - -def _udp_recv(sock, max_size, expiration): - """Reads a datagram from the socket. - A Timeout exception will be raised if the operation is not completed - by the expiration time. - """ - while True: - try: - return sock.recvfrom(max_size) - except BlockingIOError: - _wait_for_readable(sock, expiration) - - -def _udp_send(sock, data, destination, expiration): - """Sends the specified datagram to destination over the socket. - A Timeout exception will be raised if the operation is not completed - by the expiration time. - """ - while True: - try: - if destination: - return sock.sendto(data, destination) - else: - return sock.send(data) - except BlockingIOError: # pragma: no cover - _wait_for_writable(sock, expiration) - - -def send_udp( - sock: Any, - what: Union[dns.message.Message, bytes], - destination: Any, - expiration: Optional[float] = None, -) -> Tuple[int, float]: - """Send a DNS message to the specified UDP socket. - - *sock*, a ``socket``. - - *what*, a ``bytes`` or ``dns.message.Message``, the message to send. - - *destination*, a destination tuple appropriate for the address family - of the socket, specifying where to send the query. - - *expiration*, a ``float`` or ``None``, the absolute time at which - a timeout exception should be raised. If ``None``, no timeout will - occur. - - Returns an ``(int, float)`` tuple of bytes sent and the sent time. - """ - - if isinstance(what, dns.message.Message): - what = what.to_wire() - sent_time = time.time() - n = _udp_send(sock, what, destination, expiration) - return (n, sent_time) - - -def receive_udp( - sock: Any, - destination: Optional[Any] = None, - expiration: Optional[float] = None, - ignore_unexpected: bool = False, - one_rr_per_rrset: bool = False, - keyring: Optional[Dict[dns.name.Name, dns.tsig.Key]] = None, - request_mac: Optional[bytes] = b"", - ignore_trailing: bool = False, - raise_on_truncation: bool = False, -) -> Any: - """Read a DNS message from a UDP socket. - - *sock*, a ``socket``. - - *destination*, a destination tuple appropriate for the address family - of the socket, specifying where the message is expected to arrive from. - When receiving a response, this would be where the associated query was - sent. - - *expiration*, a ``float`` or ``None``, the absolute time at which - a timeout exception should be raised. If ``None``, no timeout will - occur. - - *ignore_unexpected*, a ``bool``. If ``True``, ignore responses from - unexpected sources. - - *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own - RRset. - - *keyring*, a ``dict``, the keyring to use for TSIG. - - *request_mac*, a ``bytes`` or ``None``, the MAC of the request (for TSIG). - - *ignore_trailing*, a ``bool``. If ``True``, ignore trailing - junk at end of the received message. - - *raise_on_truncation*, a ``bool``. If ``True``, raise an exception if - the TC bit is set. - - Raises if the message is malformed, if network errors occur, of if - there is a timeout. - - If *destination* is not ``None``, returns a ``(dns.message.Message, float)`` - tuple of the received message and the received time. - - If *destination* is ``None``, returns a - ``(dns.message.Message, float, tuple)`` - tuple of the received message, the received time, and the address where - the message arrived from. - """ - - wire = b"" - while True: - (wire, from_address) = _udp_recv(sock, 65535, expiration) - if _matches_destination( - sock.family, from_address, destination, ignore_unexpected - ): - break - received_time = time.time() - r = dns.message.from_wire( - wire, - keyring=keyring, - request_mac=request_mac, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - raise_on_truncation=raise_on_truncation, - ) - if destination: - return (r, received_time) - else: - return (r, received_time, from_address) - - -def udp( - q: dns.message.Message, - where: str, - timeout: Optional[float] = None, - port: int = 53, - source: Optional[str] = None, - source_port: int = 0, - ignore_unexpected: bool = False, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - raise_on_truncation: bool = False, - sock: Optional[Any] = None, -) -> dns.message.Message: - """Return the response obtained after sending a query via UDP. - - *q*, a ``dns.message.Message``, the query to send - - *where*, a ``str`` containing an IPv4 or IPv6 address, where - to send the message. - - *timeout*, a ``float`` or ``None``, the number of seconds to wait before the - query times out. If ``None``, the default, wait forever. - - *port*, an ``int``, the port send the message to. The default is 53. - - *source*, a ``str`` containing an IPv4 or IPv6 address, specifying - the source address. The default is the wildcard address. - - *source_port*, an ``int``, the port from which to send the message. - The default is 0. - - *ignore_unexpected*, a ``bool``. If ``True``, ignore responses from - unexpected sources. - - *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own - RRset. - - *ignore_trailing*, a ``bool``. If ``True``, ignore trailing - junk at end of the received message. - - *raise_on_truncation*, a ``bool``. If ``True``, raise an exception if - the TC bit is set. - - *sock*, a ``socket.socket``, or ``None``, the socket to use for the - query. If ``None``, the default, a socket is created. Note that - if a socket is provided, it must be a nonblocking datagram socket, - and the *source* and *source_port* are ignored. - - Returns a ``dns.message.Message``. - """ - - wire = q.to_wire() - (af, destination, source) = _destination_and_source( - where, port, source, source_port - ) - (begin_time, expiration) = _compute_times(timeout) - if sock: - cm: contextlib.AbstractContextManager = contextlib.nullcontext(sock) - else: - cm = _make_socket(af, socket.SOCK_DGRAM, source) - with cm as s: - send_udp(s, wire, destination, expiration) - (r, received_time) = receive_udp( - s, - destination, - expiration, - ignore_unexpected, - one_rr_per_rrset, - q.keyring, - q.mac, - ignore_trailing, - raise_on_truncation, - ) - r.time = received_time - begin_time - if not q.is_response(r): - raise BadResponse - return r - assert ( - False # help mypy figure out we can't get here lgtm[py/unreachable-statement] - ) - - -def udp_with_fallback( - q: dns.message.Message, - where: str, - timeout: Optional[float] = None, - port: int = 53, - source: Optional[str] = None, - source_port: int = 0, - ignore_unexpected: bool = False, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - udp_sock: Optional[Any] = None, - tcp_sock: Optional[Any] = None, -) -> Tuple[dns.message.Message, bool]: - """Return the response to the query, trying UDP first and falling back - to TCP if UDP results in a truncated response. - - *q*, a ``dns.message.Message``, the query to send - - *where*, a ``str`` containing an IPv4 or IPv6 address, where - to send the message. - - *timeout*, a ``float`` or ``None``, the number of seconds to wait before the - query times out. If ``None``, the default, wait forever. - - *port*, an ``int``, the port send the message to. The default is 53. - - *source*, a ``str`` containing an IPv4 or IPv6 address, specifying - the source address. The default is the wildcard address. - - *source_port*, an ``int``, the port from which to send the message. - The default is 0. - - *ignore_unexpected*, a ``bool``. If ``True``, ignore responses from - unexpected sources. - - *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own - RRset. - - *ignore_trailing*, a ``bool``. If ``True``, ignore trailing - junk at end of the received message. - - *udp_sock*, a ``socket.socket``, or ``None``, the socket to use for the - UDP query. If ``None``, the default, a socket is created. Note that - if a socket is provided, it must be a nonblocking datagram socket, - and the *source* and *source_port* are ignored for the UDP query. - - *tcp_sock*, a ``socket.socket``, or ``None``, the connected socket to use for the - TCP query. If ``None``, the default, a socket is created. Note that - if a socket is provided, it must be a nonblocking connected stream - socket, and *where*, *source* and *source_port* are ignored for the TCP - query. - - Returns a (``dns.message.Message``, tcp) tuple where tcp is ``True`` - if and only if TCP was used. - """ - try: - response = udp( - q, - where, - timeout, - port, - source, - source_port, - ignore_unexpected, - one_rr_per_rrset, - ignore_trailing, - True, - udp_sock, - ) - return (response, False) - except dns.message.Truncated: - response = tcp( - q, - where, - timeout, - port, - source, - source_port, - one_rr_per_rrset, - ignore_trailing, - tcp_sock, - ) - return (response, True) - - -def _net_read(sock, count, expiration): - """Read the specified number of bytes from sock. Keep trying until we - either get the desired amount, or we hit EOF. - A Timeout exception will be raised if the operation is not completed - by the expiration time. - """ - s = b"" - while count > 0: - try: - n = sock.recv(count) - if n == b"": - raise EOFError - count -= len(n) - s += n - except (BlockingIOError, ssl.SSLWantReadError): - _wait_for_readable(sock, expiration) - except ssl.SSLWantWriteError: # pragma: no cover - _wait_for_writable(sock, expiration) - return s - - -def _net_write(sock, data, expiration): - """Write the specified data to the socket. - A Timeout exception will be raised if the operation is not completed - by the expiration time. - """ - current = 0 - l = len(data) - while current < l: - try: - current += sock.send(data[current:]) - except (BlockingIOError, ssl.SSLWantWriteError): - _wait_for_writable(sock, expiration) - except ssl.SSLWantReadError: # pragma: no cover - _wait_for_readable(sock, expiration) - - -def send_tcp( - sock: Any, - what: Union[dns.message.Message, bytes], - expiration: Optional[float] = None, -) -> Tuple[int, float]: - """Send a DNS message to the specified TCP socket. - - *sock*, a ``socket``. - - *what*, a ``bytes`` or ``dns.message.Message``, the message to send. - - *expiration*, a ``float`` or ``None``, the absolute time at which - a timeout exception should be raised. If ``None``, no timeout will - occur. - - Returns an ``(int, float)`` tuple of bytes sent and the sent time. - """ - - if isinstance(what, dns.message.Message): - wire = what.to_wire() - else: - wire = what - l = len(wire) - # copying the wire into tcpmsg is inefficient, but lets us - # avoid writev() or doing a short write that would get pushed - # onto the net - tcpmsg = struct.pack("!H", l) + wire - sent_time = time.time() - _net_write(sock, tcpmsg, expiration) - return (len(tcpmsg), sent_time) - - -def receive_tcp( - sock: Any, - expiration: Optional[float] = None, - one_rr_per_rrset: bool = False, - keyring: Optional[Dict[dns.name.Name, dns.tsig.Key]] = None, - request_mac: Optional[bytes] = b"", - ignore_trailing: bool = False, -) -> Tuple[dns.message.Message, float]: - """Read a DNS message from a TCP socket. - - *sock*, a ``socket``. - - *expiration*, a ``float`` or ``None``, the absolute time at which - a timeout exception should be raised. If ``None``, no timeout will - occur. - - *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own - RRset. - - *keyring*, a ``dict``, the keyring to use for TSIG. - - *request_mac*, a ``bytes`` or ``None``, the MAC of the request (for TSIG). - - *ignore_trailing*, a ``bool``. If ``True``, ignore trailing - junk at end of the received message. - - Raises if the message is malformed, if network errors occur, of if - there is a timeout. - - Returns a ``(dns.message.Message, float)`` tuple of the received message - and the received time. - """ - - ldata = _net_read(sock, 2, expiration) - (l,) = struct.unpack("!H", ldata) - wire = _net_read(sock, l, expiration) - received_time = time.time() - r = dns.message.from_wire( - wire, - keyring=keyring, - request_mac=request_mac, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - ) - return (r, received_time) - - -def _connect(s, address, expiration): - err = s.connect_ex(address) - if err == 0: - return - if err in (errno.EINPROGRESS, errno.EWOULDBLOCK, errno.EALREADY): - _wait_for_writable(s, expiration) - err = s.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR) - if err != 0: - raise OSError(err, os.strerror(err)) - - -def tcp( - q: dns.message.Message, - where: str, - timeout: Optional[float] = None, - port: int = 53, - source: Optional[str] = None, - source_port: int = 0, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - sock: Optional[Any] = None, -) -> dns.message.Message: - """Return the response obtained after sending a query via TCP. - - *q*, a ``dns.message.Message``, the query to send - - *where*, a ``str`` containing an IPv4 or IPv6 address, where - to send the message. - - *timeout*, a ``float`` or ``None``, the number of seconds to wait before the - query times out. If ``None``, the default, wait forever. - - *port*, an ``int``, the port send the message to. The default is 53. - - *source*, a ``str`` containing an IPv4 or IPv6 address, specifying - the source address. The default is the wildcard address. - - *source_port*, an ``int``, the port from which to send the message. - The default is 0. - - *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own - RRset. - - *ignore_trailing*, a ``bool``. If ``True``, ignore trailing - junk at end of the received message. - - *sock*, a ``socket.socket``, or ``None``, the connected socket to use for the - query. If ``None``, the default, a socket is created. Note that - if a socket is provided, it must be a nonblocking connected stream - socket, and *where*, *port*, *source* and *source_port* are ignored. - - Returns a ``dns.message.Message``. - """ - - wire = q.to_wire() - (begin_time, expiration) = _compute_times(timeout) - if sock: - cm: contextlib.AbstractContextManager = contextlib.nullcontext(sock) - else: - (af, destination, source) = _destination_and_source( - where, port, source, source_port - ) - cm = _make_socket(af, socket.SOCK_STREAM, source) - with cm as s: - if not sock: - _connect(s, destination, expiration) - send_tcp(s, wire, expiration) - (r, received_time) = receive_tcp( - s, expiration, one_rr_per_rrset, q.keyring, q.mac, ignore_trailing - ) - r.time = received_time - begin_time - if not q.is_response(r): - raise BadResponse - return r - assert ( - False # help mypy figure out we can't get here lgtm[py/unreachable-statement] - ) - - -def _tls_handshake(s, expiration): - while True: - try: - s.do_handshake() - return - except ssl.SSLWantReadError: - _wait_for_readable(s, expiration) - except ssl.SSLWantWriteError: # pragma: no cover - _wait_for_writable(s, expiration) - - -def tls( - q: dns.message.Message, - where: str, - timeout: Optional[float] = None, - port: int = 853, - source: Optional[str] = None, - source_port: int = 0, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - sock: Optional[ssl.SSLSocket] = None, - ssl_context: Optional[ssl.SSLContext] = None, - server_hostname: Optional[str] = None, -) -> dns.message.Message: - """Return the response obtained after sending a query via TLS. - - *q*, a ``dns.message.Message``, the query to send - - *where*, a ``str`` containing an IPv4 or IPv6 address, where - to send the message. - - *timeout*, a ``float`` or ``None``, the number of seconds to wait before the - query times out. If ``None``, the default, wait forever. - - *port*, an ``int``, the port send the message to. The default is 853. - - *source*, a ``str`` containing an IPv4 or IPv6 address, specifying - the source address. The default is the wildcard address. - - *source_port*, an ``int``, the port from which to send the message. - The default is 0. - - *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own - RRset. - - *ignore_trailing*, a ``bool``. If ``True``, ignore trailing - junk at end of the received message. - - *sock*, an ``ssl.SSLSocket``, or ``None``, the socket to use for - the query. If ``None``, the default, a socket is created. Note - that if a socket is provided, it must be a nonblocking connected - SSL stream socket, and *where*, *port*, *source*, *source_port*, - and *ssl_context* are ignored. - - *ssl_context*, an ``ssl.SSLContext``, the context to use when establishing - a TLS connection. If ``None``, the default, creates one with the default - configuration. - - *server_hostname*, a ``str`` containing the server's hostname. The - default is ``None``, which means that no hostname is known, and if an - SSL context is created, hostname checking will be disabled. - - Returns a ``dns.message.Message``. - - """ - - if sock: - # - # If a socket was provided, there's no special TLS handling needed. - # - return tcp( - q, - where, - timeout, - port, - source, - source_port, - one_rr_per_rrset, - ignore_trailing, - sock, - ) - - wire = q.to_wire() - (begin_time, expiration) = _compute_times(timeout) - (af, destination, source) = _destination_and_source( - where, port, source, source_port - ) - if ssl_context is None and not sock: - ssl_context = ssl.create_default_context() - ssl_context.minimum_version = ssl.TLSVersion.TLSv1_2 - if server_hostname is None: - ssl_context.check_hostname = False - - with _make_socket( - af, - socket.SOCK_STREAM, - source, - ssl_context=ssl_context, - server_hostname=server_hostname, - ) as s: - _connect(s, destination, expiration) - _tls_handshake(s, expiration) - send_tcp(s, wire, expiration) - (r, received_time) = receive_tcp( - s, expiration, one_rr_per_rrset, q.keyring, q.mac, ignore_trailing - ) - r.time = received_time - begin_time - if not q.is_response(r): - raise BadResponse - return r - assert ( - False # help mypy figure out we can't get here lgtm[py/unreachable-statement] - ) - - -def quic( - q: dns.message.Message, - where: str, - timeout: Optional[float] = None, - port: int = 853, - source: Optional[str] = None, - source_port: int = 0, - one_rr_per_rrset: bool = False, - ignore_trailing: bool = False, - connection: Optional[dns.quic.SyncQuicConnection] = None, - verify: Union[bool, str] = True, - server_hostname: Optional[str] = None, -) -> dns.message.Message: - """Return the response obtained after sending a query via DNS-over-QUIC. - - *q*, a ``dns.message.Message``, the query to send. - - *where*, a ``str``, the nameserver IP address. - - *timeout*, a ``float`` or ``None``, the number of seconds to wait before the query - times out. If ``None``, the default, wait forever. - - *port*, a ``int``, the port to send the query to. The default is 853. - - *source*, a ``str`` containing an IPv4 or IPv6 address, specifying the source - address. The default is the wildcard address. - - *source_port*, an ``int``, the port from which to send the message. The default is - 0. - - *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own RRset. - - *ignore_trailing*, a ``bool``. If ``True``, ignore trailing junk at end of the - received message. - - *connection*, a ``dns.quic.SyncQuicConnection``. If provided, the - connection to use to send the query. - - *verify*, a ``bool`` or ``str``. If a ``True``, then TLS certificate verification - of the server is done using the default CA bundle; if ``False``, then no - verification is done; if a `str` then it specifies the path to a certificate file or - directory which will be used for verification. - - *server_hostname*, a ``str`` containing the server's hostname. The - default is ``None``, which means that no hostname is known, and if an - SSL context is created, hostname checking will be disabled. - - Returns a ``dns.message.Message``. - """ - - if not dns.quic.have_quic: - raise NoDOQ("DNS-over-QUIC is not available.") # pragma: no cover - - q.id = 0 - wire = q.to_wire() - the_connection: dns.quic.SyncQuicConnection - the_manager: dns.quic.SyncQuicManager - if connection: - manager: contextlib.AbstractContextManager = contextlib.nullcontext(None) - the_connection = connection - else: - manager = dns.quic.SyncQuicManager( - verify_mode=verify, server_name=server_hostname - ) - the_manager = manager # for type checking happiness - - with manager: - if not connection: - the_connection = the_manager.connect(where, port, source, source_port) - (start, expiration) = _compute_times(timeout) - with the_connection.make_stream(timeout) as stream: - stream.send(wire, True) - wire = stream.receive(_remaining(expiration)) - finish = time.time() - r = dns.message.from_wire( - wire, - keyring=q.keyring, - request_mac=q.request_mac, - one_rr_per_rrset=one_rr_per_rrset, - ignore_trailing=ignore_trailing, - ) - r.time = max(finish - start, 0.0) - if not q.is_response(r): - raise BadResponse - return r - - -def xfr( - where: str, - zone: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.AXFR, - rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, - timeout: Optional[float] = None, - port: int = 53, - keyring: Optional[Dict[dns.name.Name, dns.tsig.Key]] = None, - keyname: Optional[Union[dns.name.Name, str]] = None, - relativize: bool = True, - lifetime: Optional[float] = None, - source: Optional[str] = None, - source_port: int = 0, - serial: int = 0, - use_udp: bool = False, - keyalgorithm: Union[dns.name.Name, str] = dns.tsig.default_algorithm, -) -> Any: - """Return a generator for the responses to a zone transfer. - - *where*, a ``str`` containing an IPv4 or IPv6 address, where - to send the message. - - *zone*, a ``dns.name.Name`` or ``str``, the name of the zone to transfer. - - *rdtype*, an ``int`` or ``str``, the type of zone transfer. The - default is ``dns.rdatatype.AXFR``. ``dns.rdatatype.IXFR`` can be - used to do an incremental transfer instead. - - *rdclass*, an ``int`` or ``str``, the class of the zone transfer. - The default is ``dns.rdataclass.IN``. - - *timeout*, a ``float``, the number of seconds to wait for each - response message. If None, the default, wait forever. - - *port*, an ``int``, the port send the message to. The default is 53. - - *keyring*, a ``dict``, the keyring to use for TSIG. - - *keyname*, a ``dns.name.Name`` or ``str``, the name of the TSIG - key to use. - - *relativize*, a ``bool``. If ``True``, all names in the zone will be - relativized to the zone origin. It is essential that the - relativize setting matches the one specified to - ``dns.zone.from_xfr()`` if using this generator to make a zone. - - *lifetime*, a ``float``, the total number of seconds to spend - doing the transfer. If ``None``, the default, then there is no - limit on the time the transfer may take. - - *source*, a ``str`` containing an IPv4 or IPv6 address, specifying - the source address. The default is the wildcard address. - - *source_port*, an ``int``, the port from which to send the message. - The default is 0. - - *serial*, an ``int``, the SOA serial number to use as the base for - an IXFR diff sequence (only meaningful if *rdtype* is - ``dns.rdatatype.IXFR``). - - *use_udp*, a ``bool``. If ``True``, use UDP (only meaningful for IXFR). - - *keyalgorithm*, a ``dns.name.Name`` or ``str``, the TSIG algorithm to use. - - Raises on errors, and so does the generator. - - Returns a generator of ``dns.message.Message`` objects. - """ - - if isinstance(zone, str): - zone = dns.name.from_text(zone) - rdtype = dns.rdatatype.RdataType.make(rdtype) - q = dns.message.make_query(zone, rdtype, rdclass) - if rdtype == dns.rdatatype.IXFR: - rrset = dns.rrset.from_text(zone, 0, "IN", "SOA", ". . %u 0 0 0 0" % serial) - q.authority.append(rrset) - if keyring is not None: - q.use_tsig(keyring, keyname, algorithm=keyalgorithm) - wire = q.to_wire() - (af, destination, source) = _destination_and_source( - where, port, source, source_port - ) - if use_udp and rdtype != dns.rdatatype.IXFR: - raise ValueError("cannot do a UDP AXFR") - sock_type = socket.SOCK_DGRAM if use_udp else socket.SOCK_STREAM - with _make_socket(af, sock_type, source) as s: - (_, expiration) = _compute_times(lifetime) - _connect(s, destination, expiration) - l = len(wire) - if use_udp: - _udp_send(s, wire, None, expiration) - else: - tcpmsg = struct.pack("!H", l) + wire - _net_write(s, tcpmsg, expiration) - done = False - delete_mode = True - expecting_SOA = False - soa_rrset = None - if relativize: - origin = zone - oname = dns.name.empty - else: - origin = None - oname = zone - tsig_ctx = None - while not done: - (_, mexpiration) = _compute_times(timeout) - if mexpiration is None or ( - expiration is not None and mexpiration > expiration - ): - mexpiration = expiration - if use_udp: - (wire, _) = _udp_recv(s, 65535, mexpiration) - else: - ldata = _net_read(s, 2, mexpiration) - (l,) = struct.unpack("!H", ldata) - wire = _net_read(s, l, mexpiration) - is_ixfr = rdtype == dns.rdatatype.IXFR - r = dns.message.from_wire( - wire, - keyring=q.keyring, - request_mac=q.mac, - xfr=True, - origin=origin, - tsig_ctx=tsig_ctx, - multi=True, - one_rr_per_rrset=is_ixfr, - ) - rcode = r.rcode() - if rcode != dns.rcode.NOERROR: - raise TransferError(rcode) - tsig_ctx = r.tsig_ctx - answer_index = 0 - if soa_rrset is None: - if not r.answer or r.answer[0].name != oname: - raise dns.exception.FormError("No answer or RRset not for qname") - rrset = r.answer[0] - if rrset.rdtype != dns.rdatatype.SOA: - raise dns.exception.FormError("first RRset is not an SOA") - answer_index = 1 - soa_rrset = rrset.copy() - if rdtype == dns.rdatatype.IXFR: - if dns.serial.Serial(soa_rrset[0].serial) <= serial: - # - # We're already up-to-date. - # - done = True - else: - expecting_SOA = True - # - # Process SOAs in the answer section (other than the initial - # SOA in the first message). - # - for rrset in r.answer[answer_index:]: - if done: - raise dns.exception.FormError("answers after final SOA") - if rrset.rdtype == dns.rdatatype.SOA and rrset.name == oname: - if expecting_SOA: - if rrset[0].serial != serial: - raise dns.exception.FormError("IXFR base serial mismatch") - expecting_SOA = False - elif rdtype == dns.rdatatype.IXFR: - delete_mode = not delete_mode - # - # If this SOA RRset is equal to the first we saw then we're - # finished. If this is an IXFR we also check that we're - # seeing the record in the expected part of the response. - # - if rrset == soa_rrset and ( - rdtype == dns.rdatatype.AXFR - or (rdtype == dns.rdatatype.IXFR and delete_mode) - ): - done = True - elif expecting_SOA: - # - # We made an IXFR request and are expecting another - # SOA RR, but saw something else, so this must be an - # AXFR response. - # - rdtype = dns.rdatatype.AXFR - expecting_SOA = False - if done and q.keyring and not r.had_tsig: - raise dns.exception.FormError("missing TSIG") - yield r - - -class UDPMode(enum.IntEnum): - """How should UDP be used in an IXFR from :py:func:`inbound_xfr()`? - - NEVER means "never use UDP; always use TCP" - TRY_FIRST means "try to use UDP but fall back to TCP if needed" - ONLY means "raise ``dns.xfr.UseTCP`` if trying UDP does not succeed" - """ - - NEVER = 0 - TRY_FIRST = 1 - ONLY = 2 - - -def inbound_xfr( - where: str, - txn_manager: dns.transaction.TransactionManager, - query: Optional[dns.message.Message] = None, - port: int = 53, - timeout: Optional[float] = None, - lifetime: Optional[float] = None, - source: Optional[str] = None, - source_port: int = 0, - udp_mode: UDPMode = UDPMode.NEVER, -) -> None: - """Conduct an inbound transfer and apply it via a transaction from the - txn_manager. - - *where*, a ``str`` containing an IPv4 or IPv6 address, where - to send the message. - - *txn_manager*, a ``dns.transaction.TransactionManager``, the txn_manager - for this transfer (typically a ``dns.zone.Zone``). - - *query*, the query to send. If not supplied, a default query is - constructed using information from the *txn_manager*. - - *port*, an ``int``, the port send the message to. The default is 53. - - *timeout*, a ``float``, the number of seconds to wait for each - response message. If None, the default, wait forever. - - *lifetime*, a ``float``, the total number of seconds to spend - doing the transfer. If ``None``, the default, then there is no - limit on the time the transfer may take. - - *source*, a ``str`` containing an IPv4 or IPv6 address, specifying - the source address. The default is the wildcard address. - - *source_port*, an ``int``, the port from which to send the message. - The default is 0. - - *udp_mode*, a ``dns.query.UDPMode``, determines how UDP is used - for IXFRs. The default is ``dns.UDPMode.NEVER``, i.e. only use - TCP. Other possibilities are ``dns.UDPMode.TRY_FIRST``, which - means "try UDP but fallback to TCP if needed", and - ``dns.UDPMode.ONLY``, which means "try UDP and raise - ``dns.xfr.UseTCP`` if it does not succeed. - - Raises on errors. - """ - if query is None: - (query, serial) = dns.xfr.make_query(txn_manager) - else: - serial = dns.xfr.extract_serial_from_query(query) - rdtype = query.question[0].rdtype - is_ixfr = rdtype == dns.rdatatype.IXFR - origin = txn_manager.from_wire_origin() - wire = query.to_wire() - (af, destination, source) = _destination_and_source( - where, port, source, source_port - ) - (_, expiration) = _compute_times(lifetime) - retry = True - while retry: - retry = False - if is_ixfr and udp_mode != UDPMode.NEVER: - sock_type = socket.SOCK_DGRAM - is_udp = True - else: - sock_type = socket.SOCK_STREAM - is_udp = False - with _make_socket(af, sock_type, source) as s: - _connect(s, destination, expiration) - if is_udp: - _udp_send(s, wire, None, expiration) - else: - tcpmsg = struct.pack("!H", len(wire)) + wire - _net_write(s, tcpmsg, expiration) - with dns.xfr.Inbound(txn_manager, rdtype, serial, is_udp) as inbound: - done = False - tsig_ctx = None - while not done: - (_, mexpiration) = _compute_times(timeout) - if mexpiration is None or ( - expiration is not None and mexpiration > expiration - ): - mexpiration = expiration - if is_udp: - (rwire, _) = _udp_recv(s, 65535, mexpiration) - else: - ldata = _net_read(s, 2, mexpiration) - (l,) = struct.unpack("!H", ldata) - rwire = _net_read(s, l, mexpiration) - r = dns.message.from_wire( - rwire, - keyring=query.keyring, - request_mac=query.mac, - xfr=True, - origin=origin, - tsig_ctx=tsig_ctx, - multi=(not is_udp), - one_rr_per_rrset=is_ixfr, - ) - try: - done = inbound.process_message(r) - except dns.xfr.UseTCP: - assert is_udp # should not happen if we used TCP! - if udp_mode == UDPMode.ONLY: - raise - done = True - retry = True - udp_mode = UDPMode.NEVER - continue - tsig_ctx = r.tsig_ctx - if not retry and query.keyring and not r.had_tsig: - raise dns.exception.FormError("missing TSIG") diff --git a/server/venv/Lib/site-packages/dns/quic/__init__.py b/server/venv/Lib/site-packages/dns/quic/__init__.py deleted file mode 100644 index 69813f9..0000000 --- a/server/venv/Lib/site-packages/dns/quic/__init__.py +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -try: - import aioquic.quic.configuration # type: ignore - - import dns.asyncbackend - from dns._asyncbackend import NullContext - from dns.quic._asyncio import ( - AsyncioQuicConnection, - AsyncioQuicManager, - AsyncioQuicStream, - ) - from dns.quic._common import AsyncQuicConnection, AsyncQuicManager - from dns.quic._sync import SyncQuicConnection, SyncQuicManager, SyncQuicStream - - have_quic = True - - def null_factory( - *args, # pylint: disable=unused-argument - **kwargs # pylint: disable=unused-argument - ): - return NullContext(None) - - def _asyncio_manager_factory( - context, *args, **kwargs # pylint: disable=unused-argument - ): - return AsyncioQuicManager(*args, **kwargs) - - # We have a context factory and a manager factory as for trio we need to have - # a nursery. - - _async_factories = {"asyncio": (null_factory, _asyncio_manager_factory)} - - try: - import trio - - from dns.quic._trio import ( # pylint: disable=ungrouped-imports - TrioQuicConnection, - TrioQuicManager, - TrioQuicStream, - ) - - def _trio_context_factory(): - return trio.open_nursery() - - def _trio_manager_factory(context, *args, **kwargs): - return TrioQuicManager(context, *args, **kwargs) - - _async_factories["trio"] = (_trio_context_factory, _trio_manager_factory) - except ImportError: - pass - - def factories_for_backend(backend=None): - if backend is None: - backend = dns.asyncbackend.get_default_backend() - return _async_factories[backend.name()] - -except ImportError: - have_quic = False - - from typing import Any - - class AsyncQuicStream: # type: ignore - pass - - class AsyncQuicConnection: # type: ignore - async def make_stream(self) -> Any: - raise NotImplementedError - - class SyncQuicStream: # type: ignore - pass - - class SyncQuicConnection: # type: ignore - def make_stream(self) -> Any: - raise NotImplementedError diff --git a/server/venv/Lib/site-packages/dns/quic/_asyncio.py b/server/venv/Lib/site-packages/dns/quic/_asyncio.py deleted file mode 100644 index e1c5233..0000000 --- a/server/venv/Lib/site-packages/dns/quic/_asyncio.py +++ /dev/null @@ -1,223 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import asyncio -import socket -import ssl -import struct -import time - -import aioquic.quic.configuration # type: ignore -import aioquic.quic.connection # type: ignore -import aioquic.quic.events # type: ignore - -import dns.asyncbackend -import dns.exception -import dns.inet -from dns.quic._common import ( - QUIC_MAX_DATAGRAM, - AsyncQuicConnection, - AsyncQuicManager, - BaseQuicStream, - UnexpectedEOF, -) - - -class AsyncioQuicStream(BaseQuicStream): - def __init__(self, connection, stream_id): - super().__init__(connection, stream_id) - self._wake_up = asyncio.Condition() - - async def _wait_for_wake_up(self): - async with self._wake_up: - await self._wake_up.wait() - - async def wait_for(self, amount, expiration): - while True: - timeout = self._timeout_from_expiration(expiration) - if self._buffer.have(amount): - return - self._expecting = amount - try: - await asyncio.wait_for(self._wait_for_wake_up(), timeout) - except TimeoutError: - raise dns.exception.Timeout - self._expecting = 0 - - async def receive(self, timeout=None): - expiration = self._expiration_from_timeout(timeout) - await self.wait_for(2, expiration) - (size,) = struct.unpack("!H", self._buffer.get(2)) - await self.wait_for(size, expiration) - return self._buffer.get(size) - - async def send(self, datagram, is_end=False): - data = self._encapsulate(datagram) - await self._connection.write(self._stream_id, data, is_end) - - async def _add_input(self, data, is_end): - if self._common_add_input(data, is_end): - async with self._wake_up: - self._wake_up.notify() - - async def close(self): - self._close() - - # Streams are async context managers - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc_val, exc_tb): - await self.close() - async with self._wake_up: - self._wake_up.notify() - return False - - -class AsyncioQuicConnection(AsyncQuicConnection): - def __init__(self, connection, address, port, source, source_port, manager=None): - super().__init__(connection, address, port, source, source_port, manager) - self._socket = None - self._handshake_complete = asyncio.Event() - self._socket_created = asyncio.Event() - self._wake_timer = asyncio.Condition() - self._receiver_task = None - self._sender_task = None - - async def _receiver(self): - try: - af = dns.inet.af_for_address(self._address) - backend = dns.asyncbackend.get_backend("asyncio") - # Note that peer is a low-level address tuple, but make_socket() wants - # a high-level address tuple, so we convert. - self._socket = await backend.make_socket( - af, socket.SOCK_DGRAM, 0, self._source, (self._peer[0], self._peer[1]) - ) - self._socket_created.set() - async with self._socket: - while not self._done: - (datagram, address) = await self._socket.recvfrom( - QUIC_MAX_DATAGRAM, None - ) - if address[0] != self._peer[0] or address[1] != self._peer[1]: - continue - self._connection.receive_datagram( - datagram, self._peer[0], time.time() - ) - # Wake up the timer in case the sender is sleeping, as there may be - # stuff to send now. - async with self._wake_timer: - self._wake_timer.notify_all() - except Exception: - pass - finally: - self._done = True - async with self._wake_timer: - self._wake_timer.notify_all() - self._handshake_complete.set() - - async def _wait_for_wake_timer(self): - async with self._wake_timer: - await self._wake_timer.wait() - - async def _sender(self): - await self._socket_created.wait() - while not self._done: - datagrams = self._connection.datagrams_to_send(time.time()) - for datagram, address in datagrams: - assert address == self._peer[0] - await self._socket.sendto(datagram, self._peer, None) - (expiration, interval) = self._get_timer_values() - try: - await asyncio.wait_for(self._wait_for_wake_timer(), interval) - except Exception: - pass - self._handle_timer(expiration) - await self._handle_events() - - async def _handle_events(self): - count = 0 - while True: - event = self._connection.next_event() - if event is None: - return - if isinstance(event, aioquic.quic.events.StreamDataReceived): - stream = self._streams.get(event.stream_id) - if stream: - await stream._add_input(event.data, event.end_stream) - elif isinstance(event, aioquic.quic.events.HandshakeCompleted): - self._handshake_complete.set() - elif isinstance( - event, aioquic.quic.events.ConnectionTerminated - ) or isinstance(event, aioquic.quic.events.StreamReset): - self._done = True - self._receiver_task.cancel() - count += 1 - if count > 10: - # yield - count = 0 - await asyncio.sleep(0) - - async def write(self, stream, data, is_end=False): - self._connection.send_stream_data(stream, data, is_end) - async with self._wake_timer: - self._wake_timer.notify_all() - - def run(self): - if self._closed: - return - self._receiver_task = asyncio.Task(self._receiver()) - self._sender_task = asyncio.Task(self._sender()) - - async def make_stream(self, timeout=None): - try: - await asyncio.wait_for(self._handshake_complete.wait(), timeout) - except TimeoutError: - raise dns.exception.Timeout - if self._done: - raise UnexpectedEOF - stream_id = self._connection.get_next_available_stream_id(False) - stream = AsyncioQuicStream(self, stream_id) - self._streams[stream_id] = stream - return stream - - async def close(self): - if not self._closed: - self._manager.closed(self._peer[0], self._peer[1]) - self._closed = True - self._connection.close() - # sender might be blocked on this, so set it - self._socket_created.set() - await self._socket.close() - async with self._wake_timer: - self._wake_timer.notify_all() - try: - await self._receiver_task - except asyncio.CancelledError: - pass - try: - await self._sender_task - except asyncio.CancelledError: - pass - - -class AsyncioQuicManager(AsyncQuicManager): - def __init__(self, conf=None, verify_mode=ssl.CERT_REQUIRED, server_name=None): - super().__init__(conf, verify_mode, AsyncioQuicConnection, server_name) - - def connect(self, address, port=853, source=None, source_port=0): - (connection, start) = self._connect(address, port, source, source_port) - if start: - connection.run() - return connection - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc_val, exc_tb): - # Copy the iterator into a list as exiting things will mutate the connections - # table. - connections = list(self._connections.values()) - for connection in connections: - await connection.close() - return False diff --git a/server/venv/Lib/site-packages/dns/quic/_common.py b/server/venv/Lib/site-packages/dns/quic/_common.py deleted file mode 100644 index 38ec103..0000000 --- a/server/venv/Lib/site-packages/dns/quic/_common.py +++ /dev/null @@ -1,180 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import socket -import struct -import time -from typing import Any, Optional - -import aioquic.quic.configuration # type: ignore -import aioquic.quic.connection # type: ignore - -import dns.inet - -QUIC_MAX_DATAGRAM = 2048 - - -class UnexpectedEOF(Exception): - pass - - -class Buffer: - def __init__(self): - self._buffer = b"" - self._seen_end = False - - def put(self, data, is_end): - if self._seen_end: - return - self._buffer += data - if is_end: - self._seen_end = True - - def have(self, amount): - if len(self._buffer) >= amount: - return True - if self._seen_end: - raise UnexpectedEOF - return False - - def seen_end(self): - return self._seen_end - - def get(self, amount): - assert self.have(amount) - data = self._buffer[:amount] - self._buffer = self._buffer[amount:] - return data - - -class BaseQuicStream: - def __init__(self, connection, stream_id): - self._connection = connection - self._stream_id = stream_id - self._buffer = Buffer() - self._expecting = 0 - - def id(self): - return self._stream_id - - def _expiration_from_timeout(self, timeout): - if timeout is not None: - expiration = time.time() + timeout - else: - expiration = None - return expiration - - def _timeout_from_expiration(self, expiration): - if expiration is not None: - timeout = max(expiration - time.time(), 0.0) - else: - timeout = None - return timeout - - # Subclass must implement receive() as sync / async and which returns a message - # or raises UnexpectedEOF. - - def _encapsulate(self, datagram): - l = len(datagram) - return struct.pack("!H", l) + datagram - - def _common_add_input(self, data, is_end): - self._buffer.put(data, is_end) - return self._expecting > 0 and self._buffer.have(self._expecting) - - def _close(self): - self._connection.close_stream(self._stream_id) - self._buffer.put(b"", True) # send EOF in case we haven't seen it. - - -class BaseQuicConnection: - def __init__( - self, connection, address, port, source=None, source_port=0, manager=None - ): - self._done = False - self._connection = connection - self._address = address - self._port = port - self._closed = False - self._manager = manager - self._streams = {} - self._af = dns.inet.af_for_address(address) - self._peer = dns.inet.low_level_address_tuple((address, port)) - if source is None and source_port != 0: - if self._af == socket.AF_INET: - source = "0.0.0.0" - elif self._af == socket.AF_INET6: - source = "::" - else: - raise NotImplementedError - if source: - self._source = (source, source_port) - else: - self._source = None - - def close_stream(self, stream_id): - del self._streams[stream_id] - - def _get_timer_values(self, closed_is_special=True): - now = time.time() - expiration = self._connection.get_timer() - if expiration is None: - expiration = now + 3600 # arbitrary "big" value - interval = max(expiration - now, 0) - if self._closed and closed_is_special: - # lower sleep interval to avoid a race in the closing process - # which can lead to higher latency closing due to sleeping when - # we have events. - interval = min(interval, 0.05) - return (expiration, interval) - - def _handle_timer(self, expiration): - now = time.time() - if expiration <= now: - self._connection.handle_timer(now) - - -class AsyncQuicConnection(BaseQuicConnection): - async def make_stream(self, timeout: Optional[float] = None) -> Any: - pass - - -class BaseQuicManager: - def __init__(self, conf, verify_mode, connection_factory, server_name=None): - self._connections = {} - self._connection_factory = connection_factory - if conf is None: - verify_path = None - if isinstance(verify_mode, str): - verify_path = verify_mode - verify_mode = True - conf = aioquic.quic.configuration.QuicConfiguration( - alpn_protocols=["doq", "doq-i03"], - verify_mode=verify_mode, - server_name=server_name, - ) - if verify_path is not None: - conf.load_verify_locations(verify_path) - self._conf = conf - - def _connect(self, address, port=853, source=None, source_port=0): - connection = self._connections.get((address, port)) - if connection is not None: - return (connection, False) - qconn = aioquic.quic.connection.QuicConnection(configuration=self._conf) - qconn.connect(address, time.time()) - connection = self._connection_factory( - qconn, address, port, source, source_port, self - ) - self._connections[(address, port)] = connection - return (connection, True) - - def closed(self, address, port): - try: - del self._connections[(address, port)] - except KeyError: - pass - - -class AsyncQuicManager(BaseQuicManager): - def connect(self, address, port=853, source=None, source_port=0): - raise NotImplementedError diff --git a/server/venv/Lib/site-packages/dns/quic/_sync.py b/server/venv/Lib/site-packages/dns/quic/_sync.py deleted file mode 100644 index e944784..0000000 --- a/server/venv/Lib/site-packages/dns/quic/_sync.py +++ /dev/null @@ -1,226 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import selectors -import socket -import ssl -import struct -import threading -import time - -import aioquic.quic.configuration # type: ignore -import aioquic.quic.connection # type: ignore -import aioquic.quic.events # type: ignore - -import dns.exception -import dns.inet -from dns.quic._common import ( - QUIC_MAX_DATAGRAM, - BaseQuicConnection, - BaseQuicManager, - BaseQuicStream, - UnexpectedEOF, -) - -# Avoid circularity with dns.query -if hasattr(selectors, "PollSelector"): - _selector_class = selectors.PollSelector # type: ignore -else: - _selector_class = selectors.SelectSelector # type: ignore - - -class SyncQuicStream(BaseQuicStream): - def __init__(self, connection, stream_id): - super().__init__(connection, stream_id) - self._wake_up = threading.Condition() - self._lock = threading.Lock() - - def wait_for(self, amount, expiration): - while True: - timeout = self._timeout_from_expiration(expiration) - with self._lock: - if self._buffer.have(amount): - return - self._expecting = amount - with self._wake_up: - if not self._wake_up.wait(timeout): - raise dns.exception.Timeout - self._expecting = 0 - - def receive(self, timeout=None): - expiration = self._expiration_from_timeout(timeout) - self.wait_for(2, expiration) - with self._lock: - (size,) = struct.unpack("!H", self._buffer.get(2)) - self.wait_for(size, expiration) - with self._lock: - return self._buffer.get(size) - - def send(self, datagram, is_end=False): - data = self._encapsulate(datagram) - self._connection.write(self._stream_id, data, is_end) - - def _add_input(self, data, is_end): - if self._common_add_input(data, is_end): - with self._wake_up: - self._wake_up.notify() - - def close(self): - with self._lock: - self._close() - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - self.close() - with self._wake_up: - self._wake_up.notify() - return False - - -class SyncQuicConnection(BaseQuicConnection): - def __init__(self, connection, address, port, source, source_port, manager): - super().__init__(connection, address, port, source, source_port, manager) - self._socket = socket.socket(self._af, socket.SOCK_DGRAM, 0) - self._socket.connect(self._peer) - (self._send_wakeup, self._receive_wakeup) = socket.socketpair() - self._receive_wakeup.setblocking(False) - self._socket.setblocking(False) - if self._source is not None: - try: - self._socket.bind( - dns.inet.low_level_address_tuple(self._source, self._af) - ) - except Exception: - self._socket.close() - raise - self._handshake_complete = threading.Event() - self._worker_thread = None - self._lock = threading.Lock() - - def _read(self): - count = 0 - while count < 10: - count += 1 - try: - datagram = self._socket.recv(QUIC_MAX_DATAGRAM) - except BlockingIOError: - return - with self._lock: - self._connection.receive_datagram(datagram, self._peer[0], time.time()) - - def _drain_wakeup(self): - while True: - try: - self._receive_wakeup.recv(32) - except BlockingIOError: - return - - def _worker(self): - try: - sel = _selector_class() - sel.register(self._socket, selectors.EVENT_READ, self._read) - sel.register(self._receive_wakeup, selectors.EVENT_READ, self._drain_wakeup) - while not self._done: - (expiration, interval) = self._get_timer_values(False) - items = sel.select(interval) - for key, _ in items: - key.data() - with self._lock: - self._handle_timer(expiration) - datagrams = self._connection.datagrams_to_send(time.time()) - for datagram, _ in datagrams: - try: - self._socket.send(datagram) - except BlockingIOError: - # we let QUIC handle any lossage - pass - self._handle_events() - finally: - with self._lock: - self._done = True - # Ensure anyone waiting for this gets woken up. - self._handshake_complete.set() - - def _handle_events(self): - while True: - with self._lock: - event = self._connection.next_event() - if event is None: - return - if isinstance(event, aioquic.quic.events.StreamDataReceived): - with self._lock: - stream = self._streams.get(event.stream_id) - if stream: - stream._add_input(event.data, event.end_stream) - elif isinstance(event, aioquic.quic.events.HandshakeCompleted): - self._handshake_complete.set() - elif isinstance( - event, aioquic.quic.events.ConnectionTerminated - ) or isinstance(event, aioquic.quic.events.StreamReset): - with self._lock: - self._done = True - - def write(self, stream, data, is_end=False): - with self._lock: - self._connection.send_stream_data(stream, data, is_end) - self._send_wakeup.send(b"\x01") - - def run(self): - if self._closed: - return - self._worker_thread = threading.Thread(target=self._worker) - self._worker_thread.start() - - def make_stream(self, timeout=None): - if not self._handshake_complete.wait(timeout): - raise dns.exception.Timeout - with self._lock: - if self._done: - raise UnexpectedEOF - stream_id = self._connection.get_next_available_stream_id(False) - stream = SyncQuicStream(self, stream_id) - self._streams[stream_id] = stream - return stream - - def close_stream(self, stream_id): - with self._lock: - super().close_stream(stream_id) - - def close(self): - with self._lock: - if self._closed: - return - self._manager.closed(self._peer[0], self._peer[1]) - self._closed = True - self._connection.close() - self._send_wakeup.send(b"\x01") - self._worker_thread.join() - - -class SyncQuicManager(BaseQuicManager): - def __init__(self, conf=None, verify_mode=ssl.CERT_REQUIRED, server_name=None): - super().__init__(conf, verify_mode, SyncQuicConnection, server_name) - self._lock = threading.Lock() - - def connect(self, address, port=853, source=None, source_port=0): - with self._lock: - (connection, start) = self._connect(address, port, source, source_port) - if start: - connection.run() - return connection - - def closed(self, address, port): - with self._lock: - super().closed(address, port) - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - # Copy the iterator into a list as exiting things will mutate the connections - # table. - connections = list(self._connections.values()) - for connection in connections: - connection.close() - return False diff --git a/server/venv/Lib/site-packages/dns/quic/_trio.py b/server/venv/Lib/site-packages/dns/quic/_trio.py deleted file mode 100644 index ee07e4f..0000000 --- a/server/venv/Lib/site-packages/dns/quic/_trio.py +++ /dev/null @@ -1,189 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import socket -import ssl -import struct -import time - -import aioquic.quic.configuration # type: ignore -import aioquic.quic.connection # type: ignore -import aioquic.quic.events # type: ignore -import trio - -import dns.exception -import dns.inet -from dns._asyncbackend import NullContext -from dns.quic._common import ( - QUIC_MAX_DATAGRAM, - AsyncQuicConnection, - AsyncQuicManager, - BaseQuicStream, - UnexpectedEOF, -) - - -class TrioQuicStream(BaseQuicStream): - def __init__(self, connection, stream_id): - super().__init__(connection, stream_id) - self._wake_up = trio.Condition() - - async def wait_for(self, amount): - while True: - if self._buffer.have(amount): - return - self._expecting = amount - async with self._wake_up: - await self._wake_up.wait() - self._expecting = 0 - - async def receive(self, timeout=None): - if timeout is None: - context = NullContext(None) - else: - context = trio.move_on_after(timeout) - with context: - await self.wait_for(2) - (size,) = struct.unpack("!H", self._buffer.get(2)) - await self.wait_for(size) - return self._buffer.get(size) - raise dns.exception.Timeout - - async def send(self, datagram, is_end=False): - data = self._encapsulate(datagram) - await self._connection.write(self._stream_id, data, is_end) - - async def _add_input(self, data, is_end): - if self._common_add_input(data, is_end): - async with self._wake_up: - self._wake_up.notify() - - async def close(self): - self._close() - - # Streams are async context managers - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc_val, exc_tb): - await self.close() - async with self._wake_up: - self._wake_up.notify() - return False - - -class TrioQuicConnection(AsyncQuicConnection): - def __init__(self, connection, address, port, source, source_port, manager=None): - super().__init__(connection, address, port, source, source_port, manager) - self._socket = trio.socket.socket(self._af, socket.SOCK_DGRAM, 0) - if self._source: - trio.socket.bind(dns.inet.low_level_address_tuple(self._source, self._af)) - self._handshake_complete = trio.Event() - self._run_done = trio.Event() - self._worker_scope = None - - async def _worker(self): - try: - await self._socket.connect(self._peer) - while not self._done: - (expiration, interval) = self._get_timer_values(False) - with trio.CancelScope( - deadline=trio.current_time() + interval - ) as self._worker_scope: - datagram = await self._socket.recv(QUIC_MAX_DATAGRAM) - self._connection.receive_datagram( - datagram, self._peer[0], time.time() - ) - self._worker_scope = None - self._handle_timer(expiration) - datagrams = self._connection.datagrams_to_send(time.time()) - for datagram, _ in datagrams: - await self._socket.send(datagram) - await self._handle_events() - finally: - self._done = True - self._handshake_complete.set() - - async def _handle_events(self): - count = 0 - while True: - event = self._connection.next_event() - if event is None: - return - if isinstance(event, aioquic.quic.events.StreamDataReceived): - stream = self._streams.get(event.stream_id) - if stream: - await stream._add_input(event.data, event.end_stream) - elif isinstance(event, aioquic.quic.events.HandshakeCompleted): - self._handshake_complete.set() - elif isinstance( - event, aioquic.quic.events.ConnectionTerminated - ) or isinstance(event, aioquic.quic.events.StreamReset): - self._done = True - self._socket.close() - count += 1 - if count > 10: - # yield - count = 0 - await trio.sleep(0) - - async def write(self, stream, data, is_end=False): - self._connection.send_stream_data(stream, data, is_end) - if self._worker_scope is not None: - self._worker_scope.cancel() - - async def run(self): - if self._closed: - return - async with trio.open_nursery() as nursery: - nursery.start_soon(self._worker) - self._run_done.set() - - async def make_stream(self, timeout=None): - if timeout is None: - context = NullContext(None) - else: - context = trio.move_on_after(timeout) - with context: - await self._handshake_complete.wait() - if self._done: - raise UnexpectedEOF - stream_id = self._connection.get_next_available_stream_id(False) - stream = TrioQuicStream(self, stream_id) - self._streams[stream_id] = stream - return stream - raise dns.exception.Timeout - - async def close(self): - if not self._closed: - self._manager.closed(self._peer[0], self._peer[1]) - self._closed = True - self._connection.close() - if self._worker_scope is not None: - self._worker_scope.cancel() - await self._run_done.wait() - - -class TrioQuicManager(AsyncQuicManager): - def __init__( - self, nursery, conf=None, verify_mode=ssl.CERT_REQUIRED, server_name=None - ): - super().__init__(conf, verify_mode, TrioQuicConnection, server_name) - self._nursery = nursery - - def connect(self, address, port=853, source=None, source_port=0): - (connection, start) = self._connect(address, port, source, source_port) - if start: - self._nursery.start_soon(connection.run) - return connection - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc_val, exc_tb): - # Copy the iterator into a list as exiting things will mutate the connections - # table. - connections = list(self._connections.values()) - for connection in connections: - await connection.close() - return False diff --git a/server/venv/Lib/site-packages/dns/rcode.py b/server/venv/Lib/site-packages/dns/rcode.py deleted file mode 100644 index 8e6386f..0000000 --- a/server/venv/Lib/site-packages/dns/rcode.py +++ /dev/null @@ -1,168 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Result Codes.""" - -from typing import Tuple - -import dns.enum -import dns.exception - - -class Rcode(dns.enum.IntEnum): - #: No error - NOERROR = 0 - #: Format error - FORMERR = 1 - #: Server failure - SERVFAIL = 2 - #: Name does not exist ("Name Error" in RFC 1025 terminology). - NXDOMAIN = 3 - #: Not implemented - NOTIMP = 4 - #: Refused - REFUSED = 5 - #: Name exists. - YXDOMAIN = 6 - #: RRset exists. - YXRRSET = 7 - #: RRset does not exist. - NXRRSET = 8 - #: Not authoritative. - NOTAUTH = 9 - #: Name not in zone. - NOTZONE = 10 - #: DSO-TYPE Not Implemented - DSOTYPENI = 11 - #: Bad EDNS version. - BADVERS = 16 - #: TSIG Signature Failure - BADSIG = 16 - #: Key not recognized. - BADKEY = 17 - #: Signature out of time window. - BADTIME = 18 - #: Bad TKEY Mode. - BADMODE = 19 - #: Duplicate key name. - BADNAME = 20 - #: Algorithm not supported. - BADALG = 21 - #: Bad Truncation - BADTRUNC = 22 - #: Bad/missing Server Cookie - BADCOOKIE = 23 - - @classmethod - def _maximum(cls): - return 4095 - - @classmethod - def _unknown_exception_class(cls): - return UnknownRcode - - -class UnknownRcode(dns.exception.DNSException): - """A DNS rcode is unknown.""" - - -def from_text(text: str) -> Rcode: - """Convert text into an rcode. - - *text*, a ``str``, the textual rcode or an integer in textual form. - - Raises ``dns.rcode.UnknownRcode`` if the rcode mnemonic is unknown. - - Returns a ``dns.rcode.Rcode``. - """ - - return Rcode.from_text(text) - - -def from_flags(flags: int, ednsflags: int) -> Rcode: - """Return the rcode value encoded by flags and ednsflags. - - *flags*, an ``int``, the DNS flags field. - - *ednsflags*, an ``int``, the EDNS flags field. - - Raises ``ValueError`` if rcode is < 0 or > 4095 - - Returns a ``dns.rcode.Rcode``. - """ - - value = (flags & 0x000F) | ((ednsflags >> 20) & 0xFF0) - return Rcode.make(value) - - -def to_flags(value: Rcode) -> Tuple[int, int]: - """Return a (flags, ednsflags) tuple which encodes the rcode. - - *value*, a ``dns.rcode.Rcode``, the rcode. - - Raises ``ValueError`` if rcode is < 0 or > 4095. - - Returns an ``(int, int)`` tuple. - """ - - if value < 0 or value > 4095: - raise ValueError("rcode must be >= 0 and <= 4095") - v = value & 0xF - ev = (value & 0xFF0) << 20 - return (v, ev) - - -def to_text(value: Rcode, tsig: bool = False) -> str: - """Convert rcode into text. - - *value*, a ``dns.rcode.Rcode``, the rcode. - - Raises ``ValueError`` if rcode is < 0 or > 4095. - - Returns a ``str``. - """ - - if tsig and value == Rcode.BADVERS: - return "BADSIG" - return Rcode.to_text(value) - - -### BEGIN generated Rcode constants - -NOERROR = Rcode.NOERROR -FORMERR = Rcode.FORMERR -SERVFAIL = Rcode.SERVFAIL -NXDOMAIN = Rcode.NXDOMAIN -NOTIMP = Rcode.NOTIMP -REFUSED = Rcode.REFUSED -YXDOMAIN = Rcode.YXDOMAIN -YXRRSET = Rcode.YXRRSET -NXRRSET = Rcode.NXRRSET -NOTAUTH = Rcode.NOTAUTH -NOTZONE = Rcode.NOTZONE -DSOTYPENI = Rcode.DSOTYPENI -BADVERS = Rcode.BADVERS -BADSIG = Rcode.BADSIG -BADKEY = Rcode.BADKEY -BADTIME = Rcode.BADTIME -BADMODE = Rcode.BADMODE -BADNAME = Rcode.BADNAME -BADALG = Rcode.BADALG -BADTRUNC = Rcode.BADTRUNC -BADCOOKIE = Rcode.BADCOOKIE - -### END generated Rcode constants diff --git a/server/venv/Lib/site-packages/dns/rdata.py b/server/venv/Lib/site-packages/dns/rdata.py deleted file mode 100644 index 0d262e8..0000000 --- a/server/venv/Lib/site-packages/dns/rdata.py +++ /dev/null @@ -1,889 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS rdata.""" - -import base64 -import binascii -import inspect -import io -import itertools -import random -from importlib import import_module -from typing import Any, Dict, Optional, Tuple, Union - -import dns.exception -import dns.immutable -import dns.ipv4 -import dns.ipv6 -import dns.name -import dns.rdataclass -import dns.rdatatype -import dns.tokenizer -import dns.ttl -import dns.wire - -_chunksize = 32 - -# We currently allow comparisons for rdata with relative names for backwards -# compatibility, but in the future we will not, as these kinds of comparisons -# can lead to subtle bugs if code is not carefully written. -# -# This switch allows the future behavior to be turned on so code can be -# tested with it. -_allow_relative_comparisons = True - - -class NoRelativeRdataOrdering(dns.exception.DNSException): - """An attempt was made to do an ordered comparison of one or more - rdata with relative names. The only reliable way of sorting rdata - is to use non-relativized rdata. - - """ - - -def _wordbreak(data, chunksize=_chunksize, separator=b" "): - """Break a binary string into chunks of chunksize characters separated by - a space. - """ - - if not chunksize: - return data.decode() - return separator.join( - [data[i : i + chunksize] for i in range(0, len(data), chunksize)] - ).decode() - - -# pylint: disable=unused-argument - - -def _hexify(data, chunksize=_chunksize, separator=b" ", **kw): - """Convert a binary string into its hex encoding, broken up into chunks - of chunksize characters separated by a separator. - """ - - return _wordbreak(binascii.hexlify(data), chunksize, separator) - - -def _base64ify(data, chunksize=_chunksize, separator=b" ", **kw): - """Convert a binary string into its base64 encoding, broken up into chunks - of chunksize characters separated by a separator. - """ - - return _wordbreak(base64.b64encode(data), chunksize, separator) - - -# pylint: enable=unused-argument - -__escaped = b'"\\' - - -def _escapify(qstring): - """Escape the characters in a quoted string which need it.""" - - if isinstance(qstring, str): - qstring = qstring.encode() - if not isinstance(qstring, bytearray): - qstring = bytearray(qstring) - - text = "" - for c in qstring: - if c in __escaped: - text += "\\" + chr(c) - elif c >= 0x20 and c < 0x7F: - text += chr(c) - else: - text += "\\%03d" % c - return text - - -def _truncate_bitmap(what): - """Determine the index of greatest byte that isn't all zeros, and - return the bitmap that contains all the bytes less than that index. - """ - - for i in range(len(what) - 1, -1, -1): - if what[i] != 0: - return what[0 : i + 1] - return what[0:1] - - -# So we don't have to edit all the rdata classes... -_constify = dns.immutable.constify - - -@dns.immutable.immutable -class Rdata: - """Base class for all DNS rdata types.""" - - __slots__ = ["rdclass", "rdtype", "rdcomment"] - - def __init__(self, rdclass, rdtype): - """Initialize an rdata. - - *rdclass*, an ``int`` is the rdataclass of the Rdata. - - *rdtype*, an ``int`` is the rdatatype of the Rdata. - """ - - self.rdclass = self._as_rdataclass(rdclass) - self.rdtype = self._as_rdatatype(rdtype) - self.rdcomment = None - - def _get_all_slots(self): - return itertools.chain.from_iterable( - getattr(cls, "__slots__", []) for cls in self.__class__.__mro__ - ) - - def __getstate__(self): - # We used to try to do a tuple of all slots here, but it - # doesn't work as self._all_slots isn't available at - # __setstate__() time. Before that we tried to store a tuple - # of __slots__, but that didn't work as it didn't store the - # slots defined by ancestors. This older way didn't fail - # outright, but ended up with partially broken objects, e.g. - # if you unpickled an A RR it wouldn't have rdclass and rdtype - # attributes, and would compare badly. - state = {} - for slot in self._get_all_slots(): - state[slot] = getattr(self, slot) - return state - - def __setstate__(self, state): - for slot, val in state.items(): - object.__setattr__(self, slot, val) - if not hasattr(self, "rdcomment"): - # Pickled rdata from 2.0.x might not have a rdcomment, so add - # it if needed. - object.__setattr__(self, "rdcomment", None) - - def covers(self) -> dns.rdatatype.RdataType: - """Return the type a Rdata covers. - - DNS SIG/RRSIG rdatas apply to a specific type; this type is - returned by the covers() function. If the rdata type is not - SIG or RRSIG, dns.rdatatype.NONE is returned. This is useful when - creating rdatasets, allowing the rdataset to contain only RRSIGs - of a particular type, e.g. RRSIG(NS). - - Returns a ``dns.rdatatype.RdataType``. - """ - - return dns.rdatatype.NONE - - def extended_rdatatype(self) -> int: - """Return a 32-bit type value, the least significant 16 bits of - which are the ordinary DNS type, and the upper 16 bits of which are - the "covered" type, if any. - - Returns an ``int``. - """ - - return self.covers() << 16 | self.rdtype - - def to_text( - self, - origin: Optional[dns.name.Name] = None, - relativize: bool = True, - **kw: Dict[str, Any] - ) -> str: - """Convert an rdata to text format. - - Returns a ``str``. - """ - - raise NotImplementedError # pragma: no cover - - def _to_wire( - self, - file: Optional[Any], - compress: Optional[dns.name.CompressType] = None, - origin: Optional[dns.name.Name] = None, - canonicalize: bool = False, - ) -> bytes: - raise NotImplementedError # pragma: no cover - - def to_wire( - self, - file: Optional[Any] = None, - compress: Optional[dns.name.CompressType] = None, - origin: Optional[dns.name.Name] = None, - canonicalize: bool = False, - ) -> bytes: - """Convert an rdata to wire format. - - Returns a ``bytes`` or ``None``. - """ - - if file: - return self._to_wire(file, compress, origin, canonicalize) - else: - f = io.BytesIO() - self._to_wire(f, compress, origin, canonicalize) - return f.getvalue() - - def to_generic( - self, origin: Optional[dns.name.Name] = None - ) -> "dns.rdata.GenericRdata": - """Creates a dns.rdata.GenericRdata equivalent of this rdata. - - Returns a ``dns.rdata.GenericRdata``. - """ - return dns.rdata.GenericRdata( - self.rdclass, self.rdtype, self.to_wire(origin=origin) - ) - - def to_digestable(self, origin: Optional[dns.name.Name] = None) -> bytes: - """Convert rdata to a format suitable for digesting in hashes. This - is also the DNSSEC canonical form. - - Returns a ``bytes``. - """ - - return self.to_wire(origin=origin, canonicalize=True) - - def __repr__(self): - covers = self.covers() - if covers == dns.rdatatype.NONE: - ctext = "" - else: - ctext = "(" + dns.rdatatype.to_text(covers) + ")" - return ( - "" - ) - - def __str__(self): - return self.to_text() - - def _cmp(self, other): - """Compare an rdata with another rdata of the same rdtype and - rdclass. - - For rdata with only absolute names: - Return < 0 if self < other in the DNSSEC ordering, 0 if self - == other, and > 0 if self > other. - For rdata with at least one relative names: - The rdata sorts before any rdata with only absolute names. - When compared with another relative rdata, all names are - made absolute as if they were relative to the root, as the - proper origin is not available. While this creates a stable - ordering, it is NOT guaranteed to be the DNSSEC ordering. - In the future, all ordering comparisons for rdata with - relative names will be disallowed. - """ - try: - our = self.to_digestable() - our_relative = False - except dns.name.NeedAbsoluteNameOrOrigin: - if _allow_relative_comparisons: - our = self.to_digestable(dns.name.root) - our_relative = True - try: - their = other.to_digestable() - their_relative = False - except dns.name.NeedAbsoluteNameOrOrigin: - if _allow_relative_comparisons: - their = other.to_digestable(dns.name.root) - their_relative = True - if _allow_relative_comparisons: - if our_relative != their_relative: - # For the purpose of comparison, all rdata with at least one - # relative name is less than an rdata with only absolute names. - if our_relative: - return -1 - else: - return 1 - elif our_relative or their_relative: - raise NoRelativeRdataOrdering - if our == their: - return 0 - elif our > their: - return 1 - else: - return -1 - - def __eq__(self, other): - if not isinstance(other, Rdata): - return False - if self.rdclass != other.rdclass or self.rdtype != other.rdtype: - return False - our_relative = False - their_relative = False - try: - our = self.to_digestable() - except dns.name.NeedAbsoluteNameOrOrigin: - our = self.to_digestable(dns.name.root) - our_relative = True - try: - their = other.to_digestable() - except dns.name.NeedAbsoluteNameOrOrigin: - their = other.to_digestable(dns.name.root) - their_relative = True - if our_relative != their_relative: - return False - return our == their - - def __ne__(self, other): - if not isinstance(other, Rdata): - return True - if self.rdclass != other.rdclass or self.rdtype != other.rdtype: - return True - return not self.__eq__(other) - - def __lt__(self, other): - if ( - not isinstance(other, Rdata) - or self.rdclass != other.rdclass - or self.rdtype != other.rdtype - ): - return NotImplemented - return self._cmp(other) < 0 - - def __le__(self, other): - if ( - not isinstance(other, Rdata) - or self.rdclass != other.rdclass - or self.rdtype != other.rdtype - ): - return NotImplemented - return self._cmp(other) <= 0 - - def __ge__(self, other): - if ( - not isinstance(other, Rdata) - or self.rdclass != other.rdclass - or self.rdtype != other.rdtype - ): - return NotImplemented - return self._cmp(other) >= 0 - - def __gt__(self, other): - if ( - not isinstance(other, Rdata) - or self.rdclass != other.rdclass - or self.rdtype != other.rdtype - ): - return NotImplemented - return self._cmp(other) > 0 - - def __hash__(self): - return hash(self.to_digestable(dns.name.root)) - - @classmethod - def from_text( - cls, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - tok: dns.tokenizer.Tokenizer, - origin: Optional[dns.name.Name] = None, - relativize: bool = True, - relativize_to: Optional[dns.name.Name] = None, - ) -> "Rdata": - raise NotImplementedError # pragma: no cover - - @classmethod - def from_wire_parser( - cls, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - parser: dns.wire.Parser, - origin: Optional[dns.name.Name] = None, - ) -> "Rdata": - raise NotImplementedError # pragma: no cover - - def replace(self, **kwargs: Any) -> "Rdata": - """ - Create a new Rdata instance based on the instance replace was - invoked on. It is possible to pass different parameters to - override the corresponding properties of the base Rdata. - - Any field specific to the Rdata type can be replaced, but the - *rdtype* and *rdclass* fields cannot. - - Returns an instance of the same Rdata subclass as *self*. - """ - - # Get the constructor parameters. - parameters = inspect.signature(self.__init__).parameters # type: ignore - - # Ensure that all of the arguments correspond to valid fields. - # Don't allow rdclass or rdtype to be changed, though. - for key in kwargs: - if key == "rdcomment": - continue - if key not in parameters: - raise AttributeError( - "'{}' object has no attribute '{}'".format( - self.__class__.__name__, key - ) - ) - if key in ("rdclass", "rdtype"): - raise AttributeError( - "Cannot overwrite '{}' attribute '{}'".format( - self.__class__.__name__, key - ) - ) - - # Construct the parameter list. For each field, use the value in - # kwargs if present, and the current value otherwise. - args = (kwargs.get(key, getattr(self, key)) for key in parameters) - - # Create, validate, and return the new object. - rd = self.__class__(*args) - # The comment is not set in the constructor, so give it special - # handling. - rdcomment = kwargs.get("rdcomment", self.rdcomment) - if rdcomment is not None: - object.__setattr__(rd, "rdcomment", rdcomment) - return rd - - # Type checking and conversion helpers. These are class methods as - # they don't touch object state and may be useful to others. - - @classmethod - def _as_rdataclass(cls, value): - return dns.rdataclass.RdataClass.make(value) - - @classmethod - def _as_rdatatype(cls, value): - return dns.rdatatype.RdataType.make(value) - - @classmethod - def _as_bytes( - cls, - value: Any, - encode: bool = False, - max_length: Optional[int] = None, - empty_ok: bool = True, - ) -> bytes: - if encode and isinstance(value, str): - bvalue = value.encode() - elif isinstance(value, bytearray): - bvalue = bytes(value) - elif isinstance(value, bytes): - bvalue = value - else: - raise ValueError("not bytes") - if max_length is not None and len(bvalue) > max_length: - raise ValueError("too long") - if not empty_ok and len(bvalue) == 0: - raise ValueError("empty bytes not allowed") - return bvalue - - @classmethod - def _as_name(cls, value): - # Note that proper name conversion (e.g. with origin and IDNA - # awareness) is expected to be done via from_text. This is just - # a simple thing for people invoking the constructor directly. - if isinstance(value, str): - return dns.name.from_text(value) - elif not isinstance(value, dns.name.Name): - raise ValueError("not a name") - return value - - @classmethod - def _as_uint8(cls, value): - if not isinstance(value, int): - raise ValueError("not an integer") - if value < 0 or value > 255: - raise ValueError("not a uint8") - return value - - @classmethod - def _as_uint16(cls, value): - if not isinstance(value, int): - raise ValueError("not an integer") - if value < 0 or value > 65535: - raise ValueError("not a uint16") - return value - - @classmethod - def _as_uint32(cls, value): - if not isinstance(value, int): - raise ValueError("not an integer") - if value < 0 or value > 4294967295: - raise ValueError("not a uint32") - return value - - @classmethod - def _as_uint48(cls, value): - if not isinstance(value, int): - raise ValueError("not an integer") - if value < 0 or value > 281474976710655: - raise ValueError("not a uint48") - return value - - @classmethod - def _as_int(cls, value, low=None, high=None): - if not isinstance(value, int): - raise ValueError("not an integer") - if low is not None and value < low: - raise ValueError("value too small") - if high is not None and value > high: - raise ValueError("value too large") - return value - - @classmethod - def _as_ipv4_address(cls, value): - if isinstance(value, str): - # call to check validity - dns.ipv4.inet_aton(value) - return value - elif isinstance(value, bytes): - return dns.ipv4.inet_ntoa(value) - else: - raise ValueError("not an IPv4 address") - - @classmethod - def _as_ipv6_address(cls, value): - if isinstance(value, str): - # call to check validity - dns.ipv6.inet_aton(value) - return value - elif isinstance(value, bytes): - return dns.ipv6.inet_ntoa(value) - else: - raise ValueError("not an IPv6 address") - - @classmethod - def _as_bool(cls, value): - if isinstance(value, bool): - return value - else: - raise ValueError("not a boolean") - - @classmethod - def _as_ttl(cls, value): - if isinstance(value, int): - return cls._as_int(value, 0, dns.ttl.MAX_TTL) - elif isinstance(value, str): - return dns.ttl.from_text(value) - else: - raise ValueError("not a TTL") - - @classmethod - def _as_tuple(cls, value, as_value): - try: - # For user convenience, if value is a singleton of the list - # element type, wrap it in a tuple. - return (as_value(value),) - except Exception: - # Otherwise, check each element of the iterable *value* - # against *as_value*. - return tuple(as_value(v) for v in value) - - # Processing order - - @classmethod - def _processing_order(cls, iterable): - items = list(iterable) - random.shuffle(items) - return items - - -@dns.immutable.immutable -class GenericRdata(Rdata): - - """Generic Rdata Class - - This class is used for rdata types for which we have no better - implementation. It implements the DNS "unknown RRs" scheme. - """ - - __slots__ = ["data"] - - def __init__(self, rdclass, rdtype, data): - super().__init__(rdclass, rdtype) - self.data = data - - def to_text( - self, - origin: Optional[dns.name.Name] = None, - relativize: bool = True, - **kw: Dict[str, Any] - ) -> str: - return r"\# %d " % len(self.data) + _hexify(self.data, **kw) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - token = tok.get() - if not token.is_identifier() or token.value != r"\#": - raise dns.exception.SyntaxError(r"generic rdata does not start with \#") - length = tok.get_int() - hex = tok.concatenate_remaining_identifiers(True).encode() - data = binascii.unhexlify(hex) - if len(data) != length: - raise dns.exception.SyntaxError("generic rdata hex data has wrong length") - return cls(rdclass, rdtype, data) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(self.data) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - return cls(rdclass, rdtype, parser.get_remaining()) - - -_rdata_classes: Dict[ - Tuple[dns.rdataclass.RdataClass, dns.rdatatype.RdataType], Any -] = {} -_module_prefix = "dns.rdtypes" - - -def get_rdata_class(rdclass, rdtype): - cls = _rdata_classes.get((rdclass, rdtype)) - if not cls: - cls = _rdata_classes.get((dns.rdatatype.ANY, rdtype)) - if not cls: - rdclass_text = dns.rdataclass.to_text(rdclass) - rdtype_text = dns.rdatatype.to_text(rdtype) - rdtype_text = rdtype_text.replace("-", "_") - try: - mod = import_module( - ".".join([_module_prefix, rdclass_text, rdtype_text]) - ) - cls = getattr(mod, rdtype_text) - _rdata_classes[(rdclass, rdtype)] = cls - except ImportError: - try: - mod = import_module(".".join([_module_prefix, "ANY", rdtype_text])) - cls = getattr(mod, rdtype_text) - _rdata_classes[(dns.rdataclass.ANY, rdtype)] = cls - _rdata_classes[(rdclass, rdtype)] = cls - except ImportError: - pass - if not cls: - cls = GenericRdata - _rdata_classes[(rdclass, rdtype)] = cls - return cls - - -def from_text( - rdclass: Union[dns.rdataclass.RdataClass, str], - rdtype: Union[dns.rdatatype.RdataType, str], - tok: Union[dns.tokenizer.Tokenizer, str], - origin: Optional[dns.name.Name] = None, - relativize: bool = True, - relativize_to: Optional[dns.name.Name] = None, - idna_codec: Optional[dns.name.IDNACodec] = None, -) -> Rdata: - """Build an rdata object from text format. - - This function attempts to dynamically load a class which - implements the specified rdata class and type. If there is no - class-and-type-specific implementation, the GenericRdata class - is used. - - Once a class is chosen, its from_text() class method is called - with the parameters to this function. - - If *tok* is a ``str``, then a tokenizer is created and the string - is used as its input. - - *rdclass*, a ``dns.rdataclass.RdataClass`` or ``str``, the rdataclass. - - *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdatatype. - - *tok*, a ``dns.tokenizer.Tokenizer`` or a ``str``. - - *origin*, a ``dns.name.Name`` (or ``None``), the - origin to use for relative names. - - *relativize*, a ``bool``. If true, name will be relativized. - - *relativize_to*, a ``dns.name.Name`` (or ``None``), the origin to use - when relativizing names. If not set, the *origin* value will be used. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder to use if a tokenizer needs to be created. If - ``None``, the default IDNA 2003 encoder/decoder is used. If a - tokenizer is not created, then the codec associated with the tokenizer - is the one that is used. - - Returns an instance of the chosen Rdata subclass. - - """ - if isinstance(tok, str): - tok = dns.tokenizer.Tokenizer(tok, idna_codec=idna_codec) - rdclass = dns.rdataclass.RdataClass.make(rdclass) - rdtype = dns.rdatatype.RdataType.make(rdtype) - cls = get_rdata_class(rdclass, rdtype) - with dns.exception.ExceptionWrapper(dns.exception.SyntaxError): - rdata = None - if cls != GenericRdata: - # peek at first token - token = tok.get() - tok.unget(token) - if token.is_identifier() and token.value == r"\#": - # - # Known type using the generic syntax. Extract the - # wire form from the generic syntax, and then run - # from_wire on it. - # - grdata = GenericRdata.from_text( - rdclass, rdtype, tok, origin, relativize, relativize_to - ) - rdata = from_wire( - rdclass, rdtype, grdata.data, 0, len(grdata.data), origin - ) - # - # If this comparison isn't equal, then there must have been - # compressed names in the wire format, which is an error, - # there being no reasonable context to decompress with. - # - rwire = rdata.to_wire() - if rwire != grdata.data: - raise dns.exception.SyntaxError( - "compressed data in " - "generic syntax form " - "of known rdatatype" - ) - if rdata is None: - rdata = cls.from_text( - rdclass, rdtype, tok, origin, relativize, relativize_to - ) - token = tok.get_eol_as_token() - if token.comment is not None: - object.__setattr__(rdata, "rdcomment", token.comment) - return rdata - - -def from_wire_parser( - rdclass: Union[dns.rdataclass.RdataClass, str], - rdtype: Union[dns.rdatatype.RdataType, str], - parser: dns.wire.Parser, - origin: Optional[dns.name.Name] = None, -) -> Rdata: - """Build an rdata object from wire format - - This function attempts to dynamically load a class which - implements the specified rdata class and type. If there is no - class-and-type-specific implementation, the GenericRdata class - is used. - - Once a class is chosen, its from_wire() class method is called - with the parameters to this function. - - *rdclass*, a ``dns.rdataclass.RdataClass`` or ``str``, the rdataclass. - - *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdatatype. - - *parser*, a ``dns.wire.Parser``, the parser, which should be - restricted to the rdata length. - - *origin*, a ``dns.name.Name`` (or ``None``). If not ``None``, - then names will be relativized to this origin. - - Returns an instance of the chosen Rdata subclass. - """ - - rdclass = dns.rdataclass.RdataClass.make(rdclass) - rdtype = dns.rdatatype.RdataType.make(rdtype) - cls = get_rdata_class(rdclass, rdtype) - with dns.exception.ExceptionWrapper(dns.exception.FormError): - return cls.from_wire_parser(rdclass, rdtype, parser, origin) - - -def from_wire( - rdclass: Union[dns.rdataclass.RdataClass, str], - rdtype: Union[dns.rdatatype.RdataType, str], - wire: bytes, - current: int, - rdlen: int, - origin: Optional[dns.name.Name] = None, -) -> Rdata: - """Build an rdata object from wire format - - This function attempts to dynamically load a class which - implements the specified rdata class and type. If there is no - class-and-type-specific implementation, the GenericRdata class - is used. - - Once a class is chosen, its from_wire() class method is called - with the parameters to this function. - - *rdclass*, an ``int``, the rdataclass. - - *rdtype*, an ``int``, the rdatatype. - - *wire*, a ``bytes``, the wire-format message. - - *current*, an ``int``, the offset in wire of the beginning of - the rdata. - - *rdlen*, an ``int``, the length of the wire-format rdata - - *origin*, a ``dns.name.Name`` (or ``None``). If not ``None``, - then names will be relativized to this origin. - - Returns an instance of the chosen Rdata subclass. - """ - parser = dns.wire.Parser(wire, current) - with parser.restrict_to(rdlen): - return from_wire_parser(rdclass, rdtype, parser, origin) - - -class RdatatypeExists(dns.exception.DNSException): - """DNS rdatatype already exists.""" - - supp_kwargs = {"rdclass", "rdtype"} - fmt = ( - "The rdata type with class {rdclass:d} and rdtype {rdtype:d} " - + "already exists." - ) - - -def register_type( - implementation: Any, - rdtype: int, - rdtype_text: str, - is_singleton: bool = False, - rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, -) -> None: - """Dynamically register a module to handle an rdatatype. - - *implementation*, a module implementing the type in the usual dnspython - way. - - *rdtype*, an ``int``, the rdatatype to register. - - *rdtype_text*, a ``str``, the textual form of the rdatatype. - - *is_singleton*, a ``bool``, indicating if the type is a singleton (i.e. - RRsets of the type can have only one member.) - - *rdclass*, the rdataclass of the type, or ``dns.rdataclass.ANY`` if - it applies to all classes. - """ - - rdtype = dns.rdatatype.RdataType.make(rdtype) - existing_cls = get_rdata_class(rdclass, rdtype) - if existing_cls != GenericRdata or dns.rdatatype.is_metatype(rdtype): - raise RdatatypeExists(rdclass=rdclass, rdtype=rdtype) - _rdata_classes[(rdclass, rdtype)] = getattr( - implementation, rdtype_text.replace("-", "_") - ) - dns.rdatatype.register_type(rdtype, rdtype_text, is_singleton) diff --git a/server/venv/Lib/site-packages/dns/rdataclass.py b/server/venv/Lib/site-packages/dns/rdataclass.py deleted file mode 100644 index 89b85a7..0000000 --- a/server/venv/Lib/site-packages/dns/rdataclass.py +++ /dev/null @@ -1,118 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Rdata Classes.""" - -import dns.enum -import dns.exception - - -class RdataClass(dns.enum.IntEnum): - """DNS Rdata Class""" - - RESERVED0 = 0 - IN = 1 - INTERNET = IN - CH = 3 - CHAOS = CH - HS = 4 - HESIOD = HS - NONE = 254 - ANY = 255 - - @classmethod - def _maximum(cls): - return 65535 - - @classmethod - def _short_name(cls): - return "class" - - @classmethod - def _prefix(cls): - return "CLASS" - - @classmethod - def _unknown_exception_class(cls): - return UnknownRdataclass - - -_metaclasses = {RdataClass.NONE, RdataClass.ANY} - - -class UnknownRdataclass(dns.exception.DNSException): - """A DNS class is unknown.""" - - -def from_text(text: str) -> RdataClass: - """Convert text into a DNS rdata class value. - - The input text can be a defined DNS RR class mnemonic or - instance of the DNS generic class syntax. - - For example, "IN" and "CLASS1" will both result in a value of 1. - - Raises ``dns.rdatatype.UnknownRdataclass`` if the class is unknown. - - Raises ``ValueError`` if the rdata class value is not >= 0 and <= 65535. - - Returns a ``dns.rdataclass.RdataClass``. - """ - - return RdataClass.from_text(text) - - -def to_text(value: RdataClass) -> str: - """Convert a DNS rdata class value to text. - - If the value has a known mnemonic, it will be used, otherwise the - DNS generic class syntax will be used. - - Raises ``ValueError`` if the rdata class value is not >= 0 and <= 65535. - - Returns a ``str``. - """ - - return RdataClass.to_text(value) - - -def is_metaclass(rdclass: RdataClass) -> bool: - """True if the specified class is a metaclass. - - The currently defined metaclasses are ANY and NONE. - - *rdclass* is a ``dns.rdataclass.RdataClass``. - """ - - if rdclass in _metaclasses: - return True - return False - - -### BEGIN generated RdataClass constants - -RESERVED0 = RdataClass.RESERVED0 -IN = RdataClass.IN -INTERNET = RdataClass.INTERNET -CH = RdataClass.CH -CHAOS = RdataClass.CHAOS -HS = RdataClass.HS -HESIOD = RdataClass.HESIOD -NONE = RdataClass.NONE -ANY = RdataClass.ANY - -### END generated RdataClass constants diff --git a/server/venv/Lib/site-packages/dns/rdataset.py b/server/venv/Lib/site-packages/dns/rdataset.py deleted file mode 100644 index 31124af..0000000 --- a/server/venv/Lib/site-packages/dns/rdataset.py +++ /dev/null @@ -1,526 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS rdatasets (an rdataset is a set of rdatas of a given type and class)""" - -import io -import random -import struct -from typing import Any, Collection, Dict, List, Optional, Union, cast - -import dns.exception -import dns.immutable -import dns.name -import dns.rdata -import dns.rdataclass -import dns.rdatatype -import dns.set -import dns.ttl - -# define SimpleSet here for backwards compatibility -SimpleSet = dns.set.Set - - -class DifferingCovers(dns.exception.DNSException): - """An attempt was made to add a DNS SIG/RRSIG whose covered type - is not the same as that of the other rdatas in the rdataset.""" - - -class IncompatibleTypes(dns.exception.DNSException): - """An attempt was made to add DNS RR data of an incompatible type.""" - - -class Rdataset(dns.set.Set): - - """A DNS rdataset.""" - - __slots__ = ["rdclass", "rdtype", "covers", "ttl"] - - def __init__( - self, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, - ttl: int = 0, - ): - """Create a new rdataset of the specified class and type. - - *rdclass*, a ``dns.rdataclass.RdataClass``, the rdataclass. - - *rdtype*, an ``dns.rdatatype.RdataType``, the rdatatype. - - *covers*, an ``dns.rdatatype.RdataType``, the covered rdatatype. - - *ttl*, an ``int``, the TTL. - """ - - super().__init__() - self.rdclass = rdclass - self.rdtype: dns.rdatatype.RdataType = rdtype - self.covers: dns.rdatatype.RdataType = covers - self.ttl = ttl - - def _clone(self): - obj = super()._clone() - obj.rdclass = self.rdclass - obj.rdtype = self.rdtype - obj.covers = self.covers - obj.ttl = self.ttl - return obj - - def update_ttl(self, ttl: int) -> None: - """Perform TTL minimization. - - Set the TTL of the rdataset to be the lesser of the set's current - TTL or the specified TTL. If the set contains no rdatas, set the TTL - to the specified TTL. - - *ttl*, an ``int`` or ``str``. - """ - ttl = dns.ttl.make(ttl) - if len(self) == 0: - self.ttl = ttl - elif ttl < self.ttl: - self.ttl = ttl - - def add( # pylint: disable=arguments-differ,arguments-renamed - self, rd: dns.rdata.Rdata, ttl: Optional[int] = None - ) -> None: - """Add the specified rdata to the rdataset. - - If the optional *ttl* parameter is supplied, then - ``self.update_ttl(ttl)`` will be called prior to adding the rdata. - - *rd*, a ``dns.rdata.Rdata``, the rdata - - *ttl*, an ``int``, the TTL. - - Raises ``dns.rdataset.IncompatibleTypes`` if the type and class - do not match the type and class of the rdataset. - - Raises ``dns.rdataset.DifferingCovers`` if the type is a signature - type and the covered type does not match that of the rdataset. - """ - - # - # If we're adding a signature, do some special handling to - # check that the signature covers the same type as the - # other rdatas in this rdataset. If this is the first rdata - # in the set, initialize the covers field. - # - if self.rdclass != rd.rdclass or self.rdtype != rd.rdtype: - raise IncompatibleTypes - if ttl is not None: - self.update_ttl(ttl) - if self.rdtype == dns.rdatatype.RRSIG or self.rdtype == dns.rdatatype.SIG: - covers = rd.covers() - if len(self) == 0 and self.covers == dns.rdatatype.NONE: - self.covers = covers - elif self.covers != covers: - raise DifferingCovers - if dns.rdatatype.is_singleton(rd.rdtype) and len(self) > 0: - self.clear() - super().add(rd) - - def union_update(self, other): - self.update_ttl(other.ttl) - super().union_update(other) - - def intersection_update(self, other): - self.update_ttl(other.ttl) - super().intersection_update(other) - - def update(self, other): - """Add all rdatas in other to self. - - *other*, a ``dns.rdataset.Rdataset``, the rdataset from which - to update. - """ - - self.update_ttl(other.ttl) - super().update(other) - - def _rdata_repr(self): - def maybe_truncate(s): - if len(s) > 100: - return s[:100] + "..." - return s - - return "[%s]" % ", ".join("<%s>" % maybe_truncate(str(rr)) for rr in self) - - def __repr__(self): - if self.covers == 0: - ctext = "" - else: - ctext = "(" + dns.rdatatype.to_text(self.covers) + ")" - return ( - "" - ) - - def __str__(self): - return self.to_text() - - def __eq__(self, other): - if not isinstance(other, Rdataset): - return False - if ( - self.rdclass != other.rdclass - or self.rdtype != other.rdtype - or self.covers != other.covers - ): - return False - return super().__eq__(other) - - def __ne__(self, other): - return not self.__eq__(other) - - def to_text( - self, - name: Optional[dns.name.Name] = None, - origin: Optional[dns.name.Name] = None, - relativize: bool = True, - override_rdclass: Optional[dns.rdataclass.RdataClass] = None, - want_comments: bool = False, - **kw: Dict[str, Any], - ) -> str: - """Convert the rdataset into DNS zone file format. - - See ``dns.name.Name.choose_relativity`` for more information - on how *origin* and *relativize* determine the way names - are emitted. - - Any additional keyword arguments are passed on to the rdata - ``to_text()`` method. - - *name*, a ``dns.name.Name``. If name is not ``None``, emit RRs with - *name* as the owner name. - - *origin*, a ``dns.name.Name`` or ``None``, the origin for relative - names. - - *relativize*, a ``bool``. If ``True``, names will be relativized - to *origin*. - - *override_rdclass*, a ``dns.rdataclass.RdataClass`` or ``None``. - If not ``None``, use this class instead of the Rdataset's class. - - *want_comments*, a ``bool``. If ``True``, emit comments for rdata - which have them. The default is ``False``. - """ - - if name is not None: - name = name.choose_relativity(origin, relativize) - ntext = str(name) - pad = " " - else: - ntext = "" - pad = "" - s = io.StringIO() - if override_rdclass is not None: - rdclass = override_rdclass - else: - rdclass = self.rdclass - if len(self) == 0: - # - # Empty rdatasets are used for the question section, and in - # some dynamic updates, so we don't need to print out the TTL - # (which is meaningless anyway). - # - s.write( - "{}{}{} {}\n".format( - ntext, - pad, - dns.rdataclass.to_text(rdclass), - dns.rdatatype.to_text(self.rdtype), - ) - ) - else: - for rd in self: - extra = "" - if want_comments: - if rd.rdcomment: - extra = f" ;{rd.rdcomment}" - s.write( - "%s%s%d %s %s %s%s\n" - % ( - ntext, - pad, - self.ttl, - dns.rdataclass.to_text(rdclass), - dns.rdatatype.to_text(self.rdtype), - rd.to_text(origin=origin, relativize=relativize, **kw), - extra, - ) - ) - # - # We strip off the final \n for the caller's convenience in printing - # - return s.getvalue()[:-1] - - def to_wire( - self, - name: dns.name.Name, - file: Any, - compress: Optional[dns.name.CompressType] = None, - origin: Optional[dns.name.Name] = None, - override_rdclass: Optional[dns.rdataclass.RdataClass] = None, - want_shuffle: bool = True, - ) -> int: - """Convert the rdataset to wire format. - - *name*, a ``dns.name.Name`` is the owner name to use. - - *file* is the file where the name is emitted (typically a - BytesIO file). - - *compress*, a ``dict``, is the compression table to use. If - ``None`` (the default), names will not be compressed. - - *origin* is a ``dns.name.Name`` or ``None``. If the name is - relative and origin is not ``None``, then *origin* will be appended - to it. - - *override_rdclass*, an ``int``, is used as the class instead of the - class of the rdataset. This is useful when rendering rdatasets - associated with dynamic updates. - - *want_shuffle*, a ``bool``. If ``True``, then the order of the - Rdatas within the Rdataset will be shuffled before rendering. - - Returns an ``int``, the number of records emitted. - """ - - if override_rdclass is not None: - rdclass = override_rdclass - want_shuffle = False - else: - rdclass = self.rdclass - file.seek(0, io.SEEK_END) - if len(self) == 0: - name.to_wire(file, compress, origin) - stuff = struct.pack("!HHIH", self.rdtype, rdclass, 0, 0) - file.write(stuff) - return 1 - else: - l: Union[Rdataset, List[dns.rdata.Rdata]] - if want_shuffle: - l = list(self) - random.shuffle(l) - else: - l = self - for rd in l: - name.to_wire(file, compress, origin) - stuff = struct.pack("!HHIH", self.rdtype, rdclass, self.ttl, 0) - file.write(stuff) - start = file.tell() - rd.to_wire(file, compress, origin) - end = file.tell() - assert end - start < 65536 - file.seek(start - 2) - stuff = struct.pack("!H", end - start) - file.write(stuff) - file.seek(0, io.SEEK_END) - return len(self) - - def match( - self, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType, - ) -> bool: - """Returns ``True`` if this rdataset matches the specified class, - type, and covers. - """ - if self.rdclass == rdclass and self.rdtype == rdtype and self.covers == covers: - return True - return False - - def processing_order(self) -> List[dns.rdata.Rdata]: - """Return rdatas in a valid processing order according to the type's - specification. For example, MX records are in preference order from - lowest to highest preferences, with items of the same preference - shuffled. - - For types that do not define a processing order, the rdatas are - simply shuffled. - """ - if len(self) == 0: - return [] - else: - return self[0]._processing_order(iter(self)) - - -@dns.immutable.immutable -class ImmutableRdataset(Rdataset): # lgtm[py/missing-equals] - - """An immutable DNS rdataset.""" - - _clone_class = Rdataset - - def __init__(self, rdataset: Rdataset): - """Create an immutable rdataset from the specified rdataset.""" - - super().__init__( - rdataset.rdclass, rdataset.rdtype, rdataset.covers, rdataset.ttl - ) - self.items = dns.immutable.Dict(rdataset.items) - - def update_ttl(self, ttl): - raise TypeError("immutable") - - def add(self, rd, ttl=None): - raise TypeError("immutable") - - def union_update(self, other): - raise TypeError("immutable") - - def intersection_update(self, other): - raise TypeError("immutable") - - def update(self, other): - raise TypeError("immutable") - - def __delitem__(self, i): - raise TypeError("immutable") - - # lgtm complains about these not raising ArithmeticError, but there is - # precedent for overrides of these methods in other classes to raise - # TypeError, and it seems like the better exception. - - def __ior__(self, other): # lgtm[py/unexpected-raise-in-special-method] - raise TypeError("immutable") - - def __iand__(self, other): # lgtm[py/unexpected-raise-in-special-method] - raise TypeError("immutable") - - def __iadd__(self, other): # lgtm[py/unexpected-raise-in-special-method] - raise TypeError("immutable") - - def __isub__(self, other): # lgtm[py/unexpected-raise-in-special-method] - raise TypeError("immutable") - - def clear(self): - raise TypeError("immutable") - - def __copy__(self): - return ImmutableRdataset(super().copy()) - - def copy(self): - return ImmutableRdataset(super().copy()) - - def union(self, other): - return ImmutableRdataset(super().union(other)) - - def intersection(self, other): - return ImmutableRdataset(super().intersection(other)) - - def difference(self, other): - return ImmutableRdataset(super().difference(other)) - - def symmetric_difference(self, other): - return ImmutableRdataset(super().symmetric_difference(other)) - - -def from_text_list( - rdclass: Union[dns.rdataclass.RdataClass, str], - rdtype: Union[dns.rdatatype.RdataType, str], - ttl: int, - text_rdatas: Collection[str], - idna_codec: Optional[dns.name.IDNACodec] = None, - origin: Optional[dns.name.Name] = None, - relativize: bool = True, - relativize_to: Optional[dns.name.Name] = None, -) -> Rdataset: - """Create an rdataset with the specified class, type, and TTL, and with - the specified list of rdatas in text format. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder to use; if ``None``, the default IDNA 2003 - encoder/decoder is used. - - *origin*, a ``dns.name.Name`` (or ``None``), the - origin to use for relative names. - - *relativize*, a ``bool``. If true, name will be relativized. - - *relativize_to*, a ``dns.name.Name`` (or ``None``), the origin to use - when relativizing names. If not set, the *origin* value will be used. - - Returns a ``dns.rdataset.Rdataset`` object. - """ - - rdclass = dns.rdataclass.RdataClass.make(rdclass) - rdtype = dns.rdatatype.RdataType.make(rdtype) - r = Rdataset(rdclass, rdtype) - r.update_ttl(ttl) - for t in text_rdatas: - rd = dns.rdata.from_text( - r.rdclass, r.rdtype, t, origin, relativize, relativize_to, idna_codec - ) - r.add(rd) - return r - - -def from_text( - rdclass: Union[dns.rdataclass.RdataClass, str], - rdtype: Union[dns.rdatatype.RdataType, str], - ttl: int, - *text_rdatas: Any, -) -> Rdataset: - """Create an rdataset with the specified class, type, and TTL, and with - the specified rdatas in text format. - - Returns a ``dns.rdataset.Rdataset`` object. - """ - - return from_text_list(rdclass, rdtype, ttl, cast(Collection[str], text_rdatas)) - - -def from_rdata_list(ttl: int, rdatas: Collection[dns.rdata.Rdata]) -> Rdataset: - """Create an rdataset with the specified TTL, and with - the specified list of rdata objects. - - Returns a ``dns.rdataset.Rdataset`` object. - """ - - if len(rdatas) == 0: - raise ValueError("rdata list must not be empty") - r = None - for rd in rdatas: - if r is None: - r = Rdataset(rd.rdclass, rd.rdtype) - r.update_ttl(ttl) - r.add(rd) - assert r is not None - return r - - -def from_rdata(ttl: int, *rdatas: Any) -> Rdataset: - """Create an rdataset with the specified TTL, and with - the specified rdata objects. - - Returns a ``dns.rdataset.Rdataset`` object. - """ - - return from_rdata_list(ttl, cast(Collection[dns.rdata.Rdata], rdatas)) diff --git a/server/venv/Lib/site-packages/dns/rdatatype.py b/server/venv/Lib/site-packages/dns/rdatatype.py deleted file mode 100644 index e6c5818..0000000 --- a/server/venv/Lib/site-packages/dns/rdatatype.py +++ /dev/null @@ -1,332 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Rdata Types.""" - -from typing import Dict - -import dns.enum -import dns.exception - - -class RdataType(dns.enum.IntEnum): - """DNS Rdata Type""" - - TYPE0 = 0 - NONE = 0 - A = 1 - NS = 2 - MD = 3 - MF = 4 - CNAME = 5 - SOA = 6 - MB = 7 - MG = 8 - MR = 9 - NULL = 10 - WKS = 11 - PTR = 12 - HINFO = 13 - MINFO = 14 - MX = 15 - TXT = 16 - RP = 17 - AFSDB = 18 - X25 = 19 - ISDN = 20 - RT = 21 - NSAP = 22 - NSAP_PTR = 23 - SIG = 24 - KEY = 25 - PX = 26 - GPOS = 27 - AAAA = 28 - LOC = 29 - NXT = 30 - SRV = 33 - NAPTR = 35 - KX = 36 - CERT = 37 - A6 = 38 - DNAME = 39 - OPT = 41 - APL = 42 - DS = 43 - SSHFP = 44 - IPSECKEY = 45 - RRSIG = 46 - NSEC = 47 - DNSKEY = 48 - DHCID = 49 - NSEC3 = 50 - NSEC3PARAM = 51 - TLSA = 52 - SMIMEA = 53 - HIP = 55 - NINFO = 56 - CDS = 59 - CDNSKEY = 60 - OPENPGPKEY = 61 - CSYNC = 62 - ZONEMD = 63 - SVCB = 64 - HTTPS = 65 - SPF = 99 - UNSPEC = 103 - NID = 104 - L32 = 105 - L64 = 106 - LP = 107 - EUI48 = 108 - EUI64 = 109 - TKEY = 249 - TSIG = 250 - IXFR = 251 - AXFR = 252 - MAILB = 253 - MAILA = 254 - ANY = 255 - URI = 256 - CAA = 257 - AVC = 258 - AMTRELAY = 260 - TA = 32768 - DLV = 32769 - - @classmethod - def _maximum(cls): - return 65535 - - @classmethod - def _short_name(cls): - return "type" - - @classmethod - def _prefix(cls): - return "TYPE" - - @classmethod - def _extra_from_text(cls, text): - if text.find("-") >= 0: - try: - return cls[text.replace("-", "_")] - except KeyError: - pass - return _registered_by_text.get(text) - - @classmethod - def _extra_to_text(cls, value, current_text): - if current_text is None: - return _registered_by_value.get(value) - if current_text.find("_") >= 0: - return current_text.replace("_", "-") - return current_text - - @classmethod - def _unknown_exception_class(cls): - return UnknownRdatatype - - -_registered_by_text: Dict[str, RdataType] = {} -_registered_by_value: Dict[RdataType, str] = {} - -_metatypes = {RdataType.OPT} - -_singletons = { - RdataType.SOA, - RdataType.NXT, - RdataType.DNAME, - RdataType.NSEC, - RdataType.CNAME, -} - - -class UnknownRdatatype(dns.exception.DNSException): - """DNS resource record type is unknown.""" - - -def from_text(text: str) -> RdataType: - """Convert text into a DNS rdata type value. - - The input text can be a defined DNS RR type mnemonic or - instance of the DNS generic type syntax. - - For example, "NS" and "TYPE2" will both result in a value of 2. - - Raises ``dns.rdatatype.UnknownRdatatype`` if the type is unknown. - - Raises ``ValueError`` if the rdata type value is not >= 0 and <= 65535. - - Returns a ``dns.rdatatype.RdataType``. - """ - - return RdataType.from_text(text) - - -def to_text(value: RdataType) -> str: - """Convert a DNS rdata type value to text. - - If the value has a known mnemonic, it will be used, otherwise the - DNS generic type syntax will be used. - - Raises ``ValueError`` if the rdata type value is not >= 0 and <= 65535. - - Returns a ``str``. - """ - - return RdataType.to_text(value) - - -def is_metatype(rdtype: RdataType) -> bool: - """True if the specified type is a metatype. - - *rdtype* is a ``dns.rdatatype.RdataType``. - - The currently defined metatypes are TKEY, TSIG, IXFR, AXFR, MAILA, - MAILB, ANY, and OPT. - - Returns a ``bool``. - """ - - return (256 > rdtype >= 128) or rdtype in _metatypes - - -def is_singleton(rdtype: RdataType) -> bool: - """Is the specified type a singleton type? - - Singleton types can only have a single rdata in an rdataset, or a single - RR in an RRset. - - The currently defined singleton types are CNAME, DNAME, NSEC, NXT, and - SOA. - - *rdtype* is an ``int``. - - Returns a ``bool``. - """ - - if rdtype in _singletons: - return True - return False - - -# pylint: disable=redefined-outer-name -def register_type( - rdtype: RdataType, rdtype_text: str, is_singleton: bool = False -) -> None: - """Dynamically register an rdatatype. - - *rdtype*, a ``dns.rdatatype.RdataType``, the rdatatype to register. - - *rdtype_text*, a ``str``, the textual form of the rdatatype. - - *is_singleton*, a ``bool``, indicating if the type is a singleton (i.e. - RRsets of the type can have only one member.) - """ - - _registered_by_text[rdtype_text] = rdtype - _registered_by_value[rdtype] = rdtype_text - if is_singleton: - _singletons.add(rdtype) - - -### BEGIN generated RdataType constants - -TYPE0 = RdataType.TYPE0 -NONE = RdataType.NONE -A = RdataType.A -NS = RdataType.NS -MD = RdataType.MD -MF = RdataType.MF -CNAME = RdataType.CNAME -SOA = RdataType.SOA -MB = RdataType.MB -MG = RdataType.MG -MR = RdataType.MR -NULL = RdataType.NULL -WKS = RdataType.WKS -PTR = RdataType.PTR -HINFO = RdataType.HINFO -MINFO = RdataType.MINFO -MX = RdataType.MX -TXT = RdataType.TXT -RP = RdataType.RP -AFSDB = RdataType.AFSDB -X25 = RdataType.X25 -ISDN = RdataType.ISDN -RT = RdataType.RT -NSAP = RdataType.NSAP -NSAP_PTR = RdataType.NSAP_PTR -SIG = RdataType.SIG -KEY = RdataType.KEY -PX = RdataType.PX -GPOS = RdataType.GPOS -AAAA = RdataType.AAAA -LOC = RdataType.LOC -NXT = RdataType.NXT -SRV = RdataType.SRV -NAPTR = RdataType.NAPTR -KX = RdataType.KX -CERT = RdataType.CERT -A6 = RdataType.A6 -DNAME = RdataType.DNAME -OPT = RdataType.OPT -APL = RdataType.APL -DS = RdataType.DS -SSHFP = RdataType.SSHFP -IPSECKEY = RdataType.IPSECKEY -RRSIG = RdataType.RRSIG -NSEC = RdataType.NSEC -DNSKEY = RdataType.DNSKEY -DHCID = RdataType.DHCID -NSEC3 = RdataType.NSEC3 -NSEC3PARAM = RdataType.NSEC3PARAM -TLSA = RdataType.TLSA -SMIMEA = RdataType.SMIMEA -HIP = RdataType.HIP -NINFO = RdataType.NINFO -CDS = RdataType.CDS -CDNSKEY = RdataType.CDNSKEY -OPENPGPKEY = RdataType.OPENPGPKEY -CSYNC = RdataType.CSYNC -ZONEMD = RdataType.ZONEMD -SVCB = RdataType.SVCB -HTTPS = RdataType.HTTPS -SPF = RdataType.SPF -UNSPEC = RdataType.UNSPEC -NID = RdataType.NID -L32 = RdataType.L32 -L64 = RdataType.L64 -LP = RdataType.LP -EUI48 = RdataType.EUI48 -EUI64 = RdataType.EUI64 -TKEY = RdataType.TKEY -TSIG = RdataType.TSIG -IXFR = RdataType.IXFR -AXFR = RdataType.AXFR -MAILB = RdataType.MAILB -MAILA = RdataType.MAILA -ANY = RdataType.ANY -URI = RdataType.URI -CAA = RdataType.CAA -AVC = RdataType.AVC -AMTRELAY = RdataType.AMTRELAY -TA = RdataType.TA -DLV = RdataType.DLV - -### END generated RdataType constants diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/AFSDB.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/AFSDB.py deleted file mode 100644 index 3d287f6..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/AFSDB.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.mxbase - - -@dns.immutable.immutable -class AFSDB(dns.rdtypes.mxbase.UncompressedDowncasingMX): - - """AFSDB record""" - - # Use the property mechanism to make "subtype" an alias for the - # "preference" attribute, and "hostname" an alias for the "exchange" - # attribute. - # - # This lets us inherit the UncompressedMX implementation but lets - # the caller use appropriate attribute names for the rdata type. - # - # We probably lose some performance vs. a cut-and-paste - # implementation, but this way we don't copy code, and that's - # good. - - @property - def subtype(self): - "the AFSDB subtype" - return self.preference - - @property - def hostname(self): - "the AFSDB hostname" - return self.exchange diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/AMTRELAY.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/AMTRELAY.py deleted file mode 100644 index dfe7abc..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/AMTRELAY.py +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.immutable -import dns.rdtypes.util - - -class Relay(dns.rdtypes.util.Gateway): - name = "AMTRELAY relay" - - @property - def relay(self): - return self.gateway - - -@dns.immutable.immutable -class AMTRELAY(dns.rdata.Rdata): - - """AMTRELAY record""" - - # see: RFC 8777 - - __slots__ = ["precedence", "discovery_optional", "relay_type", "relay"] - - def __init__( - self, rdclass, rdtype, precedence, discovery_optional, relay_type, relay - ): - super().__init__(rdclass, rdtype) - relay = Relay(relay_type, relay) - self.precedence = self._as_uint8(precedence) - self.discovery_optional = self._as_bool(discovery_optional) - self.relay_type = relay.type - self.relay = relay.relay - - def to_text(self, origin=None, relativize=True, **kw): - relay = Relay(self.relay_type, self.relay).to_text(origin, relativize) - return "%d %d %d %s" % ( - self.precedence, - self.discovery_optional, - self.relay_type, - relay, - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - precedence = tok.get_uint8() - discovery_optional = tok.get_uint8() - if discovery_optional > 1: - raise dns.exception.SyntaxError("expecting 0 or 1") - discovery_optional = bool(discovery_optional) - relay_type = tok.get_uint8() - if relay_type > 0x7F: - raise dns.exception.SyntaxError("expecting an integer <= 127") - relay = Relay.from_text(relay_type, tok, origin, relativize, relativize_to) - return cls( - rdclass, rdtype, precedence, discovery_optional, relay_type, relay.relay - ) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - relay_type = self.relay_type | (self.discovery_optional << 7) - header = struct.pack("!BB", self.precedence, relay_type) - file.write(header) - Relay(self.relay_type, self.relay).to_wire(file, compress, origin, canonicalize) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - (precedence, relay_type) = parser.get_struct("!BB") - discovery_optional = bool(relay_type >> 7) - relay_type &= 0x7F - relay = Relay.from_wire_parser(relay_type, parser, origin) - return cls( - rdclass, rdtype, precedence, discovery_optional, relay_type, relay.relay - ) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/AVC.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/AVC.py deleted file mode 100644 index 766d5e2..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/AVC.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2016 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.txtbase - - -@dns.immutable.immutable -class AVC(dns.rdtypes.txtbase.TXTBase): - - """AVC record""" - - # See: IANA dns parameters for AVC diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/CAA.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/CAA.py deleted file mode 100644 index 8afb538..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/CAA.py +++ /dev/null @@ -1,72 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.immutable -import dns.rdata -import dns.tokenizer - - -@dns.immutable.immutable -class CAA(dns.rdata.Rdata): - - """CAA (Certification Authority Authorization) record""" - - # see: RFC 6844 - - __slots__ = ["flags", "tag", "value"] - - def __init__(self, rdclass, rdtype, flags, tag, value): - super().__init__(rdclass, rdtype) - self.flags = self._as_uint8(flags) - self.tag = self._as_bytes(tag, True, 255) - if not tag.isalnum(): - raise ValueError("tag is not alphanumeric") - self.value = self._as_bytes(value) - - def to_text(self, origin=None, relativize=True, **kw): - return '%u %s "%s"' % ( - self.flags, - dns.rdata._escapify(self.tag), - dns.rdata._escapify(self.value), - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - flags = tok.get_uint8() - tag = tok.get_string().encode() - value = tok.get_string().encode() - return cls(rdclass, rdtype, flags, tag, value) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(struct.pack("!B", self.flags)) - l = len(self.tag) - assert l < 256 - file.write(struct.pack("!B", l)) - file.write(self.tag) - file.write(self.value) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - flags = parser.get_uint8() - tag = parser.get_counted_bytes() - value = parser.get_remaining() - return cls(rdclass, rdtype, flags, tag, value) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/CDNSKEY.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/CDNSKEY.py deleted file mode 100644 index 38b8a8d..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/CDNSKEY.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.dnskeybase # lgtm[py/import-and-import-from] - -# pylint: disable=unused-import -from dns.rdtypes.dnskeybase import ( # noqa: F401 lgtm[py/unused-import] - REVOKE, - SEP, - ZONE, -) - -# pylint: enable=unused-import - - -@dns.immutable.immutable -class CDNSKEY(dns.rdtypes.dnskeybase.DNSKEYBase): - - """CDNSKEY record""" diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/CDS.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/CDS.py deleted file mode 100644 index 2ff42d9..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/CDS.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.dsbase - - -@dns.immutable.immutable -class CDS(dns.rdtypes.dsbase.DSBase): - - """CDS record""" - - _digest_length_by_type = { - **dns.rdtypes.dsbase.DSBase._digest_length_by_type, - 0: 1, # delete, RFC 8078 Sec. 4 (including Errata ID 5049) - } diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/CERT.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/CERT.py deleted file mode 100644 index 30fe863..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/CERT.py +++ /dev/null @@ -1,117 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import base64 -import struct - -import dns.dnssectypes -import dns.exception -import dns.immutable -import dns.rdata -import dns.tokenizer - -_ctype_by_value = { - 1: "PKIX", - 2: "SPKI", - 3: "PGP", - 4: "IPKIX", - 5: "ISPKI", - 6: "IPGP", - 7: "ACPKIX", - 8: "IACPKIX", - 253: "URI", - 254: "OID", -} - -_ctype_by_name = { - "PKIX": 1, - "SPKI": 2, - "PGP": 3, - "IPKIX": 4, - "ISPKI": 5, - "IPGP": 6, - "ACPKIX": 7, - "IACPKIX": 8, - "URI": 253, - "OID": 254, -} - - -def _ctype_from_text(what): - v = _ctype_by_name.get(what) - if v is not None: - return v - return int(what) - - -def _ctype_to_text(what): - v = _ctype_by_value.get(what) - if v is not None: - return v - return str(what) - - -@dns.immutable.immutable -class CERT(dns.rdata.Rdata): - - """CERT record""" - - # see RFC 4398 - - __slots__ = ["certificate_type", "key_tag", "algorithm", "certificate"] - - def __init__( - self, rdclass, rdtype, certificate_type, key_tag, algorithm, certificate - ): - super().__init__(rdclass, rdtype) - self.certificate_type = self._as_uint16(certificate_type) - self.key_tag = self._as_uint16(key_tag) - self.algorithm = self._as_uint8(algorithm) - self.certificate = self._as_bytes(certificate) - - def to_text(self, origin=None, relativize=True, **kw): - certificate_type = _ctype_to_text(self.certificate_type) - return "%s %d %s %s" % ( - certificate_type, - self.key_tag, - dns.dnssectypes.Algorithm.to_text(self.algorithm), - dns.rdata._base64ify(self.certificate, **kw), - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - certificate_type = _ctype_from_text(tok.get_string()) - key_tag = tok.get_uint16() - algorithm = dns.dnssectypes.Algorithm.from_text(tok.get_string()) - b64 = tok.concatenate_remaining_identifiers().encode() - certificate = base64.b64decode(b64) - return cls(rdclass, rdtype, certificate_type, key_tag, algorithm, certificate) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - prefix = struct.pack( - "!HHB", self.certificate_type, self.key_tag, self.algorithm - ) - file.write(prefix) - file.write(self.certificate) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - (certificate_type, key_tag, algorithm) = parser.get_struct("!HHB") - certificate = parser.get_remaining() - return cls(rdclass, rdtype, certificate_type, key_tag, algorithm, certificate) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/CNAME.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/CNAME.py deleted file mode 100644 index 759adb9..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/CNAME.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.nsbase - - -@dns.immutable.immutable -class CNAME(dns.rdtypes.nsbase.NSBase): - - """CNAME record - - Note: although CNAME is officially a singleton type, dnspython allows - non-singleton CNAME rdatasets because such sets have been commonly - used by BIND and other nameservers for load balancing.""" diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/CSYNC.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/CSYNC.py deleted file mode 100644 index 315da9f..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/CSYNC.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2004-2007, 2009-2011, 2016 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.immutable -import dns.name -import dns.rdata -import dns.rdatatype -import dns.rdtypes.util - - -@dns.immutable.immutable -class Bitmap(dns.rdtypes.util.Bitmap): - type_name = "CSYNC" - - -@dns.immutable.immutable -class CSYNC(dns.rdata.Rdata): - - """CSYNC record""" - - __slots__ = ["serial", "flags", "windows"] - - def __init__(self, rdclass, rdtype, serial, flags, windows): - super().__init__(rdclass, rdtype) - self.serial = self._as_uint32(serial) - self.flags = self._as_uint16(flags) - if not isinstance(windows, Bitmap): - windows = Bitmap(windows) - self.windows = tuple(windows.windows) - - def to_text(self, origin=None, relativize=True, **kw): - text = Bitmap(self.windows).to_text() - return "%d %d%s" % (self.serial, self.flags, text) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - serial = tok.get_uint32() - flags = tok.get_uint16() - bitmap = Bitmap.from_text(tok) - return cls(rdclass, rdtype, serial, flags, bitmap) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(struct.pack("!IH", self.serial, self.flags)) - Bitmap(self.windows).to_wire(file) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - (serial, flags) = parser.get_struct("!IH") - bitmap = Bitmap.from_wire_parser(parser) - return cls(rdclass, rdtype, serial, flags, bitmap) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/DLV.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/DLV.py deleted file mode 100644 index 632e90f..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/DLV.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.dsbase - - -@dns.immutable.immutable -class DLV(dns.rdtypes.dsbase.DSBase): - - """DLV record""" diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/DNAME.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/DNAME.py deleted file mode 100644 index 556bff5..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/DNAME.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.nsbase - - -@dns.immutable.immutable -class DNAME(dns.rdtypes.nsbase.UncompressedNS): - - """DNAME record""" - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - self.target.to_wire(file, None, origin, canonicalize) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/DNSKEY.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/DNSKEY.py deleted file mode 100644 index f1a6306..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/DNSKEY.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.dnskeybase # lgtm[py/import-and-import-from] - -# pylint: disable=unused-import -from dns.rdtypes.dnskeybase import ( # noqa: F401 lgtm[py/unused-import] - REVOKE, - SEP, - ZONE, -) - -# pylint: enable=unused-import - - -@dns.immutable.immutable -class DNSKEY(dns.rdtypes.dnskeybase.DNSKEYBase): - - """DNSKEY record""" diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/DS.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/DS.py deleted file mode 100644 index 097ecfa..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/DS.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.dsbase - - -@dns.immutable.immutable -class DS(dns.rdtypes.dsbase.DSBase): - - """DS record""" diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/EUI48.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/EUI48.py deleted file mode 100644 index 7e4e1ff..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/EUI48.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2015 Red Hat, Inc. -# Author: Petr Spacek -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.euibase - - -@dns.immutable.immutable -class EUI48(dns.rdtypes.euibase.EUIBase): - - """EUI48 record""" - - # see: rfc7043.txt - - byte_len = 6 # 0123456789ab (in hex) - text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/EUI64.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/EUI64.py deleted file mode 100644 index 68b5820..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/EUI64.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2015 Red Hat, Inc. -# Author: Petr Spacek -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.euibase - - -@dns.immutable.immutable -class EUI64(dns.rdtypes.euibase.EUIBase): - - """EUI64 record""" - - # see: rfc7043.txt - - byte_len = 8 # 0123456789abcdef (in hex) - text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab-cd-ef diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/GPOS.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/GPOS.py deleted file mode 100644 index 30aab32..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/GPOS.py +++ /dev/null @@ -1,126 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.immutable -import dns.rdata -import dns.tokenizer - - -def _validate_float_string(what): - if len(what) == 0: - raise dns.exception.FormError - if what[0] == b"-"[0] or what[0] == b"+"[0]: - what = what[1:] - if what.isdigit(): - return - try: - (left, right) = what.split(b".") - except ValueError: - raise dns.exception.FormError - if left == b"" and right == b"": - raise dns.exception.FormError - if not left == b"" and not left.decode().isdigit(): - raise dns.exception.FormError - if not right == b"" and not right.decode().isdigit(): - raise dns.exception.FormError - - -@dns.immutable.immutable -class GPOS(dns.rdata.Rdata): - - """GPOS record""" - - # see: RFC 1712 - - __slots__ = ["latitude", "longitude", "altitude"] - - def __init__(self, rdclass, rdtype, latitude, longitude, altitude): - super().__init__(rdclass, rdtype) - if isinstance(latitude, float) or isinstance(latitude, int): - latitude = str(latitude) - if isinstance(longitude, float) or isinstance(longitude, int): - longitude = str(longitude) - if isinstance(altitude, float) or isinstance(altitude, int): - altitude = str(altitude) - latitude = self._as_bytes(latitude, True, 255) - longitude = self._as_bytes(longitude, True, 255) - altitude = self._as_bytes(altitude, True, 255) - _validate_float_string(latitude) - _validate_float_string(longitude) - _validate_float_string(altitude) - self.latitude = latitude - self.longitude = longitude - self.altitude = altitude - flat = self.float_latitude - if flat < -90.0 or flat > 90.0: - raise dns.exception.FormError("bad latitude") - flong = self.float_longitude - if flong < -180.0 or flong > 180.0: - raise dns.exception.FormError("bad longitude") - - def to_text(self, origin=None, relativize=True, **kw): - return "{} {} {}".format( - self.latitude.decode(), self.longitude.decode(), self.altitude.decode() - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - latitude = tok.get_string() - longitude = tok.get_string() - altitude = tok.get_string() - return cls(rdclass, rdtype, latitude, longitude, altitude) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - l = len(self.latitude) - assert l < 256 - file.write(struct.pack("!B", l)) - file.write(self.latitude) - l = len(self.longitude) - assert l < 256 - file.write(struct.pack("!B", l)) - file.write(self.longitude) - l = len(self.altitude) - assert l < 256 - file.write(struct.pack("!B", l)) - file.write(self.altitude) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - latitude = parser.get_counted_bytes() - longitude = parser.get_counted_bytes() - altitude = parser.get_counted_bytes() - return cls(rdclass, rdtype, latitude, longitude, altitude) - - @property - def float_latitude(self): - "latitude as a floating point value" - return float(self.latitude) - - @property - def float_longitude(self): - "longitude as a floating point value" - return float(self.longitude) - - @property - def float_altitude(self): - "altitude as a floating point value" - return float(self.altitude) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/HINFO.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/HINFO.py deleted file mode 100644 index 513c155..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/HINFO.py +++ /dev/null @@ -1,67 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.immutable -import dns.rdata -import dns.tokenizer - - -@dns.immutable.immutable -class HINFO(dns.rdata.Rdata): - - """HINFO record""" - - # see: RFC 1035 - - __slots__ = ["cpu", "os"] - - def __init__(self, rdclass, rdtype, cpu, os): - super().__init__(rdclass, rdtype) - self.cpu = self._as_bytes(cpu, True, 255) - self.os = self._as_bytes(os, True, 255) - - def to_text(self, origin=None, relativize=True, **kw): - return '"{}" "{}"'.format( - dns.rdata._escapify(self.cpu), dns.rdata._escapify(self.os) - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - cpu = tok.get_string(max_length=255) - os = tok.get_string(max_length=255) - return cls(rdclass, rdtype, cpu, os) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - l = len(self.cpu) - assert l < 256 - file.write(struct.pack("!B", l)) - file.write(self.cpu) - l = len(self.os) - assert l < 256 - file.write(struct.pack("!B", l)) - file.write(self.os) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - cpu = parser.get_counted_bytes() - os = parser.get_counted_bytes() - return cls(rdclass, rdtype, cpu, os) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/HIP.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/HIP.py deleted file mode 100644 index a20aa1e..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/HIP.py +++ /dev/null @@ -1,86 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2010, 2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import base64 -import binascii -import struct - -import dns.exception -import dns.immutable -import dns.rdata -import dns.rdatatype - - -@dns.immutable.immutable -class HIP(dns.rdata.Rdata): - - """HIP record""" - - # see: RFC 5205 - - __slots__ = ["hit", "algorithm", "key", "servers"] - - def __init__(self, rdclass, rdtype, hit, algorithm, key, servers): - super().__init__(rdclass, rdtype) - self.hit = self._as_bytes(hit, True, 255) - self.algorithm = self._as_uint8(algorithm) - self.key = self._as_bytes(key, True) - self.servers = self._as_tuple(servers, self._as_name) - - def to_text(self, origin=None, relativize=True, **kw): - hit = binascii.hexlify(self.hit).decode() - key = base64.b64encode(self.key).replace(b"\n", b"").decode() - text = "" - servers = [] - for server in self.servers: - servers.append(server.choose_relativity(origin, relativize)) - if len(servers) > 0: - text += " " + " ".join((x.to_unicode() for x in servers)) - return "%u %s %s%s" % (self.algorithm, hit, key, text) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - algorithm = tok.get_uint8() - hit = binascii.unhexlify(tok.get_string().encode()) - key = base64.b64decode(tok.get_string().encode()) - servers = [] - for token in tok.get_remaining(): - server = tok.as_name(token, origin, relativize, relativize_to) - servers.append(server) - return cls(rdclass, rdtype, hit, algorithm, key, servers) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - lh = len(self.hit) - lk = len(self.key) - file.write(struct.pack("!BBH", lh, self.algorithm, lk)) - file.write(self.hit) - file.write(self.key) - for server in self.servers: - server.to_wire(file, None, origin, False) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - (lh, algorithm, lk) = parser.get_struct("!BBH") - hit = parser.get_bytes(lh) - key = parser.get_bytes(lk) - servers = [] - while parser.remaining() > 0: - server = parser.get_name(origin) - servers.append(server) - return cls(rdclass, rdtype, hit, algorithm, key, servers) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/ISDN.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/ISDN.py deleted file mode 100644 index 536a35d..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/ISDN.py +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.immutable -import dns.rdata -import dns.tokenizer - - -@dns.immutable.immutable -class ISDN(dns.rdata.Rdata): - - """ISDN record""" - - # see: RFC 1183 - - __slots__ = ["address", "subaddress"] - - def __init__(self, rdclass, rdtype, address, subaddress): - super().__init__(rdclass, rdtype) - self.address = self._as_bytes(address, True, 255) - self.subaddress = self._as_bytes(subaddress, True, 255) - - def to_text(self, origin=None, relativize=True, **kw): - if self.subaddress: - return '"{}" "{}"'.format( - dns.rdata._escapify(self.address), dns.rdata._escapify(self.subaddress) - ) - else: - return '"%s"' % dns.rdata._escapify(self.address) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - address = tok.get_string() - tokens = tok.get_remaining(max_tokens=1) - if len(tokens) >= 1: - subaddress = tokens[0].unescape().value - else: - subaddress = "" - return cls(rdclass, rdtype, address, subaddress) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - l = len(self.address) - assert l < 256 - file.write(struct.pack("!B", l)) - file.write(self.address) - l = len(self.subaddress) - if l > 0: - assert l < 256 - file.write(struct.pack("!B", l)) - file.write(self.subaddress) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - address = parser.get_counted_bytes() - if parser.remaining() > 0: - subaddress = parser.get_counted_bytes() - else: - subaddress = b"" - return cls(rdclass, rdtype, address, subaddress) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/L32.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/L32.py deleted file mode 100644 index 14be01f..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/L32.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import struct - -import dns.immutable -import dns.rdata - - -@dns.immutable.immutable -class L32(dns.rdata.Rdata): - - """L32 record""" - - # see: rfc6742.txt - - __slots__ = ["preference", "locator32"] - - def __init__(self, rdclass, rdtype, preference, locator32): - super().__init__(rdclass, rdtype) - self.preference = self._as_uint16(preference) - self.locator32 = self._as_ipv4_address(locator32) - - def to_text(self, origin=None, relativize=True, **kw): - return f"{self.preference} {self.locator32}" - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - preference = tok.get_uint16() - nodeid = tok.get_identifier() - return cls(rdclass, rdtype, preference, nodeid) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(struct.pack("!H", self.preference)) - file.write(dns.ipv4.inet_aton(self.locator32)) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - preference = parser.get_uint16() - locator32 = parser.get_remaining() - return cls(rdclass, rdtype, preference, locator32) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/L64.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/L64.py deleted file mode 100644 index d083d40..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/L64.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import struct - -import dns.immutable -import dns.rdtypes.util - - -@dns.immutable.immutable -class L64(dns.rdata.Rdata): - - """L64 record""" - - # see: rfc6742.txt - - __slots__ = ["preference", "locator64"] - - def __init__(self, rdclass, rdtype, preference, locator64): - super().__init__(rdclass, rdtype) - self.preference = self._as_uint16(preference) - if isinstance(locator64, bytes): - if len(locator64) != 8: - raise ValueError("invalid locator64") - self.locator64 = dns.rdata._hexify(locator64, 4, b":") - else: - dns.rdtypes.util.parse_formatted_hex(locator64, 4, 4, ":") - self.locator64 = locator64 - - def to_text(self, origin=None, relativize=True, **kw): - return f"{self.preference} {self.locator64}" - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - preference = tok.get_uint16() - locator64 = tok.get_identifier() - return cls(rdclass, rdtype, preference, locator64) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(struct.pack("!H", self.preference)) - file.write(dns.rdtypes.util.parse_formatted_hex(self.locator64, 4, 4, ":")) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - preference = parser.get_uint16() - locator64 = parser.get_remaining() - return cls(rdclass, rdtype, preference, locator64) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/LOC.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/LOC.py deleted file mode 100644 index 783d54a..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/LOC.py +++ /dev/null @@ -1,355 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.immutable -import dns.rdata - -_pows = tuple(10**i for i in range(0, 11)) - -# default values are in centimeters -_default_size = 100.0 -_default_hprec = 1000000.0 -_default_vprec = 1000.0 - -# for use by from_wire() -_MAX_LATITUDE = 0x80000000 + 90 * 3600000 -_MIN_LATITUDE = 0x80000000 - 90 * 3600000 -_MAX_LONGITUDE = 0x80000000 + 180 * 3600000 -_MIN_LONGITUDE = 0x80000000 - 180 * 3600000 - - -def _exponent_of(what, desc): - if what == 0: - return 0 - exp = None - for i, pow in enumerate(_pows): - if what < pow: - exp = i - 1 - break - if exp is None or exp < 0: - raise dns.exception.SyntaxError("%s value out of bounds" % desc) - return exp - - -def _float_to_tuple(what): - if what < 0: - sign = -1 - what *= -1 - else: - sign = 1 - what = round(what * 3600000) - degrees = int(what // 3600000) - what -= degrees * 3600000 - minutes = int(what // 60000) - what -= minutes * 60000 - seconds = int(what // 1000) - what -= int(seconds * 1000) - what = int(what) - return (degrees, minutes, seconds, what, sign) - - -def _tuple_to_float(what): - value = float(what[0]) - value += float(what[1]) / 60.0 - value += float(what[2]) / 3600.0 - value += float(what[3]) / 3600000.0 - return float(what[4]) * value - - -def _encode_size(what, desc): - what = int(what) - exponent = _exponent_of(what, desc) & 0xF - base = what // pow(10, exponent) & 0xF - return base * 16 + exponent - - -def _decode_size(what, desc): - exponent = what & 0x0F - if exponent > 9: - raise dns.exception.FormError("bad %s exponent" % desc) - base = (what & 0xF0) >> 4 - if base > 9: - raise dns.exception.FormError("bad %s base" % desc) - return base * pow(10, exponent) - - -def _check_coordinate_list(value, low, high): - if value[0] < low or value[0] > high: - raise ValueError(f"not in range [{low}, {high}]") - if value[1] < 0 or value[1] > 59: - raise ValueError("bad minutes value") - if value[2] < 0 or value[2] > 59: - raise ValueError("bad seconds value") - if value[3] < 0 or value[3] > 999: - raise ValueError("bad milliseconds value") - if value[4] != 1 and value[4] != -1: - raise ValueError("bad hemisphere value") - - -@dns.immutable.immutable -class LOC(dns.rdata.Rdata): - - """LOC record""" - - # see: RFC 1876 - - __slots__ = [ - "latitude", - "longitude", - "altitude", - "size", - "horizontal_precision", - "vertical_precision", - ] - - def __init__( - self, - rdclass, - rdtype, - latitude, - longitude, - altitude, - size=_default_size, - hprec=_default_hprec, - vprec=_default_vprec, - ): - """Initialize a LOC record instance. - - The parameters I{latitude} and I{longitude} may be either a 4-tuple - of integers specifying (degrees, minutes, seconds, milliseconds), - or they may be floating point values specifying the number of - degrees. The other parameters are floats. Size, horizontal precision, - and vertical precision are specified in centimeters.""" - - super().__init__(rdclass, rdtype) - if isinstance(latitude, int): - latitude = float(latitude) - if isinstance(latitude, float): - latitude = _float_to_tuple(latitude) - _check_coordinate_list(latitude, -90, 90) - self.latitude = tuple(latitude) - if isinstance(longitude, int): - longitude = float(longitude) - if isinstance(longitude, float): - longitude = _float_to_tuple(longitude) - _check_coordinate_list(longitude, -180, 180) - self.longitude = tuple(longitude) - self.altitude = float(altitude) - self.size = float(size) - self.horizontal_precision = float(hprec) - self.vertical_precision = float(vprec) - - def to_text(self, origin=None, relativize=True, **kw): - if self.latitude[4] > 0: - lat_hemisphere = "N" - else: - lat_hemisphere = "S" - if self.longitude[4] > 0: - long_hemisphere = "E" - else: - long_hemisphere = "W" - text = "%d %d %d.%03d %s %d %d %d.%03d %s %0.2fm" % ( - self.latitude[0], - self.latitude[1], - self.latitude[2], - self.latitude[3], - lat_hemisphere, - self.longitude[0], - self.longitude[1], - self.longitude[2], - self.longitude[3], - long_hemisphere, - self.altitude / 100.0, - ) - - # do not print default values - if ( - self.size != _default_size - or self.horizontal_precision != _default_hprec - or self.vertical_precision != _default_vprec - ): - text += " {:0.2f}m {:0.2f}m {:0.2f}m".format( - self.size / 100.0, - self.horizontal_precision / 100.0, - self.vertical_precision / 100.0, - ) - return text - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - latitude = [0, 0, 0, 0, 1] - longitude = [0, 0, 0, 0, 1] - size = _default_size - hprec = _default_hprec - vprec = _default_vprec - - latitude[0] = tok.get_int() - t = tok.get_string() - if t.isdigit(): - latitude[1] = int(t) - t = tok.get_string() - if "." in t: - (seconds, milliseconds) = t.split(".") - if not seconds.isdigit(): - raise dns.exception.SyntaxError("bad latitude seconds value") - latitude[2] = int(seconds) - l = len(milliseconds) - if l == 0 or l > 3 or not milliseconds.isdigit(): - raise dns.exception.SyntaxError("bad latitude milliseconds value") - if l == 1: - m = 100 - elif l == 2: - m = 10 - else: - m = 1 - latitude[3] = m * int(milliseconds) - t = tok.get_string() - elif t.isdigit(): - latitude[2] = int(t) - t = tok.get_string() - if t == "S": - latitude[4] = -1 - elif t != "N": - raise dns.exception.SyntaxError("bad latitude hemisphere value") - - longitude[0] = tok.get_int() - t = tok.get_string() - if t.isdigit(): - longitude[1] = int(t) - t = tok.get_string() - if "." in t: - (seconds, milliseconds) = t.split(".") - if not seconds.isdigit(): - raise dns.exception.SyntaxError("bad longitude seconds value") - longitude[2] = int(seconds) - l = len(milliseconds) - if l == 0 or l > 3 or not milliseconds.isdigit(): - raise dns.exception.SyntaxError("bad longitude milliseconds value") - if l == 1: - m = 100 - elif l == 2: - m = 10 - else: - m = 1 - longitude[3] = m * int(milliseconds) - t = tok.get_string() - elif t.isdigit(): - longitude[2] = int(t) - t = tok.get_string() - if t == "W": - longitude[4] = -1 - elif t != "E": - raise dns.exception.SyntaxError("bad longitude hemisphere value") - - t = tok.get_string() - if t[-1] == "m": - t = t[0:-1] - altitude = float(t) * 100.0 # m -> cm - - tokens = tok.get_remaining(max_tokens=3) - if len(tokens) >= 1: - value = tokens[0].unescape().value - if value[-1] == "m": - value = value[0:-1] - size = float(value) * 100.0 # m -> cm - if len(tokens) >= 2: - value = tokens[1].unescape().value - if value[-1] == "m": - value = value[0:-1] - hprec = float(value) * 100.0 # m -> cm - if len(tokens) >= 3: - value = tokens[2].unescape().value - if value[-1] == "m": - value = value[0:-1] - vprec = float(value) * 100.0 # m -> cm - - # Try encoding these now so we raise if they are bad - _encode_size(size, "size") - _encode_size(hprec, "horizontal precision") - _encode_size(vprec, "vertical precision") - - return cls(rdclass, rdtype, latitude, longitude, altitude, size, hprec, vprec) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - milliseconds = ( - self.latitude[0] * 3600000 - + self.latitude[1] * 60000 - + self.latitude[2] * 1000 - + self.latitude[3] - ) * self.latitude[4] - latitude = 0x80000000 + milliseconds - milliseconds = ( - self.longitude[0] * 3600000 - + self.longitude[1] * 60000 - + self.longitude[2] * 1000 - + self.longitude[3] - ) * self.longitude[4] - longitude = 0x80000000 + milliseconds - altitude = int(self.altitude) + 10000000 - size = _encode_size(self.size, "size") - hprec = _encode_size(self.horizontal_precision, "horizontal precision") - vprec = _encode_size(self.vertical_precision, "vertical precision") - wire = struct.pack( - "!BBBBIII", 0, size, hprec, vprec, latitude, longitude, altitude - ) - file.write(wire) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - ( - version, - size, - hprec, - vprec, - latitude, - longitude, - altitude, - ) = parser.get_struct("!BBBBIII") - if version != 0: - raise dns.exception.FormError("LOC version not zero") - if latitude < _MIN_LATITUDE or latitude > _MAX_LATITUDE: - raise dns.exception.FormError("bad latitude") - if latitude > 0x80000000: - latitude = (latitude - 0x80000000) / 3600000 - else: - latitude = -1 * (0x80000000 - latitude) / 3600000 - if longitude < _MIN_LONGITUDE or longitude > _MAX_LONGITUDE: - raise dns.exception.FormError("bad longitude") - if longitude > 0x80000000: - longitude = (longitude - 0x80000000) / 3600000 - else: - longitude = -1 * (0x80000000 - longitude) / 3600000 - altitude = float(altitude) - 10000000.0 - size = _decode_size(size, "size") - hprec = _decode_size(hprec, "horizontal precision") - vprec = _decode_size(vprec, "vertical precision") - return cls(rdclass, rdtype, latitude, longitude, altitude, size, hprec, vprec) - - @property - def float_latitude(self): - "latitude as a floating point value" - return _tuple_to_float(self.latitude) - - @property - def float_longitude(self): - "longitude as a floating point value" - return _tuple_to_float(self.longitude) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/LP.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/LP.py deleted file mode 100644 index 8a7c512..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/LP.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import struct - -import dns.immutable -import dns.rdata - - -@dns.immutable.immutable -class LP(dns.rdata.Rdata): - - """LP record""" - - # see: rfc6742.txt - - __slots__ = ["preference", "fqdn"] - - def __init__(self, rdclass, rdtype, preference, fqdn): - super().__init__(rdclass, rdtype) - self.preference = self._as_uint16(preference) - self.fqdn = self._as_name(fqdn) - - def to_text(self, origin=None, relativize=True, **kw): - fqdn = self.fqdn.choose_relativity(origin, relativize) - return "%d %s" % (self.preference, fqdn) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - preference = tok.get_uint16() - fqdn = tok.get_name(origin, relativize, relativize_to) - return cls(rdclass, rdtype, preference, fqdn) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(struct.pack("!H", self.preference)) - self.fqdn.to_wire(file, compress, origin, canonicalize) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - preference = parser.get_uint16() - fqdn = parser.get_name(origin) - return cls(rdclass, rdtype, preference, fqdn) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/MX.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/MX.py deleted file mode 100644 index 1f9df21..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/MX.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.mxbase - - -@dns.immutable.immutable -class MX(dns.rdtypes.mxbase.MXBase): - - """MX record""" diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/NID.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/NID.py deleted file mode 100644 index ad54aca..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/NID.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import struct - -import dns.immutable -import dns.rdtypes.util - - -@dns.immutable.immutable -class NID(dns.rdata.Rdata): - - """NID record""" - - # see: rfc6742.txt - - __slots__ = ["preference", "nodeid"] - - def __init__(self, rdclass, rdtype, preference, nodeid): - super().__init__(rdclass, rdtype) - self.preference = self._as_uint16(preference) - if isinstance(nodeid, bytes): - if len(nodeid) != 8: - raise ValueError("invalid nodeid") - self.nodeid = dns.rdata._hexify(nodeid, 4, b":") - else: - dns.rdtypes.util.parse_formatted_hex(nodeid, 4, 4, ":") - self.nodeid = nodeid - - def to_text(self, origin=None, relativize=True, **kw): - return f"{self.preference} {self.nodeid}" - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - preference = tok.get_uint16() - nodeid = tok.get_identifier() - return cls(rdclass, rdtype, preference, nodeid) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(struct.pack("!H", self.preference)) - file.write(dns.rdtypes.util.parse_formatted_hex(self.nodeid, 4, 4, ":")) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - preference = parser.get_uint16() - nodeid = parser.get_remaining() - return cls(rdclass, rdtype, preference, nodeid) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/NINFO.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/NINFO.py deleted file mode 100644 index 55bc561..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/NINFO.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.txtbase - - -@dns.immutable.immutable -class NINFO(dns.rdtypes.txtbase.TXTBase): - - """NINFO record""" - - # see: draft-reid-dnsext-zs-01 diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/NS.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/NS.py deleted file mode 100644 index fe453f0..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/NS.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.nsbase - - -@dns.immutable.immutable -class NS(dns.rdtypes.nsbase.NSBase): - - """NS record""" diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/NSEC.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/NSEC.py deleted file mode 100644 index a2d98fa..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/NSEC.py +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.exception -import dns.immutable -import dns.name -import dns.rdata -import dns.rdatatype -import dns.rdtypes.util - - -@dns.immutable.immutable -class Bitmap(dns.rdtypes.util.Bitmap): - type_name = "NSEC" - - -@dns.immutable.immutable -class NSEC(dns.rdata.Rdata): - - """NSEC record""" - - __slots__ = ["next", "windows"] - - def __init__(self, rdclass, rdtype, next, windows): - super().__init__(rdclass, rdtype) - self.next = self._as_name(next) - if not isinstance(windows, Bitmap): - windows = Bitmap(windows) - self.windows = tuple(windows.windows) - - def to_text(self, origin=None, relativize=True, **kw): - next = self.next.choose_relativity(origin, relativize) - text = Bitmap(self.windows).to_text() - return "{}{}".format(next, text) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - next = tok.get_name(origin, relativize, relativize_to) - windows = Bitmap.from_text(tok) - return cls(rdclass, rdtype, next, windows) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - # Note that NSEC downcasing, originally mandated by RFC 4034 - # section 6.2 was removed by RFC 6840 section 5.1. - self.next.to_wire(file, None, origin, False) - Bitmap(self.windows).to_wire(file) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - next = parser.get_name(origin) - bitmap = Bitmap.from_wire_parser(parser) - return cls(rdclass, rdtype, next, bitmap) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/NSEC3.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/NSEC3.py deleted file mode 100644 index d32fe16..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/NSEC3.py +++ /dev/null @@ -1,120 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2004-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import base64 -import binascii -import struct - -import dns.exception -import dns.immutable -import dns.rdata -import dns.rdatatype -import dns.rdtypes.util - -b32_hex_to_normal = bytes.maketrans( - b"0123456789ABCDEFGHIJKLMNOPQRSTUV", b"ABCDEFGHIJKLMNOPQRSTUVWXYZ234567" -) -b32_normal_to_hex = bytes.maketrans( - b"ABCDEFGHIJKLMNOPQRSTUVWXYZ234567", b"0123456789ABCDEFGHIJKLMNOPQRSTUV" -) - -# hash algorithm constants -SHA1 = 1 - -# flag constants -OPTOUT = 1 - - -@dns.immutable.immutable -class Bitmap(dns.rdtypes.util.Bitmap): - type_name = "NSEC3" - - -@dns.immutable.immutable -class NSEC3(dns.rdata.Rdata): - - """NSEC3 record""" - - __slots__ = ["algorithm", "flags", "iterations", "salt", "next", "windows"] - - def __init__( - self, rdclass, rdtype, algorithm, flags, iterations, salt, next, windows - ): - super().__init__(rdclass, rdtype) - self.algorithm = self._as_uint8(algorithm) - self.flags = self._as_uint8(flags) - self.iterations = self._as_uint16(iterations) - self.salt = self._as_bytes(salt, True, 255) - self.next = self._as_bytes(next, True, 255) - if not isinstance(windows, Bitmap): - windows = Bitmap(windows) - self.windows = tuple(windows.windows) - - def to_text(self, origin=None, relativize=True, **kw): - next = base64.b32encode(self.next).translate(b32_normal_to_hex).lower().decode() - next = next.rstrip("=") - if self.salt == b"": - salt = "-" - else: - salt = binascii.hexlify(self.salt).decode() - text = Bitmap(self.windows).to_text() - return "%u %u %u %s %s%s" % ( - self.algorithm, - self.flags, - self.iterations, - salt, - next, - text, - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - algorithm = tok.get_uint8() - flags = tok.get_uint8() - iterations = tok.get_uint16() - salt = tok.get_string() - if salt == "-": - salt = b"" - else: - salt = binascii.unhexlify(salt.encode("ascii")) - next = tok.get_string().encode("ascii").upper().translate(b32_hex_to_normal) - if next.endswith(b"="): - raise binascii.Error("Incorrect padding") - if len(next) % 8 != 0: - next += b"=" * (8 - len(next) % 8) - next = base64.b32decode(next) - bitmap = Bitmap.from_text(tok) - return cls(rdclass, rdtype, algorithm, flags, iterations, salt, next, bitmap) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - l = len(self.salt) - file.write(struct.pack("!BBHB", self.algorithm, self.flags, self.iterations, l)) - file.write(self.salt) - l = len(self.next) - file.write(struct.pack("!B", l)) - file.write(self.next) - Bitmap(self.windows).to_wire(file) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - (algorithm, flags, iterations) = parser.get_struct("!BBH") - salt = parser.get_counted_bytes() - next = parser.get_counted_bytes() - bitmap = Bitmap.from_wire_parser(parser) - return cls(rdclass, rdtype, algorithm, flags, iterations, salt, next, bitmap) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/NSEC3PARAM.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/NSEC3PARAM.py deleted file mode 100644 index 1a0c0e0..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/NSEC3PARAM.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import binascii -import struct - -import dns.exception -import dns.immutable -import dns.rdata - - -@dns.immutable.immutable -class NSEC3PARAM(dns.rdata.Rdata): - - """NSEC3PARAM record""" - - __slots__ = ["algorithm", "flags", "iterations", "salt"] - - def __init__(self, rdclass, rdtype, algorithm, flags, iterations, salt): - super().__init__(rdclass, rdtype) - self.algorithm = self._as_uint8(algorithm) - self.flags = self._as_uint8(flags) - self.iterations = self._as_uint16(iterations) - self.salt = self._as_bytes(salt, True, 255) - - def to_text(self, origin=None, relativize=True, **kw): - if self.salt == b"": - salt = "-" - else: - salt = binascii.hexlify(self.salt).decode() - return "%u %u %u %s" % (self.algorithm, self.flags, self.iterations, salt) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - algorithm = tok.get_uint8() - flags = tok.get_uint8() - iterations = tok.get_uint16() - salt = tok.get_string() - if salt == "-": - salt = "" - else: - salt = binascii.unhexlify(salt.encode()) - return cls(rdclass, rdtype, algorithm, flags, iterations, salt) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - l = len(self.salt) - file.write(struct.pack("!BBHB", self.algorithm, self.flags, self.iterations, l)) - file.write(self.salt) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - (algorithm, flags, iterations) = parser.get_struct("!BBH") - salt = parser.get_counted_bytes() - return cls(rdclass, rdtype, algorithm, flags, iterations, salt) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/OPENPGPKEY.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/OPENPGPKEY.py deleted file mode 100644 index e5e2572..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/OPENPGPKEY.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2016 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import base64 - -import dns.exception -import dns.immutable -import dns.rdata -import dns.tokenizer - - -@dns.immutable.immutable -class OPENPGPKEY(dns.rdata.Rdata): - - """OPENPGPKEY record""" - - # see: RFC 7929 - - def __init__(self, rdclass, rdtype, key): - super().__init__(rdclass, rdtype) - self.key = self._as_bytes(key) - - def to_text(self, origin=None, relativize=True, **kw): - return dns.rdata._base64ify(self.key, chunksize=None, **kw) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - b64 = tok.concatenate_remaining_identifiers().encode() - key = base64.b64decode(b64) - return cls(rdclass, rdtype, key) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(self.key) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - key = parser.get_remaining() - return cls(rdclass, rdtype, key) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/OPT.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/OPT.py deleted file mode 100644 index d70e537..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/OPT.py +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.edns -import dns.exception -import dns.immutable -import dns.rdata - -# We don't implement from_text, and that's ok. -# pylint: disable=abstract-method - - -@dns.immutable.immutable -class OPT(dns.rdata.Rdata): - - """OPT record""" - - __slots__ = ["options"] - - def __init__(self, rdclass, rdtype, options): - """Initialize an OPT rdata. - - *rdclass*, an ``int`` is the rdataclass of the Rdata, - which is also the payload size. - - *rdtype*, an ``int`` is the rdatatype of the Rdata. - - *options*, a tuple of ``bytes`` - """ - - super().__init__(rdclass, rdtype) - - def as_option(option): - if not isinstance(option, dns.edns.Option): - raise ValueError("option is not a dns.edns.option") - return option - - self.options = self._as_tuple(options, as_option) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - for opt in self.options: - owire = opt.to_wire() - file.write(struct.pack("!HH", opt.otype, len(owire))) - file.write(owire) - - def to_text(self, origin=None, relativize=True, **kw): - return " ".join(opt.to_text() for opt in self.options) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - options = [] - while parser.remaining() > 0: - (otype, olen) = parser.get_struct("!HH") - with parser.restrict_to(olen): - opt = dns.edns.option_from_wire_parser(otype, parser) - options.append(opt) - return cls(rdclass, rdtype, options) - - @property - def payload(self): - "payload size" - return self.rdclass diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/PTR.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/PTR.py deleted file mode 100644 index 7fd5547..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/PTR.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.nsbase - - -@dns.immutable.immutable -class PTR(dns.rdtypes.nsbase.NSBase): - - """PTR record""" diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/RP.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/RP.py deleted file mode 100644 index 9c64c6e..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/RP.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.exception -import dns.immutable -import dns.name -import dns.rdata - - -@dns.immutable.immutable -class RP(dns.rdata.Rdata): - - """RP record""" - - # see: RFC 1183 - - __slots__ = ["mbox", "txt"] - - def __init__(self, rdclass, rdtype, mbox, txt): - super().__init__(rdclass, rdtype) - self.mbox = self._as_name(mbox) - self.txt = self._as_name(txt) - - def to_text(self, origin=None, relativize=True, **kw): - mbox = self.mbox.choose_relativity(origin, relativize) - txt = self.txt.choose_relativity(origin, relativize) - return "{} {}".format(str(mbox), str(txt)) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - mbox = tok.get_name(origin, relativize, relativize_to) - txt = tok.get_name(origin, relativize, relativize_to) - return cls(rdclass, rdtype, mbox, txt) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - self.mbox.to_wire(file, None, origin, canonicalize) - self.txt.to_wire(file, None, origin, canonicalize) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - mbox = parser.get_name(origin) - txt = parser.get_name(origin) - return cls(rdclass, rdtype, mbox, txt) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/RRSIG.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/RRSIG.py deleted file mode 100644 index 1160502..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/RRSIG.py +++ /dev/null @@ -1,159 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import base64 -import calendar -import struct -import time - -import dns.dnssectypes -import dns.exception -import dns.immutable -import dns.rdata -import dns.rdatatype - - -class BadSigTime(dns.exception.DNSException): - - """Time in DNS SIG or RRSIG resource record cannot be parsed.""" - - -def sigtime_to_posixtime(what): - if len(what) <= 10 and what.isdigit(): - return int(what) - if len(what) != 14: - raise BadSigTime - year = int(what[0:4]) - month = int(what[4:6]) - day = int(what[6:8]) - hour = int(what[8:10]) - minute = int(what[10:12]) - second = int(what[12:14]) - return calendar.timegm((year, month, day, hour, minute, second, 0, 0, 0)) - - -def posixtime_to_sigtime(what): - return time.strftime("%Y%m%d%H%M%S", time.gmtime(what)) - - -@dns.immutable.immutable -class RRSIG(dns.rdata.Rdata): - - """RRSIG record""" - - __slots__ = [ - "type_covered", - "algorithm", - "labels", - "original_ttl", - "expiration", - "inception", - "key_tag", - "signer", - "signature", - ] - - def __init__( - self, - rdclass, - rdtype, - type_covered, - algorithm, - labels, - original_ttl, - expiration, - inception, - key_tag, - signer, - signature, - ): - super().__init__(rdclass, rdtype) - self.type_covered = self._as_rdatatype(type_covered) - self.algorithm = dns.dnssectypes.Algorithm.make(algorithm) - self.labels = self._as_uint8(labels) - self.original_ttl = self._as_ttl(original_ttl) - self.expiration = self._as_uint32(expiration) - self.inception = self._as_uint32(inception) - self.key_tag = self._as_uint16(key_tag) - self.signer = self._as_name(signer) - self.signature = self._as_bytes(signature) - - def covers(self): - return self.type_covered - - def to_text(self, origin=None, relativize=True, **kw): - return "%s %d %d %d %s %s %d %s %s" % ( - dns.rdatatype.to_text(self.type_covered), - self.algorithm, - self.labels, - self.original_ttl, - posixtime_to_sigtime(self.expiration), - posixtime_to_sigtime(self.inception), - self.key_tag, - self.signer.choose_relativity(origin, relativize), - dns.rdata._base64ify(self.signature, **kw), - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - type_covered = dns.rdatatype.from_text(tok.get_string()) - algorithm = dns.dnssectypes.Algorithm.from_text(tok.get_string()) - labels = tok.get_int() - original_ttl = tok.get_ttl() - expiration = sigtime_to_posixtime(tok.get_string()) - inception = sigtime_to_posixtime(tok.get_string()) - key_tag = tok.get_int() - signer = tok.get_name(origin, relativize, relativize_to) - b64 = tok.concatenate_remaining_identifiers().encode() - signature = base64.b64decode(b64) - return cls( - rdclass, - rdtype, - type_covered, - algorithm, - labels, - original_ttl, - expiration, - inception, - key_tag, - signer, - signature, - ) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - header = struct.pack( - "!HBBIIIH", - self.type_covered, - self.algorithm, - self.labels, - self.original_ttl, - self.expiration, - self.inception, - self.key_tag, - ) - file.write(header) - self.signer.to_wire(file, None, origin, canonicalize) - file.write(self.signature) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - header = parser.get_struct("!HBBIIIH") - signer = parser.get_name(origin) - signature = parser.get_remaining() - return cls(rdclass, rdtype, *header, signer, signature) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/RT.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/RT.py deleted file mode 100644 index 950f2a0..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/RT.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.mxbase - - -@dns.immutable.immutable -class RT(dns.rdtypes.mxbase.UncompressedDowncasingMX): - - """RT record""" diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/SMIMEA.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/SMIMEA.py deleted file mode 100644 index 55d87bf..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/SMIMEA.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import dns.immutable -import dns.rdtypes.tlsabase - - -@dns.immutable.immutable -class SMIMEA(dns.rdtypes.tlsabase.TLSABase): - """SMIMEA record""" diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/SOA.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/SOA.py deleted file mode 100644 index bde55e1..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/SOA.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.immutable -import dns.name -import dns.rdata - - -@dns.immutable.immutable -class SOA(dns.rdata.Rdata): - - """SOA record""" - - # see: RFC 1035 - - __slots__ = ["mname", "rname", "serial", "refresh", "retry", "expire", "minimum"] - - def __init__( - self, rdclass, rdtype, mname, rname, serial, refresh, retry, expire, minimum - ): - super().__init__(rdclass, rdtype) - self.mname = self._as_name(mname) - self.rname = self._as_name(rname) - self.serial = self._as_uint32(serial) - self.refresh = self._as_ttl(refresh) - self.retry = self._as_ttl(retry) - self.expire = self._as_ttl(expire) - self.minimum = self._as_ttl(minimum) - - def to_text(self, origin=None, relativize=True, **kw): - mname = self.mname.choose_relativity(origin, relativize) - rname = self.rname.choose_relativity(origin, relativize) - return "%s %s %d %d %d %d %d" % ( - mname, - rname, - self.serial, - self.refresh, - self.retry, - self.expire, - self.minimum, - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - mname = tok.get_name(origin, relativize, relativize_to) - rname = tok.get_name(origin, relativize, relativize_to) - serial = tok.get_uint32() - refresh = tok.get_ttl() - retry = tok.get_ttl() - expire = tok.get_ttl() - minimum = tok.get_ttl() - return cls( - rdclass, rdtype, mname, rname, serial, refresh, retry, expire, minimum - ) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - self.mname.to_wire(file, compress, origin, canonicalize) - self.rname.to_wire(file, compress, origin, canonicalize) - five_ints = struct.pack( - "!IIIII", self.serial, self.refresh, self.retry, self.expire, self.minimum - ) - file.write(five_ints) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - mname = parser.get_name(origin) - rname = parser.get_name(origin) - return cls(rdclass, rdtype, mname, rname, *parser.get_struct("!IIIII")) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/SPF.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/SPF.py deleted file mode 100644 index c403589..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/SPF.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.txtbase - - -@dns.immutable.immutable -class SPF(dns.rdtypes.txtbase.TXTBase): - - """SPF record""" - - # see: RFC 4408 diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/SSHFP.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/SSHFP.py deleted file mode 100644 index 6780545..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/SSHFP.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2005-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import binascii -import struct - -import dns.immutable -import dns.rdata -import dns.rdatatype - - -@dns.immutable.immutable -class SSHFP(dns.rdata.Rdata): - - """SSHFP record""" - - # See RFC 4255 - - __slots__ = ["algorithm", "fp_type", "fingerprint"] - - def __init__(self, rdclass, rdtype, algorithm, fp_type, fingerprint): - super().__init__(rdclass, rdtype) - self.algorithm = self._as_uint8(algorithm) - self.fp_type = self._as_uint8(fp_type) - self.fingerprint = self._as_bytes(fingerprint, True) - - def to_text(self, origin=None, relativize=True, **kw): - kw = kw.copy() - chunksize = kw.pop("chunksize", 128) - return "%d %d %s" % ( - self.algorithm, - self.fp_type, - dns.rdata._hexify(self.fingerprint, chunksize=chunksize, **kw), - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - algorithm = tok.get_uint8() - fp_type = tok.get_uint8() - fingerprint = tok.concatenate_remaining_identifiers().encode() - fingerprint = binascii.unhexlify(fingerprint) - return cls(rdclass, rdtype, algorithm, fp_type, fingerprint) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - header = struct.pack("!BB", self.algorithm, self.fp_type) - file.write(header) - file.write(self.fingerprint) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - header = parser.get_struct("BB") - fingerprint = parser.get_remaining() - return cls(rdclass, rdtype, header[0], header[1], fingerprint) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/TKEY.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/TKEY.py deleted file mode 100644 index d5f5fc4..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/TKEY.py +++ /dev/null @@ -1,143 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import base64 -import struct - -import dns.exception -import dns.immutable -import dns.rdata - - -@dns.immutable.immutable -class TKEY(dns.rdata.Rdata): - - """TKEY Record""" - - __slots__ = [ - "algorithm", - "inception", - "expiration", - "mode", - "error", - "key", - "other", - ] - - def __init__( - self, - rdclass, - rdtype, - algorithm, - inception, - expiration, - mode, - error, - key, - other=b"", - ): - super().__init__(rdclass, rdtype) - self.algorithm = self._as_name(algorithm) - self.inception = self._as_uint32(inception) - self.expiration = self._as_uint32(expiration) - self.mode = self._as_uint16(mode) - self.error = self._as_uint16(error) - self.key = self._as_bytes(key) - self.other = self._as_bytes(other) - - def to_text(self, origin=None, relativize=True, **kw): - _algorithm = self.algorithm.choose_relativity(origin, relativize) - text = "%s %u %u %u %u %s" % ( - str(_algorithm), - self.inception, - self.expiration, - self.mode, - self.error, - dns.rdata._base64ify(self.key, 0), - ) - if len(self.other) > 0: - text += " %s" % (dns.rdata._base64ify(self.other, 0)) - - return text - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - algorithm = tok.get_name(relativize=False) - inception = tok.get_uint32() - expiration = tok.get_uint32() - mode = tok.get_uint16() - error = tok.get_uint16() - key_b64 = tok.get_string().encode() - key = base64.b64decode(key_b64) - other_b64 = tok.concatenate_remaining_identifiers(True).encode() - other = base64.b64decode(other_b64) - - return cls( - rdclass, rdtype, algorithm, inception, expiration, mode, error, key, other - ) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - self.algorithm.to_wire(file, compress, origin) - file.write( - struct.pack("!IIHH", self.inception, self.expiration, self.mode, self.error) - ) - file.write(struct.pack("!H", len(self.key))) - file.write(self.key) - file.write(struct.pack("!H", len(self.other))) - if len(self.other) > 0: - file.write(self.other) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - algorithm = parser.get_name(origin) - inception, expiration, mode, error = parser.get_struct("!IIHH") - key = parser.get_counted_bytes(2) - other = parser.get_counted_bytes(2) - - return cls( - rdclass, rdtype, algorithm, inception, expiration, mode, error, key, other - ) - - # Constants for the mode field - from RFC 2930: - # 2.5 The Mode Field - # - # The mode field specifies the general scheme for key agreement or - # the purpose of the TKEY DNS message. Servers and resolvers - # supporting this specification MUST implement the Diffie-Hellman key - # agreement mode and the key deletion mode for queries. All other - # modes are OPTIONAL. A server supporting TKEY that receives a TKEY - # request with a mode it does not support returns the BADMODE error. - # The following values of the Mode octet are defined, available, or - # reserved: - # - # Value Description - # ----- ----------- - # 0 - reserved, see section 7 - # 1 server assignment - # 2 Diffie-Hellman exchange - # 3 GSS-API negotiation - # 4 resolver assignment - # 5 key deletion - # 6-65534 - available, see section 7 - # 65535 - reserved, see section 7 - SERVER_ASSIGNMENT = 1 - DIFFIE_HELLMAN_EXCHANGE = 2 - GSSAPI_NEGOTIATION = 3 - RESOLVER_ASSIGNMENT = 4 - KEY_DELETION = 5 diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/TLSA.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/TLSA.py deleted file mode 100644 index c9ba199..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/TLSA.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import dns.immutable -import dns.rdtypes.tlsabase - - -@dns.immutable.immutable -class TLSA(dns.rdtypes.tlsabase.TLSABase): - - """TLSA record""" diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/TSIG.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/TSIG.py deleted file mode 100644 index 1ae87eb..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/TSIG.py +++ /dev/null @@ -1,161 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import base64 -import struct - -import dns.exception -import dns.immutable -import dns.rcode -import dns.rdata - - -@dns.immutable.immutable -class TSIG(dns.rdata.Rdata): - - """TSIG record""" - - __slots__ = [ - "algorithm", - "time_signed", - "fudge", - "mac", - "original_id", - "error", - "other", - ] - - def __init__( - self, - rdclass, - rdtype, - algorithm, - time_signed, - fudge, - mac, - original_id, - error, - other, - ): - """Initialize a TSIG rdata. - - *rdclass*, an ``int`` is the rdataclass of the Rdata. - - *rdtype*, an ``int`` is the rdatatype of the Rdata. - - *algorithm*, a ``dns.name.Name``. - - *time_signed*, an ``int``. - - *fudge*, an ``int`. - - *mac*, a ``bytes`` - - *original_id*, an ``int`` - - *error*, an ``int`` - - *other*, a ``bytes`` - """ - - super().__init__(rdclass, rdtype) - self.algorithm = self._as_name(algorithm) - self.time_signed = self._as_uint48(time_signed) - self.fudge = self._as_uint16(fudge) - self.mac = self._as_bytes(mac) - self.original_id = self._as_uint16(original_id) - self.error = dns.rcode.Rcode.make(error) - self.other = self._as_bytes(other) - - def to_text(self, origin=None, relativize=True, **kw): - algorithm = self.algorithm.choose_relativity(origin, relativize) - error = dns.rcode.to_text(self.error, True) - text = ( - f"{algorithm} {self.time_signed} {self.fudge} " - + f"{len(self.mac)} {dns.rdata._base64ify(self.mac, 0)} " - + f"{self.original_id} {error} {len(self.other)}" - ) - if self.other: - text += f" {dns.rdata._base64ify(self.other, 0)}" - return text - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - algorithm = tok.get_name(relativize=False) - time_signed = tok.get_uint48() - fudge = tok.get_uint16() - mac_len = tok.get_uint16() - mac = base64.b64decode(tok.get_string()) - if len(mac) != mac_len: - raise SyntaxError("invalid MAC") - original_id = tok.get_uint16() - error = dns.rcode.from_text(tok.get_string()) - other_len = tok.get_uint16() - if other_len > 0: - other = base64.b64decode(tok.get_string()) - if len(other) != other_len: - raise SyntaxError("invalid other data") - else: - other = b"" - return cls( - rdclass, - rdtype, - algorithm, - time_signed, - fudge, - mac, - original_id, - error, - other, - ) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - self.algorithm.to_wire(file, None, origin, False) - file.write( - struct.pack( - "!HIHH", - (self.time_signed >> 32) & 0xFFFF, - self.time_signed & 0xFFFFFFFF, - self.fudge, - len(self.mac), - ) - ) - file.write(self.mac) - file.write(struct.pack("!HHH", self.original_id, self.error, len(self.other))) - file.write(self.other) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - algorithm = parser.get_name() - time_signed = parser.get_uint48() - fudge = parser.get_uint16() - mac = parser.get_counted_bytes(2) - (original_id, error) = parser.get_struct("!HH") - other = parser.get_counted_bytes(2) - return cls( - rdclass, - rdtype, - algorithm, - time_signed, - fudge, - mac, - original_id, - error, - other, - ) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/TXT.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/TXT.py deleted file mode 100644 index f4e6193..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/TXT.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.txtbase - - -@dns.immutable.immutable -class TXT(dns.rdtypes.txtbase.TXTBase): - - """TXT record""" diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/URI.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/URI.py deleted file mode 100644 index 7463e27..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/URI.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# Copyright (C) 2015 Red Hat, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.immutable -import dns.name -import dns.rdata -import dns.rdtypes.util - - -@dns.immutable.immutable -class URI(dns.rdata.Rdata): - - """URI record""" - - # see RFC 7553 - - __slots__ = ["priority", "weight", "target"] - - def __init__(self, rdclass, rdtype, priority, weight, target): - super().__init__(rdclass, rdtype) - self.priority = self._as_uint16(priority) - self.weight = self._as_uint16(weight) - self.target = self._as_bytes(target, True) - if len(self.target) == 0: - raise dns.exception.SyntaxError("URI target cannot be empty") - - def to_text(self, origin=None, relativize=True, **kw): - return '%d %d "%s"' % (self.priority, self.weight, self.target.decode()) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - priority = tok.get_uint16() - weight = tok.get_uint16() - target = tok.get().unescape() - if not (target.is_quoted_string() or target.is_identifier()): - raise dns.exception.SyntaxError("URI target must be a string") - return cls(rdclass, rdtype, priority, weight, target.value) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - two_ints = struct.pack("!HH", self.priority, self.weight) - file.write(two_ints) - file.write(self.target) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - (priority, weight) = parser.get_struct("!HH") - target = parser.get_remaining() - if len(target) == 0: - raise dns.exception.FormError("URI target may not be empty") - return cls(rdclass, rdtype, priority, weight, target) - - def _processing_priority(self): - return self.priority - - def _processing_weight(self): - return self.weight - - @classmethod - def _processing_order(cls, iterable): - return dns.rdtypes.util.weighted_processing_order(iterable) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/X25.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/X25.py deleted file mode 100644 index 06c1453..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/X25.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.immutable -import dns.rdata -import dns.tokenizer - - -@dns.immutable.immutable -class X25(dns.rdata.Rdata): - - """X25 record""" - - # see RFC 1183 - - __slots__ = ["address"] - - def __init__(self, rdclass, rdtype, address): - super().__init__(rdclass, rdtype) - self.address = self._as_bytes(address, True, 255) - - def to_text(self, origin=None, relativize=True, **kw): - return '"%s"' % dns.rdata._escapify(self.address) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - address = tok.get_string() - return cls(rdclass, rdtype, address) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - l = len(self.address) - assert l < 256 - file.write(struct.pack("!B", l)) - file.write(self.address) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - address = parser.get_counted_bytes() - return cls(rdclass, rdtype, address) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/ZONEMD.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/ZONEMD.py deleted file mode 100644 index 3062843..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/ZONEMD.py +++ /dev/null @@ -1,67 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import binascii -import struct - -import dns.immutable -import dns.rdata -import dns.rdatatype -import dns.zonetypes - - -@dns.immutable.immutable -class ZONEMD(dns.rdata.Rdata): - - """ZONEMD record""" - - # See RFC 8976 - - __slots__ = ["serial", "scheme", "hash_algorithm", "digest"] - - def __init__(self, rdclass, rdtype, serial, scheme, hash_algorithm, digest): - super().__init__(rdclass, rdtype) - self.serial = self._as_uint32(serial) - self.scheme = dns.zonetypes.DigestScheme.make(scheme) - self.hash_algorithm = dns.zonetypes.DigestHashAlgorithm.make(hash_algorithm) - self.digest = self._as_bytes(digest) - - if self.scheme == 0: # reserved, RFC 8976 Sec. 5.2 - raise ValueError("scheme 0 is reserved") - if self.hash_algorithm == 0: # reserved, RFC 8976 Sec. 5.3 - raise ValueError("hash_algorithm 0 is reserved") - - hasher = dns.zonetypes._digest_hashers.get(self.hash_algorithm) - if hasher and hasher().digest_size != len(self.digest): - raise ValueError("digest length inconsistent with hash algorithm") - - def to_text(self, origin=None, relativize=True, **kw): - kw = kw.copy() - chunksize = kw.pop("chunksize", 128) - return "%d %d %d %s" % ( - self.serial, - self.scheme, - self.hash_algorithm, - dns.rdata._hexify(self.digest, chunksize=chunksize, **kw), - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - serial = tok.get_uint32() - scheme = tok.get_uint8() - hash_algorithm = tok.get_uint8() - digest = tok.concatenate_remaining_identifiers().encode() - digest = binascii.unhexlify(digest) - return cls(rdclass, rdtype, serial, scheme, hash_algorithm, digest) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - header = struct.pack("!IBB", self.serial, self.scheme, self.hash_algorithm) - file.write(header) - file.write(self.digest) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - header = parser.get_struct("!IBB") - digest = parser.get_remaining() - return cls(rdclass, rdtype, header[0], header[1], header[2], digest) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/ANY/__init__.py b/server/venv/Lib/site-packages/dns/rdtypes/ANY/__init__.py deleted file mode 100644 index 3824a0a..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/ANY/__init__.py +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Class ANY (generic) rdata type classes.""" - -__all__ = [ - "AFSDB", - "AMTRELAY", - "AVC", - "CAA", - "CDNSKEY", - "CDS", - "CERT", - "CNAME", - "CSYNC", - "DLV", - "DNAME", - "DNSKEY", - "DS", - "EUI48", - "EUI64", - "GPOS", - "HINFO", - "HIP", - "ISDN", - "L32", - "L64", - "LOC", - "LP", - "MX", - "NID", - "NINFO", - "NS", - "NSEC", - "NSEC3", - "NSEC3PARAM", - "OPENPGPKEY", - "OPT", - "PTR", - "RP", - "RRSIG", - "RT", - "SMIMEA", - "SOA", - "SPF", - "SSHFP", - "TKEY", - "TLSA", - "TSIG", - "TXT", - "URI", - "X25", - "ZONEMD", -] diff --git a/server/venv/Lib/site-packages/dns/rdtypes/CH/A.py b/server/venv/Lib/site-packages/dns/rdtypes/CH/A.py deleted file mode 100644 index e457f38..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/CH/A.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.immutable -import dns.rdtypes.mxbase - - -@dns.immutable.immutable -class A(dns.rdata.Rdata): - - """A record for Chaosnet""" - - # domain: the domain of the address - # address: the 16-bit address - - __slots__ = ["domain", "address"] - - def __init__(self, rdclass, rdtype, domain, address): - super().__init__(rdclass, rdtype) - self.domain = self._as_name(domain) - self.address = self._as_uint16(address) - - def to_text(self, origin=None, relativize=True, **kw): - domain = self.domain.choose_relativity(origin, relativize) - return "%s %o" % (domain, self.address) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - domain = tok.get_name(origin, relativize, relativize_to) - address = tok.get_uint16(base=8) - return cls(rdclass, rdtype, domain, address) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - self.domain.to_wire(file, compress, origin, canonicalize) - pref = struct.pack("!H", self.address) - file.write(pref) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - domain = parser.get_name(origin) - address = parser.get_uint16() - return cls(rdclass, rdtype, domain, address) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/CH/__init__.py b/server/venv/Lib/site-packages/dns/rdtypes/CH/__init__.py deleted file mode 100644 index 0760c26..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/CH/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Class CH rdata type classes.""" - -__all__ = [ - "A", -] diff --git a/server/venv/Lib/site-packages/dns/rdtypes/IN/A.py b/server/venv/Lib/site-packages/dns/rdtypes/IN/A.py deleted file mode 100644 index 713d5ee..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/IN/A.py +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.exception -import dns.immutable -import dns.ipv4 -import dns.rdata -import dns.tokenizer - - -@dns.immutable.immutable -class A(dns.rdata.Rdata): - - """A record.""" - - __slots__ = ["address"] - - def __init__(self, rdclass, rdtype, address): - super().__init__(rdclass, rdtype) - self.address = self._as_ipv4_address(address) - - def to_text(self, origin=None, relativize=True, **kw): - return self.address - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - address = tok.get_identifier() - return cls(rdclass, rdtype, address) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(dns.ipv4.inet_aton(self.address)) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - address = parser.get_remaining() - return cls(rdclass, rdtype, address) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/IN/AAAA.py b/server/venv/Lib/site-packages/dns/rdtypes/IN/AAAA.py deleted file mode 100644 index f8237b4..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/IN/AAAA.py +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.exception -import dns.immutable -import dns.ipv6 -import dns.rdata -import dns.tokenizer - - -@dns.immutable.immutable -class AAAA(dns.rdata.Rdata): - - """AAAA record.""" - - __slots__ = ["address"] - - def __init__(self, rdclass, rdtype, address): - super().__init__(rdclass, rdtype) - self.address = self._as_ipv6_address(address) - - def to_text(self, origin=None, relativize=True, **kw): - return self.address - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - address = tok.get_identifier() - return cls(rdclass, rdtype, address) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(dns.ipv6.inet_aton(self.address)) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - address = parser.get_remaining() - return cls(rdclass, rdtype, address) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/IN/APL.py b/server/venv/Lib/site-packages/dns/rdtypes/IN/APL.py deleted file mode 100644 index f1bb01d..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/IN/APL.py +++ /dev/null @@ -1,152 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import binascii -import codecs -import struct - -import dns.exception -import dns.immutable -import dns.ipv4 -import dns.ipv6 -import dns.rdata -import dns.tokenizer - - -@dns.immutable.immutable -class APLItem: - - """An APL list item.""" - - __slots__ = ["family", "negation", "address", "prefix"] - - def __init__(self, family, negation, address, prefix): - self.family = dns.rdata.Rdata._as_uint16(family) - self.negation = dns.rdata.Rdata._as_bool(negation) - if self.family == 1: - self.address = dns.rdata.Rdata._as_ipv4_address(address) - self.prefix = dns.rdata.Rdata._as_int(prefix, 0, 32) - elif self.family == 2: - self.address = dns.rdata.Rdata._as_ipv6_address(address) - self.prefix = dns.rdata.Rdata._as_int(prefix, 0, 128) - else: - self.address = dns.rdata.Rdata._as_bytes(address, max_length=127) - self.prefix = dns.rdata.Rdata._as_uint8(prefix) - - def __str__(self): - if self.negation: - return "!%d:%s/%s" % (self.family, self.address, self.prefix) - else: - return "%d:%s/%s" % (self.family, self.address, self.prefix) - - def to_wire(self, file): - if self.family == 1: - address = dns.ipv4.inet_aton(self.address) - elif self.family == 2: - address = dns.ipv6.inet_aton(self.address) - else: - address = binascii.unhexlify(self.address) - # - # Truncate least significant zero bytes. - # - last = 0 - for i in range(len(address) - 1, -1, -1): - if address[i] != 0: - last = i + 1 - break - address = address[0:last] - l = len(address) - assert l < 128 - if self.negation: - l |= 0x80 - header = struct.pack("!HBB", self.family, self.prefix, l) - file.write(header) - file.write(address) - - -@dns.immutable.immutable -class APL(dns.rdata.Rdata): - - """APL record.""" - - # see: RFC 3123 - - __slots__ = ["items"] - - def __init__(self, rdclass, rdtype, items): - super().__init__(rdclass, rdtype) - for item in items: - if not isinstance(item, APLItem): - raise ValueError("item not an APLItem") - self.items = tuple(items) - - def to_text(self, origin=None, relativize=True, **kw): - return " ".join(map(str, self.items)) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - items = [] - for token in tok.get_remaining(): - item = token.unescape().value - if item[0] == "!": - negation = True - item = item[1:] - else: - negation = False - (family, rest) = item.split(":", 1) - family = int(family) - (address, prefix) = rest.split("/", 1) - prefix = int(prefix) - item = APLItem(family, negation, address, prefix) - items.append(item) - - return cls(rdclass, rdtype, items) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - for item in self.items: - item.to_wire(file) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - items = [] - while parser.remaining() > 0: - header = parser.get_struct("!HBB") - afdlen = header[2] - if afdlen > 127: - negation = True - afdlen -= 128 - else: - negation = False - address = parser.get_bytes(afdlen) - l = len(address) - if header[0] == 1: - if l < 4: - address += b"\x00" * (4 - l) - elif header[0] == 2: - if l < 16: - address += b"\x00" * (16 - l) - else: - # - # This isn't really right according to the RFC, but it - # seems better than throwing an exception - # - address = codecs.encode(address, "hex_codec") - item = APLItem(header[0], negation, address, header[1]) - items.append(item) - return cls(rdclass, rdtype, items) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/IN/DHCID.py b/server/venv/Lib/site-packages/dns/rdtypes/IN/DHCID.py deleted file mode 100644 index 65f8589..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/IN/DHCID.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import base64 - -import dns.exception -import dns.immutable -import dns.rdata - - -@dns.immutable.immutable -class DHCID(dns.rdata.Rdata): - - """DHCID record""" - - # see: RFC 4701 - - __slots__ = ["data"] - - def __init__(self, rdclass, rdtype, data): - super().__init__(rdclass, rdtype) - self.data = self._as_bytes(data) - - def to_text(self, origin=None, relativize=True, **kw): - return dns.rdata._base64ify(self.data, **kw) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - b64 = tok.concatenate_remaining_identifiers().encode() - data = base64.b64decode(b64) - return cls(rdclass, rdtype, data) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(self.data) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - data = parser.get_remaining() - return cls(rdclass, rdtype, data) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/IN/HTTPS.py b/server/venv/Lib/site-packages/dns/rdtypes/IN/HTTPS.py deleted file mode 100644 index 15464cb..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/IN/HTTPS.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import dns.immutable -import dns.rdtypes.svcbbase - - -@dns.immutable.immutable -class HTTPS(dns.rdtypes.svcbbase.SVCBBase): - """HTTPS record""" diff --git a/server/venv/Lib/site-packages/dns/rdtypes/IN/IPSECKEY.py b/server/venv/Lib/site-packages/dns/rdtypes/IN/IPSECKEY.py deleted file mode 100644 index 8bb2bcb..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/IN/IPSECKEY.py +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import base64 -import struct - -import dns.exception -import dns.immutable -import dns.rdtypes.util - - -class Gateway(dns.rdtypes.util.Gateway): - name = "IPSECKEY gateway" - - -@dns.immutable.immutable -class IPSECKEY(dns.rdata.Rdata): - - """IPSECKEY record""" - - # see: RFC 4025 - - __slots__ = ["precedence", "gateway_type", "algorithm", "gateway", "key"] - - def __init__( - self, rdclass, rdtype, precedence, gateway_type, algorithm, gateway, key - ): - super().__init__(rdclass, rdtype) - gateway = Gateway(gateway_type, gateway) - self.precedence = self._as_uint8(precedence) - self.gateway_type = gateway.type - self.algorithm = self._as_uint8(algorithm) - self.gateway = gateway.gateway - self.key = self._as_bytes(key) - - def to_text(self, origin=None, relativize=True, **kw): - gateway = Gateway(self.gateway_type, self.gateway).to_text(origin, relativize) - return "%d %d %d %s %s" % ( - self.precedence, - self.gateway_type, - self.algorithm, - gateway, - dns.rdata._base64ify(self.key, **kw), - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - precedence = tok.get_uint8() - gateway_type = tok.get_uint8() - algorithm = tok.get_uint8() - gateway = Gateway.from_text( - gateway_type, tok, origin, relativize, relativize_to - ) - b64 = tok.concatenate_remaining_identifiers().encode() - key = base64.b64decode(b64) - return cls( - rdclass, rdtype, precedence, gateway_type, algorithm, gateway.gateway, key - ) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - header = struct.pack("!BBB", self.precedence, self.gateway_type, self.algorithm) - file.write(header) - Gateway(self.gateway_type, self.gateway).to_wire( - file, compress, origin, canonicalize - ) - file.write(self.key) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - header = parser.get_struct("!BBB") - gateway_type = header[1] - gateway = Gateway.from_wire_parser(gateway_type, parser, origin) - key = parser.get_remaining() - return cls( - rdclass, rdtype, header[0], gateway_type, header[2], gateway.gateway, key - ) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/IN/KX.py b/server/venv/Lib/site-packages/dns/rdtypes/IN/KX.py deleted file mode 100644 index a03d1d5..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/IN/KX.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.mxbase - - -@dns.immutable.immutable -class KX(dns.rdtypes.mxbase.UncompressedDowncasingMX): - - """KX record""" diff --git a/server/venv/Lib/site-packages/dns/rdtypes/IN/NAPTR.py b/server/venv/Lib/site-packages/dns/rdtypes/IN/NAPTR.py deleted file mode 100644 index 1f1f5a1..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/IN/NAPTR.py +++ /dev/null @@ -1,111 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.immutable -import dns.name -import dns.rdata -import dns.rdtypes.util - - -def _write_string(file, s): - l = len(s) - assert l < 256 - file.write(struct.pack("!B", l)) - file.write(s) - - -@dns.immutable.immutable -class NAPTR(dns.rdata.Rdata): - - """NAPTR record""" - - # see: RFC 3403 - - __slots__ = ["order", "preference", "flags", "service", "regexp", "replacement"] - - def __init__( - self, rdclass, rdtype, order, preference, flags, service, regexp, replacement - ): - super().__init__(rdclass, rdtype) - self.flags = self._as_bytes(flags, True, 255) - self.service = self._as_bytes(service, True, 255) - self.regexp = self._as_bytes(regexp, True, 255) - self.order = self._as_uint16(order) - self.preference = self._as_uint16(preference) - self.replacement = self._as_name(replacement) - - def to_text(self, origin=None, relativize=True, **kw): - replacement = self.replacement.choose_relativity(origin, relativize) - return '%d %d "%s" "%s" "%s" %s' % ( - self.order, - self.preference, - dns.rdata._escapify(self.flags), - dns.rdata._escapify(self.service), - dns.rdata._escapify(self.regexp), - replacement, - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - order = tok.get_uint16() - preference = tok.get_uint16() - flags = tok.get_string() - service = tok.get_string() - regexp = tok.get_string() - replacement = tok.get_name(origin, relativize, relativize_to) - return cls( - rdclass, rdtype, order, preference, flags, service, regexp, replacement - ) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - two_ints = struct.pack("!HH", self.order, self.preference) - file.write(two_ints) - _write_string(file, self.flags) - _write_string(file, self.service) - _write_string(file, self.regexp) - self.replacement.to_wire(file, compress, origin, canonicalize) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - (order, preference) = parser.get_struct("!HH") - strings = [] - for _ in range(3): - s = parser.get_counted_bytes() - strings.append(s) - replacement = parser.get_name(origin) - return cls( - rdclass, - rdtype, - order, - preference, - strings[0], - strings[1], - strings[2], - replacement, - ) - - def _processing_priority(self): - return (self.order, self.preference) - - @classmethod - def _processing_order(cls, iterable): - return dns.rdtypes.util.priority_processing_order(iterable) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/IN/NSAP.py b/server/venv/Lib/site-packages/dns/rdtypes/IN/NSAP.py deleted file mode 100644 index be8581e..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/IN/NSAP.py +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import binascii - -import dns.exception -import dns.immutable -import dns.rdata -import dns.tokenizer - - -@dns.immutable.immutable -class NSAP(dns.rdata.Rdata): - - """NSAP record.""" - - # see: RFC 1706 - - __slots__ = ["address"] - - def __init__(self, rdclass, rdtype, address): - super().__init__(rdclass, rdtype) - self.address = self._as_bytes(address) - - def to_text(self, origin=None, relativize=True, **kw): - return "0x%s" % binascii.hexlify(self.address).decode() - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - address = tok.get_string() - if address[0:2] != "0x": - raise dns.exception.SyntaxError("string does not start with 0x") - address = address[2:].replace(".", "") - if len(address) % 2 != 0: - raise dns.exception.SyntaxError("hexstring has odd length") - address = binascii.unhexlify(address.encode()) - return cls(rdclass, rdtype, address) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(self.address) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - address = parser.get_remaining() - return cls(rdclass, rdtype, address) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/IN/NSAP_PTR.py b/server/venv/Lib/site-packages/dns/rdtypes/IN/NSAP_PTR.py deleted file mode 100644 index 0a18fdc..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/IN/NSAP_PTR.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import dns.immutable -import dns.rdtypes.nsbase - - -@dns.immutable.immutable -class NSAP_PTR(dns.rdtypes.nsbase.UncompressedNS): - - """NSAP-PTR record""" diff --git a/server/venv/Lib/site-packages/dns/rdtypes/IN/PX.py b/server/venv/Lib/site-packages/dns/rdtypes/IN/PX.py deleted file mode 100644 index 5c0aa81..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/IN/PX.py +++ /dev/null @@ -1,74 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.immutable -import dns.name -import dns.rdata -import dns.rdtypes.util - - -@dns.immutable.immutable -class PX(dns.rdata.Rdata): - - """PX record.""" - - # see: RFC 2163 - - __slots__ = ["preference", "map822", "mapx400"] - - def __init__(self, rdclass, rdtype, preference, map822, mapx400): - super().__init__(rdclass, rdtype) - self.preference = self._as_uint16(preference) - self.map822 = self._as_name(map822) - self.mapx400 = self._as_name(mapx400) - - def to_text(self, origin=None, relativize=True, **kw): - map822 = self.map822.choose_relativity(origin, relativize) - mapx400 = self.mapx400.choose_relativity(origin, relativize) - return "%d %s %s" % (self.preference, map822, mapx400) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - preference = tok.get_uint16() - map822 = tok.get_name(origin, relativize, relativize_to) - mapx400 = tok.get_name(origin, relativize, relativize_to) - return cls(rdclass, rdtype, preference, map822, mapx400) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - pref = struct.pack("!H", self.preference) - file.write(pref) - self.map822.to_wire(file, None, origin, canonicalize) - self.mapx400.to_wire(file, None, origin, canonicalize) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - preference = parser.get_uint16() - map822 = parser.get_name(origin) - mapx400 = parser.get_name(origin) - return cls(rdclass, rdtype, preference, map822, mapx400) - - def _processing_priority(self): - return self.preference - - @classmethod - def _processing_order(cls, iterable): - return dns.rdtypes.util.priority_processing_order(iterable) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/IN/SRV.py b/server/venv/Lib/site-packages/dns/rdtypes/IN/SRV.py deleted file mode 100644 index 84c5400..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/IN/SRV.py +++ /dev/null @@ -1,76 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import struct - -import dns.exception -import dns.immutable -import dns.name -import dns.rdata -import dns.rdtypes.util - - -@dns.immutable.immutable -class SRV(dns.rdata.Rdata): - - """SRV record""" - - # see: RFC 2782 - - __slots__ = ["priority", "weight", "port", "target"] - - def __init__(self, rdclass, rdtype, priority, weight, port, target): - super().__init__(rdclass, rdtype) - self.priority = self._as_uint16(priority) - self.weight = self._as_uint16(weight) - self.port = self._as_uint16(port) - self.target = self._as_name(target) - - def to_text(self, origin=None, relativize=True, **kw): - target = self.target.choose_relativity(origin, relativize) - return "%d %d %d %s" % (self.priority, self.weight, self.port, target) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - priority = tok.get_uint16() - weight = tok.get_uint16() - port = tok.get_uint16() - target = tok.get_name(origin, relativize, relativize_to) - return cls(rdclass, rdtype, priority, weight, port, target) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - three_ints = struct.pack("!HHH", self.priority, self.weight, self.port) - file.write(three_ints) - self.target.to_wire(file, compress, origin, canonicalize) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - (priority, weight, port) = parser.get_struct("!HHH") - target = parser.get_name(origin) - return cls(rdclass, rdtype, priority, weight, port, target) - - def _processing_priority(self): - return self.priority - - def _processing_weight(self): - return self.weight - - @classmethod - def _processing_order(cls, iterable): - return dns.rdtypes.util.weighted_processing_order(iterable) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/IN/SVCB.py b/server/venv/Lib/site-packages/dns/rdtypes/IN/SVCB.py deleted file mode 100644 index ff3e932..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/IN/SVCB.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import dns.immutable -import dns.rdtypes.svcbbase - - -@dns.immutable.immutable -class SVCB(dns.rdtypes.svcbbase.SVCBBase): - """SVCB record""" diff --git a/server/venv/Lib/site-packages/dns/rdtypes/IN/WKS.py b/server/venv/Lib/site-packages/dns/rdtypes/IN/WKS.py deleted file mode 100644 index 26d287a..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/IN/WKS.py +++ /dev/null @@ -1,101 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import socket -import struct - -import dns.immutable -import dns.ipv4 -import dns.rdata - -try: - _proto_tcp = socket.getprotobyname("tcp") - _proto_udp = socket.getprotobyname("udp") -except OSError: - # Fall back to defaults in case /etc/protocols is unavailable. - _proto_tcp = 6 - _proto_udp = 17 - - -@dns.immutable.immutable -class WKS(dns.rdata.Rdata): - - """WKS record""" - - # see: RFC 1035 - - __slots__ = ["address", "protocol", "bitmap"] - - def __init__(self, rdclass, rdtype, address, protocol, bitmap): - super().__init__(rdclass, rdtype) - self.address = self._as_ipv4_address(address) - self.protocol = self._as_uint8(protocol) - self.bitmap = self._as_bytes(bitmap) - - def to_text(self, origin=None, relativize=True, **kw): - bits = [] - for i, byte in enumerate(self.bitmap): - for j in range(0, 8): - if byte & (0x80 >> j): - bits.append(str(i * 8 + j)) - text = " ".join(bits) - return "%s %d %s" % (self.address, self.protocol, text) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - address = tok.get_string() - protocol = tok.get_string() - if protocol.isdigit(): - protocol = int(protocol) - else: - protocol = socket.getprotobyname(protocol) - bitmap = bytearray() - for token in tok.get_remaining(): - value = token.unescape().value - if value.isdigit(): - serv = int(value) - else: - if protocol != _proto_udp and protocol != _proto_tcp: - raise NotImplementedError("protocol must be TCP or UDP") - if protocol == _proto_udp: - protocol_text = "udp" - else: - protocol_text = "tcp" - serv = socket.getservbyname(value, protocol_text) - i = serv // 8 - l = len(bitmap) - if l < i + 1: - for _ in range(l, i + 1): - bitmap.append(0) - bitmap[i] = bitmap[i] | (0x80 >> (serv % 8)) - bitmap = dns.rdata._truncate_bitmap(bitmap) - return cls(rdclass, rdtype, address, protocol, bitmap) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(dns.ipv4.inet_aton(self.address)) - protocol = struct.pack("!B", self.protocol) - file.write(protocol) - file.write(self.bitmap) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - address = parser.get_bytes(4) - protocol = parser.get_uint8() - bitmap = parser.get_remaining() - return cls(rdclass, rdtype, address, protocol, bitmap) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/IN/__init__.py b/server/venv/Lib/site-packages/dns/rdtypes/IN/__init__.py deleted file mode 100644 index dcec4dd..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/IN/__init__.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Class IN rdata type classes.""" - -__all__ = [ - "A", - "AAAA", - "APL", - "DHCID", - "HTTPS", - "IPSECKEY", - "KX", - "NAPTR", - "NSAP", - "NSAP_PTR", - "PX", - "SRV", - "SVCB", - "WKS", -] diff --git a/server/venv/Lib/site-packages/dns/rdtypes/__init__.py b/server/venv/Lib/site-packages/dns/rdtypes/__init__.py deleted file mode 100644 index 3997f84..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS rdata type classes""" - -__all__ = [ - "ANY", - "IN", - "CH", - "dnskeybase", - "dsbase", - "euibase", - "mxbase", - "nsbase", - "svcbbase", - "tlsabase", - "txtbase", - "util", -] diff --git a/server/venv/Lib/site-packages/dns/rdtypes/dnskeybase.py b/server/venv/Lib/site-packages/dns/rdtypes/dnskeybase.py deleted file mode 100644 index 3bfcf86..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/dnskeybase.py +++ /dev/null @@ -1,88 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import base64 -import enum -import struct - -import dns.dnssectypes -import dns.exception -import dns.immutable -import dns.rdata - -# wildcard import -__all__ = ["SEP", "REVOKE", "ZONE"] # noqa: F822 - - -class Flag(enum.IntFlag): - SEP = 0x0001 - REVOKE = 0x0080 - ZONE = 0x0100 - - -@dns.immutable.immutable -class DNSKEYBase(dns.rdata.Rdata): - - """Base class for rdata that is like a DNSKEY record""" - - __slots__ = ["flags", "protocol", "algorithm", "key"] - - def __init__(self, rdclass, rdtype, flags, protocol, algorithm, key): - super().__init__(rdclass, rdtype) - self.flags = Flag(self._as_uint16(flags)) - self.protocol = self._as_uint8(protocol) - self.algorithm = dns.dnssectypes.Algorithm.make(algorithm) - self.key = self._as_bytes(key) - - def to_text(self, origin=None, relativize=True, **kw): - return "%d %d %d %s" % ( - self.flags, - self.protocol, - self.algorithm, - dns.rdata._base64ify(self.key, **kw), - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - flags = tok.get_uint16() - protocol = tok.get_uint8() - algorithm = tok.get_string() - b64 = tok.concatenate_remaining_identifiers().encode() - key = base64.b64decode(b64) - return cls(rdclass, rdtype, flags, protocol, algorithm, key) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - header = struct.pack("!HBB", self.flags, self.protocol, self.algorithm) - file.write(header) - file.write(self.key) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - header = parser.get_struct("!HBB") - key = parser.get_remaining() - return cls(rdclass, rdtype, header[0], header[1], header[2], key) - - -### BEGIN generated Flag constants - -SEP = Flag.SEP -REVOKE = Flag.REVOKE -ZONE = Flag.ZONE - -### END generated Flag constants diff --git a/server/venv/Lib/site-packages/dns/rdtypes/dsbase.py b/server/venv/Lib/site-packages/dns/rdtypes/dsbase.py deleted file mode 100644 index 1ad0b7a..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/dsbase.py +++ /dev/null @@ -1,86 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2010, 2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import binascii -import struct - -import dns.dnssectypes -import dns.immutable -import dns.rdata -import dns.rdatatype - - -@dns.immutable.immutable -class DSBase(dns.rdata.Rdata): - - """Base class for rdata that is like a DS record""" - - __slots__ = ["key_tag", "algorithm", "digest_type", "digest"] - - # Digest types registry: - # https://www.iana.org/assignments/ds-rr-types/ds-rr-types.xhtml - _digest_length_by_type = { - 1: 20, # SHA-1, RFC 3658 Sec. 2.4 - 2: 32, # SHA-256, RFC 4509 Sec. 2.2 - 3: 32, # GOST R 34.11-94, RFC 5933 Sec. 4 in conjunction with RFC 4490 Sec. 2.1 - 4: 48, # SHA-384, RFC 6605 Sec. 2 - } - - def __init__(self, rdclass, rdtype, key_tag, algorithm, digest_type, digest): - super().__init__(rdclass, rdtype) - self.key_tag = self._as_uint16(key_tag) - self.algorithm = dns.dnssectypes.Algorithm.make(algorithm) - self.digest_type = dns.dnssectypes.DSDigest.make(self._as_uint8(digest_type)) - self.digest = self._as_bytes(digest) - try: - if len(self.digest) != self._digest_length_by_type[self.digest_type]: - raise ValueError("digest length inconsistent with digest type") - except KeyError: - if self.digest_type == 0: # reserved, RFC 3658 Sec. 2.4 - raise ValueError("digest type 0 is reserved") - - def to_text(self, origin=None, relativize=True, **kw): - kw = kw.copy() - chunksize = kw.pop("chunksize", 128) - return "%d %d %d %s" % ( - self.key_tag, - self.algorithm, - self.digest_type, - dns.rdata._hexify(self.digest, chunksize=chunksize, **kw), - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - key_tag = tok.get_uint16() - algorithm = tok.get_string() - digest_type = tok.get_uint8() - digest = tok.concatenate_remaining_identifiers().encode() - digest = binascii.unhexlify(digest) - return cls(rdclass, rdtype, key_tag, algorithm, digest_type, digest) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - header = struct.pack("!HBB", self.key_tag, self.algorithm, self.digest_type) - file.write(header) - file.write(self.digest) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - header = parser.get_struct("!HBB") - digest = parser.get_remaining() - return cls(rdclass, rdtype, header[0], header[1], header[2], digest) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/euibase.py b/server/venv/Lib/site-packages/dns/rdtypes/euibase.py deleted file mode 100644 index 4c4068b..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/euibase.py +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright (C) 2015 Red Hat, Inc. -# Author: Petr Spacek -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import binascii - -import dns.immutable -import dns.rdata - - -@dns.immutable.immutable -class EUIBase(dns.rdata.Rdata): - - """EUIxx record""" - - # see: rfc7043.txt - - __slots__ = ["eui"] - # define these in subclasses - # byte_len = 6 # 0123456789ab (in hex) - # text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab - - def __init__(self, rdclass, rdtype, eui): - super().__init__(rdclass, rdtype) - self.eui = self._as_bytes(eui) - if len(self.eui) != self.byte_len: - raise dns.exception.FormError( - "EUI%s rdata has to have %s bytes" % (self.byte_len * 8, self.byte_len) - ) - - def to_text(self, origin=None, relativize=True, **kw): - return dns.rdata._hexify(self.eui, chunksize=2, separator=b"-", **kw) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - text = tok.get_string() - if len(text) != cls.text_len: - raise dns.exception.SyntaxError( - "Input text must have %s characters" % cls.text_len - ) - for i in range(2, cls.byte_len * 3 - 1, 3): - if text[i] != "-": - raise dns.exception.SyntaxError("Dash expected at position %s" % i) - text = text.replace("-", "") - try: - data = binascii.unhexlify(text.encode()) - except (ValueError, TypeError) as ex: - raise dns.exception.SyntaxError("Hex decoding error: %s" % str(ex)) - return cls(rdclass, rdtype, data) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(self.eui) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - eui = parser.get_bytes(cls.byte_len) - return cls(rdclass, rdtype, eui) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/mxbase.py b/server/venv/Lib/site-packages/dns/rdtypes/mxbase.py deleted file mode 100644 index a6bae07..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/mxbase.py +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""MX-like base classes.""" - -import struct - -import dns.exception -import dns.immutable -import dns.name -import dns.rdata -import dns.rdtypes.util - - -@dns.immutable.immutable -class MXBase(dns.rdata.Rdata): - - """Base class for rdata that is like an MX record.""" - - __slots__ = ["preference", "exchange"] - - def __init__(self, rdclass, rdtype, preference, exchange): - super().__init__(rdclass, rdtype) - self.preference = self._as_uint16(preference) - self.exchange = self._as_name(exchange) - - def to_text(self, origin=None, relativize=True, **kw): - exchange = self.exchange.choose_relativity(origin, relativize) - return "%d %s" % (self.preference, exchange) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - preference = tok.get_uint16() - exchange = tok.get_name(origin, relativize, relativize_to) - return cls(rdclass, rdtype, preference, exchange) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - pref = struct.pack("!H", self.preference) - file.write(pref) - self.exchange.to_wire(file, compress, origin, canonicalize) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - preference = parser.get_uint16() - exchange = parser.get_name(origin) - return cls(rdclass, rdtype, preference, exchange) - - def _processing_priority(self): - return self.preference - - @classmethod - def _processing_order(cls, iterable): - return dns.rdtypes.util.priority_processing_order(iterable) - - -@dns.immutable.immutable -class UncompressedMX(MXBase): - - """Base class for rdata that is like an MX record, but whose name - is not compressed when converted to DNS wire format, and whose - digestable form is not downcased.""" - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - super()._to_wire(file, None, origin, False) - - -@dns.immutable.immutable -class UncompressedDowncasingMX(MXBase): - - """Base class for rdata that is like an MX record, but whose name - is not compressed when convert to DNS wire format.""" - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - super()._to_wire(file, None, origin, canonicalize) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/nsbase.py b/server/venv/Lib/site-packages/dns/rdtypes/nsbase.py deleted file mode 100644 index 56d9423..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/nsbase.py +++ /dev/null @@ -1,65 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""NS-like base classes.""" - -import dns.exception -import dns.immutable -import dns.name -import dns.rdata - - -@dns.immutable.immutable -class NSBase(dns.rdata.Rdata): - - """Base class for rdata that is like an NS record.""" - - __slots__ = ["target"] - - def __init__(self, rdclass, rdtype, target): - super().__init__(rdclass, rdtype) - self.target = self._as_name(target) - - def to_text(self, origin=None, relativize=True, **kw): - target = self.target.choose_relativity(origin, relativize) - return str(target) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - target = tok.get_name(origin, relativize, relativize_to) - return cls(rdclass, rdtype, target) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - self.target.to_wire(file, compress, origin, canonicalize) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - target = parser.get_name(origin) - return cls(rdclass, rdtype, target) - - -@dns.immutable.immutable -class UncompressedNS(NSBase): - - """Base class for rdata that is like an NS record, but whose name - is not compressed when convert to DNS wire format, and whose - digestable form is not downcased.""" - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - self.target.to_wire(file, None, origin, False) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/svcbbase.py b/server/venv/Lib/site-packages/dns/rdtypes/svcbbase.py deleted file mode 100644 index ba5b53d..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/svcbbase.py +++ /dev/null @@ -1,563 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import base64 -import enum -import io -import struct - -import dns.enum -import dns.exception -import dns.immutable -import dns.ipv4 -import dns.ipv6 -import dns.name -import dns.rdata -import dns.rdtypes.util -import dns.tokenizer -import dns.wire - -# Until there is an RFC, this module is experimental and may be changed in -# incompatible ways. - - -class UnknownParamKey(dns.exception.DNSException): - """Unknown SVCB ParamKey""" - - -class ParamKey(dns.enum.IntEnum): - """SVCB ParamKey""" - - MANDATORY = 0 - ALPN = 1 - NO_DEFAULT_ALPN = 2 - PORT = 3 - IPV4HINT = 4 - ECH = 5 - IPV6HINT = 6 - DOHPATH = 7 - - @classmethod - def _maximum(cls): - return 65535 - - @classmethod - def _short_name(cls): - return "SVCBParamKey" - - @classmethod - def _prefix(cls): - return "KEY" - - @classmethod - def _unknown_exception_class(cls): - return UnknownParamKey - - -class Emptiness(enum.IntEnum): - NEVER = 0 - ALWAYS = 1 - ALLOWED = 2 - - -def _validate_key(key): - force_generic = False - if isinstance(key, bytes): - # We decode to latin-1 so we get 0-255 as valid and do NOT interpret - # UTF-8 sequences - key = key.decode("latin-1") - if isinstance(key, str): - if key.lower().startswith("key"): - force_generic = True - if key[3:].startswith("0") and len(key) != 4: - # key has leading zeros - raise ValueError("leading zeros in key") - key = key.replace("-", "_") - return (ParamKey.make(key), force_generic) - - -def key_to_text(key): - return ParamKey.to_text(key).replace("_", "-").lower() - - -# Like rdata escapify, but escapes ',' too. - -_escaped = b'",\\' - - -def _escapify(qstring): - text = "" - for c in qstring: - if c in _escaped: - text += "\\" + chr(c) - elif c >= 0x20 and c < 0x7F: - text += chr(c) - else: - text += "\\%03d" % c - return text - - -def _unescape(value): - if value == "": - return value - unescaped = b"" - l = len(value) - i = 0 - while i < l: - c = value[i] - i += 1 - if c == "\\": - if i >= l: # pragma: no cover (can't happen via tokenizer get()) - raise dns.exception.UnexpectedEnd - c = value[i] - i += 1 - if c.isdigit(): - if i >= l: - raise dns.exception.UnexpectedEnd - c2 = value[i] - i += 1 - if i >= l: - raise dns.exception.UnexpectedEnd - c3 = value[i] - i += 1 - if not (c2.isdigit() and c3.isdigit()): - raise dns.exception.SyntaxError - codepoint = int(c) * 100 + int(c2) * 10 + int(c3) - if codepoint > 255: - raise dns.exception.SyntaxError - unescaped += b"%c" % (codepoint) - continue - unescaped += c.encode() - return unescaped - - -def _split(value): - l = len(value) - i = 0 - items = [] - unescaped = b"" - while i < l: - c = value[i] - i += 1 - if c == ord("\\"): - if i >= l: # pragma: no cover (can't happen via tokenizer get()) - raise dns.exception.UnexpectedEnd - c = value[i] - i += 1 - unescaped += b"%c" % (c) - elif c == ord(","): - items.append(unescaped) - unescaped = b"" - else: - unescaped += b"%c" % (c) - items.append(unescaped) - return items - - -@dns.immutable.immutable -class Param: - """Abstract base class for SVCB parameters""" - - @classmethod - def emptiness(cls): - return Emptiness.NEVER - - -@dns.immutable.immutable -class GenericParam(Param): - """Generic SVCB parameter""" - - def __init__(self, value): - self.value = dns.rdata.Rdata._as_bytes(value, True) - - @classmethod - def emptiness(cls): - return Emptiness.ALLOWED - - @classmethod - def from_value(cls, value): - if value is None or len(value) == 0: - return None - else: - return cls(_unescape(value)) - - def to_text(self): - return '"' + dns.rdata._escapify(self.value) + '"' - - @classmethod - def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 - value = parser.get_bytes(parser.remaining()) - if len(value) == 0: - return None - else: - return cls(value) - - def to_wire(self, file, origin=None): # pylint: disable=W0613 - file.write(self.value) - - -@dns.immutable.immutable -class MandatoryParam(Param): - def __init__(self, keys): - # check for duplicates - keys = sorted([_validate_key(key)[0] for key in keys]) - prior_k = None - for k in keys: - if k == prior_k: - raise ValueError(f"duplicate key {k:d}") - prior_k = k - if k == ParamKey.MANDATORY: - raise ValueError("listed the mandatory key as mandatory") - self.keys = tuple(keys) - - @classmethod - def from_value(cls, value): - keys = [k.encode() for k in value.split(",")] - return cls(keys) - - def to_text(self): - return '"' + ",".join([key_to_text(key) for key in self.keys]) + '"' - - @classmethod - def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 - keys = [] - last_key = -1 - while parser.remaining() > 0: - key = parser.get_uint16() - if key < last_key: - raise dns.exception.FormError("manadatory keys not ascending") - last_key = key - keys.append(key) - return cls(keys) - - def to_wire(self, file, origin=None): # pylint: disable=W0613 - for key in self.keys: - file.write(struct.pack("!H", key)) - - -@dns.immutable.immutable -class ALPNParam(Param): - def __init__(self, ids): - self.ids = dns.rdata.Rdata._as_tuple( - ids, lambda x: dns.rdata.Rdata._as_bytes(x, True, 255, False) - ) - - @classmethod - def from_value(cls, value): - return cls(_split(_unescape(value))) - - def to_text(self): - value = ",".join([_escapify(id) for id in self.ids]) - return '"' + dns.rdata._escapify(value.encode()) + '"' - - @classmethod - def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 - ids = [] - while parser.remaining() > 0: - id = parser.get_counted_bytes() - ids.append(id) - return cls(ids) - - def to_wire(self, file, origin=None): # pylint: disable=W0613 - for id in self.ids: - file.write(struct.pack("!B", len(id))) - file.write(id) - - -@dns.immutable.immutable -class NoDefaultALPNParam(Param): - # We don't ever expect to instantiate this class, but we need - # a from_value() and a from_wire_parser(), so we just return None - # from the class methods when things are OK. - - @classmethod - def emptiness(cls): - return Emptiness.ALWAYS - - @classmethod - def from_value(cls, value): - if value is None or value == "": - return None - else: - raise ValueError("no-default-alpn with non-empty value") - - def to_text(self): - raise NotImplementedError # pragma: no cover - - @classmethod - def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 - if parser.remaining() != 0: - raise dns.exception.FormError - return None - - def to_wire(self, file, origin=None): # pylint: disable=W0613 - raise NotImplementedError # pragma: no cover - - -@dns.immutable.immutable -class PortParam(Param): - def __init__(self, port): - self.port = dns.rdata.Rdata._as_uint16(port) - - @classmethod - def from_value(cls, value): - value = int(value) - return cls(value) - - def to_text(self): - return f'"{self.port}"' - - @classmethod - def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 - port = parser.get_uint16() - return cls(port) - - def to_wire(self, file, origin=None): # pylint: disable=W0613 - file.write(struct.pack("!H", self.port)) - - -@dns.immutable.immutable -class IPv4HintParam(Param): - def __init__(self, addresses): - self.addresses = dns.rdata.Rdata._as_tuple( - addresses, dns.rdata.Rdata._as_ipv4_address - ) - - @classmethod - def from_value(cls, value): - addresses = value.split(",") - return cls(addresses) - - def to_text(self): - return '"' + ",".join(self.addresses) + '"' - - @classmethod - def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 - addresses = [] - while parser.remaining() > 0: - ip = parser.get_bytes(4) - addresses.append(dns.ipv4.inet_ntoa(ip)) - return cls(addresses) - - def to_wire(self, file, origin=None): # pylint: disable=W0613 - for address in self.addresses: - file.write(dns.ipv4.inet_aton(address)) - - -@dns.immutable.immutable -class IPv6HintParam(Param): - def __init__(self, addresses): - self.addresses = dns.rdata.Rdata._as_tuple( - addresses, dns.rdata.Rdata._as_ipv6_address - ) - - @classmethod - def from_value(cls, value): - addresses = value.split(",") - return cls(addresses) - - def to_text(self): - return '"' + ",".join(self.addresses) + '"' - - @classmethod - def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 - addresses = [] - while parser.remaining() > 0: - ip = parser.get_bytes(16) - addresses.append(dns.ipv6.inet_ntoa(ip)) - return cls(addresses) - - def to_wire(self, file, origin=None): # pylint: disable=W0613 - for address in self.addresses: - file.write(dns.ipv6.inet_aton(address)) - - -@dns.immutable.immutable -class ECHParam(Param): - def __init__(self, ech): - self.ech = dns.rdata.Rdata._as_bytes(ech, True) - - @classmethod - def from_value(cls, value): - if "\\" in value: - raise ValueError("escape in ECH value") - value = base64.b64decode(value.encode()) - return cls(value) - - def to_text(self): - b64 = base64.b64encode(self.ech).decode("ascii") - return f'"{b64}"' - - @classmethod - def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 - value = parser.get_bytes(parser.remaining()) - return cls(value) - - def to_wire(self, file, origin=None): # pylint: disable=W0613 - file.write(self.ech) - - -_class_for_key = { - ParamKey.MANDATORY: MandatoryParam, - ParamKey.ALPN: ALPNParam, - ParamKey.NO_DEFAULT_ALPN: NoDefaultALPNParam, - ParamKey.PORT: PortParam, - ParamKey.IPV4HINT: IPv4HintParam, - ParamKey.ECH: ECHParam, - ParamKey.IPV6HINT: IPv6HintParam, -} - - -def _validate_and_define(params, key, value): - (key, force_generic) = _validate_key(_unescape(key)) - if key in params: - raise SyntaxError(f'duplicate key "{key:d}"') - cls = _class_for_key.get(key, GenericParam) - emptiness = cls.emptiness() - if value is None: - if emptiness == Emptiness.NEVER: - raise SyntaxError("value cannot be empty") - value = cls.from_value(value) - else: - if force_generic: - value = cls.from_wire_parser(dns.wire.Parser(_unescape(value))) - else: - value = cls.from_value(value) - params[key] = value - - -@dns.immutable.immutable -class SVCBBase(dns.rdata.Rdata): - - """Base class for SVCB-like records""" - - # see: draft-ietf-dnsop-svcb-https-11 - - __slots__ = ["priority", "target", "params"] - - def __init__(self, rdclass, rdtype, priority, target, params): - super().__init__(rdclass, rdtype) - self.priority = self._as_uint16(priority) - self.target = self._as_name(target) - for k, v in params.items(): - k = ParamKey.make(k) - if not isinstance(v, Param) and v is not None: - raise ValueError(f"{k:d} not a Param") - self.params = dns.immutable.Dict(params) - # Make sure any parameter listed as mandatory is present in the - # record. - mandatory = params.get(ParamKey.MANDATORY) - if mandatory: - for key in mandatory.keys: - # Note we have to say "not in" as we have None as a value - # so a get() and a not None test would be wrong. - if key not in params: - raise ValueError(f"key {key:d} declared mandatory but not present") - # The no-default-alpn parameter requires the alpn parameter. - if ParamKey.NO_DEFAULT_ALPN in params: - if ParamKey.ALPN not in params: - raise ValueError("no-default-alpn present, but alpn missing") - - def to_text(self, origin=None, relativize=True, **kw): - target = self.target.choose_relativity(origin, relativize) - params = [] - for key in sorted(self.params.keys()): - value = self.params[key] - if value is None: - params.append(key_to_text(key)) - else: - kv = key_to_text(key) + "=" + value.to_text() - params.append(kv) - if len(params) > 0: - space = " " - else: - space = "" - return "%d %s%s%s" % (self.priority, target, space, " ".join(params)) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - priority = tok.get_uint16() - target = tok.get_name(origin, relativize, relativize_to) - if priority == 0: - token = tok.get() - if not token.is_eol_or_eof(): - raise SyntaxError("parameters in AliasMode") - tok.unget(token) - params = {} - while True: - token = tok.get() - if token.is_eol_or_eof(): - tok.unget(token) - break - if token.ttype != dns.tokenizer.IDENTIFIER: - raise SyntaxError("parameter is not an identifier") - equals = token.value.find("=") - if equals == len(token.value) - 1: - # 'key=', so next token should be a quoted string without - # any intervening whitespace. - key = token.value[:-1] - token = tok.get(want_leading=True) - if token.ttype != dns.tokenizer.QUOTED_STRING: - raise SyntaxError("whitespace after =") - value = token.value - elif equals > 0: - # key=value - key = token.value[:equals] - value = token.value[equals + 1 :] - elif equals == 0: - # =key - raise SyntaxError('parameter cannot start with "="') - else: - # key - key = token.value - value = None - _validate_and_define(params, key, value) - return cls(rdclass, rdtype, priority, target, params) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - file.write(struct.pack("!H", self.priority)) - self.target.to_wire(file, None, origin, False) - for key in sorted(self.params): - file.write(struct.pack("!H", key)) - value = self.params[key] - # placeholder for length (or actual length of empty values) - file.write(struct.pack("!H", 0)) - if value is None: - continue - else: - start = file.tell() - value.to_wire(file, origin) - end = file.tell() - assert end - start < 65536 - file.seek(start - 2) - stuff = struct.pack("!H", end - start) - file.write(stuff) - file.seek(0, io.SEEK_END) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - priority = parser.get_uint16() - target = parser.get_name(origin) - if priority == 0 and parser.remaining() != 0: - raise dns.exception.FormError("parameters in AliasMode") - params = {} - prior_key = -1 - while parser.remaining() > 0: - key = parser.get_uint16() - if key < prior_key: - raise dns.exception.FormError("keys not in order") - prior_key = key - vlen = parser.get_uint16() - pcls = _class_for_key.get(key, GenericParam) - with parser.restrict_to(vlen): - value = pcls.from_wire_parser(parser, origin) - params[key] = value - return cls(rdclass, rdtype, priority, target, params) - - def _processing_priority(self): - return self.priority - - @classmethod - def _processing_order(cls, iterable): - return dns.rdtypes.util.priority_processing_order(iterable) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/tlsabase.py b/server/venv/Lib/site-packages/dns/rdtypes/tlsabase.py deleted file mode 100644 index 4cdb7ab..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/tlsabase.py +++ /dev/null @@ -1,72 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2005-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import binascii -import struct - -import dns.immutable -import dns.rdata -import dns.rdatatype - - -@dns.immutable.immutable -class TLSABase(dns.rdata.Rdata): - - """Base class for TLSA and SMIMEA records""" - - # see: RFC 6698 - - __slots__ = ["usage", "selector", "mtype", "cert"] - - def __init__(self, rdclass, rdtype, usage, selector, mtype, cert): - super().__init__(rdclass, rdtype) - self.usage = self._as_uint8(usage) - self.selector = self._as_uint8(selector) - self.mtype = self._as_uint8(mtype) - self.cert = self._as_bytes(cert) - - def to_text(self, origin=None, relativize=True, **kw): - kw = kw.copy() - chunksize = kw.pop("chunksize", 128) - return "%d %d %d %s" % ( - self.usage, - self.selector, - self.mtype, - dns.rdata._hexify(self.cert, chunksize=chunksize, **kw), - ) - - @classmethod - def from_text( - cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None - ): - usage = tok.get_uint8() - selector = tok.get_uint8() - mtype = tok.get_uint8() - cert = tok.concatenate_remaining_identifiers().encode() - cert = binascii.unhexlify(cert) - return cls(rdclass, rdtype, usage, selector, mtype, cert) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - header = struct.pack("!BBB", self.usage, self.selector, self.mtype) - file.write(header) - file.write(self.cert) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - header = parser.get_struct("BBB") - cert = parser.get_remaining() - return cls(rdclass, rdtype, header[0], header[1], header[2], cert) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/txtbase.py b/server/venv/Lib/site-packages/dns/rdtypes/txtbase.py deleted file mode 100644 index fdbfb64..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/txtbase.py +++ /dev/null @@ -1,107 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2006-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""TXT-like base class.""" - -import struct -from typing import Any, Dict, Iterable, Optional, Tuple, Union - -import dns.exception -import dns.immutable -import dns.rdata -import dns.tokenizer - - -@dns.immutable.immutable -class TXTBase(dns.rdata.Rdata): - - """Base class for rdata that is like a TXT record (see RFC 1035).""" - - __slots__ = ["strings"] - - def __init__( - self, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - strings: Iterable[Union[bytes, str]], - ): - """Initialize a TXT-like rdata. - - *rdclass*, an ``int`` is the rdataclass of the Rdata. - - *rdtype*, an ``int`` is the rdatatype of the Rdata. - - *strings*, a tuple of ``bytes`` - """ - super().__init__(rdclass, rdtype) - self.strings: Tuple[bytes] = self._as_tuple( - strings, lambda x: self._as_bytes(x, True, 255) - ) - - def to_text( - self, - origin: Optional[dns.name.Name] = None, - relativize: bool = True, - **kw: Dict[str, Any] - ) -> str: - txt = "" - prefix = "" - for s in self.strings: - txt += '{}"{}"'.format(prefix, dns.rdata._escapify(s)) - prefix = " " - return txt - - @classmethod - def from_text( - cls, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - tok: dns.tokenizer.Tokenizer, - origin: Optional[dns.name.Name] = None, - relativize: bool = True, - relativize_to: Optional[dns.name.Name] = None, - ) -> dns.rdata.Rdata: - strings = [] - for token in tok.get_remaining(): - token = token.unescape_to_bytes() - # The 'if' below is always true in the current code, but we - # are leaving this check in in case things change some day. - if not ( - token.is_quoted_string() or token.is_identifier() - ): # pragma: no cover - raise dns.exception.SyntaxError("expected a string") - if len(token.value) > 255: - raise dns.exception.SyntaxError("string too long") - strings.append(token.value) - if len(strings) == 0: - raise dns.exception.UnexpectedEnd - return cls(rdclass, rdtype, strings) - - def _to_wire(self, file, compress=None, origin=None, canonicalize=False): - for s in self.strings: - l = len(s) - assert l < 256 - file.write(struct.pack("!B", l)) - file.write(s) - - @classmethod - def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): - strings = [] - while parser.remaining() > 0: - s = parser.get_counted_bytes() - strings.append(s) - return cls(rdclass, rdtype, strings) diff --git a/server/venv/Lib/site-packages/dns/rdtypes/util.py b/server/venv/Lib/site-packages/dns/rdtypes/util.py deleted file mode 100644 index 54908fd..0000000 --- a/server/venv/Lib/site-packages/dns/rdtypes/util.py +++ /dev/null @@ -1,257 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import collections -import random -import struct -from typing import Any, List - -import dns.exception -import dns.ipv4 -import dns.ipv6 -import dns.name -import dns.rdata - - -class Gateway: - """A helper class for the IPSECKEY gateway and AMTRELAY relay fields""" - - name = "" - - def __init__(self, type, gateway=None): - self.type = dns.rdata.Rdata._as_uint8(type) - self.gateway = gateway - self._check() - - @classmethod - def _invalid_type(cls, gateway_type): - return f"invalid {cls.name} type: {gateway_type}" - - def _check(self): - if self.type == 0: - if self.gateway not in (".", None): - raise SyntaxError(f"invalid {self.name} for type 0") - self.gateway = None - elif self.type == 1: - # check that it's OK - dns.ipv4.inet_aton(self.gateway) - elif self.type == 2: - # check that it's OK - dns.ipv6.inet_aton(self.gateway) - elif self.type == 3: - if not isinstance(self.gateway, dns.name.Name): - raise SyntaxError(f"invalid {self.name}; not a name") - else: - raise SyntaxError(self._invalid_type(self.type)) - - def to_text(self, origin=None, relativize=True): - if self.type == 0: - return "." - elif self.type in (1, 2): - return self.gateway - elif self.type == 3: - return str(self.gateway.choose_relativity(origin, relativize)) - else: - raise ValueError(self._invalid_type(self.type)) # pragma: no cover - - @classmethod - def from_text( - cls, gateway_type, tok, origin=None, relativize=True, relativize_to=None - ): - if gateway_type in (0, 1, 2): - gateway = tok.get_string() - elif gateway_type == 3: - gateway = tok.get_name(origin, relativize, relativize_to) - else: - raise dns.exception.SyntaxError( - cls._invalid_type(gateway_type) - ) # pragma: no cover - return cls(gateway_type, gateway) - - # pylint: disable=unused-argument - def to_wire(self, file, compress=None, origin=None, canonicalize=False): - if self.type == 0: - pass - elif self.type == 1: - file.write(dns.ipv4.inet_aton(self.gateway)) - elif self.type == 2: - file.write(dns.ipv6.inet_aton(self.gateway)) - elif self.type == 3: - self.gateway.to_wire(file, None, origin, False) - else: - raise ValueError(self._invalid_type(self.type)) # pragma: no cover - - # pylint: enable=unused-argument - - @classmethod - def from_wire_parser(cls, gateway_type, parser, origin=None): - if gateway_type == 0: - gateway = None - elif gateway_type == 1: - gateway = dns.ipv4.inet_ntoa(parser.get_bytes(4)) - elif gateway_type == 2: - gateway = dns.ipv6.inet_ntoa(parser.get_bytes(16)) - elif gateway_type == 3: - gateway = parser.get_name(origin) - else: - raise dns.exception.FormError(cls._invalid_type(gateway_type)) - return cls(gateway_type, gateway) - - -class Bitmap: - """A helper class for the NSEC/NSEC3/CSYNC type bitmaps""" - - type_name = "" - - def __init__(self, windows=None): - last_window = -1 - self.windows = windows - for window, bitmap in self.windows: - if not isinstance(window, int): - raise ValueError(f"bad {self.type_name} window type") - if window <= last_window: - raise ValueError(f"bad {self.type_name} window order") - if window > 256: - raise ValueError(f"bad {self.type_name} window number") - last_window = window - if not isinstance(bitmap, bytes): - raise ValueError(f"bad {self.type_name} octets type") - if len(bitmap) == 0 or len(bitmap) > 32: - raise ValueError(f"bad {self.type_name} octets") - - def to_text(self) -> str: - text = "" - for window, bitmap in self.windows: - bits = [] - for i, byte in enumerate(bitmap): - for j in range(0, 8): - if byte & (0x80 >> j): - rdtype = window * 256 + i * 8 + j - bits.append(dns.rdatatype.to_text(rdtype)) - text += " " + " ".join(bits) - return text - - @classmethod - def from_text(cls, tok: "dns.tokenizer.Tokenizer") -> "Bitmap": - rdtypes = [] - for token in tok.get_remaining(): - rdtype = dns.rdatatype.from_text(token.unescape().value) - if rdtype == 0: - raise dns.exception.SyntaxError(f"{cls.type_name} with bit 0") - rdtypes.append(rdtype) - return cls.from_rdtypes(rdtypes) - - @classmethod - def from_rdtypes(cls, rdtypes: List[dns.rdatatype.RdataType]) -> "Bitmap": - rdtypes = sorted(rdtypes) - window = 0 - octets = 0 - prior_rdtype = 0 - bitmap = bytearray(b"\0" * 32) - windows = [] - for rdtype in rdtypes: - if rdtype == prior_rdtype: - continue - prior_rdtype = rdtype - new_window = rdtype // 256 - if new_window != window: - if octets != 0: - windows.append((window, bytes(bitmap[0:octets]))) - bitmap = bytearray(b"\0" * 32) - window = new_window - offset = rdtype % 256 - byte = offset // 8 - bit = offset % 8 - octets = byte + 1 - bitmap[byte] = bitmap[byte] | (0x80 >> bit) - if octets != 0: - windows.append((window, bytes(bitmap[0:octets]))) - return cls(windows) - - def to_wire(self, file: Any) -> None: - for window, bitmap in self.windows: - file.write(struct.pack("!BB", window, len(bitmap))) - file.write(bitmap) - - @classmethod - def from_wire_parser(cls, parser: "dns.wire.Parser") -> "Bitmap": - windows = [] - while parser.remaining() > 0: - window = parser.get_uint8() - bitmap = parser.get_counted_bytes() - windows.append((window, bitmap)) - return cls(windows) - - -def _priority_table(items): - by_priority = collections.defaultdict(list) - for rdata in items: - by_priority[rdata._processing_priority()].append(rdata) - return by_priority - - -def priority_processing_order(iterable): - items = list(iterable) - if len(items) == 1: - return items - by_priority = _priority_table(items) - ordered = [] - for k in sorted(by_priority.keys()): - rdatas = by_priority[k] - random.shuffle(rdatas) - ordered.extend(rdatas) - return ordered - - -_no_weight = 0.1 - - -def weighted_processing_order(iterable): - items = list(iterable) - if len(items) == 1: - return items - by_priority = _priority_table(items) - ordered = [] - for k in sorted(by_priority.keys()): - rdatas = by_priority[k] - total = sum(rdata._processing_weight() or _no_weight for rdata in rdatas) - while len(rdatas) > 1: - r = random.uniform(0, total) - for n, rdata in enumerate(rdatas): - weight = rdata._processing_weight() or _no_weight - if weight > r: - break - r -= weight - total -= weight - ordered.append(rdata) # pylint: disable=undefined-loop-variable - del rdatas[n] # pylint: disable=undefined-loop-variable - ordered.append(rdatas[0]) - return ordered - - -def parse_formatted_hex(formatted, num_chunks, chunk_size, separator): - if len(formatted) != num_chunks * (chunk_size + 1) - 1: - raise ValueError("invalid formatted hex string") - value = b"" - for _ in range(num_chunks): - chunk = formatted[0:chunk_size] - value += int(chunk, 16).to_bytes(chunk_size // 2, "big") - formatted = formatted[chunk_size:] - if len(formatted) > 0 and formatted[0] != separator: - raise ValueError("invalid formatted hex string") - formatted = formatted[1:] - return value diff --git a/server/venv/Lib/site-packages/dns/renderer.py b/server/venv/Lib/site-packages/dns/renderer.py deleted file mode 100644 index 53e7c0f..0000000 --- a/server/venv/Lib/site-packages/dns/renderer.py +++ /dev/null @@ -1,324 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Help for building DNS wire format messages""" - -import contextlib -import io -import random -import struct -import time - -import dns.exception -import dns.tsig - -QUESTION = 0 -ANSWER = 1 -AUTHORITY = 2 -ADDITIONAL = 3 - - -class Renderer: - """Helper class for building DNS wire-format messages. - - Most applications can use the higher-level L{dns.message.Message} - class and its to_wire() method to generate wire-format messages. - This class is for those applications which need finer control - over the generation of messages. - - Typical use:: - - r = dns.renderer.Renderer(id=1, flags=0x80, max_size=512) - r.add_question(qname, qtype, qclass) - r.add_rrset(dns.renderer.ANSWER, rrset_1) - r.add_rrset(dns.renderer.ANSWER, rrset_2) - r.add_rrset(dns.renderer.AUTHORITY, ns_rrset) - r.add_rrset(dns.renderer.ADDITIONAL, ad_rrset_1) - r.add_rrset(dns.renderer.ADDITIONAL, ad_rrset_2) - r.add_edns(0, 0, 4096) - r.write_header() - r.add_tsig(keyname, secret, 300, 1, 0, '', request_mac) - wire = r.get_wire() - - If padding is going to be used, then the OPT record MUST be - written after everything else in the additional section except for - the TSIG (if any). - - output, an io.BytesIO, where rendering is written - - id: the message id - - flags: the message flags - - max_size: the maximum size of the message - - origin: the origin to use when rendering relative names - - compress: the compression table - - section: an int, the section currently being rendered - - counts: list of the number of RRs in each section - - mac: the MAC of the rendered message (if TSIG was used) - """ - - def __init__(self, id=None, flags=0, max_size=65535, origin=None): - """Initialize a new renderer.""" - - self.output = io.BytesIO() - if id is None: - self.id = random.randint(0, 65535) - else: - self.id = id - self.flags = flags - self.max_size = max_size - self.origin = origin - self.compress = {} - self.section = QUESTION - self.counts = [0, 0, 0, 0] - self.output.write(b"\x00" * 12) - self.mac = "" - self.reserved = 0 - self.was_padded = False - - def _rollback(self, where): - """Truncate the output buffer at offset *where*, and remove any - compression table entries that pointed beyond the truncation - point. - """ - - self.output.seek(where) - self.output.truncate() - keys_to_delete = [] - for k, v in self.compress.items(): - if v >= where: - keys_to_delete.append(k) - for k in keys_to_delete: - del self.compress[k] - - def _set_section(self, section): - """Set the renderer's current section. - - Sections must be rendered order: QUESTION, ANSWER, AUTHORITY, - ADDITIONAL. Sections may be empty. - - Raises dns.exception.FormError if an attempt was made to set - a section value less than the current section. - """ - - if self.section != section: - if self.section > section: - raise dns.exception.FormError - self.section = section - - @contextlib.contextmanager - def _track_size(self): - start = self.output.tell() - yield start - if self.output.tell() > self.max_size: - self._rollback(start) - raise dns.exception.TooBig - - def add_question(self, qname, rdtype, rdclass=dns.rdataclass.IN): - """Add a question to the message.""" - - self._set_section(QUESTION) - with self._track_size(): - qname.to_wire(self.output, self.compress, self.origin) - self.output.write(struct.pack("!HH", rdtype, rdclass)) - self.counts[QUESTION] += 1 - - def add_rrset(self, section, rrset, **kw): - """Add the rrset to the specified section. - - Any keyword arguments are passed on to the rdataset's to_wire() - routine. - """ - - self._set_section(section) - with self._track_size(): - n = rrset.to_wire(self.output, self.compress, self.origin, **kw) - self.counts[section] += n - - def add_rdataset(self, section, name, rdataset, **kw): - """Add the rdataset to the specified section, using the specified - name as the owner name. - - Any keyword arguments are passed on to the rdataset's to_wire() - routine. - """ - - self._set_section(section) - with self._track_size(): - n = rdataset.to_wire(name, self.output, self.compress, self.origin, **kw) - self.counts[section] += n - - def add_opt(self, opt, pad=0, opt_size=0, tsig_size=0): - """Add *opt* to the additional section, applying padding if desired. The - padding will take the specified precomputed OPT size and TSIG size into - account. - - Note that we don't have reliable way of knowing how big a GSS-TSIG digest - might be, so we we might not get an even multiple of the pad in that case.""" - if pad: - ttl = opt.ttl - assert opt_size >= 11 - opt_rdata = opt[0] - size_without_padding = self.output.tell() + opt_size + tsig_size - remainder = size_without_padding % pad - if remainder: - pad = b"\x00" * (pad - remainder) - else: - pad = b"" - options = list(opt_rdata.options) - options.append(dns.edns.GenericOption(dns.edns.OptionType.PADDING, pad)) - opt = dns.message.Message._make_opt(ttl, opt_rdata.rdclass, options) - self.was_padded = True - self.add_rrset(ADDITIONAL, opt) - - def add_edns(self, edns, ednsflags, payload, options=None): - """Add an EDNS OPT record to the message.""" - - # make sure the EDNS version in ednsflags agrees with edns - ednsflags &= 0xFF00FFFF - ednsflags |= edns << 16 - opt = dns.message.Message._make_opt(ednsflags, payload, options) - self.add_opt(opt) - - def add_tsig( - self, - keyname, - secret, - fudge, - id, - tsig_error, - other_data, - request_mac, - algorithm=dns.tsig.default_algorithm, - ): - """Add a TSIG signature to the message.""" - - s = self.output.getvalue() - - if isinstance(secret, dns.tsig.Key): - key = secret - else: - key = dns.tsig.Key(keyname, secret, algorithm) - tsig = dns.message.Message._make_tsig( - keyname, algorithm, 0, fudge, b"", id, tsig_error, other_data - ) - (tsig, _) = dns.tsig.sign(s, key, tsig[0], int(time.time()), request_mac) - self._write_tsig(tsig, keyname) - - def add_multi_tsig( - self, - ctx, - keyname, - secret, - fudge, - id, - tsig_error, - other_data, - request_mac, - algorithm=dns.tsig.default_algorithm, - ): - """Add a TSIG signature to the message. Unlike add_tsig(), this can be - used for a series of consecutive DNS envelopes, e.g. for a zone - transfer over TCP [RFC2845, 4.4]. - - For the first message in the sequence, give ctx=None. For each - subsequent message, give the ctx that was returned from the - add_multi_tsig() call for the previous message.""" - - s = self.output.getvalue() - - if isinstance(secret, dns.tsig.Key): - key = secret - else: - key = dns.tsig.Key(keyname, secret, algorithm) - tsig = dns.message.Message._make_tsig( - keyname, algorithm, 0, fudge, b"", id, tsig_error, other_data - ) - (tsig, ctx) = dns.tsig.sign( - s, key, tsig[0], int(time.time()), request_mac, ctx, True - ) - self._write_tsig(tsig, keyname) - return ctx - - def _write_tsig(self, tsig, keyname): - if self.was_padded: - compress = None - else: - compress = self.compress - self._set_section(ADDITIONAL) - with self._track_size(): - keyname.to_wire(self.output, compress, self.origin) - self.output.write( - struct.pack("!HHIH", dns.rdatatype.TSIG, dns.rdataclass.ANY, 0, 0) - ) - rdata_start = self.output.tell() - tsig.to_wire(self.output) - - after = self.output.tell() - self.output.seek(rdata_start - 2) - self.output.write(struct.pack("!H", after - rdata_start)) - self.counts[ADDITIONAL] += 1 - self.output.seek(10) - self.output.write(struct.pack("!H", self.counts[ADDITIONAL])) - self.output.seek(0, io.SEEK_END) - - def write_header(self): - """Write the DNS message header. - - Writing the DNS message header is done after all sections - have been rendered, but before the optional TSIG signature - is added. - """ - - self.output.seek(0) - self.output.write( - struct.pack( - "!HHHHHH", - self.id, - self.flags, - self.counts[0], - self.counts[1], - self.counts[2], - self.counts[3], - ) - ) - self.output.seek(0, io.SEEK_END) - - def get_wire(self): - """Return the wire format message.""" - - return self.output.getvalue() - - def reserve(self, size: int) -> None: - """Reserve *size* bytes.""" - if size < 0: - raise ValueError("reserved amount must be non-negative") - if size > self.max_size: - raise ValueError("cannot reserve more than the maximum size") - self.reserved += size - self.max_size -= size - - def release_reserved(self) -> None: - """Release the reserved bytes.""" - self.max_size += self.reserved - self.reserved = 0 diff --git a/server/venv/Lib/site-packages/dns/resolver.py b/server/venv/Lib/site-packages/dns/resolver.py deleted file mode 100644 index f08f824..0000000 --- a/server/venv/Lib/site-packages/dns/resolver.py +++ /dev/null @@ -1,2054 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS stub resolver.""" - -import contextlib -import random -import socket -import sys -import threading -import time -import warnings -from typing import Any, Dict, Iterator, List, Optional, Sequence, Tuple, Union -from urllib.parse import urlparse - -import dns._ddr -import dns.edns -import dns.exception -import dns.flags -import dns.inet -import dns.ipv4 -import dns.ipv6 -import dns.message -import dns.name -import dns.nameserver -import dns.query -import dns.rcode -import dns.rdataclass -import dns.rdatatype -import dns.rdtypes.svcbbase -import dns.reversename -import dns.tsig - -if sys.platform == "win32": - import dns.win32util - - -class NXDOMAIN(dns.exception.DNSException): - """The DNS query name does not exist.""" - - supp_kwargs = {"qnames", "responses"} - fmt = None # we have our own __str__ implementation - - # pylint: disable=arguments-differ - - # We do this as otherwise mypy complains about unexpected keyword argument - # idna_exception - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - def _check_kwargs(self, qnames, responses=None): - if not isinstance(qnames, (list, tuple, set)): - raise AttributeError("qnames must be a list, tuple or set") - if len(qnames) == 0: - raise AttributeError("qnames must contain at least one element") - if responses is None: - responses = {} - elif not isinstance(responses, dict): - raise AttributeError("responses must be a dict(qname=response)") - kwargs = dict(qnames=qnames, responses=responses) - return kwargs - - def __str__(self) -> str: - if "qnames" not in self.kwargs: - return super().__str__() - qnames = self.kwargs["qnames"] - if len(qnames) > 1: - msg = "None of DNS query names exist" - else: - msg = "The DNS query name does not exist" - qnames = ", ".join(map(str, qnames)) - return "{}: {}".format(msg, qnames) - - @property - def canonical_name(self): - """Return the unresolved canonical name.""" - if "qnames" not in self.kwargs: - raise TypeError("parametrized exception required") - for qname in self.kwargs["qnames"]: - response = self.kwargs["responses"][qname] - try: - cname = response.canonical_name() - if cname != qname: - return cname - except Exception: - # We can just eat this exception as it means there was - # something wrong with the response. - pass - return self.kwargs["qnames"][0] - - def __add__(self, e_nx): - """Augment by results from another NXDOMAIN exception.""" - qnames0 = list(self.kwargs.get("qnames", [])) - responses0 = dict(self.kwargs.get("responses", {})) - responses1 = e_nx.kwargs.get("responses", {}) - for qname1 in e_nx.kwargs.get("qnames", []): - if qname1 not in qnames0: - qnames0.append(qname1) - if qname1 in responses1: - responses0[qname1] = responses1[qname1] - return NXDOMAIN(qnames=qnames0, responses=responses0) - - def qnames(self): - """All of the names that were tried. - - Returns a list of ``dns.name.Name``. - """ - return self.kwargs["qnames"] - - def responses(self): - """A map from queried names to their NXDOMAIN responses. - - Returns a dict mapping a ``dns.name.Name`` to a - ``dns.message.Message``. - """ - return self.kwargs["responses"] - - def response(self, qname): - """The response for query *qname*. - - Returns a ``dns.message.Message``. - """ - return self.kwargs["responses"][qname] - - -class YXDOMAIN(dns.exception.DNSException): - """The DNS query name is too long after DNAME substitution.""" - - -ErrorTuple = Tuple[ - Optional[str], - bool, - int, - Union[Exception, str], - Optional[dns.message.Message], -] - - -def _errors_to_text(errors: List[ErrorTuple]) -> List[str]: - """Turn a resolution errors trace into a list of text.""" - texts = [] - for err in errors: - texts.append("Server {} answered {}".format(err[0], err[3])) - return texts - - -class LifetimeTimeout(dns.exception.Timeout): - """The resolution lifetime expired.""" - - msg = "The resolution lifetime expired." - fmt = "%s after {timeout:.3f} seconds: {errors}" % msg[:-1] - supp_kwargs = {"timeout", "errors"} - - # We do this as otherwise mypy complains about unexpected keyword argument - # idna_exception - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - def _fmt_kwargs(self, **kwargs): - srv_msgs = _errors_to_text(kwargs["errors"]) - return super()._fmt_kwargs( - timeout=kwargs["timeout"], errors="; ".join(srv_msgs) - ) - - -# We added more detail to resolution timeouts, but they are still -# subclasses of dns.exception.Timeout for backwards compatibility. We also -# keep dns.resolver.Timeout defined for backwards compatibility. -Timeout = LifetimeTimeout - - -class NoAnswer(dns.exception.DNSException): - """The DNS response does not contain an answer to the question.""" - - fmt = "The DNS response does not contain an answer to the question: {query}" - supp_kwargs = {"response"} - - # We do this as otherwise mypy complains about unexpected keyword argument - # idna_exception - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - def _fmt_kwargs(self, **kwargs): - return super()._fmt_kwargs(query=kwargs["response"].question) - - def response(self): - return self.kwargs["response"] - - -class NoNameservers(dns.exception.DNSException): - """All nameservers failed to answer the query. - - errors: list of servers and respective errors - The type of errors is - [(server IP address, any object convertible to string)]. - Non-empty errors list will add explanatory message () - """ - - msg = "All nameservers failed to answer the query." - fmt = "%s {query}: {errors}" % msg[:-1] - supp_kwargs = {"request", "errors"} - - # We do this as otherwise mypy complains about unexpected keyword argument - # idna_exception - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - def _fmt_kwargs(self, **kwargs): - srv_msgs = _errors_to_text(kwargs["errors"]) - return super()._fmt_kwargs( - query=kwargs["request"].question, errors="; ".join(srv_msgs) - ) - - -class NotAbsolute(dns.exception.DNSException): - """An absolute domain name is required but a relative name was provided.""" - - -class NoRootSOA(dns.exception.DNSException): - """There is no SOA RR at the DNS root name. This should never happen!""" - - -class NoMetaqueries(dns.exception.DNSException): - """DNS metaqueries are not allowed.""" - - -class NoResolverConfiguration(dns.exception.DNSException): - """Resolver configuration could not be read or specified no nameservers.""" - - -class Answer: - """DNS stub resolver answer. - - Instances of this class bundle up the result of a successful DNS - resolution. - - For convenience, the answer object implements much of the sequence - protocol, forwarding to its ``rrset`` attribute. E.g. - ``for a in answer`` is equivalent to ``for a in answer.rrset``. - ``answer[i]`` is equivalent to ``answer.rrset[i]``, and - ``answer[i:j]`` is equivalent to ``answer.rrset[i:j]``. - - Note that CNAMEs or DNAMEs in the response may mean that answer - RRset's name might not be the query name. - """ - - def __init__( - self, - qname: dns.name.Name, - rdtype: dns.rdatatype.RdataType, - rdclass: dns.rdataclass.RdataClass, - response: dns.message.QueryMessage, - nameserver: Optional[str] = None, - port: Optional[int] = None, - ) -> None: - self.qname = qname - self.rdtype = rdtype - self.rdclass = rdclass - self.response = response - self.nameserver = nameserver - self.port = port - self.chaining_result = response.resolve_chaining() - # Copy some attributes out of chaining_result for backwards - # compatibility and convenience. - self.canonical_name = self.chaining_result.canonical_name - self.rrset = self.chaining_result.answer - self.expiration = time.time() + self.chaining_result.minimum_ttl - - def __getattr__(self, attr): # pragma: no cover - if attr == "name": - return self.rrset.name - elif attr == "ttl": - return self.rrset.ttl - elif attr == "covers": - return self.rrset.covers - elif attr == "rdclass": - return self.rrset.rdclass - elif attr == "rdtype": - return self.rrset.rdtype - else: - raise AttributeError(attr) - - def __len__(self) -> int: - return self.rrset and len(self.rrset) or 0 - - def __iter__(self): - return self.rrset and iter(self.rrset) or iter(tuple()) - - def __getitem__(self, i): - if self.rrset is None: - raise IndexError - return self.rrset[i] - - def __delitem__(self, i): - if self.rrset is None: - raise IndexError - del self.rrset[i] - - -class Answers(dict): - """A dict of DNS stub resolver answers, indexed by type.""" - - -class HostAnswers(Answers): - """A dict of DNS stub resolver answers to a host name lookup, indexed by - type. - """ - - @classmethod - def make( - cls, - v6: Optional[Answer] = None, - v4: Optional[Answer] = None, - add_empty: bool = True, - ) -> "HostAnswers": - answers = HostAnswers() - if v6 is not None and (add_empty or v6.rrset): - answers[dns.rdatatype.AAAA] = v6 - if v4 is not None and (add_empty or v4.rrset): - answers[dns.rdatatype.A] = v4 - return answers - - # Returns pairs of (address, family) from this result, potentiallys - # filtering by address family. - def addresses_and_families( - self, family: int = socket.AF_UNSPEC - ) -> Iterator[Tuple[str, int]]: - if family == socket.AF_UNSPEC: - yield from self.addresses_and_families(socket.AF_INET6) - yield from self.addresses_and_families(socket.AF_INET) - return - elif family == socket.AF_INET6: - answer = self.get(dns.rdatatype.AAAA) - elif family == socket.AF_INET: - answer = self.get(dns.rdatatype.A) - else: - raise NotImplementedError(f"unknown address family {family}") - if answer: - for rdata in answer: - yield (rdata.address, family) - - # Returns addresses from this result, potentially filtering by - # address family. - def addresses(self, family: int = socket.AF_UNSPEC) -> Iterator[str]: - return (pair[0] for pair in self.addresses_and_families(family)) - - # Returns the canonical name from this result. - def canonical_name(self) -> dns.name.Name: - answer = self.get(dns.rdatatype.AAAA, self.get(dns.rdatatype.A)) - return answer.canonical_name - - -class CacheStatistics: - """Cache Statistics""" - - def __init__(self, hits: int = 0, misses: int = 0) -> None: - self.hits = hits - self.misses = misses - - def reset(self) -> None: - self.hits = 0 - self.misses = 0 - - def clone(self) -> "CacheStatistics": - return CacheStatistics(self.hits, self.misses) - - -class CacheBase: - def __init__(self) -> None: - self.lock = threading.Lock() - self.statistics = CacheStatistics() - - def reset_statistics(self) -> None: - """Reset all statistics to zero.""" - with self.lock: - self.statistics.reset() - - def hits(self) -> int: - """How many hits has the cache had?""" - with self.lock: - return self.statistics.hits - - def misses(self) -> int: - """How many misses has the cache had?""" - with self.lock: - return self.statistics.misses - - def get_statistics_snapshot(self) -> CacheStatistics: - """Return a consistent snapshot of all the statistics. - - If running with multiple threads, it's better to take a - snapshot than to call statistics methods such as hits() and - misses() individually. - """ - with self.lock: - return self.statistics.clone() - - -CacheKey = Tuple[dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass] - - -class Cache(CacheBase): - """Simple thread-safe DNS answer cache.""" - - def __init__(self, cleaning_interval: float = 300.0) -> None: - """*cleaning_interval*, a ``float`` is the number of seconds between - periodic cleanings. - """ - - super().__init__() - self.data: Dict[CacheKey, Answer] = {} - self.cleaning_interval = cleaning_interval - self.next_cleaning: float = time.time() + self.cleaning_interval - - def _maybe_clean(self) -> None: - """Clean the cache if it's time to do so.""" - - now = time.time() - if self.next_cleaning <= now: - keys_to_delete = [] - for k, v in self.data.items(): - if v.expiration <= now: - keys_to_delete.append(k) - for k in keys_to_delete: - del self.data[k] - now = time.time() - self.next_cleaning = now + self.cleaning_interval - - def get(self, key: CacheKey) -> Optional[Answer]: - """Get the answer associated with *key*. - - Returns None if no answer is cached for the key. - - *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` - tuple whose values are the query name, rdtype, and rdclass respectively. - - Returns a ``dns.resolver.Answer`` or ``None``. - """ - - with self.lock: - self._maybe_clean() - v = self.data.get(key) - if v is None or v.expiration <= time.time(): - self.statistics.misses += 1 - return None - self.statistics.hits += 1 - return v - - def put(self, key: CacheKey, value: Answer) -> None: - """Associate key and value in the cache. - - *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` - tuple whose values are the query name, rdtype, and rdclass respectively. - - *value*, a ``dns.resolver.Answer``, the answer. - """ - - with self.lock: - self._maybe_clean() - self.data[key] = value - - def flush(self, key: Optional[CacheKey] = None) -> None: - """Flush the cache. - - If *key* is not ``None``, only that item is flushed. Otherwise the entire cache - is flushed. - - *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` - tuple whose values are the query name, rdtype, and rdclass respectively. - """ - - with self.lock: - if key is not None: - if key in self.data: - del self.data[key] - else: - self.data = {} - self.next_cleaning = time.time() + self.cleaning_interval - - -class LRUCacheNode: - """LRUCache node.""" - - def __init__(self, key, value): - self.key = key - self.value = value - self.hits = 0 - self.prev = self - self.next = self - - def link_after(self, node: "LRUCacheNode") -> None: - self.prev = node - self.next = node.next - node.next.prev = self - node.next = self - - def unlink(self) -> None: - self.next.prev = self.prev - self.prev.next = self.next - - -class LRUCache(CacheBase): - """Thread-safe, bounded, least-recently-used DNS answer cache. - - This cache is better than the simple cache (above) if you're - running a web crawler or other process that does a lot of - resolutions. The LRUCache has a maximum number of nodes, and when - it is full, the least-recently used node is removed to make space - for a new one. - """ - - def __init__(self, max_size: int = 100000) -> None: - """*max_size*, an ``int``, is the maximum number of nodes to cache; - it must be greater than 0. - """ - - super().__init__() - self.data: Dict[CacheKey, LRUCacheNode] = {} - self.set_max_size(max_size) - self.sentinel: LRUCacheNode = LRUCacheNode(None, None) - self.sentinel.prev = self.sentinel - self.sentinel.next = self.sentinel - - def set_max_size(self, max_size: int) -> None: - if max_size < 1: - max_size = 1 - self.max_size = max_size - - def get(self, key: CacheKey) -> Optional[Answer]: - """Get the answer associated with *key*. - - Returns None if no answer is cached for the key. - - *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` - tuple whose values are the query name, rdtype, and rdclass respectively. - - Returns a ``dns.resolver.Answer`` or ``None``. - """ - - with self.lock: - node = self.data.get(key) - if node is None: - self.statistics.misses += 1 - return None - # Unlink because we're either going to move the node to the front - # of the LRU list or we're going to free it. - node.unlink() - if node.value.expiration <= time.time(): - del self.data[node.key] - self.statistics.misses += 1 - return None - node.link_after(self.sentinel) - self.statistics.hits += 1 - node.hits += 1 - return node.value - - def get_hits_for_key(self, key: CacheKey) -> int: - """Return the number of cache hits associated with the specified key.""" - with self.lock: - node = self.data.get(key) - if node is None or node.value.expiration <= time.time(): - return 0 - else: - return node.hits - - def put(self, key: CacheKey, value: Answer) -> None: - """Associate key and value in the cache. - - *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` - tuple whose values are the query name, rdtype, and rdclass respectively. - - *value*, a ``dns.resolver.Answer``, the answer. - """ - - with self.lock: - node = self.data.get(key) - if node is not None: - node.unlink() - del self.data[node.key] - while len(self.data) >= self.max_size: - gnode = self.sentinel.prev - gnode.unlink() - del self.data[gnode.key] - node = LRUCacheNode(key, value) - node.link_after(self.sentinel) - self.data[key] = node - - def flush(self, key: Optional[CacheKey] = None) -> None: - """Flush the cache. - - If *key* is not ``None``, only that item is flushed. Otherwise the entire cache - is flushed. - - *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` - tuple whose values are the query name, rdtype, and rdclass respectively. - """ - - with self.lock: - if key is not None: - node = self.data.get(key) - if node is not None: - node.unlink() - del self.data[node.key] - else: - gnode = self.sentinel.next - while gnode != self.sentinel: - next = gnode.next - gnode.unlink() - gnode = next - self.data = {} - - -class _Resolution: - """Helper class for dns.resolver.Resolver.resolve(). - - All of the "business logic" of resolution is encapsulated in this - class, allowing us to have multiple resolve() implementations - using different I/O schemes without copying all of the - complicated logic. - - This class is a "friend" to dns.resolver.Resolver and manipulates - resolver data structures directly. - """ - - def __init__( - self, - resolver: "BaseResolver", - qname: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str], - rdclass: Union[dns.rdataclass.RdataClass, str], - tcp: bool, - raise_on_no_answer: bool, - search: Optional[bool], - ) -> None: - if isinstance(qname, str): - qname = dns.name.from_text(qname, None) - rdtype = dns.rdatatype.RdataType.make(rdtype) - if dns.rdatatype.is_metatype(rdtype): - raise NoMetaqueries - rdclass = dns.rdataclass.RdataClass.make(rdclass) - if dns.rdataclass.is_metaclass(rdclass): - raise NoMetaqueries - self.resolver = resolver - self.qnames_to_try = resolver._get_qnames_to_try(qname, search) - self.qnames = self.qnames_to_try[:] - self.rdtype = rdtype - self.rdclass = rdclass - self.tcp = tcp - self.raise_on_no_answer = raise_on_no_answer - self.nxdomain_responses: Dict[dns.name.Name, dns.message.QueryMessage] = {} - # Initialize other things to help analysis tools - self.qname = dns.name.empty - self.nameservers: List[dns.nameserver.Nameserver] = [] - self.current_nameservers: List[dns.nameserver.Nameserver] = [] - self.errors: List[ErrorTuple] = [] - self.nameserver: Optional[dns.nameserver.Nameserver] = None - self.tcp_attempt = False - self.retry_with_tcp = False - self.request: Optional[dns.message.QueryMessage] = None - self.backoff = 0.0 - - def next_request( - self, - ) -> Tuple[Optional[dns.message.QueryMessage], Optional[Answer]]: - """Get the next request to send, and check the cache. - - Returns a (request, answer) tuple. At most one of request or - answer will not be None. - """ - - # We return a tuple instead of Union[Message,Answer] as it lets - # the caller avoid isinstance(). - - while len(self.qnames) > 0: - self.qname = self.qnames.pop(0) - - # Do we know the answer? - if self.resolver.cache: - answer = self.resolver.cache.get( - (self.qname, self.rdtype, self.rdclass) - ) - if answer is not None: - if answer.rrset is None and self.raise_on_no_answer: - raise NoAnswer(response=answer.response) - else: - return (None, answer) - answer = self.resolver.cache.get( - (self.qname, dns.rdatatype.ANY, self.rdclass) - ) - if answer is not None and answer.response.rcode() == dns.rcode.NXDOMAIN: - # cached NXDOMAIN; record it and continue to next - # name. - self.nxdomain_responses[self.qname] = answer.response - continue - - # Build the request - request = dns.message.make_query(self.qname, self.rdtype, self.rdclass) - if self.resolver.keyname is not None: - request.use_tsig( - self.resolver.keyring, - self.resolver.keyname, - algorithm=self.resolver.keyalgorithm, - ) - request.use_edns( - self.resolver.edns, - self.resolver.ednsflags, - self.resolver.payload, - options=self.resolver.ednsoptions, - ) - if self.resolver.flags is not None: - request.flags = self.resolver.flags - - self.nameservers = self.resolver._enrich_nameservers( - self.resolver._nameservers, - self.resolver.nameserver_ports, - self.resolver.port, - ) - if self.resolver.rotate: - random.shuffle(self.nameservers) - self.current_nameservers = self.nameservers[:] - self.errors = [] - self.nameserver = None - self.tcp_attempt = False - self.retry_with_tcp = False - self.request = request - self.backoff = 0.10 - - return (request, None) - - # - # We've tried everything and only gotten NXDOMAINs. (We know - # it's only NXDOMAINs as anything else would have returned - # before now.) - # - raise NXDOMAIN(qnames=self.qnames_to_try, responses=self.nxdomain_responses) - - def next_nameserver(self) -> Tuple[dns.nameserver.Nameserver, bool, float]: - if self.retry_with_tcp: - assert self.nameserver is not None - assert not self.nameserver.is_always_max_size() - self.tcp_attempt = True - self.retry_with_tcp = False - return (self.nameserver, True, 0) - - backoff = 0.0 - if not self.current_nameservers: - if len(self.nameservers) == 0: - # Out of things to try! - raise NoNameservers(request=self.request, errors=self.errors) - self.current_nameservers = self.nameservers[:] - backoff = self.backoff - self.backoff = min(self.backoff * 2, 2) - - self.nameserver = self.current_nameservers.pop(0) - self.tcp_attempt = self.tcp or self.nameserver.is_always_max_size() - return (self.nameserver, self.tcp_attempt, backoff) - - def query_result( - self, response: Optional[dns.message.Message], ex: Optional[Exception] - ) -> Tuple[Optional[Answer], bool]: - # - # returns an (answer: Answer, end_loop: bool) tuple. - # - assert self.nameserver is not None - if ex: - # Exception during I/O or from_wire() - assert response is None - self.errors.append( - ( - str(self.nameserver), - self.tcp_attempt, - self.nameserver.answer_port(), - ex, - response, - ) - ) - if ( - isinstance(ex, dns.exception.FormError) - or isinstance(ex, EOFError) - or isinstance(ex, OSError) - or isinstance(ex, NotImplementedError) - ): - # This nameserver is no good, take it out of the mix. - self.nameservers.remove(self.nameserver) - elif isinstance(ex, dns.message.Truncated): - if self.tcp_attempt: - # Truncation with TCP is no good! - self.nameservers.remove(self.nameserver) - else: - self.retry_with_tcp = True - return (None, False) - # We got an answer! - assert response is not None - assert isinstance(response, dns.message.QueryMessage) - rcode = response.rcode() - if rcode == dns.rcode.NOERROR: - try: - answer = Answer( - self.qname, - self.rdtype, - self.rdclass, - response, - self.nameserver.answer_nameserver(), - self.nameserver.answer_port(), - ) - except Exception as e: - self.errors.append( - ( - str(self.nameserver), - self.tcp_attempt, - self.nameserver.answer_port(), - e, - response, - ) - ) - # The nameserver is no good, take it out of the mix. - self.nameservers.remove(self.nameserver) - return (None, False) - if self.resolver.cache: - self.resolver.cache.put((self.qname, self.rdtype, self.rdclass), answer) - if answer.rrset is None and self.raise_on_no_answer: - raise NoAnswer(response=answer.response) - return (answer, True) - elif rcode == dns.rcode.NXDOMAIN: - # Further validate the response by making an Answer, even - # if we aren't going to cache it. - try: - answer = Answer( - self.qname, dns.rdatatype.ANY, dns.rdataclass.IN, response - ) - except Exception as e: - self.errors.append( - ( - str(self.nameserver), - self.tcp_attempt, - self.nameserver.answer_port(), - e, - response, - ) - ) - # The nameserver is no good, take it out of the mix. - self.nameservers.remove(self.nameserver) - return (None, False) - self.nxdomain_responses[self.qname] = response - if self.resolver.cache: - self.resolver.cache.put( - (self.qname, dns.rdatatype.ANY, self.rdclass), answer - ) - # Make next_nameserver() return None, so caller breaks its - # inner loop and calls next_request(). - return (None, True) - elif rcode == dns.rcode.YXDOMAIN: - yex = YXDOMAIN() - self.errors.append( - ( - str(self.nameserver), - self.tcp_attempt, - self.nameserver.answer_port(), - yex, - response, - ) - ) - raise yex - else: - # - # We got a response, but we're not happy with the - # rcode in it. - # - if rcode != dns.rcode.SERVFAIL or not self.resolver.retry_servfail: - self.nameservers.remove(self.nameserver) - self.errors.append( - ( - str(self.nameserver), - self.tcp_attempt, - self.nameserver.answer_port(), - dns.rcode.to_text(rcode), - response, - ) - ) - return (None, False) - - -class BaseResolver: - """DNS stub resolver.""" - - # We initialize in reset() - # - # pylint: disable=attribute-defined-outside-init - - domain: dns.name.Name - nameserver_ports: Dict[str, int] - port: int - search: List[dns.name.Name] - use_search_by_default: bool - timeout: float - lifetime: float - keyring: Optional[Any] - keyname: Optional[Union[dns.name.Name, str]] - keyalgorithm: Union[dns.name.Name, str] - edns: int - ednsflags: int - ednsoptions: Optional[List[dns.edns.Option]] - payload: int - cache: Any - flags: Optional[int] - retry_servfail: bool - rotate: bool - ndots: Optional[int] - _nameservers: Sequence[Union[str, dns.nameserver.Nameserver]] - - def __init__( - self, filename: str = "/etc/resolv.conf", configure: bool = True - ) -> None: - """*filename*, a ``str`` or file object, specifying a file - in standard /etc/resolv.conf format. This parameter is meaningful - only when *configure* is true and the platform is POSIX. - - *configure*, a ``bool``. If True (the default), the resolver - instance is configured in the normal fashion for the operating - system the resolver is running on. (I.e. by reading a - /etc/resolv.conf file on POSIX systems and from the registry - on Windows systems.) - """ - - self.reset() - if configure: - if sys.platform == "win32": - self.read_registry() - elif filename: - self.read_resolv_conf(filename) - - def reset(self) -> None: - """Reset all resolver configuration to the defaults.""" - - self.domain = dns.name.Name(dns.name.from_text(socket.gethostname())[1:]) - if len(self.domain) == 0: - self.domain = dns.name.root - self._nameservers = [] - self.nameserver_ports = {} - self.port = 53 - self.search = [] - self.use_search_by_default = False - self.timeout = 2.0 - self.lifetime = 5.0 - self.keyring = None - self.keyname = None - self.keyalgorithm = dns.tsig.default_algorithm - self.edns = -1 - self.ednsflags = 0 - self.ednsoptions = None - self.payload = 0 - self.cache = None - self.flags = None - self.retry_servfail = False - self.rotate = False - self.ndots = None - - def read_resolv_conf(self, f: Any) -> None: - """Process *f* as a file in the /etc/resolv.conf format. If f is - a ``str``, it is used as the name of the file to open; otherwise it - is treated as the file itself. - - Interprets the following items: - - - nameserver - name server IP address - - - domain - local domain name - - - search - search list for host-name lookup - - - options - supported options are rotate, timeout, edns0, and ndots - - """ - - nameservers = [] - if isinstance(f, str): - try: - cm: contextlib.AbstractContextManager = open(f) - except OSError: - # /etc/resolv.conf doesn't exist, can't be read, etc. - raise NoResolverConfiguration(f"cannot open {f}") - else: - cm = contextlib.nullcontext(f) - with cm as f: - for l in f: - if len(l) == 0 or l[0] == "#" or l[0] == ";": - continue - tokens = l.split() - - # Any line containing less than 2 tokens is malformed - if len(tokens) < 2: - continue - - if tokens[0] == "nameserver": - nameservers.append(tokens[1]) - elif tokens[0] == "domain": - self.domain = dns.name.from_text(tokens[1]) - # domain and search are exclusive - self.search = [] - elif tokens[0] == "search": - # the last search wins - self.search = [] - for suffix in tokens[1:]: - self.search.append(dns.name.from_text(suffix)) - # We don't set domain as it is not used if - # len(self.search) > 0 - elif tokens[0] == "options": - for opt in tokens[1:]: - if opt == "rotate": - self.rotate = True - elif opt == "edns0": - self.use_edns() - elif "timeout" in opt: - try: - self.timeout = int(opt.split(":")[1]) - except (ValueError, IndexError): - pass - elif "ndots" in opt: - try: - self.ndots = int(opt.split(":")[1]) - except (ValueError, IndexError): - pass - if len(nameservers) == 0: - raise NoResolverConfiguration("no nameservers") - # Assigning directly instead of appending means we invoke the - # setter logic, with additonal checking and enrichment. - self.nameservers = nameservers - - def read_registry(self) -> None: - """Extract resolver configuration from the Windows registry.""" - try: - info = dns.win32util.get_dns_info() # type: ignore - if info.domain is not None: - self.domain = info.domain - self.nameservers = info.nameservers - self.search = info.search - except AttributeError: - raise NotImplementedError - - def _compute_timeout( - self, - start: float, - lifetime: Optional[float] = None, - errors: Optional[List[ErrorTuple]] = None, - ) -> float: - lifetime = self.lifetime if lifetime is None else lifetime - now = time.time() - duration = now - start - if errors is None: - errors = [] - if duration < 0: - if duration < -1: - # Time going backwards is bad. Just give up. - raise LifetimeTimeout(timeout=duration, errors=errors) - else: - # Time went backwards, but only a little. This can - # happen, e.g. under vmware with older linux kernels. - # Pretend it didn't happen. - duration = 0 - if duration >= lifetime: - raise LifetimeTimeout(timeout=duration, errors=errors) - return min(lifetime - duration, self.timeout) - - def _get_qnames_to_try( - self, qname: dns.name.Name, search: Optional[bool] - ) -> List[dns.name.Name]: - # This is a separate method so we can unit test the search - # rules without requiring the Internet. - if search is None: - search = self.use_search_by_default - qnames_to_try = [] - if qname.is_absolute(): - qnames_to_try.append(qname) - else: - abs_qname = qname.concatenate(dns.name.root) - if search: - if len(self.search) > 0: - # There is a search list, so use it exclusively - search_list = self.search[:] - elif self.domain != dns.name.root and self.domain is not None: - # We have some notion of a domain that isn't the root, so - # use it as the search list. - search_list = [self.domain] - else: - search_list = [] - # Figure out the effective ndots (default is 1) - if self.ndots is None: - ndots = 1 - else: - ndots = self.ndots - for suffix in search_list: - qnames_to_try.append(qname + suffix) - if len(qname) > ndots: - # The name has at least ndots dots, so we should try an - # absolute query first. - qnames_to_try.insert(0, abs_qname) - else: - # The name has less than ndots dots, so we should search - # first, then try the absolute name. - qnames_to_try.append(abs_qname) - else: - qnames_to_try.append(abs_qname) - return qnames_to_try - - def use_tsig( - self, - keyring: Any, - keyname: Optional[Union[dns.name.Name, str]] = None, - algorithm: Union[dns.name.Name, str] = dns.tsig.default_algorithm, - ) -> None: - """Add a TSIG signature to each query. - - The parameters are passed to ``dns.message.Message.use_tsig()``; - see its documentation for details. - """ - - self.keyring = keyring - self.keyname = keyname - self.keyalgorithm = algorithm - - def use_edns( - self, - edns: Optional[Union[int, bool]] = 0, - ednsflags: int = 0, - payload: int = dns.message.DEFAULT_EDNS_PAYLOAD, - options: Optional[List[dns.edns.Option]] = None, - ) -> None: - """Configure EDNS behavior. - - *edns*, an ``int``, is the EDNS level to use. Specifying - ``None``, ``False``, or ``-1`` means "do not use EDNS", and in this case - the other parameters are ignored. Specifying ``True`` is - equivalent to specifying 0, i.e. "use EDNS0". - - *ednsflags*, an ``int``, the EDNS flag values. - - *payload*, an ``int``, is the EDNS sender's payload field, which is the - maximum size of UDP datagram the sender can handle. I.e. how big - a response to this message can be. - - *options*, a list of ``dns.edns.Option`` objects or ``None``, the EDNS - options. - """ - - if edns is None or edns is False: - edns = -1 - elif edns is True: - edns = 0 - self.edns = edns - self.ednsflags = ednsflags - self.payload = payload - self.ednsoptions = options - - def set_flags(self, flags: int) -> None: - """Overrides the default flags with your own. - - *flags*, an ``int``, the message flags to use. - """ - - self.flags = flags - - @classmethod - def _enrich_nameservers( - cls, - nameservers: Sequence[Union[str, dns.nameserver.Nameserver]], - nameserver_ports: Dict[str, int], - default_port: int, - ) -> List[dns.nameserver.Nameserver]: - enriched_nameservers = [] - if isinstance(nameservers, list): - for nameserver in nameservers: - enriched_nameserver: dns.nameserver.Nameserver - if isinstance(nameserver, dns.nameserver.Nameserver): - enriched_nameserver = nameserver - elif dns.inet.is_address(nameserver): - port = nameserver_ports.get(nameserver, default_port) - enriched_nameserver = dns.nameserver.Do53Nameserver( - nameserver, port - ) - else: - try: - if urlparse(nameserver).scheme != "https": - raise NotImplementedError - except Exception: - raise ValueError( - f"nameserver {nameserver} is not a " - "dns.nameserver.Nameserver instance or text form, " - "IP address, nor a valid https URL" - ) - enriched_nameserver = dns.nameserver.DoHNameserver(nameserver) - enriched_nameservers.append(enriched_nameserver) - else: - raise ValueError( - "nameservers must be a list or tuple (not a {})".format( - type(nameservers) - ) - ) - return enriched_nameservers - - @property - def nameservers( - self, - ) -> Sequence[Union[str, dns.nameserver.Nameserver]]: - return self._nameservers - - @nameservers.setter - def nameservers( - self, nameservers: Sequence[Union[str, dns.nameserver.Nameserver]] - ) -> None: - """ - *nameservers*, a ``list`` of nameservers, where a nameserver is either - a string interpretable as a nameserver, or a ``dns.nameserver.Nameserver`` - instance. - - Raises ``ValueError`` if *nameservers* is not a list of nameservers. - """ - # We just call _enrich_nameservers() for checking - self._enrich_nameservers(nameservers, self.nameserver_ports, self.port) - self._nameservers = nameservers - - -class Resolver(BaseResolver): - """DNS stub resolver.""" - - def resolve( - self, - qname: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, - rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, - tcp: bool = False, - source: Optional[str] = None, - raise_on_no_answer: bool = True, - source_port: int = 0, - lifetime: Optional[float] = None, - search: Optional[bool] = None, - ) -> Answer: # pylint: disable=arguments-differ - """Query nameservers to find the answer to the question. - - The *qname*, *rdtype*, and *rdclass* parameters may be objects - of the appropriate type, or strings that can be converted into objects - of the appropriate type. - - *qname*, a ``dns.name.Name`` or ``str``, the query name. - - *rdtype*, an ``int`` or ``str``, the query type. - - *rdclass*, an ``int`` or ``str``, the query class. - - *tcp*, a ``bool``. If ``True``, use TCP to make the query. - - *source*, a ``str`` or ``None``. If not ``None``, bind to this IP - address when making queries. - - *raise_on_no_answer*, a ``bool``. If ``True``, raise - ``dns.resolver.NoAnswer`` if there's no answer to the question. - - *source_port*, an ``int``, the port from which to send the message. - - *lifetime*, a ``float``, how many seconds a query should run - before timing out. - - *search*, a ``bool`` or ``None``, determines whether the - search list configured in the system's resolver configuration - are used for relative names, and whether the resolver's domain - may be added to relative names. The default is ``None``, - which causes the value of the resolver's - ``use_search_by_default`` attribute to be used. - - Raises ``dns.resolver.LifetimeTimeout`` if no answers could be found - in the specified lifetime. - - Raises ``dns.resolver.NXDOMAIN`` if the query name does not exist. - - Raises ``dns.resolver.YXDOMAIN`` if the query name is too long after - DNAME substitution. - - Raises ``dns.resolver.NoAnswer`` if *raise_on_no_answer* is - ``True`` and the query name exists but has no RRset of the - desired type and class. - - Raises ``dns.resolver.NoNameservers`` if no non-broken - nameservers are available to answer the question. - - Returns a ``dns.resolver.Answer`` instance. - - """ - - resolution = _Resolution( - self, qname, rdtype, rdclass, tcp, raise_on_no_answer, search - ) - start = time.time() - while True: - (request, answer) = resolution.next_request() - # Note we need to say "if answer is not None" and not just - # "if answer" because answer implements __len__, and python - # will call that. We want to return if we have an answer - # object, including in cases where its length is 0. - if answer is not None: - # cache hit! - return answer - assert request is not None # needed for type checking - done = False - while not done: - (nameserver, tcp, backoff) = resolution.next_nameserver() - if backoff: - time.sleep(backoff) - timeout = self._compute_timeout(start, lifetime, resolution.errors) - try: - response = nameserver.query( - request, - timeout=timeout, - source=source, - source_port=source_port, - max_size=tcp, - ) - except Exception as ex: - (_, done) = resolution.query_result(None, ex) - continue - (answer, done) = resolution.query_result(response, None) - # Note we need to say "if answer is not None" and not just - # "if answer" because answer implements __len__, and python - # will call that. We want to return if we have an answer - # object, including in cases where its length is 0. - if answer is not None: - return answer - - def query( - self, - qname: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, - rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, - tcp: bool = False, - source: Optional[str] = None, - raise_on_no_answer: bool = True, - source_port: int = 0, - lifetime: Optional[float] = None, - ) -> Answer: # pragma: no cover - """Query nameservers to find the answer to the question. - - This method calls resolve() with ``search=True``, and is - provided for backwards compatibility with prior versions of - dnspython. See the documentation for the resolve() method for - further details. - """ - warnings.warn( - "please use dns.resolver.Resolver.resolve() instead", - DeprecationWarning, - stacklevel=2, - ) - return self.resolve( - qname, - rdtype, - rdclass, - tcp, - source, - raise_on_no_answer, - source_port, - lifetime, - True, - ) - - def resolve_address(self, ipaddr: str, *args: Any, **kwargs: Any) -> Answer: - """Use a resolver to run a reverse query for PTR records. - - This utilizes the resolve() method to perform a PTR lookup on the - specified IP address. - - *ipaddr*, a ``str``, the IPv4 or IPv6 address you want to get - the PTR record for. - - All other arguments that can be passed to the resolve() function - except for rdtype and rdclass are also supported by this - function. - """ - # We make a modified kwargs for type checking happiness, as otherwise - # we get a legit warning about possibly having rdtype and rdclass - # in the kwargs more than once. - modified_kwargs: Dict[str, Any] = {} - modified_kwargs.update(kwargs) - modified_kwargs["rdtype"] = dns.rdatatype.PTR - modified_kwargs["rdclass"] = dns.rdataclass.IN - return self.resolve( - dns.reversename.from_address(ipaddr), *args, **modified_kwargs - ) - - def resolve_name( - self, - name: Union[dns.name.Name, str], - family: int = socket.AF_UNSPEC, - **kwargs: Any, - ) -> HostAnswers: - """Use a resolver to query for address records. - - This utilizes the resolve() method to perform A and/or AAAA lookups on - the specified name. - - *qname*, a ``dns.name.Name`` or ``str``, the name to resolve. - - *family*, an ``int``, the address family. If socket.AF_UNSPEC - (the default), both A and AAAA records will be retrieved. - - All other arguments that can be passed to the resolve() function - except for rdtype and rdclass are also supported by this - function. - """ - # We make a modified kwargs for type checking happiness, as otherwise - # we get a legit warning about possibly having rdtype and rdclass - # in the kwargs more than once. - modified_kwargs: Dict[str, Any] = {} - modified_kwargs.update(kwargs) - modified_kwargs.pop("rdtype", None) - modified_kwargs["rdclass"] = dns.rdataclass.IN - - if family == socket.AF_INET: - v4 = self.resolve(name, dns.rdatatype.A, **modified_kwargs) - return HostAnswers.make(v4=v4) - elif family == socket.AF_INET6: - v6 = self.resolve(name, dns.rdatatype.AAAA, **modified_kwargs) - return HostAnswers.make(v6=v6) - elif family != socket.AF_UNSPEC: - raise NotImplementedError(f"unknown address family {family}") - - raise_on_no_answer = modified_kwargs.pop("raise_on_no_answer", True) - lifetime = modified_kwargs.pop("lifetime", None) - start = time.time() - v6 = self.resolve( - name, - dns.rdatatype.AAAA, - raise_on_no_answer=False, - lifetime=self._compute_timeout(start, lifetime), - **modified_kwargs, - ) - # Note that setting name ensures we query the same name - # for A as we did for AAAA. (This is just in case search lists - # are active by default in the resolver configuration and - # we might be talking to a server that says NXDOMAIN when it - # wants to say NOERROR no data. - name = v6.qname - v4 = self.resolve( - name, - dns.rdatatype.A, - raise_on_no_answer=False, - lifetime=self._compute_timeout(start, lifetime), - **modified_kwargs, - ) - answers = HostAnswers.make(v6=v6, v4=v4, add_empty=not raise_on_no_answer) - if not answers: - raise NoAnswer(response=v6.response) - return answers - - # pylint: disable=redefined-outer-name - - def canonical_name(self, name: Union[dns.name.Name, str]) -> dns.name.Name: - """Determine the canonical name of *name*. - - The canonical name is the name the resolver uses for queries - after all CNAME and DNAME renamings have been applied. - - *name*, a ``dns.name.Name`` or ``str``, the query name. - - This method can raise any exception that ``resolve()`` can - raise, other than ``dns.resolver.NoAnswer`` and - ``dns.resolver.NXDOMAIN``. - - Returns a ``dns.name.Name``. - """ - try: - answer = self.resolve(name, raise_on_no_answer=False) - canonical_name = answer.canonical_name - except dns.resolver.NXDOMAIN as e: - canonical_name = e.canonical_name - return canonical_name - - # pylint: enable=redefined-outer-name - - def try_ddr(self, lifetime: float = 5.0) -> None: - """Try to update the resolver's nameservers using Discovery of Designated - Resolvers (DDR). If successful, the resolver will subsequently use - DNS-over-HTTPS or DNS-over-TLS for future queries. - - *lifetime*, a float, is the maximum time to spend attempting DDR. The default - is 5 seconds. - - If the SVCB query is successful and results in a non-empty list of nameservers, - then the resolver's nameservers are set to the returned servers in priority - order. - - The current implementation does not use any address hints from the SVCB record, - nor does it resolve addresses for the SCVB target name, rather it assumes that - the bootstrap nameserver will always be one of the addresses and uses it. - A future revision to the code may offer fuller support. The code verifies that - the bootstrap nameserver is in the Subject Alternative Name field of the - TLS certficate. - """ - try: - expiration = time.time() + lifetime - answer = self.resolve( - dns._ddr._local_resolver_name, "SVCB", lifetime=lifetime - ) - timeout = dns.query._remaining(expiration) - nameservers = dns._ddr._get_nameservers_sync(answer, timeout) - if len(nameservers) > 0: - self.nameservers = nameservers - except Exception: - pass - - -#: The default resolver. -default_resolver: Optional[Resolver] = None - - -def get_default_resolver() -> Resolver: - """Get the default resolver, initializing it if necessary.""" - if default_resolver is None: - reset_default_resolver() - assert default_resolver is not None - return default_resolver - - -def reset_default_resolver() -> None: - """Re-initialize default resolver. - - Note that the resolver configuration (i.e. /etc/resolv.conf on UNIX - systems) will be re-read immediately. - """ - - global default_resolver - default_resolver = Resolver() - - -def resolve( - qname: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, - rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, - tcp: bool = False, - source: Optional[str] = None, - raise_on_no_answer: bool = True, - source_port: int = 0, - lifetime: Optional[float] = None, - search: Optional[bool] = None, -) -> Answer: # pragma: no cover - """Query nameservers to find the answer to the question. - - This is a convenience function that uses the default resolver - object to make the query. - - See ``dns.resolver.Resolver.resolve`` for more information on the - parameters. - """ - - return get_default_resolver().resolve( - qname, - rdtype, - rdclass, - tcp, - source, - raise_on_no_answer, - source_port, - lifetime, - search, - ) - - -def query( - qname: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, - rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, - tcp: bool = False, - source: Optional[str] = None, - raise_on_no_answer: bool = True, - source_port: int = 0, - lifetime: Optional[float] = None, -) -> Answer: # pragma: no cover - """Query nameservers to find the answer to the question. - - This method calls resolve() with ``search=True``, and is - provided for backwards compatibility with prior versions of - dnspython. See the documentation for the resolve() method for - further details. - """ - warnings.warn( - "please use dns.resolver.resolve() instead", DeprecationWarning, stacklevel=2 - ) - return resolve( - qname, - rdtype, - rdclass, - tcp, - source, - raise_on_no_answer, - source_port, - lifetime, - True, - ) - - -def resolve_address(ipaddr: str, *args: Any, **kwargs: Any) -> Answer: - """Use a resolver to run a reverse query for PTR records. - - See ``dns.resolver.Resolver.resolve_address`` for more information on the - parameters. - """ - - return get_default_resolver().resolve_address(ipaddr, *args, **kwargs) - - -def resolve_name( - name: Union[dns.name.Name, str], family: int = socket.AF_UNSPEC, **kwargs: Any -) -> HostAnswers: - """Use a resolver to query for address records. - - See ``dns.resolver.Resolver.resolve_name`` for more information on the - parameters. - """ - - return get_default_resolver().resolve_name(name, family, **kwargs) - - -def canonical_name(name: Union[dns.name.Name, str]) -> dns.name.Name: - """Determine the canonical name of *name*. - - See ``dns.resolver.Resolver.canonical_name`` for more information on the - parameters and possible exceptions. - """ - - return get_default_resolver().canonical_name(name) - - -def try_ddr(lifetime: float = 5.0) -> None: - """Try to update the default resolver's nameservers using Discovery of Designated - Resolvers (DDR). If successful, the resolver will subsequently use - DNS-over-HTTPS or DNS-over-TLS for future queries. - - See :py:func:`dns.resolver.Resolver.try_ddr` for more information. - """ - return get_default_resolver().try_ddr(lifetime) - - -def zone_for_name( - name: Union[dns.name.Name, str], - rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, - tcp: bool = False, - resolver: Optional[Resolver] = None, - lifetime: Optional[float] = None, -) -> dns.name.Name: - """Find the name of the zone which contains the specified name. - - *name*, an absolute ``dns.name.Name`` or ``str``, the query name. - - *rdclass*, an ``int``, the query class. - - *tcp*, a ``bool``. If ``True``, use TCP to make the query. - - *resolver*, a ``dns.resolver.Resolver`` or ``None``, the resolver to use. - If ``None``, the default, then the default resolver is used. - - *lifetime*, a ``float``, the total time to allow for the queries needed - to determine the zone. If ``None``, the default, then only the individual - query limits of the resolver apply. - - Raises ``dns.resolver.NoRootSOA`` if there is no SOA RR at the DNS - root. (This is only likely to happen if you're using non-default - root servers in your network and they are misconfigured.) - - Raises ``dns.resolver.LifetimeTimeout`` if the answer could not be - found in the allotted lifetime. - - Returns a ``dns.name.Name``. - """ - - if isinstance(name, str): - name = dns.name.from_text(name, dns.name.root) - if resolver is None: - resolver = get_default_resolver() - if not name.is_absolute(): - raise NotAbsolute(name) - start = time.time() - expiration: Optional[float] - if lifetime is not None: - expiration = start + lifetime - else: - expiration = None - while 1: - try: - rlifetime: Optional[float] - if expiration is not None: - rlifetime = expiration - time.time() - if rlifetime <= 0: - rlifetime = 0 - else: - rlifetime = None - answer = resolver.resolve( - name, dns.rdatatype.SOA, rdclass, tcp, lifetime=rlifetime - ) - assert answer.rrset is not None - if answer.rrset.name == name: - return name - # otherwise we were CNAMEd or DNAMEd and need to look higher - except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer) as e: - if isinstance(e, dns.resolver.NXDOMAIN): - response = e.responses().get(name) - else: - response = e.response() # pylint: disable=no-value-for-parameter - if response: - for rrs in response.authority: - if rrs.rdtype == dns.rdatatype.SOA and rrs.rdclass == rdclass: - (nr, _, _) = rrs.name.fullcompare(name) - if nr == dns.name.NAMERELN_SUPERDOMAIN: - # We're doing a proper superdomain check as - # if the name were equal we ought to have gotten - # it in the answer section! We are ignoring the - # possibility that the authority is insane and - # is including multiple SOA RRs for different - # authorities. - return rrs.name - # we couldn't extract anything useful from the response (e.g. it's - # a type 3 NXDOMAIN) - try: - name = name.parent() - except dns.name.NoParent: - raise NoRootSOA - - -def make_resolver_at( - where: Union[dns.name.Name, str], - port: int = 53, - family: int = socket.AF_UNSPEC, - resolver: Optional[Resolver] = None, -) -> Resolver: - """Make a stub resolver using the specified destination as the full resolver. - - *where*, a ``dns.name.Name`` or ``str`` the domain name or IP address of the - full resolver. - - *port*, an ``int``, the port to use. If not specified, the default is 53. - - *family*, an ``int``, the address family to use. This parameter is used if - *where* is not an address. The default is ``socket.AF_UNSPEC`` in which case - the first address returned by ``resolve_name()`` will be used, otherwise the - first address of the specified family will be used. - - *resolver*, a ``dns.resolver.Resolver`` or ``None``, the resolver to use for - resolution of hostnames. If not specified, the default resolver will be used. - - Returns a ``dns.resolver.Resolver`` or raises an exception. - """ - if resolver is None: - resolver = get_default_resolver() - nameservers: List[Union[str, dns.nameserver.Nameserver]] = [] - if isinstance(where, str) and dns.inet.is_address(where): - nameservers.append(dns.nameserver.Do53Nameserver(where, port)) - else: - for address in resolver.resolve_name(where, family).addresses(): - nameservers.append(dns.nameserver.Do53Nameserver(address, port)) - res = dns.resolver.Resolver(configure=False) - res.nameservers = nameservers - return res - - -def resolve_at( - where: Union[dns.name.Name, str], - qname: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, - rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, - tcp: bool = False, - source: Optional[str] = None, - raise_on_no_answer: bool = True, - source_port: int = 0, - lifetime: Optional[float] = None, - search: Optional[bool] = None, - port: int = 53, - family: int = socket.AF_UNSPEC, - resolver: Optional[Resolver] = None, -) -> Answer: - """Query nameservers to find the answer to the question. - - This is a convenience function that calls ``dns.resolver.make_resolver_at()`` to - make a resolver, and then uses it to resolve the query. - - See ``dns.resolver.Resolver.resolve`` for more information on the resolution - parameters, and ``dns.resolver.make_resolver_at`` for information about the resolver - parameters *where*, *port*, *family*, and *resolver*. - - If making more than one query, it is more efficient to call - ``dns.resolver.make_resolver_at()`` and then use that resolver for the queries - instead of calling ``resolve_at()`` multiple times. - """ - return make_resolver_at(where, port, family, resolver).resolve( - qname, - rdtype, - rdclass, - tcp, - source, - raise_on_no_answer, - source_port, - lifetime, - search, - ) - - -# -# Support for overriding the system resolver for all python code in the -# running process. -# - -_protocols_for_socktype = { - socket.SOCK_DGRAM: [socket.SOL_UDP], - socket.SOCK_STREAM: [socket.SOL_TCP], -} - -_resolver = None -_original_getaddrinfo = socket.getaddrinfo -_original_getnameinfo = socket.getnameinfo -_original_getfqdn = socket.getfqdn -_original_gethostbyname = socket.gethostbyname -_original_gethostbyname_ex = socket.gethostbyname_ex -_original_gethostbyaddr = socket.gethostbyaddr - - -def _getaddrinfo( - host=None, service=None, family=socket.AF_UNSPEC, socktype=0, proto=0, flags=0 -): - if flags & socket.AI_NUMERICHOST != 0: - # Short circuit directly into the system's getaddrinfo(). We're - # not adding any value in this case, and this avoids infinite loops - # because dns.query.* needs to call getaddrinfo() for IPv6 scoping - # reasons. We will also do this short circuit below if we - # discover that the host is an address literal. - return _original_getaddrinfo(host, service, family, socktype, proto, flags) - if flags & (socket.AI_ADDRCONFIG | socket.AI_V4MAPPED) != 0: - # Not implemented. We raise a gaierror as opposed to a - # NotImplementedError as it helps callers handle errors more - # appropriately. [Issue #316] - # - # We raise EAI_FAIL as opposed to EAI_SYSTEM because there is - # no EAI_SYSTEM on Windows [Issue #416]. We didn't go for - # EAI_BADFLAGS as the flags aren't bad, we just don't - # implement them. - raise socket.gaierror( - socket.EAI_FAIL, "Non-recoverable failure in name resolution" - ) - if host is None and service is None: - raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") - addrs = [] - canonical_name = None # pylint: disable=redefined-outer-name - # Is host None or an address literal? If so, use the system's - # getaddrinfo(). - if host is None: - return _original_getaddrinfo(host, service, family, socktype, proto, flags) - try: - # We don't care about the result of af_for_address(), we're just - # calling it so it raises an exception if host is not an IPv4 or - # IPv6 address. - dns.inet.af_for_address(host) - return _original_getaddrinfo(host, service, family, socktype, proto, flags) - except Exception: - pass - # Something needs resolution! - try: - answers = _resolver.resolve_name(host, family) - addrs = answers.addresses_and_families() - canonical_name = answers.canonical_name().to_text(True) - except dns.resolver.NXDOMAIN: - raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") - except Exception: - # We raise EAI_AGAIN here as the failure may be temporary - # (e.g. a timeout) and EAI_SYSTEM isn't defined on Windows. - # [Issue #416] - raise socket.gaierror(socket.EAI_AGAIN, "Temporary failure in name resolution") - port = None - try: - # Is it a port literal? - if service is None: - port = 0 - else: - port = int(service) - except Exception: - if flags & socket.AI_NUMERICSERV == 0: - try: - port = socket.getservbyname(service) - except Exception: - pass - if port is None: - raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") - tuples = [] - if socktype == 0: - socktypes = [socket.SOCK_DGRAM, socket.SOCK_STREAM] - else: - socktypes = [socktype] - if flags & socket.AI_CANONNAME != 0: - cname = canonical_name - else: - cname = "" - for addr, af in addrs: - for socktype in socktypes: - for proto in _protocols_for_socktype[socktype]: - addr_tuple = dns.inet.low_level_address_tuple((addr, port), af) - tuples.append((af, socktype, proto, cname, addr_tuple)) - if len(tuples) == 0: - raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") - return tuples - - -def _getnameinfo(sockaddr, flags=0): - host = sockaddr[0] - port = sockaddr[1] - if len(sockaddr) == 4: - scope = sockaddr[3] - family = socket.AF_INET6 - else: - scope = None - family = socket.AF_INET - tuples = _getaddrinfo(host, port, family, socket.SOCK_STREAM, socket.SOL_TCP, 0) - if len(tuples) > 1: - raise socket.error("sockaddr resolved to multiple addresses") - addr = tuples[0][4][0] - if flags & socket.NI_DGRAM: - pname = "udp" - else: - pname = "tcp" - qname = dns.reversename.from_address(addr) - if flags & socket.NI_NUMERICHOST == 0: - try: - answer = _resolver.resolve(qname, "PTR") - hostname = answer.rrset[0].target.to_text(True) - except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer): - if flags & socket.NI_NAMEREQD: - raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") - hostname = addr - if scope is not None: - hostname += "%" + str(scope) - else: - hostname = addr - if scope is not None: - hostname += "%" + str(scope) - if flags & socket.NI_NUMERICSERV: - service = str(port) - else: - service = socket.getservbyport(port, pname) - return (hostname, service) - - -def _getfqdn(name=None): - if name is None: - name = socket.gethostname() - try: - (name, _, _) = _gethostbyaddr(name) - # Python's version checks aliases too, but our gethostbyname - # ignores them, so we do so here as well. - except Exception: - pass - return name - - -def _gethostbyname(name): - return _gethostbyname_ex(name)[2][0] - - -def _gethostbyname_ex(name): - aliases = [] - addresses = [] - tuples = _getaddrinfo( - name, 0, socket.AF_INET, socket.SOCK_STREAM, socket.SOL_TCP, socket.AI_CANONNAME - ) - canonical = tuples[0][3] - for item in tuples: - addresses.append(item[4][0]) - # XXX we just ignore aliases - return (canonical, aliases, addresses) - - -def _gethostbyaddr(ip): - try: - dns.ipv6.inet_aton(ip) - sockaddr = (ip, 80, 0, 0) - family = socket.AF_INET6 - except Exception: - try: - dns.ipv4.inet_aton(ip) - except Exception: - raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") - sockaddr = (ip, 80) - family = socket.AF_INET - (name, _) = _getnameinfo(sockaddr, socket.NI_NAMEREQD) - aliases = [] - addresses = [] - tuples = _getaddrinfo( - name, 0, family, socket.SOCK_STREAM, socket.SOL_TCP, socket.AI_CANONNAME - ) - canonical = tuples[0][3] - # We only want to include an address from the tuples if it's the - # same as the one we asked about. We do this comparison in binary - # to avoid any differences in text representations. - bin_ip = dns.inet.inet_pton(family, ip) - for item in tuples: - addr = item[4][0] - bin_addr = dns.inet.inet_pton(family, addr) - if bin_ip == bin_addr: - addresses.append(addr) - # XXX we just ignore aliases - return (canonical, aliases, addresses) - - -def override_system_resolver(resolver: Optional[Resolver] = None) -> None: - """Override the system resolver routines in the socket module with - versions which use dnspython's resolver. - - This can be useful in testing situations where you want to control - the resolution behavior of python code without having to change - the system's resolver settings (e.g. /etc/resolv.conf). - - The resolver to use may be specified; if it's not, the default - resolver will be used. - - resolver, a ``dns.resolver.Resolver`` or ``None``, the resolver to use. - """ - - if resolver is None: - resolver = get_default_resolver() - global _resolver - _resolver = resolver - socket.getaddrinfo = _getaddrinfo - socket.getnameinfo = _getnameinfo - socket.getfqdn = _getfqdn - socket.gethostbyname = _gethostbyname - socket.gethostbyname_ex = _gethostbyname_ex - socket.gethostbyaddr = _gethostbyaddr - - -def restore_system_resolver() -> None: - """Undo the effects of prior override_system_resolver().""" - - global _resolver - _resolver = None - socket.getaddrinfo = _original_getaddrinfo - socket.getnameinfo = _original_getnameinfo - socket.getfqdn = _original_getfqdn - socket.gethostbyname = _original_gethostbyname - socket.gethostbyname_ex = _original_gethostbyname_ex - socket.gethostbyaddr = _original_gethostbyaddr diff --git a/server/venv/Lib/site-packages/dns/reversename.py b/server/venv/Lib/site-packages/dns/reversename.py deleted file mode 100644 index 8236c71..0000000 --- a/server/venv/Lib/site-packages/dns/reversename.py +++ /dev/null @@ -1,105 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2006-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Reverse Map Names.""" - -import binascii - -import dns.ipv4 -import dns.ipv6 -import dns.name - -ipv4_reverse_domain = dns.name.from_text("in-addr.arpa.") -ipv6_reverse_domain = dns.name.from_text("ip6.arpa.") - - -def from_address( - text: str, - v4_origin: dns.name.Name = ipv4_reverse_domain, - v6_origin: dns.name.Name = ipv6_reverse_domain, -) -> dns.name.Name: - """Convert an IPv4 or IPv6 address in textual form into a Name object whose - value is the reverse-map domain name of the address. - - *text*, a ``str``, is an IPv4 or IPv6 address in textual form - (e.g. '127.0.0.1', '::1') - - *v4_origin*, a ``dns.name.Name`` to append to the labels corresponding to - the address if the address is an IPv4 address, instead of the default - (in-addr.arpa.) - - *v6_origin*, a ``dns.name.Name`` to append to the labels corresponding to - the address if the address is an IPv6 address, instead of the default - (ip6.arpa.) - - Raises ``dns.exception.SyntaxError`` if the address is badly formed. - - Returns a ``dns.name.Name``. - """ - - try: - v6 = dns.ipv6.inet_aton(text) - if dns.ipv6.is_mapped(v6): - parts = ["%d" % byte for byte in v6[12:]] - origin = v4_origin - else: - parts = [x for x in str(binascii.hexlify(v6).decode())] - origin = v6_origin - except Exception: - parts = ["%d" % byte for byte in dns.ipv4.inet_aton(text)] - origin = v4_origin - return dns.name.from_text(".".join(reversed(parts)), origin=origin) - - -def to_address( - name: dns.name.Name, - v4_origin: dns.name.Name = ipv4_reverse_domain, - v6_origin: dns.name.Name = ipv6_reverse_domain, -) -> str: - """Convert a reverse map domain name into textual address form. - - *name*, a ``dns.name.Name``, an IPv4 or IPv6 address in reverse-map name - form. - - *v4_origin*, a ``dns.name.Name`` representing the top-level domain for - IPv4 addresses, instead of the default (in-addr.arpa.) - - *v6_origin*, a ``dns.name.Name`` representing the top-level domain for - IPv4 addresses, instead of the default (ip6.arpa.) - - Raises ``dns.exception.SyntaxError`` if the name does not have a - reverse-map form. - - Returns a ``str``. - """ - - if name.is_subdomain(v4_origin): - name = name.relativize(v4_origin) - text = b".".join(reversed(name.labels)) - # run through inet_ntoa() to check syntax and make pretty. - return dns.ipv4.inet_ntoa(dns.ipv4.inet_aton(text)) - elif name.is_subdomain(v6_origin): - name = name.relativize(v6_origin) - labels = list(reversed(name.labels)) - parts = [] - for i in range(0, len(labels), 4): - parts.append(b"".join(labels[i : i + 4])) - text = b":".join(parts) - # run through inet_ntoa() to check syntax and make pretty. - return dns.ipv6.inet_ntoa(dns.ipv6.inet_aton(text)) - else: - raise dns.exception.SyntaxError("unknown reverse-map address family") diff --git a/server/venv/Lib/site-packages/dns/rrset.py b/server/venv/Lib/site-packages/dns/rrset.py deleted file mode 100644 index 350de13..0000000 --- a/server/venv/Lib/site-packages/dns/rrset.py +++ /dev/null @@ -1,286 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS RRsets (an RRset is a named rdataset)""" - -from typing import Any, Collection, Dict, Optional, Union, cast - -import dns.name -import dns.rdataclass -import dns.rdataset -import dns.renderer - - -class RRset(dns.rdataset.Rdataset): - - """A DNS RRset (named rdataset). - - RRset inherits from Rdataset, and RRsets can be treated as - Rdatasets in most cases. There are, however, a few notable - exceptions. RRsets have different to_wire() and to_text() method - arguments, reflecting the fact that RRsets always have an owner - name. - """ - - __slots__ = ["name", "deleting"] - - def __init__( - self, - name: dns.name.Name, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, - deleting: Optional[dns.rdataclass.RdataClass] = None, - ): - """Create a new RRset.""" - - super().__init__(rdclass, rdtype, covers) - self.name = name - self.deleting = deleting - - def _clone(self): - obj = super()._clone() - obj.name = self.name - obj.deleting = self.deleting - return obj - - def __repr__(self): - if self.covers == 0: - ctext = "" - else: - ctext = "(" + dns.rdatatype.to_text(self.covers) + ")" - if self.deleting is not None: - dtext = " delete=" + dns.rdataclass.to_text(self.deleting) - else: - dtext = "" - return ( - "" - ) - - def __str__(self): - return self.to_text() - - def __eq__(self, other): - if isinstance(other, RRset): - if self.name != other.name: - return False - elif not isinstance(other, dns.rdataset.Rdataset): - return False - return super().__eq__(other) - - def match(self, *args: Any, **kwargs: Any) -> bool: # type: ignore[override] - """Does this rrset match the specified attributes? - - Behaves as :py:func:`full_match()` if the first argument is a - ``dns.name.Name``, and as :py:func:`dns.rdataset.Rdataset.match()` - otherwise. - - (This behavior fixes a design mistake where the signature of this - method became incompatible with that of its superclass. The fix - makes RRsets matchable as Rdatasets while preserving backwards - compatibility.) - """ - if isinstance(args[0], dns.name.Name): - return self.full_match(*args, **kwargs) # type: ignore[arg-type] - else: - return super().match(*args, **kwargs) # type: ignore[arg-type] - - def full_match( - self, - name: dns.name.Name, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType, - deleting: Optional[dns.rdataclass.RdataClass] = None, - ) -> bool: - """Returns ``True`` if this rrset matches the specified name, class, - type, covers, and deletion state. - """ - if not super().match(rdclass, rdtype, covers): - return False - if self.name != name or self.deleting != deleting: - return False - return True - - # pylint: disable=arguments-differ - - def to_text( # type: ignore[override] - self, - origin: Optional[dns.name.Name] = None, - relativize: bool = True, - **kw: Dict[str, Any] - ) -> str: - """Convert the RRset into DNS zone file format. - - See ``dns.name.Name.choose_relativity`` for more information - on how *origin* and *relativize* determine the way names - are emitted. - - Any additional keyword arguments are passed on to the rdata - ``to_text()`` method. - - *origin*, a ``dns.name.Name`` or ``None``, the origin for relative - names. - - *relativize*, a ``bool``. If ``True``, names will be relativized - to *origin*. - """ - - return super().to_text( - self.name, origin, relativize, self.deleting, **kw # type: ignore - ) - - def to_wire( # type: ignore[override] - self, - file: Any, - compress: Optional[dns.name.CompressType] = None, # type: ignore - origin: Optional[dns.name.Name] = None, - **kw: Dict[str, Any] - ) -> int: - """Convert the RRset to wire format. - - All keyword arguments are passed to ``dns.rdataset.to_wire()``; see - that function for details. - - Returns an ``int``, the number of records emitted. - """ - - return super().to_wire( - self.name, file, compress, origin, self.deleting, **kw # type:ignore - ) - - # pylint: enable=arguments-differ - - def to_rdataset(self) -> dns.rdataset.Rdataset: - """Convert an RRset into an Rdataset. - - Returns a ``dns.rdataset.Rdataset``. - """ - return dns.rdataset.from_rdata_list(self.ttl, list(self)) - - -def from_text_list( - name: Union[dns.name.Name, str], - ttl: int, - rdclass: Union[dns.rdataclass.RdataClass, str], - rdtype: Union[dns.rdatatype.RdataType, str], - text_rdatas: Collection[str], - idna_codec: Optional[dns.name.IDNACodec] = None, - origin: Optional[dns.name.Name] = None, - relativize: bool = True, - relativize_to: Optional[dns.name.Name] = None, -) -> RRset: - """Create an RRset with the specified name, TTL, class, and type, and with - the specified list of rdatas in text format. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder to use; if ``None``, the default IDNA 2003 - encoder/decoder is used. - - *origin*, a ``dns.name.Name`` (or ``None``), the - origin to use for relative names. - - *relativize*, a ``bool``. If true, name will be relativized. - - *relativize_to*, a ``dns.name.Name`` (or ``None``), the origin to use - when relativizing names. If not set, the *origin* value will be used. - - Returns a ``dns.rrset.RRset`` object. - """ - - if isinstance(name, str): - name = dns.name.from_text(name, None, idna_codec=idna_codec) - rdclass = dns.rdataclass.RdataClass.make(rdclass) - rdtype = dns.rdatatype.RdataType.make(rdtype) - r = RRset(name, rdclass, rdtype) - r.update_ttl(ttl) - for t in text_rdatas: - rd = dns.rdata.from_text( - r.rdclass, r.rdtype, t, origin, relativize, relativize_to, idna_codec - ) - r.add(rd) - return r - - -def from_text( - name: Union[dns.name.Name, str], - ttl: int, - rdclass: Union[dns.rdataclass.RdataClass, str], - rdtype: Union[dns.rdatatype.RdataType, str], - *text_rdatas: Any -) -> RRset: - """Create an RRset with the specified name, TTL, class, and type and with - the specified rdatas in text format. - - Returns a ``dns.rrset.RRset`` object. - """ - - return from_text_list( - name, ttl, rdclass, rdtype, cast(Collection[str], text_rdatas) - ) - - -def from_rdata_list( - name: Union[dns.name.Name, str], - ttl: int, - rdatas: Collection[dns.rdata.Rdata], - idna_codec: Optional[dns.name.IDNACodec] = None, -) -> RRset: - """Create an RRset with the specified name and TTL, and with - the specified list of rdata objects. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder to use; if ``None``, the default IDNA 2003 - encoder/decoder is used. - - Returns a ``dns.rrset.RRset`` object. - - """ - - if isinstance(name, str): - name = dns.name.from_text(name, None, idna_codec=idna_codec) - - if len(rdatas) == 0: - raise ValueError("rdata list must not be empty") - r = None - for rd in rdatas: - if r is None: - r = RRset(name, rd.rdclass, rd.rdtype) - r.update_ttl(ttl) - r.add(rd) - assert r is not None - return r - - -def from_rdata(name: Union[dns.name.Name, str], ttl: int, *rdatas: Any) -> RRset: - """Create an RRset with the specified name and TTL, and with - the specified rdata objects. - - Returns a ``dns.rrset.RRset`` object. - """ - - return from_rdata_list(name, ttl, cast(Collection[dns.rdata.Rdata], rdatas)) diff --git a/server/venv/Lib/site-packages/dns/serial.py b/server/venv/Lib/site-packages/dns/serial.py deleted file mode 100644 index 3417299..0000000 --- a/server/venv/Lib/site-packages/dns/serial.py +++ /dev/null @@ -1,118 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -"""Serial Number Arthimetic from RFC 1982""" - - -class Serial: - def __init__(self, value: int, bits: int = 32): - self.value = value % 2**bits - self.bits = bits - - def __repr__(self): - return f"dns.serial.Serial({self.value}, {self.bits})" - - def __eq__(self, other): - if isinstance(other, int): - other = Serial(other, self.bits) - elif not isinstance(other, Serial) or other.bits != self.bits: - return NotImplemented - return self.value == other.value - - def __ne__(self, other): - if isinstance(other, int): - other = Serial(other, self.bits) - elif not isinstance(other, Serial) or other.bits != self.bits: - return NotImplemented - return self.value != other.value - - def __lt__(self, other): - if isinstance(other, int): - other = Serial(other, self.bits) - elif not isinstance(other, Serial) or other.bits != self.bits: - return NotImplemented - if self.value < other.value and other.value - self.value < 2 ** (self.bits - 1): - return True - elif self.value > other.value and self.value - other.value > 2 ** ( - self.bits - 1 - ): - return True - else: - return False - - def __le__(self, other): - return self == other or self < other - - def __gt__(self, other): - if isinstance(other, int): - other = Serial(other, self.bits) - elif not isinstance(other, Serial) or other.bits != self.bits: - return NotImplemented - if self.value < other.value and other.value - self.value > 2 ** (self.bits - 1): - return True - elif self.value > other.value and self.value - other.value < 2 ** ( - self.bits - 1 - ): - return True - else: - return False - - def __ge__(self, other): - return self == other or self > other - - def __add__(self, other): - v = self.value - if isinstance(other, Serial): - delta = other.value - elif isinstance(other, int): - delta = other - else: - raise ValueError - if abs(delta) > (2 ** (self.bits - 1) - 1): - raise ValueError - v += delta - v = v % 2**self.bits - return Serial(v, self.bits) - - def __iadd__(self, other): - v = self.value - if isinstance(other, Serial): - delta = other.value - elif isinstance(other, int): - delta = other - else: - raise ValueError - if abs(delta) > (2 ** (self.bits - 1) - 1): - raise ValueError - v += delta - v = v % 2**self.bits - self.value = v - return self - - def __sub__(self, other): - v = self.value - if isinstance(other, Serial): - delta = other.value - elif isinstance(other, int): - delta = other - else: - raise ValueError - if abs(delta) > (2 ** (self.bits - 1) - 1): - raise ValueError - v -= delta - v = v % 2**self.bits - return Serial(v, self.bits) - - def __isub__(self, other): - v = self.value - if isinstance(other, Serial): - delta = other.value - elif isinstance(other, int): - delta = other - else: - raise ValueError - if abs(delta) > (2 ** (self.bits - 1) - 1): - raise ValueError - v -= delta - v = v % 2**self.bits - self.value = v - return self diff --git a/server/venv/Lib/site-packages/dns/set.py b/server/venv/Lib/site-packages/dns/set.py deleted file mode 100644 index fa50ed9..0000000 --- a/server/venv/Lib/site-packages/dns/set.py +++ /dev/null @@ -1,308 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import itertools - - -class Set: - - """A simple set class. - - This class was originally used to deal with sets being missing in - ancient versions of python, but dnspython will continue to use it - as these sets are based on lists and are thus indexable, and this - ability is widely used in dnspython applications. - """ - - __slots__ = ["items"] - - def __init__(self, items=None): - """Initialize the set. - - *items*, an iterable or ``None``, the initial set of items. - """ - - self.items = dict() - if items is not None: - for item in items: - # This is safe for how we use set, but if other code - # subclasses it could be a legitimate issue. - self.add(item) # lgtm[py/init-calls-subclass] - - def __repr__(self): - return "dns.set.Set(%s)" % repr(list(self.items.keys())) - - def add(self, item): - """Add an item to the set.""" - - if item not in self.items: - self.items[item] = None - - def remove(self, item): - """Remove an item from the set.""" - - try: - del self.items[item] - except KeyError: - raise ValueError - - def discard(self, item): - """Remove an item from the set if present.""" - - self.items.pop(item, None) - - def pop(self): - """Remove an arbitrary item from the set.""" - (k, _) = self.items.popitem() - return k - - def _clone(self) -> "Set": - """Make a (shallow) copy of the set. - - There is a 'clone protocol' that subclasses of this class - should use. To make a copy, first call your super's _clone() - method, and use the object returned as the new instance. Then - make shallow copies of the attributes defined in the subclass. - - This protocol allows us to write the set algorithms that - return new instances (e.g. union) once, and keep using them in - subclasses. - """ - - if hasattr(self, "_clone_class"): - cls = self._clone_class # type: ignore - else: - cls = self.__class__ - obj = cls.__new__(cls) - obj.items = dict() - obj.items.update(self.items) - return obj - - def __copy__(self): - """Make a (shallow) copy of the set.""" - - return self._clone() - - def copy(self): - """Make a (shallow) copy of the set.""" - - return self._clone() - - def union_update(self, other): - """Update the set, adding any elements from other which are not - already in the set. - """ - - if not isinstance(other, Set): - raise ValueError("other must be a Set instance") - if self is other: # lgtm[py/comparison-using-is] - return - for item in other.items: - self.add(item) - - def intersection_update(self, other): - """Update the set, removing any elements from other which are not - in both sets. - """ - - if not isinstance(other, Set): - raise ValueError("other must be a Set instance") - if self is other: # lgtm[py/comparison-using-is] - return - # we make a copy of the list so that we can remove items from - # the list without breaking the iterator. - for item in list(self.items): - if item not in other.items: - del self.items[item] - - def difference_update(self, other): - """Update the set, removing any elements from other which are in - the set. - """ - - if not isinstance(other, Set): - raise ValueError("other must be a Set instance") - if self is other: # lgtm[py/comparison-using-is] - self.items.clear() - else: - for item in other.items: - self.discard(item) - - def symmetric_difference_update(self, other): - """Update the set, retaining only elements unique to both sets.""" - - if not isinstance(other, Set): - raise ValueError("other must be a Set instance") - if self is other: # lgtm[py/comparison-using-is] - self.items.clear() - else: - overlap = self.intersection(other) - self.union_update(other) - self.difference_update(overlap) - - def union(self, other): - """Return a new set which is the union of ``self`` and ``other``. - - Returns the same Set type as this set. - """ - - obj = self._clone() - obj.union_update(other) - return obj - - def intersection(self, other): - """Return a new set which is the intersection of ``self`` and - ``other``. - - Returns the same Set type as this set. - """ - - obj = self._clone() - obj.intersection_update(other) - return obj - - def difference(self, other): - """Return a new set which ``self`` - ``other``, i.e. the items - in ``self`` which are not also in ``other``. - - Returns the same Set type as this set. - """ - - obj = self._clone() - obj.difference_update(other) - return obj - - def symmetric_difference(self, other): - """Return a new set which (``self`` - ``other``) | (``other`` - - ``self), ie: the items in either ``self`` or ``other`` which - are not contained in their intersection. - - Returns the same Set type as this set. - """ - - obj = self._clone() - obj.symmetric_difference_update(other) - return obj - - def __or__(self, other): - return self.union(other) - - def __and__(self, other): - return self.intersection(other) - - def __add__(self, other): - return self.union(other) - - def __sub__(self, other): - return self.difference(other) - - def __xor__(self, other): - return self.symmetric_difference(other) - - def __ior__(self, other): - self.union_update(other) - return self - - def __iand__(self, other): - self.intersection_update(other) - return self - - def __iadd__(self, other): - self.union_update(other) - return self - - def __isub__(self, other): - self.difference_update(other) - return self - - def __ixor__(self, other): - self.symmetric_difference_update(other) - return self - - def update(self, other): - """Update the set, adding any elements from other which are not - already in the set. - - *other*, the collection of items with which to update the set, which - may be any iterable type. - """ - - for item in other: - self.add(item) - - def clear(self): - """Make the set empty.""" - self.items.clear() - - def __eq__(self, other): - return self.items == other.items - - def __ne__(self, other): - return not self.__eq__(other) - - def __len__(self): - return len(self.items) - - def __iter__(self): - return iter(self.items) - - def __getitem__(self, i): - if isinstance(i, slice): - return list(itertools.islice(self.items, i.start, i.stop, i.step)) - else: - return next(itertools.islice(self.items, i, i + 1)) - - def __delitem__(self, i): - if isinstance(i, slice): - for elt in list(self[i]): - del self.items[elt] - else: - del self.items[self[i]] - - def issubset(self, other): - """Is this set a subset of *other*? - - Returns a ``bool``. - """ - - if not isinstance(other, Set): - raise ValueError("other must be a Set instance") - for item in self.items: - if item not in other.items: - return False - return True - - def issuperset(self, other): - """Is this set a superset of *other*? - - Returns a ``bool``. - """ - - if not isinstance(other, Set): - raise ValueError("other must be a Set instance") - for item in other.items: - if item not in self.items: - return False - return True - - def isdisjoint(self, other): - if not isinstance(other, Set): - raise ValueError("other must be a Set instance") - for item in other.items: - if item in self.items: - return False - return True diff --git a/server/venv/Lib/site-packages/dns/tokenizer.py b/server/venv/Lib/site-packages/dns/tokenizer.py deleted file mode 100644 index 454cac4..0000000 --- a/server/venv/Lib/site-packages/dns/tokenizer.py +++ /dev/null @@ -1,708 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""Tokenize DNS zone file format""" - -import io -import sys -from typing import Any, List, Optional, Tuple - -import dns.exception -import dns.name -import dns.ttl - -_DELIMITERS = {" ", "\t", "\n", ";", "(", ")", '"'} -_QUOTING_DELIMITERS = {'"'} - -EOF = 0 -EOL = 1 -WHITESPACE = 2 -IDENTIFIER = 3 -QUOTED_STRING = 4 -COMMENT = 5 -DELIMITER = 6 - - -class UngetBufferFull(dns.exception.DNSException): - """An attempt was made to unget a token when the unget buffer was full.""" - - -class Token: - """A DNS zone file format token. - - ttype: The token type - value: The token value - has_escape: Does the token value contain escapes? - """ - - def __init__( - self, - ttype: int, - value: Any = "", - has_escape: bool = False, - comment: Optional[str] = None, - ): - """Initialize a token instance.""" - - self.ttype = ttype - self.value = value - self.has_escape = has_escape - self.comment = comment - - def is_eof(self) -> bool: - return self.ttype == EOF - - def is_eol(self) -> bool: - return self.ttype == EOL - - def is_whitespace(self) -> bool: - return self.ttype == WHITESPACE - - def is_identifier(self) -> bool: - return self.ttype == IDENTIFIER - - def is_quoted_string(self) -> bool: - return self.ttype == QUOTED_STRING - - def is_comment(self) -> bool: - return self.ttype == COMMENT - - def is_delimiter(self) -> bool: # pragma: no cover (we don't return delimiters yet) - return self.ttype == DELIMITER - - def is_eol_or_eof(self) -> bool: - return self.ttype == EOL or self.ttype == EOF - - def __eq__(self, other): - if not isinstance(other, Token): - return False - return self.ttype == other.ttype and self.value == other.value - - def __ne__(self, other): - if not isinstance(other, Token): - return True - return self.ttype != other.ttype or self.value != other.value - - def __str__(self): - return '%d "%s"' % (self.ttype, self.value) - - def unescape(self) -> "Token": - if not self.has_escape: - return self - unescaped = "" - l = len(self.value) - i = 0 - while i < l: - c = self.value[i] - i += 1 - if c == "\\": - if i >= l: # pragma: no cover (can't happen via get()) - raise dns.exception.UnexpectedEnd - c = self.value[i] - i += 1 - if c.isdigit(): - if i >= l: - raise dns.exception.UnexpectedEnd - c2 = self.value[i] - i += 1 - if i >= l: - raise dns.exception.UnexpectedEnd - c3 = self.value[i] - i += 1 - if not (c2.isdigit() and c3.isdigit()): - raise dns.exception.SyntaxError - codepoint = int(c) * 100 + int(c2) * 10 + int(c3) - if codepoint > 255: - raise dns.exception.SyntaxError - c = chr(codepoint) - unescaped += c - return Token(self.ttype, unescaped) - - def unescape_to_bytes(self) -> "Token": - # We used to use unescape() for TXT-like records, but this - # caused problems as we'd process DNS escapes into Unicode code - # points instead of byte values, and then a to_text() of the - # processed data would not equal the original input. For - # example, \226 in the TXT record would have a to_text() of - # \195\162 because we applied UTF-8 encoding to Unicode code - # point 226. - # - # We now apply escapes while converting directly to bytes, - # avoiding this double encoding. - # - # This code also handles cases where the unicode input has - # non-ASCII code-points in it by converting it to UTF-8. TXT - # records aren't defined for Unicode, but this is the best we - # can do to preserve meaning. For example, - # - # foo\u200bbar - # - # (where \u200b is Unicode code point 0x200b) will be treated - # as if the input had been the UTF-8 encoding of that string, - # namely: - # - # foo\226\128\139bar - # - unescaped = b"" - l = len(self.value) - i = 0 - while i < l: - c = self.value[i] - i += 1 - if c == "\\": - if i >= l: # pragma: no cover (can't happen via get()) - raise dns.exception.UnexpectedEnd - c = self.value[i] - i += 1 - if c.isdigit(): - if i >= l: - raise dns.exception.UnexpectedEnd - c2 = self.value[i] - i += 1 - if i >= l: - raise dns.exception.UnexpectedEnd - c3 = self.value[i] - i += 1 - if not (c2.isdigit() and c3.isdigit()): - raise dns.exception.SyntaxError - codepoint = int(c) * 100 + int(c2) * 10 + int(c3) - if codepoint > 255: - raise dns.exception.SyntaxError - unescaped += b"%c" % (codepoint) - else: - # Note that as mentioned above, if c is a Unicode - # code point outside of the ASCII range, then this - # += is converting that code point to its UTF-8 - # encoding and appending multiple bytes to - # unescaped. - unescaped += c.encode() - else: - unescaped += c.encode() - return Token(self.ttype, bytes(unescaped)) - - -class Tokenizer: - """A DNS zone file format tokenizer. - - A token object is basically a (type, value) tuple. The valid - types are EOF, EOL, WHITESPACE, IDENTIFIER, QUOTED_STRING, - COMMENT, and DELIMITER. - - file: The file to tokenize - - ungotten_char: The most recently ungotten character, or None. - - ungotten_token: The most recently ungotten token, or None. - - multiline: The current multiline level. This value is increased - by one every time a '(' delimiter is read, and decreased by one every time - a ')' delimiter is read. - - quoting: This variable is true if the tokenizer is currently - reading a quoted string. - - eof: This variable is true if the tokenizer has encountered EOF. - - delimiters: The current delimiter dictionary. - - line_number: The current line number - - filename: A filename that will be returned by the where() method. - - idna_codec: A dns.name.IDNACodec, specifies the IDNA - encoder/decoder. If None, the default IDNA 2003 - encoder/decoder is used. - """ - - def __init__( - self, - f: Any = sys.stdin, - filename: Optional[str] = None, - idna_codec: Optional[dns.name.IDNACodec] = None, - ): - """Initialize a tokenizer instance. - - f: The file to tokenize. The default is sys.stdin. - This parameter may also be a string, in which case the tokenizer - will take its input from the contents of the string. - - filename: the name of the filename that the where() method - will return. - - idna_codec: A dns.name.IDNACodec, specifies the IDNA - encoder/decoder. If None, the default IDNA 2003 - encoder/decoder is used. - """ - - if isinstance(f, str): - f = io.StringIO(f) - if filename is None: - filename = "" - elif isinstance(f, bytes): - f = io.StringIO(f.decode()) - if filename is None: - filename = "" - else: - if filename is None: - if f is sys.stdin: - filename = "" - else: - filename = "" - self.file = f - self.ungotten_char: Optional[str] = None - self.ungotten_token: Optional[Token] = None - self.multiline = 0 - self.quoting = False - self.eof = False - self.delimiters = _DELIMITERS - self.line_number = 1 - assert filename is not None - self.filename = filename - if idna_codec is None: - self.idna_codec: dns.name.IDNACodec = dns.name.IDNA_2003 - else: - self.idna_codec = idna_codec - - def _get_char(self) -> str: - """Read a character from input.""" - - if self.ungotten_char is None: - if self.eof: - c = "" - else: - c = self.file.read(1) - if c == "": - self.eof = True - elif c == "\n": - self.line_number += 1 - else: - c = self.ungotten_char - self.ungotten_char = None - return c - - def where(self) -> Tuple[str, int]: - """Return the current location in the input. - - Returns a (string, int) tuple. The first item is the filename of - the input, the second is the current line number. - """ - - return (self.filename, self.line_number) - - def _unget_char(self, c: str) -> None: - """Unget a character. - - The unget buffer for characters is only one character large; it is - an error to try to unget a character when the unget buffer is not - empty. - - c: the character to unget - raises UngetBufferFull: there is already an ungotten char - """ - - if self.ungotten_char is not None: - # this should never happen! - raise UngetBufferFull # pragma: no cover - self.ungotten_char = c - - def skip_whitespace(self) -> int: - """Consume input until a non-whitespace character is encountered. - - The non-whitespace character is then ungotten, and the number of - whitespace characters consumed is returned. - - If the tokenizer is in multiline mode, then newlines are whitespace. - - Returns the number of characters skipped. - """ - - skipped = 0 - while True: - c = self._get_char() - if c != " " and c != "\t": - if (c != "\n") or not self.multiline: - self._unget_char(c) - return skipped - skipped += 1 - - def get(self, want_leading: bool = False, want_comment: bool = False) -> Token: - """Get the next token. - - want_leading: If True, return a WHITESPACE token if the - first character read is whitespace. The default is False. - - want_comment: If True, return a COMMENT token if the - first token read is a comment. The default is False. - - Raises dns.exception.UnexpectedEnd: input ended prematurely - - Raises dns.exception.SyntaxError: input was badly formed - - Returns a Token. - """ - - if self.ungotten_token is not None: - utoken = self.ungotten_token - self.ungotten_token = None - if utoken.is_whitespace(): - if want_leading: - return utoken - elif utoken.is_comment(): - if want_comment: - return utoken - else: - return utoken - skipped = self.skip_whitespace() - if want_leading and skipped > 0: - return Token(WHITESPACE, " ") - token = "" - ttype = IDENTIFIER - has_escape = False - while True: - c = self._get_char() - if c == "" or c in self.delimiters: - if c == "" and self.quoting: - raise dns.exception.UnexpectedEnd - if token == "" and ttype != QUOTED_STRING: - if c == "(": - self.multiline += 1 - self.skip_whitespace() - continue - elif c == ")": - if self.multiline <= 0: - raise dns.exception.SyntaxError - self.multiline -= 1 - self.skip_whitespace() - continue - elif c == '"': - if not self.quoting: - self.quoting = True - self.delimiters = _QUOTING_DELIMITERS - ttype = QUOTED_STRING - continue - else: - self.quoting = False - self.delimiters = _DELIMITERS - self.skip_whitespace() - continue - elif c == "\n": - return Token(EOL, "\n") - elif c == ";": - while 1: - c = self._get_char() - if c == "\n" or c == "": - break - token += c - if want_comment: - self._unget_char(c) - return Token(COMMENT, token) - elif c == "": - if self.multiline: - raise dns.exception.SyntaxError( - "unbalanced parentheses" - ) - return Token(EOF, comment=token) - elif self.multiline: - self.skip_whitespace() - token = "" - continue - else: - return Token(EOL, "\n", comment=token) - else: - # This code exists in case we ever want a - # delimiter to be returned. It never produces - # a token currently. - token = c - ttype = DELIMITER - else: - self._unget_char(c) - break - elif self.quoting and c == "\n": - raise dns.exception.SyntaxError("newline in quoted string") - elif c == "\\": - # - # It's an escape. Put it and the next character into - # the token; it will be checked later for goodness. - # - token += c - has_escape = True - c = self._get_char() - if c == "" or (c == "\n" and not self.quoting): - raise dns.exception.UnexpectedEnd - token += c - if token == "" and ttype != QUOTED_STRING: - if self.multiline: - raise dns.exception.SyntaxError("unbalanced parentheses") - ttype = EOF - return Token(ttype, token, has_escape) - - def unget(self, token: Token) -> None: - """Unget a token. - - The unget buffer for tokens is only one token large; it is - an error to try to unget a token when the unget buffer is not - empty. - - token: the token to unget - - Raises UngetBufferFull: there is already an ungotten token - """ - - if self.ungotten_token is not None: - raise UngetBufferFull - self.ungotten_token = token - - def next(self): - """Return the next item in an iteration. - - Returns a Token. - """ - - token = self.get() - if token.is_eof(): - raise StopIteration - return token - - __next__ = next - - def __iter__(self): - return self - - # Helpers - - def get_int(self, base: int = 10) -> int: - """Read the next token and interpret it as an unsigned integer. - - Raises dns.exception.SyntaxError if not an unsigned integer. - - Returns an int. - """ - - token = self.get().unescape() - if not token.is_identifier(): - raise dns.exception.SyntaxError("expecting an identifier") - if not token.value.isdigit(): - raise dns.exception.SyntaxError("expecting an integer") - return int(token.value, base) - - def get_uint8(self) -> int: - """Read the next token and interpret it as an 8-bit unsigned - integer. - - Raises dns.exception.SyntaxError if not an 8-bit unsigned integer. - - Returns an int. - """ - - value = self.get_int() - if value < 0 or value > 255: - raise dns.exception.SyntaxError( - "%d is not an unsigned 8-bit integer" % value - ) - return value - - def get_uint16(self, base: int = 10) -> int: - """Read the next token and interpret it as a 16-bit unsigned - integer. - - Raises dns.exception.SyntaxError if not a 16-bit unsigned integer. - - Returns an int. - """ - - value = self.get_int(base=base) - if value < 0 or value > 65535: - if base == 8: - raise dns.exception.SyntaxError( - "%o is not an octal unsigned 16-bit integer" % value - ) - else: - raise dns.exception.SyntaxError( - "%d is not an unsigned 16-bit integer" % value - ) - return value - - def get_uint32(self, base: int = 10) -> int: - """Read the next token and interpret it as a 32-bit unsigned - integer. - - Raises dns.exception.SyntaxError if not a 32-bit unsigned integer. - - Returns an int. - """ - - value = self.get_int(base=base) - if value < 0 or value > 4294967295: - raise dns.exception.SyntaxError( - "%d is not an unsigned 32-bit integer" % value - ) - return value - - def get_uint48(self, base: int = 10) -> int: - """Read the next token and interpret it as a 48-bit unsigned - integer. - - Raises dns.exception.SyntaxError if not a 48-bit unsigned integer. - - Returns an int. - """ - - value = self.get_int(base=base) - if value < 0 or value > 281474976710655: - raise dns.exception.SyntaxError( - "%d is not an unsigned 48-bit integer" % value - ) - return value - - def get_string(self, max_length: Optional[int] = None) -> str: - """Read the next token and interpret it as a string. - - Raises dns.exception.SyntaxError if not a string. - Raises dns.exception.SyntaxError if token value length - exceeds max_length (if specified). - - Returns a string. - """ - - token = self.get().unescape() - if not (token.is_identifier() or token.is_quoted_string()): - raise dns.exception.SyntaxError("expecting a string") - if max_length and len(token.value) > max_length: - raise dns.exception.SyntaxError("string too long") - return token.value - - def get_identifier(self) -> str: - """Read the next token, which should be an identifier. - - Raises dns.exception.SyntaxError if not an identifier. - - Returns a string. - """ - - token = self.get().unescape() - if not token.is_identifier(): - raise dns.exception.SyntaxError("expecting an identifier") - return token.value - - def get_remaining(self, max_tokens: Optional[int] = None) -> List[Token]: - """Return the remaining tokens on the line, until an EOL or EOF is seen. - - max_tokens: If not None, stop after this number of tokens. - - Returns a list of tokens. - """ - - tokens = [] - while True: - token = self.get() - if token.is_eol_or_eof(): - self.unget(token) - break - tokens.append(token) - if len(tokens) == max_tokens: - break - return tokens - - def concatenate_remaining_identifiers(self, allow_empty: bool = False) -> str: - """Read the remaining tokens on the line, which should be identifiers. - - Raises dns.exception.SyntaxError if there are no remaining tokens, - unless `allow_empty=True` is given. - - Raises dns.exception.SyntaxError if a token is seen that is not an - identifier. - - Returns a string containing a concatenation of the remaining - identifiers. - """ - s = "" - while True: - token = self.get().unescape() - if token.is_eol_or_eof(): - self.unget(token) - break - if not token.is_identifier(): - raise dns.exception.SyntaxError - s += token.value - if not (allow_empty or s): - raise dns.exception.SyntaxError("expecting another identifier") - return s - - def as_name( - self, - token: Token, - origin: Optional[dns.name.Name] = None, - relativize: bool = False, - relativize_to: Optional[dns.name.Name] = None, - ) -> dns.name.Name: - """Try to interpret the token as a DNS name. - - Raises dns.exception.SyntaxError if not a name. - - Returns a dns.name.Name. - """ - if not token.is_identifier(): - raise dns.exception.SyntaxError("expecting an identifier") - name = dns.name.from_text(token.value, origin, self.idna_codec) - return name.choose_relativity(relativize_to or origin, relativize) - - def get_name( - self, - origin: Optional[dns.name.Name] = None, - relativize: bool = False, - relativize_to: Optional[dns.name.Name] = None, - ) -> dns.name.Name: - """Read the next token and interpret it as a DNS name. - - Raises dns.exception.SyntaxError if not a name. - - Returns a dns.name.Name. - """ - - token = self.get() - return self.as_name(token, origin, relativize, relativize_to) - - def get_eol_as_token(self) -> Token: - """Read the next token and raise an exception if it isn't EOL or - EOF. - - Returns a string. - """ - - token = self.get() - if not token.is_eol_or_eof(): - raise dns.exception.SyntaxError( - 'expected EOL or EOF, got %d "%s"' % (token.ttype, token.value) - ) - return token - - def get_eol(self) -> str: - return self.get_eol_as_token().value - - def get_ttl(self) -> int: - """Read the next token and interpret it as a DNS TTL. - - Raises dns.exception.SyntaxError or dns.ttl.BadTTL if not an - identifier or badly formed. - - Returns an int. - """ - - token = self.get().unescape() - if not token.is_identifier(): - raise dns.exception.SyntaxError("expecting an identifier") - return dns.ttl.from_text(token.value) diff --git a/server/venv/Lib/site-packages/dns/transaction.py b/server/venv/Lib/site-packages/dns/transaction.py deleted file mode 100644 index 21dea77..0000000 --- a/server/venv/Lib/site-packages/dns/transaction.py +++ /dev/null @@ -1,651 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import collections -from typing import Any, Callable, Iterator, List, Optional, Tuple, Union - -import dns.exception -import dns.name -import dns.node -import dns.rdataclass -import dns.rdataset -import dns.rdatatype -import dns.rrset -import dns.serial -import dns.ttl - - -class TransactionManager: - def reader(self) -> "Transaction": - """Begin a read-only transaction.""" - raise NotImplementedError # pragma: no cover - - def writer(self, replacement: bool = False) -> "Transaction": - """Begin a writable transaction. - - *replacement*, a ``bool``. If `True`, the content of the - transaction completely replaces any prior content. If False, - the default, then the content of the transaction updates the - existing content. - """ - raise NotImplementedError # pragma: no cover - - def origin_information( - self, - ) -> Tuple[Optional[dns.name.Name], bool, Optional[dns.name.Name]]: - """Returns a tuple - - (absolute_origin, relativize, effective_origin) - - giving the absolute name of the default origin for any - relative domain names, the "effective origin", and whether - names should be relativized. The "effective origin" is the - absolute origin if relativize is False, and the empty name if - relativize is true. (The effective origin is provided even - though it can be computed from the absolute_origin and - relativize setting because it avoids a lot of code - duplication.) - - If the returned names are `None`, then no origin information is - available. - - This information is used by code working with transactions to - allow it to coordinate relativization. The transaction code - itself takes what it gets (i.e. does not change name - relativity). - - """ - raise NotImplementedError # pragma: no cover - - def get_class(self) -> dns.rdataclass.RdataClass: - """The class of the transaction manager.""" - raise NotImplementedError # pragma: no cover - - def from_wire_origin(self) -> Optional[dns.name.Name]: - """Origin to use in from_wire() calls.""" - (absolute_origin, relativize, _) = self.origin_information() - if relativize: - return absolute_origin - else: - return None - - -class DeleteNotExact(dns.exception.DNSException): - """Existing data did not match data specified by an exact delete.""" - - -class ReadOnly(dns.exception.DNSException): - """Tried to write to a read-only transaction.""" - - -class AlreadyEnded(dns.exception.DNSException): - """Tried to use an already-ended transaction.""" - - -def _ensure_immutable_rdataset(rdataset): - if rdataset is None or isinstance(rdataset, dns.rdataset.ImmutableRdataset): - return rdataset - return dns.rdataset.ImmutableRdataset(rdataset) - - -def _ensure_immutable_node(node): - if node is None or node.is_immutable(): - return node - return dns.node.ImmutableNode(node) - - -CheckPutRdatasetType = Callable[ - ["Transaction", dns.name.Name, dns.rdataset.Rdataset], None -] -CheckDeleteRdatasetType = Callable[ - ["Transaction", dns.name.Name, dns.rdatatype.RdataType, dns.rdatatype.RdataType], - None, -] -CheckDeleteNameType = Callable[["Transaction", dns.name.Name], None] - - -class Transaction: - def __init__( - self, - manager: TransactionManager, - replacement: bool = False, - read_only: bool = False, - ): - self.manager = manager - self.replacement = replacement - self.read_only = read_only - self._ended = False - self._check_put_rdataset: List[CheckPutRdatasetType] = [] - self._check_delete_rdataset: List[CheckDeleteRdatasetType] = [] - self._check_delete_name: List[CheckDeleteNameType] = [] - - # - # This is the high level API - # - # Note that we currently use non-immutable types in the return type signature to - # avoid covariance problems, e.g. if the caller has a List[Rdataset], mypy will be - # unhappy if we return an ImmutableRdataset. - - def get( - self, - name: Optional[Union[dns.name.Name, str]], - rdtype: Union[dns.rdatatype.RdataType, str], - covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, - ) -> dns.rdataset.Rdataset: - """Return the rdataset associated with *name*, *rdtype*, and *covers*, - or `None` if not found. - - Note that the returned rdataset is immutable. - """ - self._check_ended() - if isinstance(name, str): - name = dns.name.from_text(name, None) - rdtype = dns.rdatatype.RdataType.make(rdtype) - covers = dns.rdatatype.RdataType.make(covers) - rdataset = self._get_rdataset(name, rdtype, covers) - return _ensure_immutable_rdataset(rdataset) - - def get_node(self, name: dns.name.Name) -> Optional[dns.node.Node]: - """Return the node at *name*, if any. - - Returns an immutable node or ``None``. - """ - return _ensure_immutable_node(self._get_node(name)) - - def _check_read_only(self) -> None: - if self.read_only: - raise ReadOnly - - def add(self, *args: Any) -> None: - """Add records. - - The arguments may be: - - - rrset - - - name, rdataset... - - - name, ttl, rdata... - """ - self._check_ended() - self._check_read_only() - self._add(False, args) - - def replace(self, *args: Any) -> None: - """Replace the existing rdataset at the name with the specified - rdataset, or add the specified rdataset if there was no existing - rdataset. - - The arguments may be: - - - rrset - - - name, rdataset... - - - name, ttl, rdata... - - Note that if you want to replace the entire node, you should do - a delete of the name followed by one or more calls to add() or - replace(). - """ - self._check_ended() - self._check_read_only() - self._add(True, args) - - def delete(self, *args: Any) -> None: - """Delete records. - - It is not an error if some of the records are not in the existing - set. - - The arguments may be: - - - rrset - - - name - - - name, rdataclass, rdatatype, [covers] - - - name, rdataset... - - - name, rdata... - """ - self._check_ended() - self._check_read_only() - self._delete(False, args) - - def delete_exact(self, *args: Any) -> None: - """Delete records. - - The arguments may be: - - - rrset - - - name - - - name, rdataclass, rdatatype, [covers] - - - name, rdataset... - - - name, rdata... - - Raises dns.transaction.DeleteNotExact if some of the records - are not in the existing set. - - """ - self._check_ended() - self._check_read_only() - self._delete(True, args) - - def name_exists(self, name: Union[dns.name.Name, str]) -> bool: - """Does the specified name exist?""" - self._check_ended() - if isinstance(name, str): - name = dns.name.from_text(name, None) - return self._name_exists(name) - - def update_serial( - self, - value: int = 1, - relative: bool = True, - name: dns.name.Name = dns.name.empty, - ) -> None: - """Update the serial number. - - *value*, an `int`, is an increment if *relative* is `True`, or the - actual value to set if *relative* is `False`. - - Raises `KeyError` if there is no SOA rdataset at *name*. - - Raises `ValueError` if *value* is negative or if the increment is - so large that it would cause the new serial to be less than the - prior value. - """ - self._check_ended() - if value < 0: - raise ValueError("negative update_serial() value") - if isinstance(name, str): - name = dns.name.from_text(name, None) - rdataset = self._get_rdataset(name, dns.rdatatype.SOA, dns.rdatatype.NONE) - if rdataset is None or len(rdataset) == 0: - raise KeyError - if relative: - serial = dns.serial.Serial(rdataset[0].serial) + value - else: - serial = dns.serial.Serial(value) - serial = serial.value # convert back to int - if serial == 0: - serial = 1 - rdata = rdataset[0].replace(serial=serial) - new_rdataset = dns.rdataset.from_rdata(rdataset.ttl, rdata) - self.replace(name, new_rdataset) - - def __iter__(self): - self._check_ended() - return self._iterate_rdatasets() - - def changed(self) -> bool: - """Has this transaction changed anything? - - For read-only transactions, the result is always `False`. - - For writable transactions, the result is `True` if at some time - during the life of the transaction, the content was changed. - """ - self._check_ended() - return self._changed() - - def commit(self) -> None: - """Commit the transaction. - - Normally transactions are used as context managers and commit - or rollback automatically, but it may be done explicitly if needed. - A ``dns.transaction.Ended`` exception will be raised if you try - to use a transaction after it has been committed or rolled back. - - Raises an exception if the commit fails (in which case the transaction - is also rolled back. - """ - self._end(True) - - def rollback(self) -> None: - """Rollback the transaction. - - Normally transactions are used as context managers and commit - or rollback automatically, but it may be done explicitly if needed. - A ``dns.transaction.AlreadyEnded`` exception will be raised if you try - to use a transaction after it has been committed or rolled back. - - Rollback cannot otherwise fail. - """ - self._end(False) - - def check_put_rdataset(self, check: CheckPutRdatasetType) -> None: - """Call *check* before putting (storing) an rdataset. - - The function is called with the transaction, the name, and the rdataset. - - The check function may safely make non-mutating transaction method - calls, but behavior is undefined if mutating transaction methods are - called. The check function should raise an exception if it objects to - the put, and otherwise should return ``None``. - """ - self._check_put_rdataset.append(check) - - def check_delete_rdataset(self, check: CheckDeleteRdatasetType) -> None: - """Call *check* before deleting an rdataset. - - The function is called with the transaction, the name, the rdatatype, - and the covered rdatatype. - - The check function may safely make non-mutating transaction method - calls, but behavior is undefined if mutating transaction methods are - called. The check function should raise an exception if it objects to - the put, and otherwise should return ``None``. - """ - self._check_delete_rdataset.append(check) - - def check_delete_name(self, check: CheckDeleteNameType) -> None: - """Call *check* before putting (storing) an rdataset. - - The function is called with the transaction and the name. - - The check function may safely make non-mutating transaction method - calls, but behavior is undefined if mutating transaction methods are - called. The check function should raise an exception if it objects to - the put, and otherwise should return ``None``. - """ - self._check_delete_name.append(check) - - def iterate_rdatasets( - self, - ) -> Iterator[Tuple[dns.name.Name, dns.rdataset.Rdataset]]: - """Iterate all the rdatasets in the transaction, returning - (`dns.name.Name`, `dns.rdataset.Rdataset`) tuples. - - Note that as is usual with python iterators, adding or removing items - while iterating will invalidate the iterator and may raise `RuntimeError` - or fail to iterate over all entries.""" - self._check_ended() - return self._iterate_rdatasets() - - def iterate_names(self) -> Iterator[dns.name.Name]: - """Iterate all the names in the transaction. - - Note that as is usual with python iterators, adding or removing names - while iterating will invalidate the iterator and may raise `RuntimeError` - or fail to iterate over all entries.""" - self._check_ended() - return self._iterate_names() - - # - # Helper methods - # - - def _raise_if_not_empty(self, method, args): - if len(args) != 0: - raise TypeError(f"extra parameters to {method}") - - def _rdataset_from_args(self, method, deleting, args): - try: - arg = args.popleft() - if isinstance(arg, dns.rrset.RRset): - rdataset = arg.to_rdataset() - elif isinstance(arg, dns.rdataset.Rdataset): - rdataset = arg - else: - if deleting: - ttl = 0 - else: - if isinstance(arg, int): - ttl = arg - if ttl > dns.ttl.MAX_TTL: - raise ValueError(f"{method}: TTL value too big") - else: - raise TypeError(f"{method}: expected a TTL") - arg = args.popleft() - if isinstance(arg, dns.rdata.Rdata): - rdataset = dns.rdataset.from_rdata(ttl, arg) - else: - raise TypeError(f"{method}: expected an Rdata") - return rdataset - except IndexError: - if deleting: - return None - else: - # reraise - raise TypeError(f"{method}: expected more arguments") - - def _add(self, replace, args): - try: - args = collections.deque(args) - if replace: - method = "replace()" - else: - method = "add()" - arg = args.popleft() - if isinstance(arg, str): - arg = dns.name.from_text(arg, None) - if isinstance(arg, dns.name.Name): - name = arg - rdataset = self._rdataset_from_args(method, False, args) - elif isinstance(arg, dns.rrset.RRset): - rrset = arg - name = rrset.name - # rrsets are also rdatasets, but they don't print the - # same and can't be stored in nodes, so convert. - rdataset = rrset.to_rdataset() - else: - raise TypeError( - f"{method} requires a name or RRset as the first argument" - ) - if rdataset.rdclass != self.manager.get_class(): - raise ValueError(f"{method} has objects of wrong RdataClass") - if rdataset.rdtype == dns.rdatatype.SOA: - (_, _, origin) = self._origin_information() - if name != origin: - raise ValueError(f"{method} has non-origin SOA") - self._raise_if_not_empty(method, args) - if not replace: - existing = self._get_rdataset(name, rdataset.rdtype, rdataset.covers) - if existing is not None: - if isinstance(existing, dns.rdataset.ImmutableRdataset): - trds = dns.rdataset.Rdataset( - existing.rdclass, existing.rdtype, existing.covers - ) - trds.update(existing) - existing = trds - rdataset = existing.union(rdataset) - self._checked_put_rdataset(name, rdataset) - except IndexError: - raise TypeError(f"not enough parameters to {method}") - - def _delete(self, exact, args): - try: - args = collections.deque(args) - if exact: - method = "delete_exact()" - else: - method = "delete()" - arg = args.popleft() - if isinstance(arg, str): - arg = dns.name.from_text(arg, None) - if isinstance(arg, dns.name.Name): - name = arg - if len(args) > 0 and ( - isinstance(args[0], int) or isinstance(args[0], str) - ): - # deleting by type and (optionally) covers - rdtype = dns.rdatatype.RdataType.make(args.popleft()) - if len(args) > 0: - covers = dns.rdatatype.RdataType.make(args.popleft()) - else: - covers = dns.rdatatype.NONE - self._raise_if_not_empty(method, args) - existing = self._get_rdataset(name, rdtype, covers) - if existing is None: - if exact: - raise DeleteNotExact(f"{method}: missing rdataset") - else: - self._delete_rdataset(name, rdtype, covers) - return - else: - rdataset = self._rdataset_from_args(method, True, args) - elif isinstance(arg, dns.rrset.RRset): - rdataset = arg # rrsets are also rdatasets - name = rdataset.name - else: - raise TypeError( - f"{method} requires a name or RRset as the first argument" - ) - self._raise_if_not_empty(method, args) - if rdataset: - if rdataset.rdclass != self.manager.get_class(): - raise ValueError(f"{method} has objects of wrong RdataClass") - existing = self._get_rdataset(name, rdataset.rdtype, rdataset.covers) - if existing is not None: - if exact: - intersection = existing.intersection(rdataset) - if intersection != rdataset: - raise DeleteNotExact(f"{method}: missing rdatas") - rdataset = existing.difference(rdataset) - if len(rdataset) == 0: - self._checked_delete_rdataset( - name, rdataset.rdtype, rdataset.covers - ) - else: - self._checked_put_rdataset(name, rdataset) - elif exact: - raise DeleteNotExact(f"{method}: missing rdataset") - else: - if exact and not self._name_exists(name): - raise DeleteNotExact(f"{method}: name not known") - self._checked_delete_name(name) - except IndexError: - raise TypeError(f"not enough parameters to {method}") - - def _check_ended(self): - if self._ended: - raise AlreadyEnded - - def _end(self, commit): - self._check_ended() - if self._ended: - raise AlreadyEnded - try: - self._end_transaction(commit) - finally: - self._ended = True - - def _checked_put_rdataset(self, name, rdataset): - for check in self._check_put_rdataset: - check(self, name, rdataset) - self._put_rdataset(name, rdataset) - - def _checked_delete_rdataset(self, name, rdtype, covers): - for check in self._check_delete_rdataset: - check(self, name, rdtype, covers) - self._delete_rdataset(name, rdtype, covers) - - def _checked_delete_name(self, name): - for check in self._check_delete_name: - check(self, name) - self._delete_name(name) - - # - # Transactions are context managers. - # - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if not self._ended: - if exc_type is None: - self.commit() - else: - self.rollback() - return False - - # - # This is the low level API, which must be implemented by subclasses - # of Transaction. - # - - def _get_rdataset(self, name, rdtype, covers): - """Return the rdataset associated with *name*, *rdtype*, and *covers*, - or `None` if not found. - """ - raise NotImplementedError # pragma: no cover - - def _put_rdataset(self, name, rdataset): - """Store the rdataset.""" - raise NotImplementedError # pragma: no cover - - def _delete_name(self, name): - """Delete all data associated with *name*. - - It is not an error if the name does not exist. - """ - raise NotImplementedError # pragma: no cover - - def _delete_rdataset(self, name, rdtype, covers): - """Delete all data associated with *name*, *rdtype*, and *covers*. - - It is not an error if the rdataset does not exist. - """ - raise NotImplementedError # pragma: no cover - - def _name_exists(self, name): - """Does name exist? - - Returns a bool. - """ - raise NotImplementedError # pragma: no cover - - def _changed(self): - """Has this transaction changed anything?""" - raise NotImplementedError # pragma: no cover - - def _end_transaction(self, commit): - """End the transaction. - - *commit*, a bool. If ``True``, commit the transaction, otherwise - roll it back. - - If committing and the commit fails, then roll back and raise an - exception. - """ - raise NotImplementedError # pragma: no cover - - def _set_origin(self, origin): - """Set the origin. - - This method is called when reading a possibly relativized - source, and an origin setting operation occurs (e.g. $ORIGIN - in a zone file). - """ - raise NotImplementedError # pragma: no cover - - def _iterate_rdatasets(self): - """Return an iterator that yields (name, rdataset) tuples.""" - raise NotImplementedError # pragma: no cover - - def _iterate_names(self): - """Return an iterator that yields a name.""" - raise NotImplementedError # pragma: no cover - - def _get_node(self, name): - """Return the node at *name*, if any. - - Returns a node or ``None``. - """ - raise NotImplementedError # pragma: no cover - - # - # Low-level API with a default implementation, in case a subclass needs - # to override. - # - - def _origin_information(self): - # This is only used by _add() - return self.manager.origin_information() diff --git a/server/venv/Lib/site-packages/dns/tsig.py b/server/venv/Lib/site-packages/dns/tsig.py deleted file mode 100644 index 58760f5..0000000 --- a/server/venv/Lib/site-packages/dns/tsig.py +++ /dev/null @@ -1,361 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2001-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS TSIG support.""" - -import base64 -import hashlib -import hmac -import struct - -import dns.exception -import dns.name -import dns.rcode -import dns.rdataclass - - -class BadTime(dns.exception.DNSException): - - """The current time is not within the TSIG's validity time.""" - - -class BadSignature(dns.exception.DNSException): - - """The TSIG signature fails to verify.""" - - -class BadKey(dns.exception.DNSException): - - """The TSIG record owner name does not match the key.""" - - -class BadAlgorithm(dns.exception.DNSException): - - """The TSIG algorithm does not match the key.""" - - -class PeerError(dns.exception.DNSException): - - """Base class for all TSIG errors generated by the remote peer""" - - -class PeerBadKey(PeerError): - - """The peer didn't know the key we used""" - - -class PeerBadSignature(PeerError): - - """The peer didn't like the signature we sent""" - - -class PeerBadTime(PeerError): - - """The peer didn't like the time we sent""" - - -class PeerBadTruncation(PeerError): - - """The peer didn't like amount of truncation in the TSIG we sent""" - - -# TSIG Algorithms - -HMAC_MD5 = dns.name.from_text("HMAC-MD5.SIG-ALG.REG.INT") -HMAC_SHA1 = dns.name.from_text("hmac-sha1") -HMAC_SHA224 = dns.name.from_text("hmac-sha224") -HMAC_SHA256 = dns.name.from_text("hmac-sha256") -HMAC_SHA256_128 = dns.name.from_text("hmac-sha256-128") -HMAC_SHA384 = dns.name.from_text("hmac-sha384") -HMAC_SHA384_192 = dns.name.from_text("hmac-sha384-192") -HMAC_SHA512 = dns.name.from_text("hmac-sha512") -HMAC_SHA512_256 = dns.name.from_text("hmac-sha512-256") -GSS_TSIG = dns.name.from_text("gss-tsig") - -default_algorithm = HMAC_SHA256 - -mac_sizes = { - HMAC_SHA1: 20, - HMAC_SHA224: 28, - HMAC_SHA256: 32, - HMAC_SHA256_128: 16, - HMAC_SHA384: 48, - HMAC_SHA384_192: 24, - HMAC_SHA512: 64, - HMAC_SHA512_256: 32, - HMAC_MD5: 16, - GSS_TSIG: 128, # This is what we assume to be the worst case! -} - - -class GSSTSig: - """ - GSS-TSIG TSIG implementation. This uses the GSS-API context established - in the TKEY message handshake to sign messages using GSS-API message - integrity codes, per the RFC. - - In order to avoid a direct GSSAPI dependency, the keyring holds a ref - to the GSSAPI object required, rather than the key itself. - """ - - def __init__(self, gssapi_context): - self.gssapi_context = gssapi_context - self.data = b"" - self.name = "gss-tsig" - - def update(self, data): - self.data += data - - def sign(self): - # defer to the GSSAPI function to sign - return self.gssapi_context.get_signature(self.data) - - def verify(self, expected): - try: - # defer to the GSSAPI function to verify - return self.gssapi_context.verify_signature(self.data, expected) - except Exception: - # note the usage of a bare exception - raise BadSignature - - -class GSSTSigAdapter: - def __init__(self, keyring): - self.keyring = keyring - - def __call__(self, message, keyname): - if keyname in self.keyring: - key = self.keyring[keyname] - if isinstance(key, Key) and key.algorithm == GSS_TSIG: - if message: - GSSTSigAdapter.parse_tkey_and_step(key, message, keyname) - return key - else: - return None - - @classmethod - def parse_tkey_and_step(cls, key, message, keyname): - # if the message is a TKEY type, absorb the key material - # into the context using step(); this is used to allow the - # client to complete the GSSAPI negotiation before attempting - # to verify the signed response to a TKEY message exchange - try: - rrset = message.find_rrset( - message.answer, keyname, dns.rdataclass.ANY, dns.rdatatype.TKEY - ) - if rrset: - token = rrset[0].key - gssapi_context = key.secret - return gssapi_context.step(token) - except KeyError: - pass - - -class HMACTSig: - """ - HMAC TSIG implementation. This uses the HMAC python module to handle the - sign/verify operations. - """ - - _hashes = { - HMAC_SHA1: hashlib.sha1, - HMAC_SHA224: hashlib.sha224, - HMAC_SHA256: hashlib.sha256, - HMAC_SHA256_128: (hashlib.sha256, 128), - HMAC_SHA384: hashlib.sha384, - HMAC_SHA384_192: (hashlib.sha384, 192), - HMAC_SHA512: hashlib.sha512, - HMAC_SHA512_256: (hashlib.sha512, 256), - HMAC_MD5: hashlib.md5, - } - - def __init__(self, key, algorithm): - try: - hashinfo = self._hashes[algorithm] - except KeyError: - raise NotImplementedError(f"TSIG algorithm {algorithm} is not supported") - - # create the HMAC context - if isinstance(hashinfo, tuple): - self.hmac_context = hmac.new(key, digestmod=hashinfo[0]) - self.size = hashinfo[1] - else: - self.hmac_context = hmac.new(key, digestmod=hashinfo) - self.size = None - self.name = self.hmac_context.name - if self.size: - self.name += f"-{self.size}" - - def update(self, data): - return self.hmac_context.update(data) - - def sign(self): - # defer to the HMAC digest() function for that digestmod - digest = self.hmac_context.digest() - if self.size: - digest = digest[: (self.size // 8)] - return digest - - def verify(self, expected): - # re-digest and compare the results - mac = self.sign() - if not hmac.compare_digest(mac, expected): - raise BadSignature - - -def _digest(wire, key, rdata, time=None, request_mac=None, ctx=None, multi=None): - """Return a context containing the TSIG rdata for the input parameters - @rtype: dns.tsig.HMACTSig or dns.tsig.GSSTSig object - @raises ValueError: I{other_data} is too long - @raises NotImplementedError: I{algorithm} is not supported - """ - - first = not (ctx and multi) - if first: - ctx = get_context(key) - if request_mac: - ctx.update(struct.pack("!H", len(request_mac))) - ctx.update(request_mac) - ctx.update(struct.pack("!H", rdata.original_id)) - ctx.update(wire[2:]) - if first: - ctx.update(key.name.to_digestable()) - ctx.update(struct.pack("!H", dns.rdataclass.ANY)) - ctx.update(struct.pack("!I", 0)) - if time is None: - time = rdata.time_signed - upper_time = (time >> 32) & 0xFFFF - lower_time = time & 0xFFFFFFFF - time_encoded = struct.pack("!HIH", upper_time, lower_time, rdata.fudge) - other_len = len(rdata.other) - if other_len > 65535: - raise ValueError("TSIG Other Data is > 65535 bytes") - if first: - ctx.update(key.algorithm.to_digestable() + time_encoded) - ctx.update(struct.pack("!HH", rdata.error, other_len) + rdata.other) - else: - ctx.update(time_encoded) - return ctx - - -def _maybe_start_digest(key, mac, multi): - """If this is the first message in a multi-message sequence, - start a new context. - @rtype: dns.tsig.HMACTSig or dns.tsig.GSSTSig object - """ - if multi: - ctx = get_context(key) - ctx.update(struct.pack("!H", len(mac))) - ctx.update(mac) - return ctx - else: - return None - - -def sign(wire, key, rdata, time=None, request_mac=None, ctx=None, multi=False): - """Return a (tsig_rdata, mac, ctx) tuple containing the HMAC TSIG rdata - for the input parameters, the HMAC MAC calculated by applying the - TSIG signature algorithm, and the TSIG digest context. - @rtype: (string, dns.tsig.HMACTSig or dns.tsig.GSSTSig object) - @raises ValueError: I{other_data} is too long - @raises NotImplementedError: I{algorithm} is not supported - """ - - ctx = _digest(wire, key, rdata, time, request_mac, ctx, multi) - mac = ctx.sign() - tsig = rdata.replace(time_signed=time, mac=mac) - - return (tsig, _maybe_start_digest(key, mac, multi)) - - -def validate( - wire, key, owner, rdata, now, request_mac, tsig_start, ctx=None, multi=False -): - """Validate the specified TSIG rdata against the other input parameters. - - @raises FormError: The TSIG is badly formed. - @raises BadTime: There is too much time skew between the client and the - server. - @raises BadSignature: The TSIG signature did not validate - @rtype: dns.tsig.HMACTSig or dns.tsig.GSSTSig object""" - - (adcount,) = struct.unpack("!H", wire[10:12]) - if adcount == 0: - raise dns.exception.FormError - adcount -= 1 - new_wire = wire[0:10] + struct.pack("!H", adcount) + wire[12:tsig_start] - if rdata.error != 0: - if rdata.error == dns.rcode.BADSIG: - raise PeerBadSignature - elif rdata.error == dns.rcode.BADKEY: - raise PeerBadKey - elif rdata.error == dns.rcode.BADTIME: - raise PeerBadTime - elif rdata.error == dns.rcode.BADTRUNC: - raise PeerBadTruncation - else: - raise PeerError("unknown TSIG error code %d" % rdata.error) - if abs(rdata.time_signed - now) > rdata.fudge: - raise BadTime - if key.name != owner: - raise BadKey - if key.algorithm != rdata.algorithm: - raise BadAlgorithm - ctx = _digest(new_wire, key, rdata, None, request_mac, ctx, multi) - ctx.verify(rdata.mac) - return _maybe_start_digest(key, rdata.mac, multi) - - -def get_context(key): - """Returns an HMAC context for the specified key. - - @rtype: HMAC context - @raises NotImplementedError: I{algorithm} is not supported - """ - - if key.algorithm == GSS_TSIG: - return GSSTSig(key.secret) - else: - return HMACTSig(key.secret, key.algorithm) - - -class Key: - def __init__(self, name, secret, algorithm=default_algorithm): - if isinstance(name, str): - name = dns.name.from_text(name) - self.name = name - if isinstance(secret, str): - secret = base64.decodebytes(secret.encode()) - self.secret = secret - if isinstance(algorithm, str): - algorithm = dns.name.from_text(algorithm) - self.algorithm = algorithm - - def __eq__(self, other): - return ( - isinstance(other, Key) - and self.name == other.name - and self.secret == other.secret - and self.algorithm == other.algorithm - ) - - def __repr__(self): - r = f" Dict[dns.name.Name, dns.tsig.Key]: - """Convert a dictionary containing (textual DNS name, base64 secret) - pairs into a binary keyring which has (dns.name.Name, bytes) pairs, or - a dictionary containing (textual DNS name, (algorithm, base64 secret)) - pairs into a binary keyring which has (dns.name.Name, dns.tsig.Key) pairs. - @rtype: dict""" - - keyring = {} - for name, value in textring.items(): - kname = dns.name.from_text(name) - if isinstance(value, str): - keyring[kname] = dns.tsig.Key(kname, value).secret - else: - (algorithm, secret) = value - keyring[kname] = dns.tsig.Key(kname, secret, algorithm) - return keyring - - -def to_text(keyring: Dict[dns.name.Name, Any]) -> Dict[str, Any]: - """Convert a dictionary containing (dns.name.Name, dns.tsig.Key) pairs - into a text keyring which has (textual DNS name, (textual algorithm, - base64 secret)) pairs, or a dictionary containing (dns.name.Name, bytes) - pairs into a text keyring which has (textual DNS name, base64 secret) pairs. - @rtype: dict""" - - textring = {} - - def b64encode(secret): - return base64.encodebytes(secret).decode().rstrip() - - for name, key in keyring.items(): - tname = name.to_text() - if isinstance(key, bytes): - textring[tname] = b64encode(key) - else: - if isinstance(key.secret, bytes): - text_secret = b64encode(key.secret) - else: - text_secret = str(key.secret) - - textring[tname] = (key.algorithm.to_text(), text_secret) - return textring diff --git a/server/venv/Lib/site-packages/dns/ttl.py b/server/venv/Lib/site-packages/dns/ttl.py deleted file mode 100644 index 264b033..0000000 --- a/server/venv/Lib/site-packages/dns/ttl.py +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS TTL conversion.""" - -from typing import Union - -import dns.exception - -# Technically TTLs are supposed to be between 0 and 2**31 - 1, with values -# greater than that interpreted as 0, but we do not impose this policy here -# as values > 2**31 - 1 occur in real world data. -# -# We leave it to applications to impose tighter bounds if desired. -MAX_TTL = 2**32 - 1 - - -class BadTTL(dns.exception.SyntaxError): - """DNS TTL value is not well-formed.""" - - -def from_text(text: str) -> int: - """Convert the text form of a TTL to an integer. - - The BIND 8 units syntax for TTLs (e.g. '1w6d4h3m10s') is supported. - - *text*, a ``str``, the textual TTL. - - Raises ``dns.ttl.BadTTL`` if the TTL is not well-formed. - - Returns an ``int``. - """ - - if text.isdigit(): - total = int(text) - elif len(text) == 0: - raise BadTTL - else: - total = 0 - current = 0 - need_digit = True - for c in text: - if c.isdigit(): - current *= 10 - current += int(c) - need_digit = False - else: - if need_digit: - raise BadTTL - c = c.lower() - if c == "w": - total += current * 604800 - elif c == "d": - total += current * 86400 - elif c == "h": - total += current * 3600 - elif c == "m": - total += current * 60 - elif c == "s": - total += current - else: - raise BadTTL("unknown unit '%s'" % c) - current = 0 - need_digit = True - if not current == 0: - raise BadTTL("trailing integer") - if total < 0 or total > MAX_TTL: - raise BadTTL("TTL should be between 0 and 2**32 - 1 (inclusive)") - return total - - -def make(value: Union[int, str]) -> int: - if isinstance(value, int): - return value - elif isinstance(value, str): - return dns.ttl.from_text(value) - else: - raise ValueError("cannot convert value to TTL") diff --git a/server/venv/Lib/site-packages/dns/update.py b/server/venv/Lib/site-packages/dns/update.py deleted file mode 100644 index bf1157a..0000000 --- a/server/venv/Lib/site-packages/dns/update.py +++ /dev/null @@ -1,386 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Dynamic Update Support""" - -from typing import Any, List, Optional, Union - -import dns.message -import dns.name -import dns.opcode -import dns.rdata -import dns.rdataclass -import dns.rdataset -import dns.rdatatype -import dns.tsig - - -class UpdateSection(dns.enum.IntEnum): - """Update sections""" - - ZONE = 0 - PREREQ = 1 - UPDATE = 2 - ADDITIONAL = 3 - - @classmethod - def _maximum(cls): - return 3 - - -class UpdateMessage(dns.message.Message): # lgtm[py/missing-equals] - # ignore the mypy error here as we mean to use a different enum - _section_enum = UpdateSection # type: ignore - - def __init__( - self, - zone: Optional[Union[dns.name.Name, str]] = None, - rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, - keyring: Optional[Any] = None, - keyname: Optional[dns.name.Name] = None, - keyalgorithm: Union[dns.name.Name, str] = dns.tsig.default_algorithm, - id: Optional[int] = None, - ): - """Initialize a new DNS Update object. - - See the documentation of the Message class for a complete - description of the keyring dictionary. - - *zone*, a ``dns.name.Name``, ``str``, or ``None``, the zone - which is being updated. ``None`` should only be used by dnspython's - message constructors, as a zone is required for the convenience - methods like ``add()``, ``replace()``, etc. - - *rdclass*, an ``int`` or ``str``, the class of the zone. - - The *keyring*, *keyname*, and *keyalgorithm* parameters are passed to - ``use_tsig()``; see its documentation for details. - """ - super().__init__(id=id) - self.flags |= dns.opcode.to_flags(dns.opcode.UPDATE) - if isinstance(zone, str): - zone = dns.name.from_text(zone) - self.origin = zone - rdclass = dns.rdataclass.RdataClass.make(rdclass) - self.zone_rdclass = rdclass - if self.origin: - self.find_rrset( - self.zone, - self.origin, - rdclass, - dns.rdatatype.SOA, - create=True, - force_unique=True, - ) - if keyring is not None: - self.use_tsig(keyring, keyname, algorithm=keyalgorithm) - - @property - def zone(self) -> List[dns.rrset.RRset]: - """The zone section.""" - return self.sections[0] - - @zone.setter - def zone(self, v): - self.sections[0] = v - - @property - def prerequisite(self) -> List[dns.rrset.RRset]: - """The prerequisite section.""" - return self.sections[1] - - @prerequisite.setter - def prerequisite(self, v): - self.sections[1] = v - - @property - def update(self) -> List[dns.rrset.RRset]: - """The update section.""" - return self.sections[2] - - @update.setter - def update(self, v): - self.sections[2] = v - - def _add_rr(self, name, ttl, rd, deleting=None, section=None): - """Add a single RR to the update section.""" - - if section is None: - section = self.update - covers = rd.covers() - rrset = self.find_rrset( - section, name, self.zone_rdclass, rd.rdtype, covers, deleting, True, True - ) - rrset.add(rd, ttl) - - def _add(self, replace, section, name, *args): - """Add records. - - *replace* is the replacement mode. If ``False``, - RRs are added to an existing RRset; if ``True``, the RRset - is replaced with the specified contents. The second - argument is the section to add to. The third argument - is always a name. The other arguments can be: - - - rdataset... - - - ttl, rdata... - - - ttl, rdtype, string... - """ - - if isinstance(name, str): - name = dns.name.from_text(name, None) - if isinstance(args[0], dns.rdataset.Rdataset): - for rds in args: - if replace: - self.delete(name, rds.rdtype) - for rd in rds: - self._add_rr(name, rds.ttl, rd, section=section) - else: - args = list(args) - ttl = int(args.pop(0)) - if isinstance(args[0], dns.rdata.Rdata): - if replace: - self.delete(name, args[0].rdtype) - for rd in args: - self._add_rr(name, ttl, rd, section=section) - else: - rdtype = dns.rdatatype.RdataType.make(args.pop(0)) - if replace: - self.delete(name, rdtype) - for s in args: - rd = dns.rdata.from_text(self.zone_rdclass, rdtype, s, self.origin) - self._add_rr(name, ttl, rd, section=section) - - def add(self, name: Union[dns.name.Name, str], *args: Any) -> None: - """Add records. - - The first argument is always a name. The other - arguments can be: - - - rdataset... - - - ttl, rdata... - - - ttl, rdtype, string... - """ - - self._add(False, self.update, name, *args) - - def delete(self, name: Union[dns.name.Name, str], *args: Any) -> None: - """Delete records. - - The first argument is always a name. The other - arguments can be: - - - *empty* - - - rdataset... - - - rdata... - - - rdtype, [string...] - """ - - if isinstance(name, str): - name = dns.name.from_text(name, None) - if len(args) == 0: - self.find_rrset( - self.update, - name, - dns.rdataclass.ANY, - dns.rdatatype.ANY, - dns.rdatatype.NONE, - dns.rdataclass.ANY, - True, - True, - ) - elif isinstance(args[0], dns.rdataset.Rdataset): - for rds in args: - for rd in rds: - self._add_rr(name, 0, rd, dns.rdataclass.NONE) - else: - largs = list(args) - if isinstance(largs[0], dns.rdata.Rdata): - for rd in largs: - self._add_rr(name, 0, rd, dns.rdataclass.NONE) - else: - rdtype = dns.rdatatype.RdataType.make(largs.pop(0)) - if len(largs) == 0: - self.find_rrset( - self.update, - name, - self.zone_rdclass, - rdtype, - dns.rdatatype.NONE, - dns.rdataclass.ANY, - True, - True, - ) - else: - for s in largs: - rd = dns.rdata.from_text( - self.zone_rdclass, - rdtype, - s, # type: ignore[arg-type] - self.origin, - ) - self._add_rr(name, 0, rd, dns.rdataclass.NONE) - - def replace(self, name: Union[dns.name.Name, str], *args: Any) -> None: - """Replace records. - - The first argument is always a name. The other - arguments can be: - - - rdataset... - - - ttl, rdata... - - - ttl, rdtype, string... - - Note that if you want to replace the entire node, you should do - a delete of the name followed by one or more calls to add. - """ - - self._add(True, self.update, name, *args) - - def present(self, name: Union[dns.name.Name, str], *args: Any) -> None: - """Require that an owner name (and optionally an rdata type, - or specific rdataset) exists as a prerequisite to the - execution of the update. - - The first argument is always a name. - The other arguments can be: - - - rdataset... - - - rdata... - - - rdtype, string... - """ - - if isinstance(name, str): - name = dns.name.from_text(name, None) - if len(args) == 0: - self.find_rrset( - self.prerequisite, - name, - dns.rdataclass.ANY, - dns.rdatatype.ANY, - dns.rdatatype.NONE, - None, - True, - True, - ) - elif ( - isinstance(args[0], dns.rdataset.Rdataset) - or isinstance(args[0], dns.rdata.Rdata) - or len(args) > 1 - ): - if not isinstance(args[0], dns.rdataset.Rdataset): - # Add a 0 TTL - largs = list(args) - largs.insert(0, 0) # type: ignore[arg-type] - self._add(False, self.prerequisite, name, *largs) - else: - self._add(False, self.prerequisite, name, *args) - else: - rdtype = dns.rdatatype.RdataType.make(args[0]) - self.find_rrset( - self.prerequisite, - name, - dns.rdataclass.ANY, - rdtype, - dns.rdatatype.NONE, - None, - True, - True, - ) - - def absent( - self, - name: Union[dns.name.Name, str], - rdtype: Optional[Union[dns.rdatatype.RdataType, str]] = None, - ) -> None: - """Require that an owner name (and optionally an rdata type) does - not exist as a prerequisite to the execution of the update.""" - - if isinstance(name, str): - name = dns.name.from_text(name, None) - if rdtype is None: - self.find_rrset( - self.prerequisite, - name, - dns.rdataclass.NONE, - dns.rdatatype.ANY, - dns.rdatatype.NONE, - None, - True, - True, - ) - else: - rdtype = dns.rdatatype.RdataType.make(rdtype) - self.find_rrset( - self.prerequisite, - name, - dns.rdataclass.NONE, - rdtype, - dns.rdatatype.NONE, - None, - True, - True, - ) - - def _get_one_rr_per_rrset(self, value): - # Updates are always one_rr_per_rrset - return True - - def _parse_rr_header(self, section, name, rdclass, rdtype): - deleting = None - empty = False - if section == UpdateSection.ZONE: - if ( - dns.rdataclass.is_metaclass(rdclass) - or rdtype != dns.rdatatype.SOA - or self.zone - ): - raise dns.exception.FormError - else: - if not self.zone: - raise dns.exception.FormError - if rdclass in (dns.rdataclass.ANY, dns.rdataclass.NONE): - deleting = rdclass - rdclass = self.zone[0].rdclass - empty = ( - deleting == dns.rdataclass.ANY or section == UpdateSection.PREREQ - ) - return (rdclass, rdtype, deleting, empty) - - -# backwards compatibility -Update = UpdateMessage - -### BEGIN generated UpdateSection constants - -ZONE = UpdateSection.ZONE -PREREQ = UpdateSection.PREREQ -UPDATE = UpdateSection.UPDATE -ADDITIONAL = UpdateSection.ADDITIONAL - -### END generated UpdateSection constants diff --git a/server/venv/Lib/site-packages/dns/version.py b/server/venv/Lib/site-packages/dns/version.py deleted file mode 100644 index 1f1fbf2..0000000 --- a/server/venv/Lib/site-packages/dns/version.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""dnspython release version information.""" - -#: MAJOR -MAJOR = 2 -#: MINOR -MINOR = 4 -#: MICRO -MICRO = 2 -#: RELEASELEVEL -RELEASELEVEL = 0x0F -#: SERIAL -SERIAL = 0 - -if RELEASELEVEL == 0x0F: # pragma: no cover lgtm[py/unreachable-statement] - #: version - version = "%d.%d.%d" % (MAJOR, MINOR, MICRO) # lgtm[py/unreachable-statement] -elif RELEASELEVEL == 0x00: # pragma: no cover lgtm[py/unreachable-statement] - version = "%d.%d.%ddev%d" % ( - MAJOR, - MINOR, - MICRO, - SERIAL, - ) # lgtm[py/unreachable-statement] -elif RELEASELEVEL == 0x0C: # pragma: no cover lgtm[py/unreachable-statement] - version = "%d.%d.%drc%d" % ( - MAJOR, - MINOR, - MICRO, - SERIAL, - ) # lgtm[py/unreachable-statement] -else: # pragma: no cover lgtm[py/unreachable-statement] - version = "%d.%d.%d%x%d" % ( - MAJOR, - MINOR, - MICRO, - RELEASELEVEL, - SERIAL, - ) # lgtm[py/unreachable-statement] - -#: hexversion -hexversion = MAJOR << 24 | MINOR << 16 | MICRO << 8 | RELEASELEVEL << 4 | SERIAL diff --git a/server/venv/Lib/site-packages/dns/versioned.py b/server/venv/Lib/site-packages/dns/versioned.py deleted file mode 100644 index fd78e67..0000000 --- a/server/venv/Lib/site-packages/dns/versioned.py +++ /dev/null @@ -1,318 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -"""DNS Versioned Zones.""" - -import collections -import threading -from typing import Callable, Deque, Optional, Set, Union - -import dns.exception -import dns.immutable -import dns.name -import dns.node -import dns.rdataclass -import dns.rdataset -import dns.rdatatype -import dns.rdtypes.ANY.SOA -import dns.zone - - -class UseTransaction(dns.exception.DNSException): - """To alter a versioned zone, use a transaction.""" - - -# Backwards compatibility -Node = dns.zone.VersionedNode -ImmutableNode = dns.zone.ImmutableVersionedNode -Version = dns.zone.Version -WritableVersion = dns.zone.WritableVersion -ImmutableVersion = dns.zone.ImmutableVersion -Transaction = dns.zone.Transaction - - -class Zone(dns.zone.Zone): # lgtm[py/missing-equals] - __slots__ = [ - "_versions", - "_versions_lock", - "_write_txn", - "_write_waiters", - "_write_event", - "_pruning_policy", - "_readers", - ] - - node_factory = Node - - def __init__( - self, - origin: Optional[Union[dns.name.Name, str]], - rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, - relativize: bool = True, - pruning_policy: Optional[Callable[["Zone", Version], Optional[bool]]] = None, - ): - """Initialize a versioned zone object. - - *origin* is the origin of the zone. It may be a ``dns.name.Name``, - a ``str``, or ``None``. If ``None``, then the zone's origin will - be set by the first ``$ORIGIN`` line in a zone file. - - *rdclass*, an ``int``, the zone's rdata class; the default is class IN. - - *relativize*, a ``bool``, determine's whether domain names are - relativized to the zone's origin. The default is ``True``. - - *pruning policy*, a function taking a ``Zone`` and a ``Version`` and returning - a ``bool``, or ``None``. Should the version be pruned? If ``None``, - the default policy, which retains one version is used. - """ - super().__init__(origin, rdclass, relativize) - self._versions: Deque[Version] = collections.deque() - self._version_lock = threading.Lock() - if pruning_policy is None: - self._pruning_policy = self._default_pruning_policy - else: - self._pruning_policy = pruning_policy - self._write_txn: Optional[Transaction] = None - self._write_event: Optional[threading.Event] = None - self._write_waiters: Deque[threading.Event] = collections.deque() - self._readers: Set[Transaction] = set() - self._commit_version_unlocked( - None, WritableVersion(self, replacement=True), origin - ) - - def reader( - self, id: Optional[int] = None, serial: Optional[int] = None - ) -> Transaction: # pylint: disable=arguments-differ - if id is not None and serial is not None: - raise ValueError("cannot specify both id and serial") - with self._version_lock: - if id is not None: - version = None - for v in reversed(self._versions): - if v.id == id: - version = v - break - if version is None: - raise KeyError("version not found") - elif serial is not None: - if self.relativize: - oname = dns.name.empty - else: - assert self.origin is not None - oname = self.origin - version = None - for v in reversed(self._versions): - n = v.nodes.get(oname) - if n: - rds = n.get_rdataset(self.rdclass, dns.rdatatype.SOA) - if rds and rds[0].serial == serial: - version = v - break - if version is None: - raise KeyError("serial not found") - else: - version = self._versions[-1] - txn = Transaction(self, False, version) - self._readers.add(txn) - return txn - - def writer(self, replacement: bool = False) -> Transaction: - event = None - while True: - with self._version_lock: - # Checking event == self._write_event ensures that either - # no one was waiting before we got lucky and found no write - # txn, or we were the one who was waiting and got woken up. - # This prevents "taking cuts" when creating a write txn. - if self._write_txn is None and event == self._write_event: - # Creating the transaction defers version setup - # (i.e. copying the nodes dictionary) until we - # give up the lock, so that we hold the lock as - # short a time as possible. This is why we call - # _setup_version() below. - self._write_txn = Transaction( - self, replacement, make_immutable=True - ) - # give up our exclusive right to make a Transaction - self._write_event = None - break - # Someone else is writing already, so we will have to - # wait, but we want to do the actual wait outside the - # lock. - event = threading.Event() - self._write_waiters.append(event) - # wait (note we gave up the lock!) - # - # We only wake one sleeper at a time, so it's important - # that no event waiter can exit this method (e.g. via - # cancellation) without returning a transaction or waking - # someone else up. - # - # This is not a problem with Threading module threads as - # they cannot be canceled, but could be an issue with trio - # tasks when we do the async version of writer(). - # I.e. we'd need to do something like: - # - # try: - # event.wait() - # except trio.Cancelled: - # with self._version_lock: - # self._maybe_wakeup_one_waiter_unlocked() - # raise - # - event.wait() - # Do the deferred version setup. - self._write_txn._setup_version() - return self._write_txn - - def _maybe_wakeup_one_waiter_unlocked(self): - if len(self._write_waiters) > 0: - self._write_event = self._write_waiters.popleft() - self._write_event.set() - - # pylint: disable=unused-argument - def _default_pruning_policy(self, zone, version): - return True - - # pylint: enable=unused-argument - - def _prune_versions_unlocked(self): - assert len(self._versions) > 0 - # Don't ever prune a version greater than or equal to one that - # a reader has open. This pins versions in memory while the - # reader is open, and importantly lets the reader open a txn on - # a successor version (e.g. if generating an IXFR). - # - # Note our definition of least_kept also ensures we do not try to - # delete the greatest version. - if len(self._readers) > 0: - least_kept = min(txn.version.id for txn in self._readers) - else: - least_kept = self._versions[-1].id - while self._versions[0].id < least_kept and self._pruning_policy( - self, self._versions[0] - ): - self._versions.popleft() - - def set_max_versions(self, max_versions: Optional[int]) -> None: - """Set a pruning policy that retains up to the specified number - of versions - """ - if max_versions is not None and max_versions < 1: - raise ValueError("max versions must be at least 1") - if max_versions is None: - - def policy(zone, _): # pylint: disable=unused-argument - return False - - else: - - def policy(zone, _): - return len(zone._versions) > max_versions - - self.set_pruning_policy(policy) - - def set_pruning_policy( - self, policy: Optional[Callable[["Zone", Version], Optional[bool]]] - ) -> None: - """Set the pruning policy for the zone. - - The *policy* function takes a `Version` and returns `True` if - the version should be pruned, and `False` otherwise. `None` - may also be specified for policy, in which case the default policy - is used. - - Pruning checking proceeds from the least version and the first - time the function returns `False`, the checking stops. I.e. the - retained versions are always a consecutive sequence. - """ - if policy is None: - policy = self._default_pruning_policy - with self._version_lock: - self._pruning_policy = policy - self._prune_versions_unlocked() - - def _end_read(self, txn): - with self._version_lock: - self._readers.remove(txn) - self._prune_versions_unlocked() - - def _end_write_unlocked(self, txn): - assert self._write_txn == txn - self._write_txn = None - self._maybe_wakeup_one_waiter_unlocked() - - def _end_write(self, txn): - with self._version_lock: - self._end_write_unlocked(txn) - - def _commit_version_unlocked(self, txn, version, origin): - self._versions.append(version) - self._prune_versions_unlocked() - self.nodes = version.nodes - if self.origin is None: - self.origin = origin - # txn can be None in __init__ when we make the empty version. - if txn is not None: - self._end_write_unlocked(txn) - - def _commit_version(self, txn, version, origin): - with self._version_lock: - self._commit_version_unlocked(txn, version, origin) - - def _get_next_version_id(self): - if len(self._versions) > 0: - id = self._versions[-1].id + 1 - else: - id = 1 - return id - - def find_node( - self, name: Union[dns.name.Name, str], create: bool = False - ) -> dns.node.Node: - if create: - raise UseTransaction - return super().find_node(name) - - def delete_node(self, name: Union[dns.name.Name, str]) -> None: - raise UseTransaction - - def find_rdataset( - self, - name: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str], - covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, - create: bool = False, - ) -> dns.rdataset.Rdataset: - if create: - raise UseTransaction - rdataset = super().find_rdataset(name, rdtype, covers) - return dns.rdataset.ImmutableRdataset(rdataset) - - def get_rdataset( - self, - name: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str], - covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, - create: bool = False, - ) -> Optional[dns.rdataset.Rdataset]: - if create: - raise UseTransaction - rdataset = super().get_rdataset(name, rdtype, covers) - if rdataset is not None: - return dns.rdataset.ImmutableRdataset(rdataset) - else: - return None - - def delete_rdataset( - self, - name: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str], - covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, - ) -> None: - raise UseTransaction - - def replace_rdataset( - self, name: Union[dns.name.Name, str], replacement: dns.rdataset.Rdataset - ) -> None: - raise UseTransaction diff --git a/server/venv/Lib/site-packages/dns/win32util.py b/server/venv/Lib/site-packages/dns/win32util.py deleted file mode 100644 index b2ca61d..0000000 --- a/server/venv/Lib/site-packages/dns/win32util.py +++ /dev/null @@ -1,249 +0,0 @@ -import sys - -if sys.platform == "win32": - from typing import Any - - import dns.name - - _prefer_wmi = True - - import winreg # pylint: disable=import-error - - # Keep pylint quiet on non-windows. - try: - WindowsError is None # pylint: disable=used-before-assignment - except KeyError: - WindowsError = Exception - - try: - import threading - - import pythoncom # pylint: disable=import-error - import wmi # pylint: disable=import-error - - _have_wmi = True - except Exception: - _have_wmi = False - - def _config_domain(domain): - # Sometimes DHCP servers add a '.' prefix to the default domain, and - # Windows just stores such values in the registry (see #687). - # Check for this and fix it. - if domain.startswith("."): - domain = domain[1:] - return dns.name.from_text(domain) - - class DnsInfo: - def __init__(self): - self.domain = None - self.nameservers = [] - self.search = [] - - if _have_wmi: - - class _WMIGetter(threading.Thread): - def __init__(self): - super().__init__() - self.info = DnsInfo() - - def run(self): - pythoncom.CoInitialize() - try: - system = wmi.WMI() - for interface in system.Win32_NetworkAdapterConfiguration(): - if interface.IPEnabled and interface.DNSDomain: - self.info.domain = _config_domain(interface.DNSDomain) - self.info.nameservers = list(interface.DNSServerSearchOrder) - if interface.DNSDomainSuffixSearchOrder: - self.info.search = [ - _config_domain(x) - for x in interface.DNSDomainSuffixSearchOrder - ] - break - finally: - pythoncom.CoUninitialize() - - def get(self): - # We always run in a separate thread to avoid any issues with - # the COM threading model. - self.start() - self.join() - return self.info - - else: - - class _WMIGetter: # type: ignore - pass - - class _RegistryGetter: - def __init__(self): - self.info = DnsInfo() - - def _determine_split_char(self, entry): - # - # The windows registry irritatingly changes the list element - # delimiter in between ' ' and ',' (and vice-versa) in various - # versions of windows. - # - if entry.find(" ") >= 0: - split_char = " " - elif entry.find(",") >= 0: - split_char = "," - else: - # probably a singleton; treat as a space-separated list. - split_char = " " - return split_char - - def _config_nameservers(self, nameservers): - split_char = self._determine_split_char(nameservers) - ns_list = nameservers.split(split_char) - for ns in ns_list: - if ns not in self.info.nameservers: - self.info.nameservers.append(ns) - - def _config_search(self, search): - split_char = self._determine_split_char(search) - search_list = search.split(split_char) - for s in search_list: - s = _config_domain(s) - if s not in self.info.search: - self.info.search.append(s) - - def _config_fromkey(self, key, always_try_domain): - try: - servers, _ = winreg.QueryValueEx(key, "NameServer") - except WindowsError: - servers = None - if servers: - self._config_nameservers(servers) - if servers or always_try_domain: - try: - dom, _ = winreg.QueryValueEx(key, "Domain") - if dom: - self.info.domain = _config_domain(dom) - except WindowsError: - pass - else: - try: - servers, _ = winreg.QueryValueEx(key, "DhcpNameServer") - except WindowsError: - servers = None - if servers: - self._config_nameservers(servers) - try: - dom, _ = winreg.QueryValueEx(key, "DhcpDomain") - if dom: - self.info.domain = _config_domain(dom) - except WindowsError: - pass - try: - search, _ = winreg.QueryValueEx(key, "SearchList") - except WindowsError: - search = None - if search is None: - try: - search, _ = winreg.QueryValueEx(key, "DhcpSearchList") - except WindowsError: - search = None - if search: - self._config_search(search) - - def _is_nic_enabled(self, lm, guid): - # Look in the Windows Registry to determine whether the network - # interface corresponding to the given guid is enabled. - # - # (Code contributed by Paul Marks, thanks!) - # - try: - # This hard-coded location seems to be consistent, at least - # from Windows 2000 through Vista. - connection_key = winreg.OpenKey( - lm, - r"SYSTEM\CurrentControlSet\Control\Network" - r"\{4D36E972-E325-11CE-BFC1-08002BE10318}" - r"\%s\Connection" % guid, - ) - - try: - # The PnpInstanceID points to a key inside Enum - (pnp_id, ttype) = winreg.QueryValueEx( - connection_key, "PnpInstanceID" - ) - - if ttype != winreg.REG_SZ: - raise ValueError # pragma: no cover - - device_key = winreg.OpenKey( - lm, r"SYSTEM\CurrentControlSet\Enum\%s" % pnp_id - ) - - try: - # Get ConfigFlags for this device - (flags, ttype) = winreg.QueryValueEx(device_key, "ConfigFlags") - - if ttype != winreg.REG_DWORD: - raise ValueError # pragma: no cover - - # Based on experimentation, bit 0x1 indicates that the - # device is disabled. - # - # XXXRTH I suspect we really want to & with 0x03 so - # that CONFIGFLAGS_REMOVED devices are also ignored, - # but we're shifting to WMI as ConfigFlags is not - # supposed to be used. - return not flags & 0x1 - - finally: - device_key.Close() - finally: - connection_key.Close() - except Exception: # pragma: no cover - return False - - def get(self): - """Extract resolver configuration from the Windows registry.""" - - lm = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) - try: - tcp_params = winreg.OpenKey( - lm, r"SYSTEM\CurrentControlSet\Services\Tcpip\Parameters" - ) - try: - self._config_fromkey(tcp_params, True) - finally: - tcp_params.Close() - interfaces = winreg.OpenKey( - lm, - r"SYSTEM\CurrentControlSet\Services\Tcpip\Parameters\Interfaces", - ) - try: - i = 0 - while True: - try: - guid = winreg.EnumKey(interfaces, i) - i += 1 - key = winreg.OpenKey(interfaces, guid) - try: - if not self._is_nic_enabled(lm, guid): - continue - self._config_fromkey(key, False) - finally: - key.Close() - except EnvironmentError: - break - finally: - interfaces.Close() - finally: - lm.Close() - return self.info - - _getter_class: Any - if _have_wmi and _prefer_wmi: - _getter_class = _WMIGetter - else: - _getter_class = _RegistryGetter - - def get_dns_info(): - """Extract resolver configuration.""" - getter = _getter_class() - return getter.get() diff --git a/server/venv/Lib/site-packages/dns/wire.py b/server/venv/Lib/site-packages/dns/wire.py deleted file mode 100644 index 9f9b157..0000000 --- a/server/venv/Lib/site-packages/dns/wire.py +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -import contextlib -import struct -from typing import Iterator, Optional, Tuple - -import dns.exception -import dns.name - - -class Parser: - def __init__(self, wire: bytes, current: int = 0): - self.wire = wire - self.current = 0 - self.end = len(self.wire) - if current: - self.seek(current) - self.furthest = current - - def remaining(self) -> int: - return self.end - self.current - - def get_bytes(self, size: int) -> bytes: - assert size >= 0 - if size > self.remaining(): - raise dns.exception.FormError - output = self.wire[self.current : self.current + size] - self.current += size - self.furthest = max(self.furthest, self.current) - return output - - def get_counted_bytes(self, length_size: int = 1) -> bytes: - length = int.from_bytes(self.get_bytes(length_size), "big") - return self.get_bytes(length) - - def get_remaining(self) -> bytes: - return self.get_bytes(self.remaining()) - - def get_uint8(self) -> int: - return struct.unpack("!B", self.get_bytes(1))[0] - - def get_uint16(self) -> int: - return struct.unpack("!H", self.get_bytes(2))[0] - - def get_uint32(self) -> int: - return struct.unpack("!I", self.get_bytes(4))[0] - - def get_uint48(self) -> int: - return int.from_bytes(self.get_bytes(6), "big") - - def get_struct(self, format: str) -> Tuple: - return struct.unpack(format, self.get_bytes(struct.calcsize(format))) - - def get_name(self, origin: Optional["dns.name.Name"] = None) -> "dns.name.Name": - name = dns.name.from_wire_parser(self) - if origin: - name = name.relativize(origin) - return name - - def seek(self, where: int) -> None: - # Note that seeking to the end is OK! (If you try to read - # after such a seek, you'll get an exception as expected.) - if where < 0 or where > self.end: - raise dns.exception.FormError - self.current = where - - @contextlib.contextmanager - def restrict_to(self, size: int) -> Iterator: - assert size >= 0 - if size > self.remaining(): - raise dns.exception.FormError - saved_end = self.end - try: - self.end = self.current + size - yield - # We make this check here and not in the finally as we - # don't want to raise if we're already raising for some - # other reason. - if self.current != self.end: - raise dns.exception.FormError - finally: - self.end = saved_end - - @contextlib.contextmanager - def restore_furthest(self) -> Iterator: - try: - yield None - finally: - self.current = self.furthest diff --git a/server/venv/Lib/site-packages/dns/xfr.py b/server/venv/Lib/site-packages/dns/xfr.py deleted file mode 100644 index dd247d3..0000000 --- a/server/venv/Lib/site-packages/dns/xfr.py +++ /dev/null @@ -1,343 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2017 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -from typing import Any, List, Optional, Tuple, Union - -import dns.exception -import dns.message -import dns.name -import dns.rcode -import dns.rdataset -import dns.rdatatype -import dns.serial -import dns.transaction -import dns.tsig -import dns.zone - - -class TransferError(dns.exception.DNSException): - """A zone transfer response got a non-zero rcode.""" - - def __init__(self, rcode): - message = "Zone transfer error: %s" % dns.rcode.to_text(rcode) - super().__init__(message) - self.rcode = rcode - - -class SerialWentBackwards(dns.exception.FormError): - """The current serial number is less than the serial we know.""" - - -class UseTCP(dns.exception.DNSException): - """This IXFR cannot be completed with UDP.""" - - -class Inbound: - """ - State machine for zone transfers. - """ - - def __init__( - self, - txn_manager: dns.transaction.TransactionManager, - rdtype: dns.rdatatype.RdataType = dns.rdatatype.AXFR, - serial: Optional[int] = None, - is_udp: bool = False, - ): - """Initialize an inbound zone transfer. - - *txn_manager* is a :py:class:`dns.transaction.TransactionManager`. - - *rdtype* can be `dns.rdatatype.AXFR` or `dns.rdatatype.IXFR` - - *serial* is the base serial number for IXFRs, and is required in - that case. - - *is_udp*, a ``bool`` indidicates if UDP is being used for this - XFR. - """ - self.txn_manager = txn_manager - self.txn: Optional[dns.transaction.Transaction] = None - self.rdtype = rdtype - if rdtype == dns.rdatatype.IXFR: - if serial is None: - raise ValueError("a starting serial must be supplied for IXFRs") - elif is_udp: - raise ValueError("is_udp specified for AXFR") - self.serial = serial - self.is_udp = is_udp - (_, _, self.origin) = txn_manager.origin_information() - self.soa_rdataset: Optional[dns.rdataset.Rdataset] = None - self.done = False - self.expecting_SOA = False - self.delete_mode = False - - def process_message(self, message: dns.message.Message) -> bool: - """Process one message in the transfer. - - The message should have the same relativization as was specified when - the `dns.xfr.Inbound` was created. The message should also have been - created with `one_rr_per_rrset=True` because order matters. - - Returns `True` if the transfer is complete, and `False` otherwise. - """ - if self.txn is None: - replacement = self.rdtype == dns.rdatatype.AXFR - self.txn = self.txn_manager.writer(replacement) - rcode = message.rcode() - if rcode != dns.rcode.NOERROR: - raise TransferError(rcode) - # - # We don't require a question section, but if it is present is - # should be correct. - # - if len(message.question) > 0: - if message.question[0].name != self.origin: - raise dns.exception.FormError("wrong question name") - if message.question[0].rdtype != self.rdtype: - raise dns.exception.FormError("wrong question rdatatype") - answer_index = 0 - if self.soa_rdataset is None: - # - # This is the first message. We're expecting an SOA at - # the origin. - # - if not message.answer or message.answer[0].name != self.origin: - raise dns.exception.FormError("No answer or RRset not for zone origin") - rrset = message.answer[0] - rdataset = rrset - if rdataset.rdtype != dns.rdatatype.SOA: - raise dns.exception.FormError("first RRset is not an SOA") - answer_index = 1 - self.soa_rdataset = rdataset.copy() - if self.rdtype == dns.rdatatype.IXFR: - if self.soa_rdataset[0].serial == self.serial: - # - # We're already up-to-date. - # - self.done = True - elif dns.serial.Serial(self.soa_rdataset[0].serial) < self.serial: - # It went backwards! - raise SerialWentBackwards - else: - if self.is_udp and len(message.answer[answer_index:]) == 0: - # - # There are no more records, so this is the - # "truncated" response. Say to use TCP - # - raise UseTCP - # - # Note we're expecting another SOA so we can detect - # if this IXFR response is an AXFR-style response. - # - self.expecting_SOA = True - # - # Process the answer section (other than the initial SOA in - # the first message). - # - for rrset in message.answer[answer_index:]: - name = rrset.name - rdataset = rrset - if self.done: - raise dns.exception.FormError("answers after final SOA") - assert self.txn is not None # for mypy - if rdataset.rdtype == dns.rdatatype.SOA and name == self.origin: - # - # Every time we see an origin SOA delete_mode inverts - # - if self.rdtype == dns.rdatatype.IXFR: - self.delete_mode = not self.delete_mode - # - # If this SOA Rdataset is equal to the first we saw - # then we're finished. If this is an IXFR we also - # check that we're seeing the record in the expected - # part of the response. - # - if rdataset == self.soa_rdataset and ( - self.rdtype == dns.rdatatype.AXFR - or (self.rdtype == dns.rdatatype.IXFR and self.delete_mode) - ): - # - # This is the final SOA - # - if self.expecting_SOA: - # We got an empty IXFR sequence! - raise dns.exception.FormError("empty IXFR sequence") - if ( - self.rdtype == dns.rdatatype.IXFR - and self.serial != rdataset[0].serial - ): - raise dns.exception.FormError("unexpected end of IXFR sequence") - self.txn.replace(name, rdataset) - self.txn.commit() - self.txn = None - self.done = True - else: - # - # This is not the final SOA - # - self.expecting_SOA = False - if self.rdtype == dns.rdatatype.IXFR: - if self.delete_mode: - # This is the start of an IXFR deletion set - if rdataset[0].serial != self.serial: - raise dns.exception.FormError( - "IXFR base serial mismatch" - ) - else: - # This is the start of an IXFR addition set - self.serial = rdataset[0].serial - self.txn.replace(name, rdataset) - else: - # We saw a non-final SOA for the origin in an AXFR. - raise dns.exception.FormError("unexpected origin SOA in AXFR") - continue - if self.expecting_SOA: - # - # We made an IXFR request and are expecting another - # SOA RR, but saw something else, so this must be an - # AXFR response. - # - self.rdtype = dns.rdatatype.AXFR - self.expecting_SOA = False - self.delete_mode = False - self.txn.rollback() - self.txn = self.txn_manager.writer(True) - # - # Note we are falling through into the code below - # so whatever rdataset this was gets written. - # - # Add or remove the data - if self.delete_mode: - self.txn.delete_exact(name, rdataset) - else: - self.txn.add(name, rdataset) - if self.is_udp and not self.done: - # - # This is a UDP IXFR and we didn't get to done, and we didn't - # get the proper "truncated" response - # - raise dns.exception.FormError("unexpected end of UDP IXFR") - return self.done - - # - # Inbounds are context managers. - # - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if self.txn: - self.txn.rollback() - return False - - -def make_query( - txn_manager: dns.transaction.TransactionManager, - serial: Optional[int] = 0, - use_edns: Optional[Union[int, bool]] = None, - ednsflags: Optional[int] = None, - payload: Optional[int] = None, - request_payload: Optional[int] = None, - options: Optional[List[dns.edns.Option]] = None, - keyring: Any = None, - keyname: Optional[dns.name.Name] = None, - keyalgorithm: Union[dns.name.Name, str] = dns.tsig.default_algorithm, -) -> Tuple[dns.message.QueryMessage, Optional[int]]: - """Make an AXFR or IXFR query. - - *txn_manager* is a ``dns.transaction.TransactionManager``, typically a - ``dns.zone.Zone``. - - *serial* is an ``int`` or ``None``. If 0, then IXFR will be - attempted using the most recent serial number from the - *txn_manager*; it is the caller's responsibility to ensure there - are no write transactions active that could invalidate the - retrieved serial. If a serial cannot be determined, AXFR will be - forced. Other integer values are the starting serial to use. - ``None`` forces an AXFR. - - Please see the documentation for :py:func:`dns.message.make_query` and - :py:func:`dns.message.Message.use_tsig` for details on the other parameters - to this function. - - Returns a `(query, serial)` tuple. - """ - (zone_origin, _, origin) = txn_manager.origin_information() - if zone_origin is None: - raise ValueError("no zone origin") - if serial is None: - rdtype = dns.rdatatype.AXFR - elif not isinstance(serial, int): - raise ValueError("serial is not an integer") - elif serial == 0: - with txn_manager.reader() as txn: - rdataset = txn.get(origin, "SOA") - if rdataset: - serial = rdataset[0].serial - rdtype = dns.rdatatype.IXFR - else: - serial = None - rdtype = dns.rdatatype.AXFR - elif serial > 0 and serial < 4294967296: - rdtype = dns.rdatatype.IXFR - else: - raise ValueError("serial out-of-range") - rdclass = txn_manager.get_class() - q = dns.message.make_query( - zone_origin, - rdtype, - rdclass, - use_edns, - False, - ednsflags, - payload, - request_payload, - options, - ) - if serial is not None: - rdata = dns.rdata.from_text(rdclass, "SOA", f". . {serial} 0 0 0 0") - rrset = q.find_rrset( - q.authority, zone_origin, rdclass, dns.rdatatype.SOA, create=True - ) - rrset.add(rdata, 0) - if keyring is not None: - q.use_tsig(keyring, keyname, algorithm=keyalgorithm) - return (q, serial) - - -def extract_serial_from_query(query: dns.message.Message) -> Optional[int]: - """Extract the SOA serial number from query if it is an IXFR and return - it, otherwise return None. - - *query* is a dns.message.QueryMessage that is an IXFR or AXFR request. - - Raises if the query is not an IXFR or AXFR, or if an IXFR doesn't have - an appropriate SOA RRset in the authority section. - """ - if not isinstance(query, dns.message.QueryMessage): - raise ValueError("query not a QueryMessage") - question = query.question[0] - if question.rdtype == dns.rdatatype.AXFR: - return None - elif question.rdtype != dns.rdatatype.IXFR: - raise ValueError("query is not an AXFR or IXFR") - soa = query.find_rrset( - query.authority, question.name, question.rdclass, dns.rdatatype.SOA - ) - return soa[0].serial diff --git a/server/venv/Lib/site-packages/dns/zone.py b/server/venv/Lib/site-packages/dns/zone.py deleted file mode 100644 index 9e763f5..0000000 --- a/server/venv/Lib/site-packages/dns/zone.py +++ /dev/null @@ -1,1395 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Zones.""" - -import contextlib -import io -import os -import struct -from typing import Any, Dict, Iterable, Iterator, List, Optional, Set, Tuple, Union - -import dns.exception -import dns.grange -import dns.immutable -import dns.name -import dns.node -import dns.rdata -import dns.rdataclass -import dns.rdataset -import dns.rdatatype -import dns.rdtypes.ANY.SOA -import dns.rdtypes.ANY.ZONEMD -import dns.rrset -import dns.tokenizer -import dns.transaction -import dns.ttl -import dns.zonefile -from dns.zonetypes import DigestHashAlgorithm, DigestScheme, _digest_hashers - - -class BadZone(dns.exception.DNSException): - - """The DNS zone is malformed.""" - - -class NoSOA(BadZone): - - """The DNS zone has no SOA RR at its origin.""" - - -class NoNS(BadZone): - - """The DNS zone has no NS RRset at its origin.""" - - -class UnknownOrigin(BadZone): - - """The DNS zone's origin is unknown.""" - - -class UnsupportedDigestScheme(dns.exception.DNSException): - - """The zone digest's scheme is unsupported.""" - - -class UnsupportedDigestHashAlgorithm(dns.exception.DNSException): - - """The zone digest's origin is unsupported.""" - - -class NoDigest(dns.exception.DNSException): - - """The DNS zone has no ZONEMD RRset at its origin.""" - - -class DigestVerificationFailure(dns.exception.DNSException): - - """The ZONEMD digest failed to verify.""" - - -class Zone(dns.transaction.TransactionManager): - - """A DNS zone. - - A ``Zone`` is a mapping from names to nodes. The zone object may be - treated like a Python dictionary, e.g. ``zone[name]`` will retrieve - the node associated with that name. The *name* may be a - ``dns.name.Name object``, or it may be a string. In either case, - if the name is relative it is treated as relative to the origin of - the zone. - """ - - node_factory = dns.node.Node - - __slots__ = ["rdclass", "origin", "nodes", "relativize"] - - def __init__( - self, - origin: Optional[Union[dns.name.Name, str]], - rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, - relativize: bool = True, - ): - """Initialize a zone object. - - *origin* is the origin of the zone. It may be a ``dns.name.Name``, - a ``str``, or ``None``. If ``None``, then the zone's origin will - be set by the first ``$ORIGIN`` line in a zone file. - - *rdclass*, an ``int``, the zone's rdata class; the default is class IN. - - *relativize*, a ``bool``, determine's whether domain names are - relativized to the zone's origin. The default is ``True``. - """ - - if origin is not None: - if isinstance(origin, str): - origin = dns.name.from_text(origin) - elif not isinstance(origin, dns.name.Name): - raise ValueError("origin parameter must be convertible to a DNS name") - if not origin.is_absolute(): - raise ValueError("origin parameter must be an absolute name") - self.origin = origin - self.rdclass = rdclass - self.nodes: Dict[dns.name.Name, dns.node.Node] = {} - self.relativize = relativize - - def __eq__(self, other): - """Two zones are equal if they have the same origin, class, and - nodes. - - Returns a ``bool``. - """ - - if not isinstance(other, Zone): - return False - if ( - self.rdclass != other.rdclass - or self.origin != other.origin - or self.nodes != other.nodes - ): - return False - return True - - def __ne__(self, other): - """Are two zones not equal? - - Returns a ``bool``. - """ - - return not self.__eq__(other) - - def _validate_name(self, name: Union[dns.name.Name, str]) -> dns.name.Name: - if isinstance(name, str): - name = dns.name.from_text(name, None) - elif not isinstance(name, dns.name.Name): - raise KeyError("name parameter must be convertible to a DNS name") - if name.is_absolute(): - if self.origin is None: - # This should probably never happen as other code (e.g. - # _rr_line) will notice the lack of an origin before us, but - # we check just in case! - raise KeyError("no zone origin is defined") - if not name.is_subdomain(self.origin): - raise KeyError("name parameter must be a subdomain of the zone origin") - if self.relativize: - name = name.relativize(self.origin) - elif not self.relativize: - # We have a relative name in a non-relative zone, so derelativize. - if self.origin is None: - raise KeyError("no zone origin is defined") - name = name.derelativize(self.origin) - return name - - def __getitem__(self, key): - key = self._validate_name(key) - return self.nodes[key] - - def __setitem__(self, key, value): - key = self._validate_name(key) - self.nodes[key] = value - - def __delitem__(self, key): - key = self._validate_name(key) - del self.nodes[key] - - def __iter__(self): - return self.nodes.__iter__() - - def keys(self): - return self.nodes.keys() - - def values(self): - return self.nodes.values() - - def items(self): - return self.nodes.items() - - def get(self, key): - key = self._validate_name(key) - return self.nodes.get(key) - - def __contains__(self, key): - key = self._validate_name(key) - return key in self.nodes - - def find_node( - self, name: Union[dns.name.Name, str], create: bool = False - ) -> dns.node.Node: - """Find a node in the zone, possibly creating it. - - *name*: the name of the node to find. - The value may be a ``dns.name.Name`` or a ``str``. If absolute, the - name must be a subdomain of the zone's origin. If ``zone.relativize`` - is ``True``, then the name will be relativized. - - *create*, a ``bool``. If true, the node will be created if it does - not exist. - - Raises ``KeyError`` if the name is not known and create was - not specified, or if the name was not a subdomain of the origin. - - Returns a ``dns.node.Node``. - """ - - name = self._validate_name(name) - node = self.nodes.get(name) - if node is None: - if not create: - raise KeyError - node = self.node_factory() - self.nodes[name] = node - return node - - def get_node( - self, name: Union[dns.name.Name, str], create: bool = False - ) -> Optional[dns.node.Node]: - """Get a node in the zone, possibly creating it. - - This method is like ``find_node()``, except it returns None instead - of raising an exception if the node does not exist and creation - has not been requested. - - *name*: the name of the node to find. - The value may be a ``dns.name.Name`` or a ``str``. If absolute, the - name must be a subdomain of the zone's origin. If ``zone.relativize`` - is ``True``, then the name will be relativized. - - *create*, a ``bool``. If true, the node will be created if it does - not exist. - - Raises ``KeyError`` if the name is not known and create was - not specified, or if the name was not a subdomain of the origin. - - Returns a ``dns.node.Node`` or ``None``. - """ - - try: - node = self.find_node(name, create) - except KeyError: - node = None - return node - - def delete_node(self, name: Union[dns.name.Name, str]) -> None: - """Delete the specified node if it exists. - - *name*: the name of the node to find. - The value may be a ``dns.name.Name`` or a ``str``. If absolute, the - name must be a subdomain of the zone's origin. If ``zone.relativize`` - is ``True``, then the name will be relativized. - - It is not an error if the node does not exist. - """ - - name = self._validate_name(name) - if name in self.nodes: - del self.nodes[name] - - def find_rdataset( - self, - name: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str], - covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, - create: bool = False, - ) -> dns.rdataset.Rdataset: - """Look for an rdataset with the specified name and type in the zone, - and return an rdataset encapsulating it. - - The rdataset returned is not a copy; changes to it will change - the zone. - - KeyError is raised if the name or type are not found. - - *name*: the name of the node to find. - The value may be a ``dns.name.Name`` or a ``str``. If absolute, the - name must be a subdomain of the zone's origin. If ``zone.relativize`` - is ``True``, then the name will be relativized. - - *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdata type desired. - - *covers*, a ``dns.rdatatype.RdataType`` or ``str`` the covered type. - Usually this value is ``dns.rdatatype.NONE``, but if the - rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, - then the covers value will be the rdata type the SIG/RRSIG - covers. The library treats the SIG and RRSIG types as if they - were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). - This makes RRSIGs much easier to work with than if RRSIGs - covering different rdata types were aggregated into a single - RRSIG rdataset. - - *create*, a ``bool``. If true, the node will be created if it does - not exist. - - Raises ``KeyError`` if the name is not known and create was - not specified, or if the name was not a subdomain of the origin. - - Returns a ``dns.rdataset.Rdataset``. - """ - - name = self._validate_name(name) - rdtype = dns.rdatatype.RdataType.make(rdtype) - covers = dns.rdatatype.RdataType.make(covers) - node = self.find_node(name, create) - return node.find_rdataset(self.rdclass, rdtype, covers, create) - - def get_rdataset( - self, - name: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str], - covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, - create: bool = False, - ) -> Optional[dns.rdataset.Rdataset]: - """Look for an rdataset with the specified name and type in the zone. - - This method is like ``find_rdataset()``, except it returns None instead - of raising an exception if the rdataset does not exist and creation - has not been requested. - - The rdataset returned is not a copy; changes to it will change - the zone. - - *name*: the name of the node to find. - The value may be a ``dns.name.Name`` or a ``str``. If absolute, the - name must be a subdomain of the zone's origin. If ``zone.relativize`` - is ``True``, then the name will be relativized. - - *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdata type desired. - - *covers*, a ``dns.rdatatype.RdataType`` or ``str``, the covered type. - Usually this value is ``dns.rdatatype.NONE``, but if the - rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, - then the covers value will be the rdata type the SIG/RRSIG - covers. The library treats the SIG and RRSIG types as if they - were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). - This makes RRSIGs much easier to work with than if RRSIGs - covering different rdata types were aggregated into a single - RRSIG rdataset. - - *create*, a ``bool``. If true, the node will be created if it does - not exist. - - Raises ``KeyError`` if the name is not known and create was - not specified, or if the name was not a subdomain of the origin. - - Returns a ``dns.rdataset.Rdataset`` or ``None``. - """ - - try: - rdataset = self.find_rdataset(name, rdtype, covers, create) - except KeyError: - rdataset = None - return rdataset - - def delete_rdataset( - self, - name: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str], - covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, - ) -> None: - """Delete the rdataset matching *rdtype* and *covers*, if it - exists at the node specified by *name*. - - It is not an error if the node does not exist, or if there is no matching - rdataset at the node. - - If the node has no rdatasets after the deletion, it will itself be deleted. - - *name*: the name of the node to find. The value may be a ``dns.name.Name`` or a - ``str``. If absolute, the name must be a subdomain of the zone's origin. If - ``zone.relativize`` is ``True``, then the name will be relativized. - - *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdata type desired. - - *covers*, a ``dns.rdatatype.RdataType`` or ``str`` or ``None``, the covered - type. Usually this value is ``dns.rdatatype.NONE``, but if the rdtype is - ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, then the covers value will be - the rdata type the SIG/RRSIG covers. The library treats the SIG and RRSIG types - as if they were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). This - makes RRSIGs much easier to work with than if RRSIGs covering different rdata - types were aggregated into a single RRSIG rdataset. - """ - - name = self._validate_name(name) - rdtype = dns.rdatatype.RdataType.make(rdtype) - covers = dns.rdatatype.RdataType.make(covers) - node = self.get_node(name) - if node is not None: - node.delete_rdataset(self.rdclass, rdtype, covers) - if len(node) == 0: - self.delete_node(name) - - def replace_rdataset( - self, name: Union[dns.name.Name, str], replacement: dns.rdataset.Rdataset - ) -> None: - """Replace an rdataset at name. - - It is not an error if there is no rdataset matching I{replacement}. - - Ownership of the *replacement* object is transferred to the zone; - in other words, this method does not store a copy of *replacement* - at the node, it stores *replacement* itself. - - If the node does not exist, it is created. - - *name*: the name of the node to find. - The value may be a ``dns.name.Name`` or a ``str``. If absolute, the - name must be a subdomain of the zone's origin. If ``zone.relativize`` - is ``True``, then the name will be relativized. - - *replacement*, a ``dns.rdataset.Rdataset``, the replacement rdataset. - """ - - if replacement.rdclass != self.rdclass: - raise ValueError("replacement.rdclass != zone.rdclass") - node = self.find_node(name, True) - node.replace_rdataset(replacement) - - def find_rrset( - self, - name: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str], - covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, - ) -> dns.rrset.RRset: - """Look for an rdataset with the specified name and type in the zone, - and return an RRset encapsulating it. - - This method is less efficient than the similar - ``find_rdataset()`` because it creates an RRset instead of - returning the matching rdataset. It may be more convenient - for some uses since it returns an object which binds the owner - name to the rdataset. - - This method may not be used to create new nodes or rdatasets; - use ``find_rdataset`` instead. - - *name*: the name of the node to find. - The value may be a ``dns.name.Name`` or a ``str``. If absolute, the - name must be a subdomain of the zone's origin. If ``zone.relativize`` - is ``True``, then the name will be relativized. - - *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdata type desired. - - *covers*, a ``dns.rdatatype.RdataType`` or ``str``, the covered type. - Usually this value is ``dns.rdatatype.NONE``, but if the - rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, - then the covers value will be the rdata type the SIG/RRSIG - covers. The library treats the SIG and RRSIG types as if they - were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). - This makes RRSIGs much easier to work with than if RRSIGs - covering different rdata types were aggregated into a single - RRSIG rdataset. - - *create*, a ``bool``. If true, the node will be created if it does - not exist. - - Raises ``KeyError`` if the name is not known and create was - not specified, or if the name was not a subdomain of the origin. - - Returns a ``dns.rrset.RRset`` or ``None``. - """ - - vname = self._validate_name(name) - rdtype = dns.rdatatype.RdataType.make(rdtype) - covers = dns.rdatatype.RdataType.make(covers) - rdataset = self.nodes[vname].find_rdataset(self.rdclass, rdtype, covers) - rrset = dns.rrset.RRset(vname, self.rdclass, rdtype, covers) - rrset.update(rdataset) - return rrset - - def get_rrset( - self, - name: Union[dns.name.Name, str], - rdtype: Union[dns.rdatatype.RdataType, str], - covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, - ) -> Optional[dns.rrset.RRset]: - """Look for an rdataset with the specified name and type in the zone, - and return an RRset encapsulating it. - - This method is less efficient than the similar ``get_rdataset()`` - because it creates an RRset instead of returning the matching - rdataset. It may be more convenient for some uses since it - returns an object which binds the owner name to the rdataset. - - This method may not be used to create new nodes or rdatasets; - use ``get_rdataset()`` instead. - - *name*: the name of the node to find. - The value may be a ``dns.name.Name`` or a ``str``. If absolute, the - name must be a subdomain of the zone's origin. If ``zone.relativize`` - is ``True``, then the name will be relativized. - - *rdtype*, a ``dns.rdataset.Rdataset`` or ``str``, the rdata type desired. - - *covers*, a ``dns.rdataset.Rdataset`` or ``str``, the covered type. - Usually this value is ``dns.rdatatype.NONE``, but if the - rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, - then the covers value will be the rdata type the SIG/RRSIG - covers. The library treats the SIG and RRSIG types as if they - were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). - This makes RRSIGs much easier to work with than if RRSIGs - covering different rdata types were aggregated into a single - RRSIG rdataset. - - *create*, a ``bool``. If true, the node will be created if it does - not exist. - - Raises ``KeyError`` if the name is not known and create was - not specified, or if the name was not a subdomain of the origin. - - Returns a ``dns.rrset.RRset`` or ``None``. - """ - - try: - rrset = self.find_rrset(name, rdtype, covers) - except KeyError: - rrset = None - return rrset - - def iterate_rdatasets( - self, - rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.ANY, - covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, - ) -> Iterator[Tuple[dns.name.Name, dns.rdataset.Rdataset]]: - """Return a generator which yields (name, rdataset) tuples for - all rdatasets in the zone which have the specified *rdtype* - and *covers*. If *rdtype* is ``dns.rdatatype.ANY``, the default, - then all rdatasets will be matched. - - *rdtype*, a ``dns.rdataset.Rdataset`` or ``str``, the rdata type desired. - - *covers*, a ``dns.rdataset.Rdataset`` or ``str``, the covered type. - Usually this value is ``dns.rdatatype.NONE``, but if the - rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, - then the covers value will be the rdata type the SIG/RRSIG - covers. The library treats the SIG and RRSIG types as if they - were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). - This makes RRSIGs much easier to work with than if RRSIGs - covering different rdata types were aggregated into a single - RRSIG rdataset. - """ - - rdtype = dns.rdatatype.RdataType.make(rdtype) - covers = dns.rdatatype.RdataType.make(covers) - for name, node in self.items(): - for rds in node: - if rdtype == dns.rdatatype.ANY or ( - rds.rdtype == rdtype and rds.covers == covers - ): - yield (name, rds) - - def iterate_rdatas( - self, - rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.ANY, - covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, - ) -> Iterator[Tuple[dns.name.Name, int, dns.rdata.Rdata]]: - """Return a generator which yields (name, ttl, rdata) tuples for - all rdatas in the zone which have the specified *rdtype* - and *covers*. If *rdtype* is ``dns.rdatatype.ANY``, the default, - then all rdatas will be matched. - - *rdtype*, a ``dns.rdataset.Rdataset`` or ``str``, the rdata type desired. - - *covers*, a ``dns.rdataset.Rdataset`` or ``str``, the covered type. - Usually this value is ``dns.rdatatype.NONE``, but if the - rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, - then the covers value will be the rdata type the SIG/RRSIG - covers. The library treats the SIG and RRSIG types as if they - were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). - This makes RRSIGs much easier to work with than if RRSIGs - covering different rdata types were aggregated into a single - RRSIG rdataset. - """ - - rdtype = dns.rdatatype.RdataType.make(rdtype) - covers = dns.rdatatype.RdataType.make(covers) - for name, node in self.items(): - for rds in node: - if rdtype == dns.rdatatype.ANY or ( - rds.rdtype == rdtype and rds.covers == covers - ): - for rdata in rds: - yield (name, rds.ttl, rdata) - - def to_file( - self, - f: Any, - sorted: bool = True, - relativize: bool = True, - nl: Optional[str] = None, - want_comments: bool = False, - want_origin: bool = False, - ) -> None: - """Write a zone to a file. - - *f*, a file or `str`. If *f* is a string, it is treated - as the name of a file to open. - - *sorted*, a ``bool``. If True, the default, then the file - will be written with the names sorted in DNSSEC order from - least to greatest. Otherwise the names will be written in - whatever order they happen to have in the zone's dictionary. - - *relativize*, a ``bool``. If True, the default, then domain - names in the output will be relativized to the zone's origin - if possible. - - *nl*, a ``str`` or None. The end of line string. If not - ``None``, the output will use the platform's native - end-of-line marker (i.e. LF on POSIX, CRLF on Windows). - - *want_comments*, a ``bool``. If ``True``, emit end-of-line comments - as part of writing the file. If ``False``, the default, do not - emit them. - - *want_origin*, a ``bool``. If ``True``, emit a $ORIGIN line at - the start of the file. If ``False``, the default, do not emit - one. - """ - - if isinstance(f, str): - cm: contextlib.AbstractContextManager = open(f, "wb") - else: - cm = contextlib.nullcontext(f) - with cm as f: - # must be in this way, f.encoding may contain None, or even - # attribute may not be there - file_enc = getattr(f, "encoding", None) - if file_enc is None: - file_enc = "utf-8" - - if nl is None: - # binary mode, '\n' is not enough - nl_b = os.linesep.encode(file_enc) - nl = "\n" - elif isinstance(nl, str): - nl_b = nl.encode(file_enc) - else: - nl_b = nl - nl = nl.decode() - - if want_origin: - assert self.origin is not None - l = "$ORIGIN " + self.origin.to_text() - l_b = l.encode(file_enc) - try: - f.write(l_b) - f.write(nl_b) - except TypeError: # textual mode - f.write(l) - f.write(nl) - - if sorted: - names = list(self.keys()) - names.sort() - else: - names = self.keys() - for n in names: - l = self[n].to_text( - n, - origin=self.origin, - relativize=relativize, - want_comments=want_comments, - ) - l_b = l.encode(file_enc) - - try: - f.write(l_b) - f.write(nl_b) - except TypeError: # textual mode - f.write(l) - f.write(nl) - - def to_text( - self, - sorted: bool = True, - relativize: bool = True, - nl: Optional[str] = None, - want_comments: bool = False, - want_origin: bool = False, - ) -> str: - """Return a zone's text as though it were written to a file. - - *sorted*, a ``bool``. If True, the default, then the file - will be written with the names sorted in DNSSEC order from - least to greatest. Otherwise the names will be written in - whatever order they happen to have in the zone's dictionary. - - *relativize*, a ``bool``. If True, the default, then domain - names in the output will be relativized to the zone's origin - if possible. - - *nl*, a ``str`` or None. The end of line string. If not - ``None``, the output will use the platform's native - end-of-line marker (i.e. LF on POSIX, CRLF on Windows). - - *want_comments*, a ``bool``. If ``True``, emit end-of-line comments - as part of writing the file. If ``False``, the default, do not - emit them. - - *want_origin*, a ``bool``. If ``True``, emit a $ORIGIN line at - the start of the output. If ``False``, the default, do not emit - one. - - Returns a ``str``. - """ - temp_buffer = io.StringIO() - self.to_file(temp_buffer, sorted, relativize, nl, want_comments, want_origin) - return_value = temp_buffer.getvalue() - temp_buffer.close() - return return_value - - def check_origin(self) -> None: - """Do some simple checking of the zone's origin. - - Raises ``dns.zone.NoSOA`` if there is no SOA RRset. - - Raises ``dns.zone.NoNS`` if there is no NS RRset. - - Raises ``KeyError`` if there is no origin node. - """ - if self.relativize: - name = dns.name.empty - else: - assert self.origin is not None - name = self.origin - if self.get_rdataset(name, dns.rdatatype.SOA) is None: - raise NoSOA - if self.get_rdataset(name, dns.rdatatype.NS) is None: - raise NoNS - - def get_soa( - self, txn: Optional[dns.transaction.Transaction] = None - ) -> dns.rdtypes.ANY.SOA.SOA: - """Get the zone SOA rdata. - - Raises ``dns.zone.NoSOA`` if there is no SOA RRset. - - Returns a ``dns.rdtypes.ANY.SOA.SOA`` Rdata. - """ - if self.relativize: - origin_name = dns.name.empty - else: - if self.origin is None: - # get_soa() has been called very early, and there must not be - # an SOA if there is no origin. - raise NoSOA - origin_name = self.origin - soa: Optional[dns.rdataset.Rdataset] - if txn: - soa = txn.get(origin_name, dns.rdatatype.SOA) - else: - soa = self.get_rdataset(origin_name, dns.rdatatype.SOA) - if soa is None: - raise NoSOA - return soa[0] - - def _compute_digest( - self, - hash_algorithm: DigestHashAlgorithm, - scheme: DigestScheme = DigestScheme.SIMPLE, - ) -> bytes: - hashinfo = _digest_hashers.get(hash_algorithm) - if not hashinfo: - raise UnsupportedDigestHashAlgorithm - if scheme != DigestScheme.SIMPLE: - raise UnsupportedDigestScheme - - if self.relativize: - origin_name = dns.name.empty - else: - assert self.origin is not None - origin_name = self.origin - hasher = hashinfo() - for name, node in sorted(self.items()): - rrnamebuf = name.to_digestable(self.origin) - for rdataset in sorted(node, key=lambda rds: (rds.rdtype, rds.covers)): - if name == origin_name and dns.rdatatype.ZONEMD in ( - rdataset.rdtype, - rdataset.covers, - ): - continue - rrfixed = struct.pack( - "!HHI", rdataset.rdtype, rdataset.rdclass, rdataset.ttl - ) - rdatas = [rdata.to_digestable(self.origin) for rdata in rdataset] - for rdata in sorted(rdatas): - rrlen = struct.pack("!H", len(rdata)) - hasher.update(rrnamebuf + rrfixed + rrlen + rdata) - return hasher.digest() - - def compute_digest( - self, - hash_algorithm: DigestHashAlgorithm, - scheme: DigestScheme = DigestScheme.SIMPLE, - ) -> dns.rdtypes.ANY.ZONEMD.ZONEMD: - serial = self.get_soa().serial - digest = self._compute_digest(hash_algorithm, scheme) - return dns.rdtypes.ANY.ZONEMD.ZONEMD( - self.rdclass, dns.rdatatype.ZONEMD, serial, scheme, hash_algorithm, digest - ) - - def verify_digest( - self, zonemd: Optional[dns.rdtypes.ANY.ZONEMD.ZONEMD] = None - ) -> None: - digests: Union[dns.rdataset.Rdataset, List[dns.rdtypes.ANY.ZONEMD.ZONEMD]] - if zonemd: - digests = [zonemd] - else: - assert self.origin is not None - rds = self.get_rdataset(self.origin, dns.rdatatype.ZONEMD) - if rds is None: - raise NoDigest - digests = rds - for digest in digests: - try: - computed = self._compute_digest(digest.hash_algorithm, digest.scheme) - if computed == digest.digest: - return - except Exception: - pass - raise DigestVerificationFailure - - # TransactionManager methods - - def reader(self) -> "Transaction": - return Transaction(self, False, Version(self, 1, self.nodes, self.origin)) - - def writer(self, replacement: bool = False) -> "Transaction": - txn = Transaction(self, replacement) - txn._setup_version() - return txn - - def origin_information( - self, - ) -> Tuple[Optional[dns.name.Name], bool, Optional[dns.name.Name]]: - effective: Optional[dns.name.Name] - if self.relativize: - effective = dns.name.empty - else: - effective = self.origin - return (self.origin, self.relativize, effective) - - def get_class(self): - return self.rdclass - - # Transaction methods - - def _end_read(self, txn): - pass - - def _end_write(self, txn): - pass - - def _commit_version(self, _, version, origin): - self.nodes = version.nodes - if self.origin is None: - self.origin = origin - - def _get_next_version_id(self): - # Versions are ephemeral and all have id 1 - return 1 - - -# These classes used to be in dns.versioned, but have moved here so we can use -# the copy-on-write transaction mechanism for both kinds of zones. In a -# regular zone, the version only exists during the transaction, and the nodes -# are regular dns.node.Nodes. - -# A node with a version id. - - -class VersionedNode(dns.node.Node): # lgtm[py/missing-equals] - __slots__ = ["id"] - - def __init__(self): - super().__init__() - # A proper id will get set by the Version - self.id = 0 - - -@dns.immutable.immutable -class ImmutableVersionedNode(VersionedNode): - def __init__(self, node): - super().__init__() - self.id = node.id - self.rdatasets = tuple( - [dns.rdataset.ImmutableRdataset(rds) for rds in node.rdatasets] - ) - - def find_rdataset( - self, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, - create: bool = False, - ) -> dns.rdataset.Rdataset: - if create: - raise TypeError("immutable") - return super().find_rdataset(rdclass, rdtype, covers, False) - - def get_rdataset( - self, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, - create: bool = False, - ) -> Optional[dns.rdataset.Rdataset]: - if create: - raise TypeError("immutable") - return super().get_rdataset(rdclass, rdtype, covers, False) - - def delete_rdataset( - self, - rdclass: dns.rdataclass.RdataClass, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, - ) -> None: - raise TypeError("immutable") - - def replace_rdataset(self, replacement: dns.rdataset.Rdataset) -> None: - raise TypeError("immutable") - - def is_immutable(self) -> bool: - return True - - -class Version: - def __init__( - self, - zone: Zone, - id: int, - nodes: Optional[Dict[dns.name.Name, dns.node.Node]] = None, - origin: Optional[dns.name.Name] = None, - ): - self.zone = zone - self.id = id - if nodes is not None: - self.nodes = nodes - else: - self.nodes = {} - self.origin = origin - - def _validate_name(self, name: dns.name.Name) -> dns.name.Name: - if name.is_absolute(): - if self.origin is None: - # This should probably never happen as other code (e.g. - # _rr_line) will notice the lack of an origin before us, but - # we check just in case! - raise KeyError("no zone origin is defined") - if not name.is_subdomain(self.origin): - raise KeyError("name is not a subdomain of the zone origin") - if self.zone.relativize: - name = name.relativize(self.origin) - elif not self.zone.relativize: - # We have a relative name in a non-relative zone, so derelativize. - if self.origin is None: - raise KeyError("no zone origin is defined") - name = name.derelativize(self.origin) - return name - - def get_node(self, name: dns.name.Name) -> Optional[dns.node.Node]: - name = self._validate_name(name) - return self.nodes.get(name) - - def get_rdataset( - self, - name: dns.name.Name, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType, - ) -> Optional[dns.rdataset.Rdataset]: - node = self.get_node(name) - if node is None: - return None - return node.get_rdataset(self.zone.rdclass, rdtype, covers) - - def keys(self): - return self.nodes.keys() - - def items(self): - return self.nodes.items() - - -class WritableVersion(Version): - def __init__(self, zone: Zone, replacement: bool = False): - # The zone._versions_lock must be held by our caller in a versioned - # zone. - id = zone._get_next_version_id() - super().__init__(zone, id) - if not replacement: - # We copy the map, because that gives us a simple and thread-safe - # way of doing versions, and we have a garbage collector to help - # us. We only make new node objects if we actually change the - # node. - self.nodes.update(zone.nodes) - # We have to copy the zone origin as it may be None in the first - # version, and we don't want to mutate the zone until we commit. - self.origin = zone.origin - self.changed: Set[dns.name.Name] = set() - - def _maybe_cow(self, name: dns.name.Name) -> dns.node.Node: - name = self._validate_name(name) - node = self.nodes.get(name) - if node is None or name not in self.changed: - new_node = self.zone.node_factory() - if hasattr(new_node, "id"): - # We keep doing this for backwards compatibility, as earlier - # code used new_node.id != self.id for the "do we need to CoW?" - # test. Now we use the changed set as this works with both - # regular zones and versioned zones. - # - # We ignore the mypy error as this is safe but it doesn't see it. - new_node.id = self.id # type: ignore - if node is not None: - # moo! copy on write! - new_node.rdatasets.extend(node.rdatasets) - self.nodes[name] = new_node - self.changed.add(name) - return new_node - else: - return node - - def delete_node(self, name: dns.name.Name) -> None: - name = self._validate_name(name) - if name in self.nodes: - del self.nodes[name] - self.changed.add(name) - - def put_rdataset( - self, name: dns.name.Name, rdataset: dns.rdataset.Rdataset - ) -> None: - node = self._maybe_cow(name) - node.replace_rdataset(rdataset) - - def delete_rdataset( - self, - name: dns.name.Name, - rdtype: dns.rdatatype.RdataType, - covers: dns.rdatatype.RdataType, - ) -> None: - node = self._maybe_cow(name) - node.delete_rdataset(self.zone.rdclass, rdtype, covers) - if len(node) == 0: - del self.nodes[name] - - -@dns.immutable.immutable -class ImmutableVersion(Version): - def __init__(self, version: WritableVersion): - # We tell super() that it's a replacement as we don't want it - # to copy the nodes, as we're about to do that with an - # immutable Dict. - super().__init__(version.zone, True) - # set the right id! - self.id = version.id - # keep the origin - self.origin = version.origin - # Make changed nodes immutable - for name in version.changed: - node = version.nodes.get(name) - # it might not exist if we deleted it in the version - if node: - version.nodes[name] = ImmutableVersionedNode(node) - # We're changing the type of the nodes dictionary here on purpose, so - # we ignore the mypy error. - self.nodes = dns.immutable.Dict(version.nodes, True) # type: ignore - - -class Transaction(dns.transaction.Transaction): - def __init__(self, zone, replacement, version=None, make_immutable=False): - read_only = version is not None - super().__init__(zone, replacement, read_only) - self.version = version - self.make_immutable = make_immutable - - @property - def zone(self): - return self.manager - - def _setup_version(self): - assert self.version is None - self.version = WritableVersion(self.zone, self.replacement) - - def _get_rdataset(self, name, rdtype, covers): - return self.version.get_rdataset(name, rdtype, covers) - - def _put_rdataset(self, name, rdataset): - assert not self.read_only - self.version.put_rdataset(name, rdataset) - - def _delete_name(self, name): - assert not self.read_only - self.version.delete_node(name) - - def _delete_rdataset(self, name, rdtype, covers): - assert not self.read_only - self.version.delete_rdataset(name, rdtype, covers) - - def _name_exists(self, name): - return self.version.get_node(name) is not None - - def _changed(self): - if self.read_only: - return False - else: - return len(self.version.changed) > 0 - - def _end_transaction(self, commit): - if self.read_only: - self.zone._end_read(self) - elif commit and len(self.version.changed) > 0: - if self.make_immutable: - version = ImmutableVersion(self.version) - else: - version = self.version - self.zone._commit_version(self, version, self.version.origin) - else: - # rollback - self.zone._end_write(self) - - def _set_origin(self, origin): - if self.version.origin is None: - self.version.origin = origin - - def _iterate_rdatasets(self): - for name, node in self.version.items(): - for rdataset in node: - yield (name, rdataset) - - def _iterate_names(self): - return self.version.keys() - - def _get_node(self, name): - return self.version.get_node(name) - - def _origin_information(self): - (absolute, relativize, effective) = self.manager.origin_information() - if absolute is None and self.version.origin is not None: - # No origin has been committed yet, but we've learned one as part of - # this txn. Use it. - absolute = self.version.origin - if relativize: - effective = dns.name.empty - else: - effective = absolute - return (absolute, relativize, effective) - - -def from_text( - text: str, - origin: Optional[Union[dns.name.Name, str]] = None, - rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, - relativize: bool = True, - zone_factory: Any = Zone, - filename: Optional[str] = None, - allow_include: bool = False, - check_origin: bool = True, - idna_codec: Optional[dns.name.IDNACodec] = None, - allow_directives: Union[bool, Iterable[str]] = True, -) -> Zone: - """Build a zone object from a zone file format string. - - *text*, a ``str``, the zone file format input. - - *origin*, a ``dns.name.Name``, a ``str``, or ``None``. The origin - of the zone; if not specified, the first ``$ORIGIN`` statement in the - zone file will determine the origin of the zone. - - *rdclass*, a ``dns.rdataclass.RdataClass``, the zone's rdata class; the default is - class IN. - - *relativize*, a ``bool``, determine's whether domain names are - relativized to the zone's origin. The default is ``True``. - - *zone_factory*, the zone factory to use or ``None``. If ``None``, then - ``dns.zone.Zone`` will be used. The value may be any class or callable - that returns a subclass of ``dns.zone.Zone``. - - *filename*, a ``str`` or ``None``, the filename to emit when - describing where an error occurred; the default is ``''``. - - *allow_include*, a ``bool``. If ``True``, the default, then ``$INCLUDE`` - directives are permitted. If ``False``, then encoutering a ``$INCLUDE`` - will raise a ``SyntaxError`` exception. - - *check_origin*, a ``bool``. If ``True``, the default, then sanity - checks of the origin node will be made by calling the zone's - ``check_origin()`` method. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder - is used. - - *allow_directives*, a ``bool`` or an iterable of `str`. If ``True``, the default, - then directives are permitted, and the *allow_include* parameter controls whether - ``$INCLUDE`` is permitted. If ``False`` or an empty iterable, then no directive - processing is done and any directive-like text will be treated as a regular owner - name. If a non-empty iterable, then only the listed directives (including the - ``$``) are allowed. - - Raises ``dns.zone.NoSOA`` if there is no SOA RRset. - - Raises ``dns.zone.NoNS`` if there is no NS RRset. - - Raises ``KeyError`` if there is no origin node. - - Returns a subclass of ``dns.zone.Zone``. - """ - - # 'text' can also be a file, but we don't publish that fact - # since it's an implementation detail. The official file - # interface is from_file(). - - if filename is None: - filename = "" - zone = zone_factory(origin, rdclass, relativize=relativize) - with zone.writer(True) as txn: - tok = dns.tokenizer.Tokenizer(text, filename, idna_codec=idna_codec) - reader = dns.zonefile.Reader( - tok, - rdclass, - txn, - allow_include=allow_include, - allow_directives=allow_directives, - ) - try: - reader.read() - except dns.zonefile.UnknownOrigin: - # for backwards compatibility - raise dns.zone.UnknownOrigin - # Now that we're done reading, do some basic checking of the zone. - if check_origin: - zone.check_origin() - return zone - - -def from_file( - f: Any, - origin: Optional[Union[dns.name.Name, str]] = None, - rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, - relativize: bool = True, - zone_factory: Any = Zone, - filename: Optional[str] = None, - allow_include: bool = True, - check_origin: bool = True, - idna_codec: Optional[dns.name.IDNACodec] = None, - allow_directives: Union[bool, Iterable[str]] = True, -) -> Zone: - """Read a zone file and build a zone object. - - *f*, a file or ``str``. If *f* is a string, it is treated - as the name of a file to open. - - *origin*, a ``dns.name.Name``, a ``str``, or ``None``. The origin - of the zone; if not specified, the first ``$ORIGIN`` statement in the - zone file will determine the origin of the zone. - - *rdclass*, an ``int``, the zone's rdata class; the default is class IN. - - *relativize*, a ``bool``, determine's whether domain names are - relativized to the zone's origin. The default is ``True``. - - *zone_factory*, the zone factory to use or ``None``. If ``None``, then - ``dns.zone.Zone`` will be used. The value may be any class or callable - that returns a subclass of ``dns.zone.Zone``. - - *filename*, a ``str`` or ``None``, the filename to emit when - describing where an error occurred; the default is ``''``. - - *allow_include*, a ``bool``. If ``True``, the default, then ``$INCLUDE`` - directives are permitted. If ``False``, then encoutering a ``$INCLUDE`` - will raise a ``SyntaxError`` exception. - - *check_origin*, a ``bool``. If ``True``, the default, then sanity - checks of the origin node will be made by calling the zone's - ``check_origin()`` method. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder - is used. - - *allow_directives*, a ``bool`` or an iterable of `str`. If ``True``, the default, - then directives are permitted, and the *allow_include* parameter controls whether - ``$INCLUDE`` is permitted. If ``False`` or an empty iterable, then no directive - processing is done and any directive-like text will be treated as a regular owner - name. If a non-empty iterable, then only the listed directives (including the - ``$``) are allowed. - - Raises ``dns.zone.NoSOA`` if there is no SOA RRset. - - Raises ``dns.zone.NoNS`` if there is no NS RRset. - - Raises ``KeyError`` if there is no origin node. - - Returns a subclass of ``dns.zone.Zone``. - """ - - if isinstance(f, str): - if filename is None: - filename = f - cm: contextlib.AbstractContextManager = open(f) - else: - cm = contextlib.nullcontext(f) - with cm as f: - return from_text( - f, - origin, - rdclass, - relativize, - zone_factory, - filename, - allow_include, - check_origin, - idna_codec, - allow_directives, - ) - assert False # make mypy happy lgtm[py/unreachable-statement] - - -def from_xfr( - xfr: Any, - zone_factory: Any = Zone, - relativize: bool = True, - check_origin: bool = True, -) -> Zone: - """Convert the output of a zone transfer generator into a zone object. - - *xfr*, a generator of ``dns.message.Message`` objects, typically - ``dns.query.xfr()``. - - *relativize*, a ``bool``, determine's whether domain names are - relativized to the zone's origin. The default is ``True``. - It is essential that the relativize setting matches the one specified - to the generator. - - *check_origin*, a ``bool``. If ``True``, the default, then sanity - checks of the origin node will be made by calling the zone's - ``check_origin()`` method. - - Raises ``dns.zone.NoSOA`` if there is no SOA RRset. - - Raises ``dns.zone.NoNS`` if there is no NS RRset. - - Raises ``KeyError`` if there is no origin node. - - Raises ``ValueError`` if no messages are yielded by the generator. - - Returns a subclass of ``dns.zone.Zone``. - """ - - z = None - for r in xfr: - if z is None: - if relativize: - origin = r.origin - else: - origin = r.answer[0].name - rdclass = r.answer[0].rdclass - z = zone_factory(origin, rdclass, relativize=relativize) - for rrset in r.answer: - znode = z.nodes.get(rrset.name) - if not znode: - znode = z.node_factory() - z.nodes[rrset.name] = znode - zrds = znode.find_rdataset(rrset.rdclass, rrset.rdtype, rrset.covers, True) - zrds.update_ttl(rrset.ttl) - for rd in rrset: - zrds.add(rd) - if z is None: - raise ValueError("empty transfer") - if check_origin: - z.check_origin() - return z diff --git a/server/venv/Lib/site-packages/dns/zonefile.py b/server/venv/Lib/site-packages/dns/zonefile.py deleted file mode 100644 index 27f0492..0000000 --- a/server/venv/Lib/site-packages/dns/zonefile.py +++ /dev/null @@ -1,747 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose with or without fee is hereby granted, -# provided that the above copyright notice and this permission notice -# appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -"""DNS Zones.""" - -import re -import sys -from typing import Any, Iterable, List, Optional, Set, Tuple, Union - -import dns.exception -import dns.grange -import dns.name -import dns.node -import dns.rdata -import dns.rdataclass -import dns.rdatatype -import dns.rdtypes.ANY.SOA -import dns.rrset -import dns.tokenizer -import dns.transaction -import dns.ttl - - -class UnknownOrigin(dns.exception.DNSException): - """Unknown origin""" - - -class CNAMEAndOtherData(dns.exception.DNSException): - """A node has a CNAME and other data""" - - -def _check_cname_and_other_data(txn, name, rdataset): - rdataset_kind = dns.node.NodeKind.classify_rdataset(rdataset) - node = txn.get_node(name) - if node is None: - # empty nodes are neutral. - return - node_kind = node.classify() - if ( - node_kind == dns.node.NodeKind.CNAME - and rdataset_kind == dns.node.NodeKind.REGULAR - ): - raise CNAMEAndOtherData("rdataset type is not compatible with a CNAME node") - elif ( - node_kind == dns.node.NodeKind.REGULAR - and rdataset_kind == dns.node.NodeKind.CNAME - ): - raise CNAMEAndOtherData( - "CNAME rdataset is not compatible with a regular data node" - ) - # Otherwise at least one of the node and the rdataset is neutral, so - # adding the rdataset is ok - - -SavedStateType = Tuple[ - dns.tokenizer.Tokenizer, - Optional[dns.name.Name], # current_origin - Optional[dns.name.Name], # last_name - Optional[Any], # current_file - int, # last_ttl - bool, # last_ttl_known - int, # default_ttl - bool, -] # default_ttl_known - - -def _upper_dollarize(s): - s = s.upper() - if not s.startswith("$"): - s = "$" + s - return s - - -class Reader: - - """Read a DNS zone file into a transaction.""" - - def __init__( - self, - tok: dns.tokenizer.Tokenizer, - rdclass: dns.rdataclass.RdataClass, - txn: dns.transaction.Transaction, - allow_include: bool = False, - allow_directives: Union[bool, Iterable[str]] = True, - force_name: Optional[dns.name.Name] = None, - force_ttl: Optional[int] = None, - force_rdclass: Optional[dns.rdataclass.RdataClass] = None, - force_rdtype: Optional[dns.rdatatype.RdataType] = None, - default_ttl: Optional[int] = None, - ): - self.tok = tok - (self.zone_origin, self.relativize, _) = txn.manager.origin_information() - self.current_origin = self.zone_origin - self.last_ttl = 0 - self.last_ttl_known = False - if force_ttl is not None: - default_ttl = force_ttl - if default_ttl is None: - self.default_ttl = 0 - self.default_ttl_known = False - else: - self.default_ttl = default_ttl - self.default_ttl_known = True - self.last_name = self.current_origin - self.zone_rdclass = rdclass - self.txn = txn - self.saved_state: List[SavedStateType] = [] - self.current_file: Optional[Any] = None - self.allowed_directives: Set[str] - if allow_directives is True: - self.allowed_directives = {"$GENERATE", "$ORIGIN", "$TTL"} - if allow_include: - self.allowed_directives.add("$INCLUDE") - elif allow_directives is False: - # allow_include was ignored in earlier releases if allow_directives was - # False, so we continue that. - self.allowed_directives = set() - else: - # Note that if directives are explicitly specified, then allow_include - # is ignored. - self.allowed_directives = set(_upper_dollarize(d) for d in allow_directives) - self.force_name = force_name - self.force_ttl = force_ttl - self.force_rdclass = force_rdclass - self.force_rdtype = force_rdtype - self.txn.check_put_rdataset(_check_cname_and_other_data) - - def _eat_line(self): - while 1: - token = self.tok.get() - if token.is_eol_or_eof(): - break - - def _get_identifier(self): - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - return token - - def _rr_line(self): - """Process one line from a DNS zone file.""" - token = None - # Name - if self.force_name is not None: - name = self.force_name - else: - if self.current_origin is None: - raise UnknownOrigin - token = self.tok.get(want_leading=True) - if not token.is_whitespace(): - self.last_name = self.tok.as_name(token, self.current_origin) - else: - token = self.tok.get() - if token.is_eol_or_eof(): - # treat leading WS followed by EOL/EOF as if they were EOL/EOF. - return - self.tok.unget(token) - name = self.last_name - if not name.is_subdomain(self.zone_origin): - self._eat_line() - return - if self.relativize: - name = name.relativize(self.zone_origin) - - # TTL - if self.force_ttl is not None: - ttl = self.force_ttl - self.last_ttl = ttl - self.last_ttl_known = True - else: - token = self._get_identifier() - ttl = None - try: - ttl = dns.ttl.from_text(token.value) - self.last_ttl = ttl - self.last_ttl_known = True - token = None - except dns.ttl.BadTTL: - self.tok.unget(token) - - # Class - if self.force_rdclass is not None: - rdclass = self.force_rdclass - else: - token = self._get_identifier() - try: - rdclass = dns.rdataclass.from_text(token.value) - except dns.exception.SyntaxError: - raise - except Exception: - rdclass = self.zone_rdclass - self.tok.unget(token) - if rdclass != self.zone_rdclass: - raise dns.exception.SyntaxError("RR class is not zone's class") - - if ttl is None: - # support for syntax - token = self._get_identifier() - ttl = None - try: - ttl = dns.ttl.from_text(token.value) - self.last_ttl = ttl - self.last_ttl_known = True - token = None - except dns.ttl.BadTTL: - if self.default_ttl_known: - ttl = self.default_ttl - elif self.last_ttl_known: - ttl = self.last_ttl - self.tok.unget(token) - - # Type - if self.force_rdtype is not None: - rdtype = self.force_rdtype - else: - token = self._get_identifier() - try: - rdtype = dns.rdatatype.from_text(token.value) - except Exception: - raise dns.exception.SyntaxError("unknown rdatatype '%s'" % token.value) - - try: - rd = dns.rdata.from_text( - rdclass, - rdtype, - self.tok, - self.current_origin, - self.relativize, - self.zone_origin, - ) - except dns.exception.SyntaxError: - # Catch and reraise. - raise - except Exception: - # All exceptions that occur in the processing of rdata - # are treated as syntax errors. This is not strictly - # correct, but it is correct almost all of the time. - # We convert them to syntax errors so that we can emit - # helpful filename:line info. - (ty, va) = sys.exc_info()[:2] - raise dns.exception.SyntaxError( - "caught exception {}: {}".format(str(ty), str(va)) - ) - - if not self.default_ttl_known and rdtype == dns.rdatatype.SOA: - # The pre-RFC2308 and pre-BIND9 behavior inherits the zone default - # TTL from the SOA minttl if no $TTL statement is present before the - # SOA is parsed. - self.default_ttl = rd.minimum - self.default_ttl_known = True - if ttl is None: - # if we didn't have a TTL on the SOA, set it! - ttl = rd.minimum - - # TTL check. We had to wait until now to do this as the SOA RR's - # own TTL can be inferred from its minimum. - if ttl is None: - raise dns.exception.SyntaxError("Missing default TTL value") - - self.txn.add(name, ttl, rd) - - def _parse_modify(self, side: str) -> Tuple[str, str, int, int, str]: - # Here we catch everything in '{' '}' in a group so we can replace it - # with ''. - is_generate1 = re.compile(r"^.*\$({(\+|-?)(\d+),(\d+),(.)}).*$") - is_generate2 = re.compile(r"^.*\$({(\+|-?)(\d+)}).*$") - is_generate3 = re.compile(r"^.*\$({(\+|-?)(\d+),(\d+)}).*$") - # Sometimes there are modifiers in the hostname. These come after - # the dollar sign. They are in the form: ${offset[,width[,base]]}. - # Make names - g1 = is_generate1.match(side) - if g1: - mod, sign, offset, width, base = g1.groups() - if sign == "": - sign = "+" - g2 = is_generate2.match(side) - if g2: - mod, sign, offset = g2.groups() - if sign == "": - sign = "+" - width = 0 - base = "d" - g3 = is_generate3.match(side) - if g3: - mod, sign, offset, width = g3.groups() - if sign == "": - sign = "+" - base = "d" - - if not (g1 or g2 or g3): - mod = "" - sign = "+" - offset = 0 - width = 0 - base = "d" - - offset = int(offset) - width = int(width) - - if sign not in ["+", "-"]: - raise dns.exception.SyntaxError("invalid offset sign %s" % sign) - if base not in ["d", "o", "x", "X", "n", "N"]: - raise dns.exception.SyntaxError("invalid type %s" % base) - - return mod, sign, offset, width, base - - def _generate_line(self): - # range lhs [ttl] [class] type rhs [ comment ] - """Process one line containing the GENERATE statement from a DNS - zone file.""" - if self.current_origin is None: - raise UnknownOrigin - - token = self.tok.get() - # Range (required) - try: - start, stop, step = dns.grange.from_text(token.value) - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - except Exception: - raise dns.exception.SyntaxError - - # lhs (required) - try: - lhs = token.value - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - except Exception: - raise dns.exception.SyntaxError - - # TTL - try: - ttl = dns.ttl.from_text(token.value) - self.last_ttl = ttl - self.last_ttl_known = True - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - except dns.ttl.BadTTL: - if not (self.last_ttl_known or self.default_ttl_known): - raise dns.exception.SyntaxError("Missing default TTL value") - if self.default_ttl_known: - ttl = self.default_ttl - elif self.last_ttl_known: - ttl = self.last_ttl - # Class - try: - rdclass = dns.rdataclass.from_text(token.value) - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - except dns.exception.SyntaxError: - raise dns.exception.SyntaxError - except Exception: - rdclass = self.zone_rdclass - if rdclass != self.zone_rdclass: - raise dns.exception.SyntaxError("RR class is not zone's class") - # Type - try: - rdtype = dns.rdatatype.from_text(token.value) - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError - except Exception: - raise dns.exception.SyntaxError("unknown rdatatype '%s'" % token.value) - - # rhs (required) - rhs = token.value - - def _calculate_index(counter: int, offset_sign: str, offset: int) -> int: - """Calculate the index from the counter and offset.""" - if offset_sign == "-": - offset *= -1 - return counter + offset - - def _format_index(index: int, base: str, width: int) -> str: - """Format the index with the given base, and zero-fill it - to the given width.""" - if base in ["d", "o", "x", "X"]: - return format(index, base).zfill(width) - - # base can only be n or N here - hexa = _format_index(index, "x", width) - nibbles = ".".join(hexa[::-1])[:width] - if base == "N": - nibbles = nibbles.upper() - return nibbles - - lmod, lsign, loffset, lwidth, lbase = self._parse_modify(lhs) - rmod, rsign, roffset, rwidth, rbase = self._parse_modify(rhs) - for i in range(start, stop + 1, step): - # +1 because bind is inclusive and python is exclusive - - lindex = _calculate_index(i, lsign, loffset) - rindex = _calculate_index(i, rsign, roffset) - - lzfindex = _format_index(lindex, lbase, lwidth) - rzfindex = _format_index(rindex, rbase, rwidth) - - name = lhs.replace("$%s" % (lmod), lzfindex) - rdata = rhs.replace("$%s" % (rmod), rzfindex) - - self.last_name = dns.name.from_text( - name, self.current_origin, self.tok.idna_codec - ) - name = self.last_name - if not name.is_subdomain(self.zone_origin): - self._eat_line() - return - if self.relativize: - name = name.relativize(self.zone_origin) - - try: - rd = dns.rdata.from_text( - rdclass, - rdtype, - rdata, - self.current_origin, - self.relativize, - self.zone_origin, - ) - except dns.exception.SyntaxError: - # Catch and reraise. - raise - except Exception: - # All exceptions that occur in the processing of rdata - # are treated as syntax errors. This is not strictly - # correct, but it is correct almost all of the time. - # We convert them to syntax errors so that we can emit - # helpful filename:line info. - (ty, va) = sys.exc_info()[:2] - raise dns.exception.SyntaxError( - "caught exception %s: %s" % (str(ty), str(va)) - ) - - self.txn.add(name, ttl, rd) - - def read(self) -> None: - """Read a DNS zone file and build a zone object. - - @raises dns.zone.NoSOA: No SOA RR was found at the zone origin - @raises dns.zone.NoNS: No NS RRset was found at the zone origin - """ - - try: - while 1: - token = self.tok.get(True, True) - if token.is_eof(): - if self.current_file is not None: - self.current_file.close() - if len(self.saved_state) > 0: - ( - self.tok, - self.current_origin, - self.last_name, - self.current_file, - self.last_ttl, - self.last_ttl_known, - self.default_ttl, - self.default_ttl_known, - ) = self.saved_state.pop(-1) - continue - break - elif token.is_eol(): - continue - elif token.is_comment(): - self.tok.get_eol() - continue - elif token.value[0] == "$" and len(self.allowed_directives) > 0: - # Note that we only run directive processing code if at least - # one directive is allowed in order to be backwards compatible - c = token.value.upper() - if c not in self.allowed_directives: - raise dns.exception.SyntaxError( - f"zone file directive '{c}' is not allowed" - ) - if c == "$TTL": - token = self.tok.get() - if not token.is_identifier(): - raise dns.exception.SyntaxError("bad $TTL") - self.default_ttl = dns.ttl.from_text(token.value) - self.default_ttl_known = True - self.tok.get_eol() - elif c == "$ORIGIN": - self.current_origin = self.tok.get_name() - self.tok.get_eol() - if self.zone_origin is None: - self.zone_origin = self.current_origin - self.txn._set_origin(self.current_origin) - elif c == "$INCLUDE": - token = self.tok.get() - filename = token.value - token = self.tok.get() - new_origin: Optional[dns.name.Name] - if token.is_identifier(): - new_origin = dns.name.from_text( - token.value, self.current_origin, self.tok.idna_codec - ) - self.tok.get_eol() - elif not token.is_eol_or_eof(): - raise dns.exception.SyntaxError("bad origin in $INCLUDE") - else: - new_origin = self.current_origin - self.saved_state.append( - ( - self.tok, - self.current_origin, - self.last_name, - self.current_file, - self.last_ttl, - self.last_ttl_known, - self.default_ttl, - self.default_ttl_known, - ) - ) - self.current_file = open(filename, "r") - self.tok = dns.tokenizer.Tokenizer(self.current_file, filename) - self.current_origin = new_origin - elif c == "$GENERATE": - self._generate_line() - else: - raise dns.exception.SyntaxError( - f"Unknown zone file directive '{c}'" - ) - continue - self.tok.unget(token) - self._rr_line() - except dns.exception.SyntaxError as detail: - (filename, line_number) = self.tok.where() - if detail is None: - detail = "syntax error" - ex = dns.exception.SyntaxError( - "%s:%d: %s" % (filename, line_number, detail) - ) - tb = sys.exc_info()[2] - raise ex.with_traceback(tb) from None - - -class RRsetsReaderTransaction(dns.transaction.Transaction): - def __init__(self, manager, replacement, read_only): - assert not read_only - super().__init__(manager, replacement, read_only) - self.rdatasets = {} - - def _get_rdataset(self, name, rdtype, covers): - return self.rdatasets.get((name, rdtype, covers)) - - def _get_node(self, name): - rdatasets = [] - for (rdataset_name, _, _), rdataset in self.rdatasets.items(): - if name == rdataset_name: - rdatasets.append(rdataset) - if len(rdatasets) == 0: - return None - node = dns.node.Node() - node.rdatasets = rdatasets - return node - - def _put_rdataset(self, name, rdataset): - self.rdatasets[(name, rdataset.rdtype, rdataset.covers)] = rdataset - - def _delete_name(self, name): - # First remove any changes involving the name - remove = [] - for key in self.rdatasets: - if key[0] == name: - remove.append(key) - if len(remove) > 0: - for key in remove: - del self.rdatasets[key] - - def _delete_rdataset(self, name, rdtype, covers): - try: - del self.rdatasets[(name, rdtype, covers)] - except KeyError: - pass - - def _name_exists(self, name): - for n, _, _ in self.rdatasets: - if n == name: - return True - return False - - def _changed(self): - return len(self.rdatasets) > 0 - - def _end_transaction(self, commit): - if commit and self._changed(): - rrsets = [] - for (name, _, _), rdataset in self.rdatasets.items(): - rrset = dns.rrset.RRset( - name, rdataset.rdclass, rdataset.rdtype, rdataset.covers - ) - rrset.update(rdataset) - rrsets.append(rrset) - self.manager.set_rrsets(rrsets) - - def _set_origin(self, origin): - pass - - def _iterate_rdatasets(self): - raise NotImplementedError # pragma: no cover - - def _iterate_names(self): - raise NotImplementedError # pragma: no cover - - -class RRSetsReaderManager(dns.transaction.TransactionManager): - def __init__( - self, origin=dns.name.root, relativize=False, rdclass=dns.rdataclass.IN - ): - self.origin = origin - self.relativize = relativize - self.rdclass = rdclass - self.rrsets = [] - - def reader(self): # pragma: no cover - raise NotImplementedError - - def writer(self, replacement=False): - assert replacement is True - return RRsetsReaderTransaction(self, True, False) - - def get_class(self): - return self.rdclass - - def origin_information(self): - if self.relativize: - effective = dns.name.empty - else: - effective = self.origin - return (self.origin, self.relativize, effective) - - def set_rrsets(self, rrsets): - self.rrsets = rrsets - - -def read_rrsets( - text: Any, - name: Optional[Union[dns.name.Name, str]] = None, - ttl: Optional[int] = None, - rdclass: Optional[Union[dns.rdataclass.RdataClass, str]] = dns.rdataclass.IN, - default_rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, - rdtype: Optional[Union[dns.rdatatype.RdataType, str]] = None, - default_ttl: Optional[Union[int, str]] = None, - idna_codec: Optional[dns.name.IDNACodec] = None, - origin: Optional[Union[dns.name.Name, str]] = dns.name.root, - relativize: bool = False, -) -> List[dns.rrset.RRset]: - """Read one or more rrsets from the specified text, possibly subject - to restrictions. - - *text*, a file object or a string, is the input to process. - - *name*, a string, ``dns.name.Name``, or ``None``, is the owner name of - the rrset. If not ``None``, then the owner name is "forced", and the - input must not specify an owner name. If ``None``, then any owner names - are allowed and must be present in the input. - - *ttl*, an ``int``, string, or None. If not ``None``, the the TTL is - forced to be the specified value and the input must not specify a TTL. - If ``None``, then a TTL may be specified in the input. If it is not - specified, then the *default_ttl* will be used. - - *rdclass*, a ``dns.rdataclass.RdataClass``, string, or ``None``. If - not ``None``, then the class is forced to the specified value, and the - input must not specify a class. If ``None``, then the input may specify - a class that matches *default_rdclass*. Note that it is not possible to - return rrsets with differing classes; specifying ``None`` for the class - simply allows the user to optionally type a class as that may be convenient - when cutting and pasting. - - *default_rdclass*, a ``dns.rdataclass.RdataClass`` or string. The class - of the returned rrsets. - - *rdtype*, a ``dns.rdatatype.RdataType``, string, or ``None``. If not - ``None``, then the type is forced to the specified value, and the - input must not specify a type. If ``None``, then a type must be present - for each RR. - - *default_ttl*, an ``int``, string, or ``None``. If not ``None``, then if - the TTL is not forced and is not specified, then this value will be used. - if ``None``, then if the TTL is not forced an error will occur if the TTL - is not specified. - - *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA - encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder - is used. Note that codecs only apply to the owner name; dnspython does - not do IDNA for names in rdata, as there is no IDNA zonefile format. - - *origin*, a string, ``dns.name.Name``, or ``None``, is the origin for any - relative names in the input, and also the origin to relativize to if - *relativize* is ``True``. - - *relativize*, a bool. If ``True``, names are relativized to the *origin*; - if ``False`` then any relative names in the input are made absolute by - appending the *origin*. - """ - if isinstance(origin, str): - origin = dns.name.from_text(origin, dns.name.root, idna_codec) - if isinstance(name, str): - name = dns.name.from_text(name, origin, idna_codec) - if isinstance(ttl, str): - ttl = dns.ttl.from_text(ttl) - if isinstance(default_ttl, str): - default_ttl = dns.ttl.from_text(default_ttl) - if rdclass is not None: - rdclass = dns.rdataclass.RdataClass.make(rdclass) - else: - rdclass = None - default_rdclass = dns.rdataclass.RdataClass.make(default_rdclass) - if rdtype is not None: - rdtype = dns.rdatatype.RdataType.make(rdtype) - else: - rdtype = None - manager = RRSetsReaderManager(origin, relativize, default_rdclass) - with manager.writer(True) as txn: - tok = dns.tokenizer.Tokenizer(text, "", idna_codec=idna_codec) - reader = Reader( - tok, - default_rdclass, - txn, - allow_directives=False, - force_name=name, - force_ttl=ttl, - force_rdclass=rdclass, - force_rdtype=rdtype, - default_ttl=default_ttl, - ) - reader.read() - return manager.rrsets diff --git a/server/venv/Lib/site-packages/dns/zonetypes.py b/server/venv/Lib/site-packages/dns/zonetypes.py deleted file mode 100644 index 195ee2e..0000000 --- a/server/venv/Lib/site-packages/dns/zonetypes.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license - -"""Common zone-related types.""" - -# This is a separate file to avoid import circularity between dns.zone and -# the implementation of the ZONEMD type. - -import hashlib - -import dns.enum - - -class DigestScheme(dns.enum.IntEnum): - """ZONEMD Scheme""" - - SIMPLE = 1 - - @classmethod - def _maximum(cls): - return 255 - - -class DigestHashAlgorithm(dns.enum.IntEnum): - """ZONEMD Hash Algorithm""" - - SHA384 = 1 - SHA512 = 2 - - @classmethod - def _maximum(cls): - return 255 - - -_digest_hashers = { - DigestHashAlgorithm.SHA384: hashlib.sha384, - DigestHashAlgorithm.SHA512: hashlib.sha512, -} diff --git a/server/venv/Lib/site-packages/dnspython-2.4.2.dist-info/INSTALLER b/server/venv/Lib/site-packages/dnspython-2.4.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/server/venv/Lib/site-packages/dnspython-2.4.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/server/venv/Lib/site-packages/dnspython-2.4.2.dist-info/LICENSE b/server/venv/Lib/site-packages/dnspython-2.4.2.dist-info/LICENSE deleted file mode 100644 index 390a726..0000000 --- a/server/venv/Lib/site-packages/dnspython-2.4.2.dist-info/LICENSE +++ /dev/null @@ -1,35 +0,0 @@ -ISC License - -Copyright (C) Dnspython Contributors - -Permission to use, copy, modify, and/or distribute this software for -any purpose with or without fee is hereby granted, provided that the -above copyright notice and this permission notice appear in all -copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL -WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE -AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL -DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR -PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. - - - -Copyright (C) 2001-2017 Nominum, Inc. -Copyright (C) Google Inc. - -Permission to use, copy, modify, and distribute this software and its -documentation for any purpose with or without fee is hereby granted, -provided that the above copyright notice and this permission notice -appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/server/venv/Lib/site-packages/dnspython-2.4.2.dist-info/METADATA b/server/venv/Lib/site-packages/dnspython-2.4.2.dist-info/METADATA deleted file mode 100644 index a69819a..0000000 --- a/server/venv/Lib/site-packages/dnspython-2.4.2.dist-info/METADATA +++ /dev/null @@ -1,128 +0,0 @@ -Metadata-Version: 2.1 -Name: dnspython -Version: 2.4.2 -Summary: DNS toolkit -Home-page: https://www.dnspython.org -License: ISC -Author: Bob Halley -Author-email: halley@dnspython.org -Requires-Python: >=3.8,<4.0 -Classifier: License :: OSI Approved -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Provides-Extra: dnssec -Provides-Extra: doh -Provides-Extra: doq -Provides-Extra: idna -Provides-Extra: trio -Provides-Extra: wmi -Requires-Dist: aioquic (>=0.9.20) ; extra == "doq" -Requires-Dist: cryptography (>=2.6,<42.0) ; extra == "dnssec" -Requires-Dist: h2 (>=4.1.0) ; extra == "doh" -Requires-Dist: httpcore (>=0.17.3) ; extra == "doh" -Requires-Dist: httpx (>=0.24.1) ; extra == "doh" -Requires-Dist: idna (>=2.1,<4.0) ; extra == "idna" -Requires-Dist: trio (>=0.14,<0.23) ; extra == "trio" -Requires-Dist: wmi (>=1.5.1,<2.0.0) ; extra == "wmi" -Project-URL: Bug Tracker, https://github.com/rthalley/dnspython/issues -Project-URL: Documentation, https://dnspython.readthedocs.io/en/stable/ -Project-URL: Repository, https://github.com/rthalley/dnspython.git -Description-Content-Type: text/markdown - -# dnspython - -[![Build Status](https://github.com/rthalley/dnspython/actions/workflows/python-package.yml/badge.svg)](https://github.com/rthalley/dnspython/actions/) -[![Documentation Status](https://readthedocs.org/projects/dnspython/badge/?version=latest)](https://dnspython.readthedocs.io/en/latest/?badge=latest) -[![PyPI version](https://badge.fury.io/py/dnspython.svg)](https://badge.fury.io/py/dnspython) -[![License: ISC](https://img.shields.io/badge/License-ISC-brightgreen.svg)](https://opensource.org/licenses/ISC) -[![Coverage](https://codecov.io/github/rthalley/dnspython/coverage.svg?branch=master)](https://codecov.io/github/rthalley/dnspython) -[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) - -## INTRODUCTION - -dnspython is a DNS toolkit for Python. It supports almost all record types. It -can be used for queries, zone transfers, and dynamic updates. It supports TSIG -authenticated messages and EDNS0. - -dnspython provides both high and low level access to DNS. The high level classes -perform queries for data of a given name, type, and class, and return an answer -set. The low level classes allow direct manipulation of DNS zones, messages, -names, and records. - -To see a few of the ways dnspython can be used, look in the `examples/` -directory. - -dnspython is a utility to work with DNS, `/etc/hosts` is thus not used. For -simple forward DNS lookups, it's better to use `socket.getaddrinfo()` or -`socket.gethostbyname()`. - -dnspython originated at Nominum where it was developed -to facilitate the testing of DNS software. - -## ABOUT THIS RELEASE - -This is dnspython 2.4.2. -Please read -[What's New](https://dnspython.readthedocs.io/en/stable/whatsnew.html) for -information about the changes in this release. - -## INSTALLATION - -* Many distributions have dnspython packaged for you, so you should - check there first. -* To use a wheel downloaded from PyPi, run: - - pip install dnspython - -* To install from the source code, go into the top-level of the source code - and run: - -``` - pip install --upgrade pip build - python -m build - pip install dist/*.whl -``` - -* To install the latest from the master branch, run `pip install git+https://github.com/rthalley/dnspython.git` - -Dnspython's default installation does not depend on any modules other than -those in the Python standard library. To use some features, additional modules -must be installed. For convenience, pip options are defined for the -requirements. - -If you want to use DNS-over-HTTPS, run -`pip install dnspython[doh]`. - -If you want to use DNSSEC functionality, run -`pip install dnspython[dnssec]`. - -If you want to use internationalized domain names (IDNA) -functionality, you must run -`pip install dnspython[idna]` - -If you want to use the Trio asynchronous I/O package, run -`pip install dnspython[trio]`. - -If you want to use WMI on Windows to determine the active DNS settings -instead of the default registry scanning method, run -`pip install dnspython[wmi]`. - -If you want to try the experimental DNS-over-QUIC code, run -`pip install dnspython[doq]`. - -Note that you can install any combination of the above, e.g.: -`pip install dnspython[doh,dnssec,idna]` - -### Notices - -Python 2.x support ended with the release of 1.16.0. Dnspython 2.0.0 through -2.2.x support Python 3.6 and later. For dnspython 2.3.x, the minimum -supported Python version is 3.7, and for 2.4.x the minimum supported verison is 3.8. -We plan to align future support with the lifetime of the Python 3 versions. - -Documentation has moved to -[dnspython.readthedocs.io](https://dnspython.readthedocs.io). - diff --git a/server/venv/Lib/site-packages/dnspython-2.4.2.dist-info/RECORD b/server/venv/Lib/site-packages/dnspython-2.4.2.dist-info/RECORD deleted file mode 100644 index c2117c5..0000000 --- a/server/venv/Lib/site-packages/dnspython-2.4.2.dist-info/RECORD +++ /dev/null @@ -1,289 +0,0 @@ -dns/__init__.py,sha256=YJZtDG14Idw5ui3h1nWooSwPM9gsxQgB8M0GBZ3aly0,1663 -dns/__pycache__/__init__.cpython-311.pyc,, -dns/__pycache__/_asyncbackend.cpython-311.pyc,, -dns/__pycache__/_asyncio_backend.cpython-311.pyc,, -dns/__pycache__/_ddr.cpython-311.pyc,, -dns/__pycache__/_immutable_ctx.cpython-311.pyc,, -dns/__pycache__/_trio_backend.cpython-311.pyc,, -dns/__pycache__/asyncbackend.cpython-311.pyc,, -dns/__pycache__/asyncquery.cpython-311.pyc,, -dns/__pycache__/asyncresolver.cpython-311.pyc,, -dns/__pycache__/dnssec.cpython-311.pyc,, -dns/__pycache__/dnssectypes.cpython-311.pyc,, -dns/__pycache__/e164.cpython-311.pyc,, -dns/__pycache__/edns.cpython-311.pyc,, -dns/__pycache__/entropy.cpython-311.pyc,, -dns/__pycache__/enum.cpython-311.pyc,, -dns/__pycache__/exception.cpython-311.pyc,, -dns/__pycache__/flags.cpython-311.pyc,, -dns/__pycache__/grange.cpython-311.pyc,, -dns/__pycache__/immutable.cpython-311.pyc,, -dns/__pycache__/inet.cpython-311.pyc,, -dns/__pycache__/ipv4.cpython-311.pyc,, -dns/__pycache__/ipv6.cpython-311.pyc,, -dns/__pycache__/message.cpython-311.pyc,, -dns/__pycache__/name.cpython-311.pyc,, -dns/__pycache__/namedict.cpython-311.pyc,, -dns/__pycache__/nameserver.cpython-311.pyc,, -dns/__pycache__/node.cpython-311.pyc,, -dns/__pycache__/opcode.cpython-311.pyc,, -dns/__pycache__/query.cpython-311.pyc,, -dns/__pycache__/rcode.cpython-311.pyc,, -dns/__pycache__/rdata.cpython-311.pyc,, -dns/__pycache__/rdataclass.cpython-311.pyc,, -dns/__pycache__/rdataset.cpython-311.pyc,, -dns/__pycache__/rdatatype.cpython-311.pyc,, -dns/__pycache__/renderer.cpython-311.pyc,, -dns/__pycache__/resolver.cpython-311.pyc,, -dns/__pycache__/reversename.cpython-311.pyc,, -dns/__pycache__/rrset.cpython-311.pyc,, -dns/__pycache__/serial.cpython-311.pyc,, -dns/__pycache__/set.cpython-311.pyc,, -dns/__pycache__/tokenizer.cpython-311.pyc,, -dns/__pycache__/transaction.cpython-311.pyc,, -dns/__pycache__/tsig.cpython-311.pyc,, -dns/__pycache__/tsigkeyring.cpython-311.pyc,, -dns/__pycache__/ttl.cpython-311.pyc,, -dns/__pycache__/update.cpython-311.pyc,, -dns/__pycache__/version.cpython-311.pyc,, -dns/__pycache__/versioned.cpython-311.pyc,, -dns/__pycache__/win32util.cpython-311.pyc,, -dns/__pycache__/wire.cpython-311.pyc,, -dns/__pycache__/xfr.cpython-311.pyc,, -dns/__pycache__/zone.cpython-311.pyc,, -dns/__pycache__/zonefile.cpython-311.pyc,, -dns/__pycache__/zonetypes.cpython-311.pyc,, -dns/_asyncbackend.py,sha256=Ny0kGesm9wbLBnt-0u-tANOKsxcYt2jbMuRoRz_JZUA,2360 -dns/_asyncio_backend.py,sha256=Il3f2OGH0Q2EnToE4F52p-FQFrIw3WgpsyNZG1JNUFQ,8970 -dns/_ddr.py,sha256=rHXKC8kncCTT9N4KBh1flicl79nyDjQ-DDvq30MJ3B8,5247 -dns/_immutable_ctx.py,sha256=gtoCLMmdHXI23zt5lRSIS3A4Ca3jZJngebdoFFOtiwU,2459 -dns/_trio_backend.py,sha256=GVVnbYqhUyvKbwMiDH_8YSZ2B_XYyjlZ6dRoGAYD5vw,8150 -dns/asyncbackend.py,sha256=F8YnEkFKXAhJ-2DM3_EQ1o7UGq-8FpEj4arSyMvqkO8,2794 -dns/asyncquery.py,sha256=AZ4nIpOMzZJfIb-Y8Ba0We6VQ7kTU7qFQtS5NP_lazw,26057 -dns/asyncresolver.py,sha256=GD86dCyW9YGKs6SggWXwBKEXifW7Qdx4cEAGFKY6fA4,17852 -dns/dnssec.py,sha256=uk3eiNbhHR8OmZJpqh2NLrBRV2329RukbuFNSC0whpA,40663 -dns/dnssecalgs/__init__.py,sha256=qU3J8IH7DiGMGHOoXLM7cPdbm9sXVkUcHjK9rnPtSPM,4270 -dns/dnssecalgs/__pycache__/__init__.cpython-311.pyc,, -dns/dnssecalgs/__pycache__/base.cpython-311.pyc,, -dns/dnssecalgs/__pycache__/cryptography.cpython-311.pyc,, -dns/dnssecalgs/__pycache__/dsa.cpython-311.pyc,, -dns/dnssecalgs/__pycache__/ecdsa.cpython-311.pyc,, -dns/dnssecalgs/__pycache__/eddsa.cpython-311.pyc,, -dns/dnssecalgs/__pycache__/rsa.cpython-311.pyc,, -dns/dnssecalgs/base.py,sha256=hsFHFr_eCYeDcI0eU6_WiLlOYL0GR4QJ__sXoMrIAfE,2446 -dns/dnssecalgs/cryptography.py,sha256=3uqMfRm-zCkJPOrxUqlu9CmdxIMy71dVor9eAHi0wZM,2425 -dns/dnssecalgs/dsa.py,sha256=hklh_HkT_ZffQBHQ7t6pKUStTH4x5nXlz8R9RUP72aY,3497 -dns/dnssecalgs/ecdsa.py,sha256=GWrJgEXAK08MCdbLk7LQcD2ajKqW_dbONWXh3wieLzw,3016 -dns/dnssecalgs/eddsa.py,sha256=9lQQZ92f2PiIhhylieInO-19aSTDQiyoY8X2kTkGlcs,1914 -dns/dnssecalgs/rsa.py,sha256=jWkhWKByylIo7Y9gAiiO8t8bowF8IZ0siVjgZpdhLSE,3555 -dns/dnssectypes.py,sha256=CyeuGTS_rM3zXr8wD9qMT9jkzvVfTY2JWckUcogG83E,1799 -dns/e164.py,sha256=EsK8cnOtOx7kQ0DmSwibcwkzp6efMWjbRiTyHZO8Q-M,3978 -dns/edns.py,sha256=zV-_hBJXyo573G5RuADpiKcJPAEM_bO-Rhbi1rW9iKM,14004 -dns/entropy.py,sha256=qkG8hXDLzrJS6R5My26iA59c0RhPwJNzuOhOCAZU5Bw,4242 -dns/enum.py,sha256=EepaunPKixTSrascy7iAe9UQEXXxP_MB5Gx4jUpHIhg,3691 -dns/exception.py,sha256=FphWy-JLRG06UUUq2VmUGwdPA1xWja_8YfrcffRFlQs,5957 -dns/flags.py,sha256=cQ3kTFyvcKiWHAxI5AwchNqxVOrsIrgJ6brgrH42Wq8,2750 -dns/grange.py,sha256=HA623Mv2mZDmOK_BZNDDakT0L6EHsMQU9lFFkE8dKr0,2148 -dns/immutable.py,sha256=vvdKWL9jeOCWGb3vBHcjdJqO7ZqY7Cc3jlG7alFFFOo,1836 -dns/inet.py,sha256=pSjZ7qnGQ3sfqcnHuwBaX3NezrOwNRrDV3pZV73Je-w,5278 -dns/ipv4.py,sha256=E3P-j6Hq40CROhx5TPJHyL2hZwheLXzEuo4oJRFFqeg,2064 -dns/ipv6.py,sha256=5EgxQ3q_LNNPqXETkueZQQUl6LYj0HK8FRofwtILfR0,6192 -dns/message.py,sha256=6OXRaNfhSJ_xrTuhUZJ3CdN_kLJ_s78J3kh4O5GHJ_s,63161 -dns/name.py,sha256=sdI8_WQizVfBGUEg_oS0a7qHdCpiNEX9mGbwXF3uxAY,34424 -dns/namedict.py,sha256=hJRYpKeQv6Bd2LaUOPV0L_a0eXEIuqgggPXaH4c3Tow,4000 -dns/nameserver.py,sha256=NqCanrWjj3kQWWsjHgCQ5B3xVOlg5URhYeS1rc7RPZI,9096 -dns/node.py,sha256=58G51FNCWcXqJMUMefs9K1dYPAIpFDS-s-1YAVoqFkM,12664 -dns/opcode.py,sha256=I6JyuFUL0msja_BYm6bzXHfbbfqUod_69Ss4xcv8xWQ,2730 -dns/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -dns/query.py,sha256=9HRjCP8qHMoUCP6F6GS-ASv_sVXEEoCanP-Vvx8FFuE,51935 -dns/quic/__init__.py,sha256=cV_5B_I8JjgfuVUuTItyYyEqhMNl1RDK6abTal1RuBY,2163 -dns/quic/__pycache__/__init__.cpython-311.pyc,, -dns/quic/__pycache__/_asyncio.cpython-311.pyc,, -dns/quic/__pycache__/_common.cpython-311.pyc,, -dns/quic/__pycache__/_sync.cpython-311.pyc,, -dns/quic/__pycache__/_trio.cpython-311.pyc,, -dns/quic/_asyncio.py,sha256=1MqoikjGT4be9-aZG1Vq_ZMOY44oAKwLMoQdGkbW0yE,8003 -dns/quic/_common.py,sha256=7GORU3PVSMD8GpN4oR36HHjSHwAGz2tAqtXLF93GxNo,5503 -dns/quic/_sync.py,sha256=VWFW5ssX6O-g8Nipc_RzPSc5lFv9Tnv88CqbME1Uaes,7707 -dns/quic/_trio.py,sha256=hnz1WCPU8FYWZdbq97_Grq4nmDlOm1qSUrbk0CTkjxc,6544 -dns/rcode.py,sha256=N6JjrIQjCdJy0boKIp8Hcky5tm__LSDscpDz3rE_sgU,4156 -dns/rdata.py,sha256=brq_h01u_bHuLJ_xT1iue8HYXtNjdDqqTELLDRt5QYc,29557 -dns/rdataclass.py,sha256=TK4W4ywB1L_X7EZqk2Gmwnu7vdQpolQF5DtQWyNk5xo,2984 -dns/rdataset.py,sha256=pHywgIeDtJtTTnSb7x5bXzbNv3ORCBuw-ks74EW8qeo,17049 -dns/rdatatype.py,sha256=gIdYZ0iHRlgiTEO-ftobUANmaAmjTnNc4JljMaP1OnQ,7339 -dns/rdtypes/ANY/AFSDB.py,sha256=BpmfO1dxhDxaMCG2kejYQQVNvVmS-PKgwuZpDWBdWTc,1662 -dns/rdtypes/ANY/AMTRELAY.py,sha256=YO4zGhp8SBFibhNYu12Z3xZImvGicWfTcwS3ISiPgYc,3382 -dns/rdtypes/ANY/AVC.py,sha256=D6d0Ex0BThmFXmCWvN2Spa84siVZPiQZeXjuv0TLZrY,1025 -dns/rdtypes/ANY/CAA.py,sha256=fqSrgAtNrWeJMbQTLiJuCKyUGuUMeyrI0dHxRcG0NYk,2512 -dns/rdtypes/ANY/CDNSKEY.py,sha256=behtB7NJWa2I05fLoHrLktqNNjRvBxm5q84GQf3DkLM,1226 -dns/rdtypes/ANY/CDS.py,sha256=BSB8MHej1iN8Qbr2mY2hjJ4o8wXZxJ9yS_rIHCs_pBI,1164 -dns/rdtypes/ANY/CERT.py,sha256=w5tC8POadB_WesP9x2Ir7FhHusZ2WprI-nKPjedgJhc,3534 -dns/rdtypes/ANY/CNAME.py,sha256=dnligzy3ItR3cAh-BW8CkBOIN1iMUuuVwfkhah87G_c,1207 -dns/rdtypes/ANY/CSYNC.py,sha256=cvOarNvap-_4YxdvpkWplwWKoGgkB1trlmvKOp5yiQo,2440 -dns/rdtypes/ANY/DLV.py,sha256=CypU0r_3pU3f1_-lSBywvXjNXu1IFCEDvKELDP-5_qk,987 -dns/rdtypes/ANY/DNAME.py,sha256=0UalrV9mwXfTNRm1CBwu2KywSdchKVJ3w02dY8xXsg0,1151 -dns/rdtypes/ANY/DNSKEY.py,sha256=W47nY1dSxcO3TaXrEo8cWDcsYgPwQDT8h9s9PBKfANU,1224 -dns/rdtypes/ANY/DS.py,sha256=ZkA31leBr2FbB4HmuWK9yuLzkMFqSY9s4Q_8eEmBDvY,996 -dns/rdtypes/ANY/EUI48.py,sha256=j_VNoznxW9legrRI21qHXOTUoKcN1prwtRBx0jfmorI,1152 -dns/rdtypes/ANY/EUI64.py,sha256=QX7_T0t9dmqje7NcQU55qlAV0k7ZnlfaTrEc8H41KRo,1162 -dns/rdtypes/ANY/GPOS.py,sha256=KO16H4VcRe7P2o67jD4eMV1MuVK8W_-xtJeZSUKer3E,4434 -dns/rdtypes/ANY/HINFO.py,sha256=uzMjQoc286m7jFcZ7Bc388NIyG_6lrUM4GrskN6wUMM,2250 -dns/rdtypes/ANY/HIP.py,sha256=JSMKOVkAdM2jV4IoxSnRlIcEtldimL5z_6J1kMiHnRk,3229 -dns/rdtypes/ANY/ISDN.py,sha256=mOI9rSD8BqkdwvNKWwS580Rp-yq13Z_Q3MVoI0aXn_k,2714 -dns/rdtypes/ANY/L32.py,sha256=dF4DNMVOubJysF1YuoIDqpGhzEC7MDIK8r5MJS5WVLw,1287 -dns/rdtypes/ANY/L64.py,sha256=BQC32j-ED1jgNJkZuQ6cP1qpmPQrJU_WwYN4wXAujoE,1593 -dns/rdtypes/ANY/LOC.py,sha256=rr9WaEDZaT9akesOxGeSscXzPVkQO8e9OpVm9CJTsEc,12025 -dns/rdtypes/ANY/LP.py,sha256=Gc9WDLUJDLCVBiBv9OHHsASELdd7IFJ0LXUN8Ud6a8A,1339 -dns/rdtypes/ANY/MX.py,sha256=RqJSBgnejy70cBmhNLrDiTZxVpZnomujroJBWiozSb4,996 -dns/rdtypes/ANY/NID.py,sha256=ezcvntoK3FQ_LbtzVDkUWP1bIqmqOq2COcQ1EEQf_Uc,1545 -dns/rdtypes/ANY/NINFO.py,sha256=-WoY0aN8T4fOFQPj2xUGpAwL-0RpH08jDZ5y-CLlb2Q,1042 -dns/rdtypes/ANY/NS.py,sha256=Y97NCXGAWRi5o8wcsr_5HDXVrU67wfmPYAPRLzs--AY,996 -dns/rdtypes/ANY/NSEC.py,sha256=gasas9ITgOCnXpcq-GDzOfmgHX-K4XQJ00aAirovXGs,2476 -dns/rdtypes/ANY/NSEC3.py,sha256=7N1X8XENEMy8Eu-OS-Sv1iR2IML-LL4IFgfpZE_uBP0,4152 -dns/rdtypes/ANY/NSEC3PARAM.py,sha256=wbiKFzM-d43zd2Yuy7lGwObXDufy7P9fDCypYKQmbXE,2636 -dns/rdtypes/ANY/OPENPGPKEY.py,sha256=rhcB9knQVTcSoS3yOamXzNUzxNuSbYQP0WkEUNF3k_g,1852 -dns/rdtypes/ANY/OPT.py,sha256=W9rUHHVxYyKE0cQnrojh2UQnEz5H0TCXtN5ag-KEpZU,2562 -dns/rdtypes/ANY/PTR.py,sha256=cUwuFKXckmAjabsBT3lMaHyHzA_24QDHUi5goFMXXuc,998 -dns/rdtypes/ANY/RP.py,sha256=FmX8WQ_XaGXYUoArud1T4lTi2yXrSqqxaguHhB9y7MY,2185 -dns/rdtypes/ANY/RRSIG.py,sha256=oyYoYdjv421AQ_w7Kylq3Kp6BAO6sINqo3ZtcWZfHZ8,4924 -dns/rdtypes/ANY/RT.py,sha256=sErLRUOGk3H_jQ1-Pu2h30HqqFMqIQaWuasRx6OH-wU,1014 -dns/rdtypes/ANY/SMIMEA.py,sha256=6yjHuVDfIEodBU9wxbCGCDZ5cWYwyY6FCk-aq2VNU0s,222 -dns/rdtypes/ANY/SOA.py,sha256=mnfEFZZ-et0Euz1o3bDUxWXEtVmTzQ2VkPwVDUVIogM,3146 -dns/rdtypes/ANY/SPF.py,sha256=M1_Nm5l4pHPlhUT13oTnSRdpI-gpTpM5myUE0Ym00V0,1023 -dns/rdtypes/ANY/SSHFP.py,sha256=avOC-M3V3-ukuo1Ej7nNwjzLjrCLDGLRrvMgw_wwLWE,2531 -dns/rdtypes/ANY/TKEY.py,sha256=PxytD37IfJDH3jDI4Tvv6X3lrP5ldMFn0y_nO6WGxIY,4932 -dns/rdtypes/ANY/TLSA.py,sha256=EYP7AXBh4zMtBgnfz828qfoLkFLCm5g8vR-6oX_DTbE,219 -dns/rdtypes/ANY/TSIG.py,sha256=zrA3aWrgmL1Wf-svd1gHx4Us1QNA0qI1YcsVAybulSk,4751 -dns/rdtypes/ANY/TXT.py,sha256=jKTLk67pF0MaiGllLsBXgH_Z-PJjLFKTnI5mO36i7_0,1001 -dns/rdtypes/ANY/URI.py,sha256=wSGmh6pStbLtttwD196POVeHvGqcsEBKp5G4TYPyCwA,2922 -dns/rdtypes/ANY/X25.py,sha256=KLZT5BRBMor8GRlhqNSnjd5zVym0yihMt0MOuP97h2I,1945 -dns/rdtypes/ANY/ZONEMD.py,sha256=tiOqxbp_A3CSmeZ5rO4aixiWAfzgUzmPOEjfo1ecj40,2394 -dns/rdtypes/ANY/__init__.py,sha256=Pox71HfsEnGGB1PGU44pwrrmjxPLQlA-IbX6nQRoA2M,1497 -dns/rdtypes/ANY/__pycache__/AFSDB.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/AMTRELAY.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/AVC.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/CAA.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/CDNSKEY.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/CDS.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/CERT.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/CNAME.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/CSYNC.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/DLV.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/DNAME.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/DNSKEY.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/DS.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/EUI48.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/EUI64.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/GPOS.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/HINFO.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/HIP.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/ISDN.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/L32.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/L64.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/LOC.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/LP.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/MX.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/NID.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/NINFO.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/NS.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/NSEC.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/NSEC3.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/NSEC3PARAM.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/OPENPGPKEY.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/OPT.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/PTR.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/RP.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/RRSIG.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/RT.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/SMIMEA.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/SOA.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/SPF.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/SSHFP.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/TKEY.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/TLSA.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/TSIG.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/TXT.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/URI.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/X25.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/ZONEMD.cpython-311.pyc,, -dns/rdtypes/ANY/__pycache__/__init__.cpython-311.pyc,, -dns/rdtypes/CH/A.py,sha256=hOLY9c7VkGMQZkKqjclUMj2s4M5wD1-M8F738FwyTFw,2217 -dns/rdtypes/CH/__init__.py,sha256=GD9YeDKb9VBDo-J5rrChX1MWEGyQXuR9Htnbhg_iYLc,923 -dns/rdtypes/CH/__pycache__/A.cpython-311.pyc,, -dns/rdtypes/CH/__pycache__/__init__.cpython-311.pyc,, -dns/rdtypes/IN/A.py,sha256=pq9G7ZZrCCEBWOFWvLmivtu8b_9ZAFIJU0oQUMHHudE,1815 -dns/rdtypes/IN/AAAA.py,sha256=GvQ1So05gExl1pw0HxO4ypfrT-EfnVCwBVVqgSvq5zk,1821 -dns/rdtypes/IN/APL.py,sha256=0GaLpxZ1jFI82NUJ2-t23PRd9q-678X9Z8iJZBVzxtA,5099 -dns/rdtypes/IN/DHCID.py,sha256=x-JxOiEbaOUREw2QXBLKvGTrljn7mrs8fO2jjlmc4AI,1857 -dns/rdtypes/IN/HTTPS.py,sha256=P-IjwcvDQMmtoBgsDHglXF7KgLX73G6jEDqCKsnaGpQ,220 -dns/rdtypes/IN/IPSECKEY.py,sha256=E1jjo99dVZ1GiB6sGioPZR9sG1aP9aZbifq7F91IjOI,3291 -dns/rdtypes/IN/KX.py,sha256=q8Ns4jiCuxqkYZzfkihS2WXaQ7qxQY7SSzibJu_oNQ0,1014 -dns/rdtypes/IN/NAPTR.py,sha256=MxuMrWCAR3p7vHPFddtWa2uQxvXTPKjkawbT_JxyZ0A,3751 -dns/rdtypes/IN/NSAP.py,sha256=bHxNkjZYbq5MrkWQC8ILoTrqpoNzRfCrWtOFgSjoO60,2166 -dns/rdtypes/IN/NSAP_PTR.py,sha256=HX32Hz7pop1a8amu8hLvi2-5YtzbROBUsbGmbOngcZ0,1016 -dns/rdtypes/IN/PX.py,sha256=g8OQekugan0lUTEjwRmApDKqSlEzFa_-FtM4Z9w7TnU,2757 -dns/rdtypes/IN/SRV.py,sha256=OTMa1rnbWtBf19ft7uAcjGAEtUwk-C-rDR9pAaUvBMA,2770 -dns/rdtypes/IN/SVCB.py,sha256=HeFmi2v01F00Hott8FlvQ4R7aPxFmT7RF-gt45R5K_M,218 -dns/rdtypes/IN/WKS.py,sha256=sE-jEDIrKzchRA2-KqChOTSRSDrueINDhD_hPH4IjR0,3653 -dns/rdtypes/IN/__init__.py,sha256=HbI8aw9HWroI6SgEvl8Sx6FdkDswCCXMbSRuJy5o8LQ,1083 -dns/rdtypes/IN/__pycache__/A.cpython-311.pyc,, -dns/rdtypes/IN/__pycache__/AAAA.cpython-311.pyc,, -dns/rdtypes/IN/__pycache__/APL.cpython-311.pyc,, -dns/rdtypes/IN/__pycache__/DHCID.cpython-311.pyc,, -dns/rdtypes/IN/__pycache__/HTTPS.cpython-311.pyc,, -dns/rdtypes/IN/__pycache__/IPSECKEY.cpython-311.pyc,, -dns/rdtypes/IN/__pycache__/KX.cpython-311.pyc,, -dns/rdtypes/IN/__pycache__/NAPTR.cpython-311.pyc,, -dns/rdtypes/IN/__pycache__/NSAP.cpython-311.pyc,, -dns/rdtypes/IN/__pycache__/NSAP_PTR.cpython-311.pyc,, -dns/rdtypes/IN/__pycache__/PX.cpython-311.pyc,, -dns/rdtypes/IN/__pycache__/SRV.cpython-311.pyc,, -dns/rdtypes/IN/__pycache__/SVCB.cpython-311.pyc,, -dns/rdtypes/IN/__pycache__/WKS.cpython-311.pyc,, -dns/rdtypes/IN/__pycache__/__init__.cpython-311.pyc,, -dns/rdtypes/__init__.py,sha256=NYizfGglJfhqt_GMtSSXf7YQXIEHHCiJ_Y_qaLVeiOI,1073 -dns/rdtypes/__pycache__/__init__.cpython-311.pyc,, -dns/rdtypes/__pycache__/dnskeybase.cpython-311.pyc,, -dns/rdtypes/__pycache__/dsbase.cpython-311.pyc,, -dns/rdtypes/__pycache__/euibase.cpython-311.pyc,, -dns/rdtypes/__pycache__/mxbase.cpython-311.pyc,, -dns/rdtypes/__pycache__/nsbase.cpython-311.pyc,, -dns/rdtypes/__pycache__/svcbbase.cpython-311.pyc,, -dns/rdtypes/__pycache__/tlsabase.cpython-311.pyc,, -dns/rdtypes/__pycache__/txtbase.cpython-311.pyc,, -dns/rdtypes/__pycache__/util.cpython-311.pyc,, -dns/rdtypes/dnskeybase.py,sha256=-KMVEf-AKcfaQo4GXkQYz68PH-3nqJYJsl31Gtv2Vv8,2857 -dns/rdtypes/dsbase.py,sha256=_LkKfZ5rA0eV-AVvkhWjZ8niiDQQRdvTPGKmmFeGDiM,3428 -dns/rdtypes/euibase.py,sha256=uPt1qsqMB32T7LOxMsSfEtakF4A4prWLoZR-_MU4Hp8,2631 -dns/rdtypes/mxbase.py,sha256=iPJcRxNljkrXN3oi29_VE6qkTrOqpeNsp6pRo_F9h9M,3199 -dns/rdtypes/nsbase.py,sha256=X52rY0FrKpg38TvUIN_0wQJJpBTMgbuBDMEIF1cFXF4,2325 -dns/rdtypes/svcbbase.py,sha256=hO3JFkPJ1YV60AfIhJy9qXu4glZagRTapuhV6sPyjNo,16952 -dns/rdtypes/tlsabase.py,sha256=QtyXSL4wxPU-1LfiU6Pj-wrvaFg4_qNjvrGX5g0JbVA,2597 -dns/rdtypes/txtbase.py,sha256=xdGVjo817CYrbpUsC00LsBIbnFCnXJZr6ENcdQpSqnA,3630 -dns/rdtypes/util.py,sha256=6AGQ-k3mLNlx4Ep_FiDABj1WVumUUGs3zQ6X-2iISec,9003 -dns/renderer.py,sha256=44AhGs5mkMDDck2WZ6ScTyCYGPh7UrOEiCQRsdqs0uI,10673 -dns/resolver.py,sha256=wagpUIu8Oh12O-zk48U30A6VQQOspjfibU4Ls2So-kM,73552 -dns/reversename.py,sha256=zoqXEbMZXm6R13nXbJHgTsf6L2C6uReODj6mqSHrTiE,3828 -dns/rrset.py,sha256=QMz3uoWt8X0tmbVHOCrOuUozL795A-aT3RyzaPyv98M,9168 -dns/serial.py,sha256=-t5rPW-TcJwzBMfIJo7Tl-uDtaYtpqOfCVYx9dMaDCY,3606 -dns/set.py,sha256=R8LN8_aUrvOfav5Za7VcSrdi0D10jJsSrNewdrc8Lwg,9089 -dns/tokenizer.py,sha256=Dcc3lQgEIHCVZBuO6FaKWEojtPSd3EuaUC4vQA-spnk,23583 -dns/transaction.py,sha256=b-jo7wxbkB1bxVRw0a0hE0lZRveKV6WcYKOUVctUpdw,22660 -dns/tsig.py,sha256=1YzDVByrQ_qpITVQjO4fIkZUqY9d6lMJwv_ePeQNPmE,11422 -dns/tsigkeyring.py,sha256=Z0xZemcU3XjZ9HlxBYv2E2PSuIhaFreqLDlD7HcmZDA,2633 -dns/ttl.py,sha256=fWFkw8qfk6saTp7lAPxZOuD3U3TRxVRvIpljQnG-01I,2979 -dns/update.py,sha256=y9d6LOO8xrUaH2UrZhy3ssnx8bJEsxqTArw5V8XqBRs,12243 -dns/version.py,sha256=tV2AtPHOnpAh4Tt6Xz7VjswJej9aIiOOAm-voiNqxro,1926 -dns/versioned.py,sha256=3YQj8mzGmZEsjnuVJJjcWopVmDKYLhEj4hEGTLEwzco,11765 -dns/win32util.py,sha256=l6OznV8cDXxuqB6yBFhYt_-P1v_ENnS-8DCQqNw8DWI,9005 -dns/wire.py,sha256=vy0SolgECbO1UXB4dnhXhDeFKOJT29nQxXvSfKOgA5s,2830 -dns/xfr.py,sha256=FKkKO-kSpyE1vHU5mnoPIP4YxiCl5gG7E5wOgY_4GO8,13273 -dns/zone.py,sha256=0PJLT0gqTJ4Woq37ucN80k_-xBm0oeqk7JBnJwpCca0,51093 -dns/zonefile.py,sha256=rJEY8rEvDG9BETsJsb-TozRe5ccAkgSNs0yb9vx-_uU,27930 -dns/zonetypes.py,sha256=HrQNZxZ_gWLWI9dskix71msi9wkYK5pgrBBbPb1T74Y,690 -dnspython-2.4.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -dnspython-2.4.2.dist-info/LICENSE,sha256=w-o_9WVLMpwZ07xfdIGvYjw93tSmFFWFSZ-EOtPXQc0,1526 -dnspython-2.4.2.dist-info/METADATA,sha256=aQdMbHIJB5rtUWHQyRB5EAPB7km6lV3ohDC5cpygnbs,4892 -dnspython-2.4.2.dist-info/RECORD,, -dnspython-2.4.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -dnspython-2.4.2.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88 diff --git a/server/venv/Lib/site-packages/dnspython-2.4.2.dist-info/REQUESTED b/server/venv/Lib/site-packages/dnspython-2.4.2.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/dnspython-2.4.2.dist-info/WHEEL b/server/venv/Lib/site-packages/dnspython-2.4.2.dist-info/WHEEL deleted file mode 100644 index 258a6ff..0000000 --- a/server/venv/Lib/site-packages/dnspython-2.4.2.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: poetry-core 1.6.1 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/server/venv/Lib/site-packages/docker-6.1.3.dist-info/INSTALLER b/server/venv/Lib/site-packages/docker-6.1.3.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/server/venv/Lib/site-packages/docker-6.1.3.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/server/venv/Lib/site-packages/docker-6.1.3.dist-info/LICENSE b/server/venv/Lib/site-packages/docker-6.1.3.dist-info/LICENSE deleted file mode 100644 index 75191a4..0000000 --- a/server/venv/Lib/site-packages/docker-6.1.3.dist-info/LICENSE +++ /dev/null @@ -1,191 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - Copyright 2016 Docker, Inc. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/server/venv/Lib/site-packages/docker-6.1.3.dist-info/METADATA b/server/venv/Lib/site-packages/docker-6.1.3.dist-info/METADATA deleted file mode 100644 index da998f0..0000000 --- a/server/venv/Lib/site-packages/docker-6.1.3.dist-info/METADATA +++ /dev/null @@ -1,114 +0,0 @@ -Metadata-Version: 2.1 -Name: docker -Version: 6.1.3 -Summary: A Python library for the Docker Engine API. -Home-page: https://github.com/docker/docker-py -Maintainer: Ulysses Souza -Maintainer-email: ulysses.souza@docker.com -License: Apache License 2.0 -Project-URL: Documentation, https://docker-py.readthedocs.io -Project-URL: Changelog, https://docker-py.readthedocs.io/en/stable/change-log.html -Project-URL: Source, https://github.com/docker/docker-py -Project-URL: Tracker, https://github.com/docker/docker-py/issues -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Other Environment -Classifier: Intended Audience :: Developers -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Topic :: Software Development -Classifier: Topic :: Utilities -Classifier: License :: OSI Approved :: Apache Software License -Requires-Python: >=3.7 -Description-Content-Type: text/markdown -License-File: LICENSE -Requires-Dist: packaging (>=14.0) -Requires-Dist: requests (>=2.26.0) -Requires-Dist: urllib3 (>=1.26.0) -Requires-Dist: websocket-client (>=0.32.0) -Requires-Dist: pywin32 (>=304) ; sys_platform == "win32" -Provides-Extra: ssh -Requires-Dist: paramiko (>=2.4.3) ; extra == 'ssh' -Provides-Extra: tls - -# Docker SDK for Python - -[![Build Status](https://github.com/docker/docker-py/actions/workflows/ci.yml/badge.svg?branch=master)](https://github.com/docker/docker-py/actions/workflows/ci.yml/) - -A Python library for the Docker Engine API. It lets you do anything the `docker` command does, but from within Python apps – run containers, manage containers, manage Swarms, etc. - -## Installation - -The latest stable version [is available on PyPI](https://pypi.python.org/pypi/docker/). Either add `docker` to your `requirements.txt` file or install with pip: - - pip install docker - -> Older versions (< 6.0) required installing `docker[tls]` for SSL/TLS support. -> This is no longer necessary and is a no-op, but is supported for backwards compatibility. - -## Usage - -Connect to Docker using the default socket or the configuration in your environment: - -```python -import docker -client = docker.from_env() -``` - -You can run containers: - -```python ->>> client.containers.run("ubuntu:latest", "echo hello world") -'hello world\n' -``` - -You can run containers in the background: - -```python ->>> client.containers.run("bfirsh/reticulate-splines", detach=True) - -``` - -You can manage containers: - -```python ->>> client.containers.list() -[, , ...] - ->>> container = client.containers.get('45e6d2de7c54') - ->>> container.attrs['Config']['Image'] -"bfirsh/reticulate-splines" - ->>> container.logs() -"Reticulating spline 1...\n" - ->>> container.stop() -``` - -You can stream logs: - -```python ->>> for line in container.logs(stream=True): -... print(line.strip()) -Reticulating spline 2... -Reticulating spline 3... -... -``` - -You can manage images: - -```python ->>> client.images.pull('nginx') - - ->>> client.images.list() -[, , ...] -``` - -[Read the full documentation](https://docker-py.readthedocs.io) to see everything you can do. diff --git a/server/venv/Lib/site-packages/docker-6.1.3.dist-info/RECORD b/server/venv/Lib/site-packages/docker-6.1.3.dist-info/RECORD deleted file mode 100644 index 975660f..0000000 --- a/server/venv/Lib/site-packages/docker-6.1.3.dist-info/RECORD +++ /dev/null @@ -1,143 +0,0 @@ -docker-6.1.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -docker-6.1.3.dist-info/LICENSE,sha256=8vCwf6XkksEdJ6oNLz8aDmS50X8y2KpImuKvlgmvM7I,10758 -docker-6.1.3.dist-info/METADATA,sha256=YZu5A7oG4YgSiPcaXXm9S0PegRNAA2I2SJMdQ9X5xgE,3480 -docker-6.1.3.dist-info/RECORD,, -docker-6.1.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -docker-6.1.3.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92 -docker-6.1.3.dist-info/top_level.txt,sha256=ANFR59OS5o4sdWpvxCZAAG3cCpjTfbo_kKe3P2MYi70,7 -docker/__init__.py,sha256=Dd9AdZduT0TnVrecd5fF3zk4GGl-aFlfdN2oKB_EvmQ,228 -docker/__pycache__/__init__.cpython-311.pyc,, -docker/__pycache__/_version.cpython-311.pyc,, -docker/__pycache__/auth.cpython-311.pyc,, -docker/__pycache__/client.cpython-311.pyc,, -docker/__pycache__/constants.cpython-311.pyc,, -docker/__pycache__/errors.cpython-311.pyc,, -docker/__pycache__/tls.cpython-311.pyc,, -docker/__pycache__/version.cpython-311.pyc,, -docker/_version.py,sha256=TmxyIydO4sd1hsJxGpGmUq9EnsrBkhLTcGLWcgADhOs,160 -docker/api/__init__.py,sha256=BiXMFMoFV_JnfVjY0tADBBs3E7B8D0ZIgzihIlKQueQ,45 -docker/api/__pycache__/__init__.cpython-311.pyc,, -docker/api/__pycache__/build.cpython-311.pyc,, -docker/api/__pycache__/client.cpython-311.pyc,, -docker/api/__pycache__/config.cpython-311.pyc,, -docker/api/__pycache__/container.cpython-311.pyc,, -docker/api/__pycache__/daemon.cpython-311.pyc,, -docker/api/__pycache__/exec_api.cpython-311.pyc,, -docker/api/__pycache__/image.cpython-311.pyc,, -docker/api/__pycache__/network.cpython-311.pyc,, -docker/api/__pycache__/plugin.cpython-311.pyc,, -docker/api/__pycache__/secret.cpython-311.pyc,, -docker/api/__pycache__/service.cpython-311.pyc,, -docker/api/__pycache__/swarm.cpython-311.pyc,, -docker/api/__pycache__/volume.cpython-311.pyc,, -docker/api/build.py,sha256=CUfl3lxBn8h3AHpY8sf_B13JRNb6t7NAtm5BacpLbXc,14667 -docker/api/client.py,sha256=wm0DcMx1Ct7JOZjwF1CKo-oET7DC7aIbSxVqsODNBig,19128 -docker/api/config.py,sha256=B1xH9KGgMFGJ9oz-R7IvlW5FUWuFxXDkNnM5YfgPDrQ,2706 -docker/api/container.py,sha256=0eTJe5u-ML_GTTR7OTaDZnpqtbC15tNTtJXjEEgXTOo,52534 -docker/api/daemon.py,sha256=QHMYZH0w4xhOeWWDyr5ae13ktKu8-tnG7U44dWl9PxQ,6008 -docker/api/exec_api.py,sha256=2QMkgH-XMK-aMOPdvu7HQzI0Ax4oi7wA_U67S_pLLPk,6224 -docker/api/image.py,sha256=nBShNQKZZi34BPpPfilXS_-Woigp0TC359Sjlbrk9oM,20255 -docker/api/network.py,sha256=Z46lVwBwPwVzCv8UgQ_Olt3f_7ZKfdeu4XCzrriHvWY,10691 -docker/api/plugin.py,sha256=eR6GI12ECXg_vLrQ9GBgMuiUvtcqmDxzhmw59ymOVZw,8982 -docker/api/secret.py,sha256=NUpa0WihkiGWAVA_29GI45Hp1t4NSFQeVgMOpTj_18U,2883 -docker/api/service.py,sha256=VJAX7J_-g6Mc2kr4ko0agheW2_3t-YDU_TFrjZ796po,19255 -docker/api/swarm.py,sha256=Ke3hZxGRTuEh8a5ypTGNwO0TjvOtmv3mjRahLt9Fka8,18116 -docker/api/volume.py,sha256=ov_7O6nWCpKYSRkP2hquj8OVldrthquNxqxOgbH68ao,5133 -docker/auth.py,sha256=ir4c2aVd5Hf_HaHQB774b6KtvnBn12dEGENGrbiIE9U,13241 -docker/client.py,sha256=bbQWWb0s3uHB5w2Sdu1465jQNUuIOMIOSK3mAfzJg2E,7832 -docker/constants.py,sha256=yHgWs9dSA35LHDWflSgR0_0zW4JIZzC_0UvnVsLAebk,1194 -docker/context/__init__.py,sha256=29uRKZpoa9S3HKIyooMVzzYZKH6SKzcGqmRwJDjh7FY,72 -docker/context/__pycache__/__init__.cpython-311.pyc,, -docker/context/__pycache__/api.cpython-311.pyc,, -docker/context/__pycache__/config.cpython-311.pyc,, -docker/context/__pycache__/context.cpython-311.pyc,, -docker/context/api.py,sha256=zgZzUoY3Kfs8MWpdcDyPzF7TjDQaELzHUEpssDmwhP0,6433 -docker/context/config.py,sha256=VzVr3UY_ivzJSPI-5QvHywAV3oYU1N5CcXuuO-UQ7o0,2149 -docker/context/context.py,sha256=UuF5sd_KJTU-_EhN4ycpbovSI1mbxRMnev13K7mrIgI,7906 -docker/credentials/__init__.py,sha256=w5nswhHJJItLlGXWaB8xmSKG7HnMZ3ti4bSwLaxlpQc,117 -docker/credentials/__pycache__/__init__.cpython-311.pyc,, -docker/credentials/__pycache__/constants.cpython-311.pyc,, -docker/credentials/__pycache__/errors.cpython-311.pyc,, -docker/credentials/__pycache__/store.cpython-311.pyc,, -docker/credentials/__pycache__/utils.cpython-311.pyc,, -docker/credentials/constants.py,sha256=5HjRGorpVvkamvLBI9yLPvur1E0glaq_ZhuRfbVA_bE,142 -docker/credentials/errors.py,sha256=_MM0oT3Vny_HLVNZvrRkBW0NAEU-Mtb-Go9dKIZeAjs,573 -docker/credentials/store.py,sha256=J0l7uefxKCRyt9k8i0eyJTu_vn8VF66B8BmUTXTJIX0,3441 -docker/credentials/utils.py,sha256=0xhfjlCxa2_CyRGCSKzPvuAJzV-Ty7t-O5MKnyEq1us,224 -docker/errors.py,sha256=a_yDDkeASPf3RZ0XIc5Tx470Pg43aW8_7fDMyfcLcJE,5344 -docker/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -docker/models/__pycache__/__init__.cpython-311.pyc,, -docker/models/__pycache__/configs.cpython-311.pyc,, -docker/models/__pycache__/containers.cpython-311.pyc,, -docker/models/__pycache__/images.cpython-311.pyc,, -docker/models/__pycache__/networks.cpython-311.pyc,, -docker/models/__pycache__/nodes.cpython-311.pyc,, -docker/models/__pycache__/plugins.cpython-311.pyc,, -docker/models/__pycache__/resource.cpython-311.pyc,, -docker/models/__pycache__/secrets.cpython-311.pyc,, -docker/models/__pycache__/services.cpython-311.pyc,, -docker/models/__pycache__/swarm.cpython-311.pyc,, -docker/models/__pycache__/volumes.cpython-311.pyc,, -docker/models/configs.py,sha256=2XSscrUDpeMuhSiKEdJyLkTIlBDbL8aBhzpa76AhpqM,1781 -docker/models/containers.py,sha256=2Bzqz2UDy1sShuXX_euczdBPnuwRPe9sODUdYmeXnVo,46077 -docker/models/images.py,sha256=7jWtXpPvnR_VB5EZnXIFtH8WK0sVEMLKC-Si1mHAr10,18010 -docker/models/networks.py,sha256=t7oxs3hN_S-SbSCFOZ_N-V-M_b0Rpzt1F8PVv5jNYPQ,7962 -docker/models/nodes.py,sha256=Ft1oDseNJH_9UNRtwdSCB-xomomBbgsqybzuP86kEA8,2928 -docker/models/plugins.py,sha256=v1WJFuHHzBpnYXIzfj6ZUdxWh3GMVgfganKiJ41fUxc,5970 -docker/models/resource.py,sha256=MOcmZcH3jDT_zi_G409Pvbkm6lDGdDXYCGhQGoBsxrA,2611 -docker/models/secrets.py,sha256=tz-5w4c51JhLJUwgIMl2FUbfkt8t4OobI_yONqD5FAU,1845 -docker/models/services.py,sha256=Au8FS5P_rd99kQLS4MbZR_FWBMplQg5lG8OkVtYUWx8,13922 -docker/models/swarm.py,sha256=v_c8sC0UXVRIkgBuK-Byq3kwTKHfteI3p5lQ0A8j3JM,8289 -docker/models/volumes.py,sha256=5Z5msY57CqNEKTfKWWwxgApndbZLzOeRkYvOOXSdMLM,2848 -docker/tls.py,sha256=Wy__hWzqK55yvyB4S-xCVKPzUFNbiXr94UsoErMFFoA,3267 -docker/transport/__init__.py,sha256=OBWoUBeQ2JNkUhy_qBvHyxcli3AFAzURNNCJgr9o-es,286 -docker/transport/__pycache__/__init__.cpython-311.pyc,, -docker/transport/__pycache__/basehttpadapter.cpython-311.pyc,, -docker/transport/__pycache__/npipeconn.cpython-311.pyc,, -docker/transport/__pycache__/npipesocket.cpython-311.pyc,, -docker/transport/__pycache__/sshconn.cpython-311.pyc,, -docker/transport/__pycache__/ssladapter.cpython-311.pyc,, -docker/transport/__pycache__/unixconn.cpython-311.pyc,, -docker/transport/basehttpadapter.py,sha256=j3J3XH4lYe1ZFnfy_pc5krujFdL7rpo5EjScUj9MVPc,192 -docker/transport/npipeconn.py,sha256=ghBqfBCrXLRWp9QSM8ioG1P8QxwPCuO6dZYQ5jr5M5A,3518 -docker/transport/npipesocket.py,sha256=zvetvWHmY4m6sWbMYjPMWZI-VpIXIKPUrgIKw-xNf_g,6585 -docker/transport/sshconn.py,sha256=F-ulwzToFXsZGMNnNX3MCyViK2ew-rgvlM8Eh6_HArg,7915 -docker/transport/ssladapter.py,sha256=u7_m4hSPd8nOAsntml9-k7aCUNRUFJfvRwgyirn2New,2172 -docker/transport/unixconn.py,sha256=HN-DmSEXjiwBzrMAVVhUxGLiN5LeCA-EaVioGYP4gLs,2932 -docker/types/__init__.py,sha256=OeZzEkI1283hH5HrErmHmk8mcAI6Mk0kpE5IIBWQyVw,595 -docker/types/__pycache__/__init__.cpython-311.pyc,, -docker/types/__pycache__/base.cpython-311.pyc,, -docker/types/__pycache__/containers.cpython-311.pyc,, -docker/types/__pycache__/daemon.cpython-311.pyc,, -docker/types/__pycache__/healthcheck.cpython-311.pyc,, -docker/types/__pycache__/networks.cpython-311.pyc,, -docker/types/__pycache__/services.cpython-311.pyc,, -docker/types/__pycache__/swarm.cpython-311.pyc,, -docker/types/base.py,sha256=shDFz5VEx-mfdtMtJQQIpT8GG6rZ-NmfUS6uFvSr3ig,110 -docker/types/containers.py,sha256=ooZGpCHlHxvrOn0PW-p7lRZA_X6CTQVd28acm_jXZb0,27481 -docker/types/daemon.py,sha256=2wYCRaAt3DI74HAk6CxIWqSoKWuhr29J9RjgkU8MtYw,1923 -docker/types/healthcheck.py,sha256=leuqfN6oUwlxSd_iMGFPfRiTB2ZipzESHtBj8NxzVv4,2776 -docker/types/networks.py,sha256=7I82VYPQd7Rjqfwx0-Q2jOIwGK-K8UiS0K61YkZtuuw,4240 -docker/types/services.py,sha256=rnz5mNfFVG8tVW_izFuU-gFy9kRVDiuVwJ1AaPl2nfA,33158 -docker/types/swarm.py,sha256=-xhjuOgXBE3Ct9zsuD0r-k8jROL-XETmRTWlRgGZTG4,4653 -docker/utils/__init__.py,sha256=GtbFn1E_1wlRDGEvrNX7I7tCmCEt3Kc3xJ3ictrGX2g,606 -docker/utils/__pycache__/__init__.cpython-311.pyc,, -docker/utils/__pycache__/build.cpython-311.pyc,, -docker/utils/__pycache__/config.cpython-311.pyc,, -docker/utils/__pycache__/decorators.cpython-311.pyc,, -docker/utils/__pycache__/fnmatch.cpython-311.pyc,, -docker/utils/__pycache__/json_stream.cpython-311.pyc,, -docker/utils/__pycache__/ports.cpython-311.pyc,, -docker/utils/__pycache__/proxy.cpython-311.pyc,, -docker/utils/__pycache__/socket.cpython-311.pyc,, -docker/utils/__pycache__/utils.cpython-311.pyc,, -docker/utils/build.py,sha256=vkPbg8CLdjNHvKupbWCj0yC7Wfp-VKADFeedbgpT_Bw,8019 -docker/utils/config.py,sha256=f5YJSiRJSGiOhwSTQJz_7HsA9AHSENib692TXNld6w0,1724 -docker/utils/decorators.py,sha256=csomwcPuekUQgzyKrhBlA5A3AlKeAa1-3Oem9N1tVaQ,1547 -docker/utils/fnmatch.py,sha256=CkbGWzQvoOLotOH7E909rhj1cC5x64r8rLwSn7MQPM8,3295 -docker/utils/json_stream.py,sha256=CEEgxwKOGzb_Uizjiu6C5Biztumo3KHZ1snPHQ1CQvw,2178 -docker/utils/ports.py,sha256=ZDksUk-1qZqEuvg1sUijN1BBgTeGNeD93d02WdIGTa4,2800 -docker/utils/proxy.py,sha256=eqJGUaXPLnn5IB8NAM_EZgAmoAFVcHeq-xyMhEMCFRE,2203 -docker/utils/socket.py,sha256=9Ug_bOzAMolKgKRt0l5OejCMmPIkxQoAiLSsvDNLlhQ,5093 -docker/utils/utils.py,sha256=sx_syPTLuMvJ4QM3MW_6EID9Cuk2BLqQPLNKQ65WHUY,13741 -docker/version.py,sha256=Thhl2KxMoJZ89xTdnDx-PzH3zYSmKJcyteygC50UFUI,536 diff --git a/server/venv/Lib/site-packages/docker-6.1.3.dist-info/REQUESTED b/server/venv/Lib/site-packages/docker-6.1.3.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/docker-6.1.3.dist-info/WHEEL b/server/venv/Lib/site-packages/docker-6.1.3.dist-info/WHEEL deleted file mode 100644 index 1f37c02..0000000 --- a/server/venv/Lib/site-packages/docker-6.1.3.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.40.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/server/venv/Lib/site-packages/docker-6.1.3.dist-info/top_level.txt b/server/venv/Lib/site-packages/docker-6.1.3.dist-info/top_level.txt deleted file mode 100644 index bdb9670..0000000 --- a/server/venv/Lib/site-packages/docker-6.1.3.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -docker diff --git a/server/venv/Lib/site-packages/docker/__init__.py b/server/venv/Lib/site-packages/docker/__init__.py deleted file mode 100644 index 46beb53..0000000 --- a/server/venv/Lib/site-packages/docker/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# flake8: noqa -from .api import APIClient -from .client import DockerClient, from_env -from .context import Context -from .context import ContextAPI -from .tls import TLSConfig -from .version import __version__ - -__title__ = 'docker' diff --git a/server/venv/Lib/site-packages/docker/_version.py b/server/venv/Lib/site-packages/docker/_version.py deleted file mode 100644 index e41a81a..0000000 --- a/server/venv/Lib/site-packages/docker/_version.py +++ /dev/null @@ -1,4 +0,0 @@ -# file generated by setuptools_scm -# don't change, don't track in version control -__version__ = version = '6.1.3' -__version_tuple__ = version_tuple = (6, 1, 3) diff --git a/server/venv/Lib/site-packages/docker/api/__init__.py b/server/venv/Lib/site-packages/docker/api/__init__.py deleted file mode 100644 index ff51844..0000000 --- a/server/venv/Lib/site-packages/docker/api/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# flake8: noqa -from .client import APIClient diff --git a/server/venv/Lib/site-packages/docker/api/build.py b/server/venv/Lib/site-packages/docker/api/build.py deleted file mode 100644 index 3a1a3d9..0000000 --- a/server/venv/Lib/site-packages/docker/api/build.py +++ /dev/null @@ -1,360 +0,0 @@ -import json -import logging -import os -import random - -from .. import auth -from .. import constants -from .. import errors -from .. import utils - - -log = logging.getLogger(__name__) - - -class BuildApiMixin: - def build(self, path=None, tag=None, quiet=False, fileobj=None, - nocache=False, rm=False, timeout=None, - custom_context=False, encoding=None, pull=False, - forcerm=False, dockerfile=None, container_limits=None, - decode=False, buildargs=None, gzip=False, shmsize=None, - labels=None, cache_from=None, target=None, network_mode=None, - squash=None, extra_hosts=None, platform=None, isolation=None, - use_config_proxy=True): - """ - Similar to the ``docker build`` command. Either ``path`` or ``fileobj`` - needs to be set. ``path`` can be a local path (to a directory - containing a Dockerfile) or a remote URL. ``fileobj`` must be a - readable file-like object to a Dockerfile. - - If you have a tar file for the Docker build context (including a - Dockerfile) already, pass a readable file-like object to ``fileobj`` - and also pass ``custom_context=True``. If the stream is compressed - also, set ``encoding`` to the correct value (e.g ``gzip``). - - Example: - >>> from io import BytesIO - >>> from docker import APIClient - >>> dockerfile = ''' - ... # Shared Volume - ... FROM busybox:buildroot-2014.02 - ... VOLUME /data - ... CMD ["/bin/sh"] - ... ''' - >>> f = BytesIO(dockerfile.encode('utf-8')) - >>> cli = APIClient(base_url='tcp://127.0.0.1:2375') - >>> response = [line for line in cli.build( - ... fileobj=f, rm=True, tag='yourname/volume' - ... )] - >>> response - ['{"stream":" ---\\u003e a9eb17255234\\n"}', - '{"stream":"Step 1 : VOLUME /data\\n"}', - '{"stream":" ---\\u003e Running in abdc1e6896c6\\n"}', - '{"stream":" ---\\u003e 713bca62012e\\n"}', - '{"stream":"Removing intermediate container abdc1e6896c6\\n"}', - '{"stream":"Step 2 : CMD [\\"/bin/sh\\"]\\n"}', - '{"stream":" ---\\u003e Running in dba30f2a1a7e\\n"}', - '{"stream":" ---\\u003e 032b8b2855fc\\n"}', - '{"stream":"Removing intermediate container dba30f2a1a7e\\n"}', - '{"stream":"Successfully built 032b8b2855fc\\n"}'] - - Args: - path (str): Path to the directory containing the Dockerfile - fileobj: A file object to use as the Dockerfile. (Or a file-like - object) - tag (str): A tag to add to the final image - quiet (bool): Whether to return the status - nocache (bool): Don't use the cache when set to ``True`` - rm (bool): Remove intermediate containers. The ``docker build`` - command now defaults to ``--rm=true``, but we have kept the old - default of `False` to preserve backward compatibility - timeout (int): HTTP timeout - custom_context (bool): Optional if using ``fileobj`` - encoding (str): The encoding for a stream. Set to ``gzip`` for - compressing - pull (bool): Downloads any updates to the FROM image in Dockerfiles - forcerm (bool): Always remove intermediate containers, even after - unsuccessful builds - dockerfile (str): path within the build context to the Dockerfile - gzip (bool): If set to ``True``, gzip compression/encoding is used - buildargs (dict): A dictionary of build arguments - container_limits (dict): A dictionary of limits applied to each - container created by the build process. Valid keys: - - - memory (int): set memory limit for build - - memswap (int): Total memory (memory + swap), -1 to disable - swap - - cpushares (int): CPU shares (relative weight) - - cpusetcpus (str): CPUs in which to allow execution, e.g., - ``"0-3"``, ``"0,1"`` - decode (bool): If set to ``True``, the returned stream will be - decoded into dicts on the fly. Default ``False`` - shmsize (int): Size of `/dev/shm` in bytes. The size must be - greater than 0. If omitted the system uses 64MB - labels (dict): A dictionary of labels to set on the image - cache_from (:py:class:`list`): A list of images used for build - cache resolution - target (str): Name of the build-stage to build in a multi-stage - Dockerfile - network_mode (str): networking mode for the run commands during - build - squash (bool): Squash the resulting images layers into a - single layer. - extra_hosts (dict): Extra hosts to add to /etc/hosts in building - containers, as a mapping of hostname to IP address. - platform (str): Platform in the format ``os[/arch[/variant]]`` - isolation (str): Isolation technology used during build. - Default: `None`. - use_config_proxy (bool): If ``True``, and if the docker client - configuration file (``~/.docker/config.json`` by default) - contains a proxy configuration, the corresponding environment - variables will be set in the container being built. - - Returns: - A generator for the build output. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - ``TypeError`` - If neither ``path`` nor ``fileobj`` is specified. - """ - remote = context = None - headers = {} - container_limits = container_limits or {} - buildargs = buildargs or {} - if path is None and fileobj is None: - raise TypeError("Either path or fileobj needs to be provided.") - if gzip and encoding is not None: - raise errors.DockerException( - 'Can not use custom encoding if gzip is enabled' - ) - - for key in container_limits.keys(): - if key not in constants.CONTAINER_LIMITS_KEYS: - raise errors.DockerException( - f'Invalid container_limits key {key}' - ) - - if custom_context: - if not fileobj: - raise TypeError("You must specify fileobj with custom_context") - context = fileobj - elif fileobj is not None: - context = utils.mkbuildcontext(fileobj) - elif path.startswith(('http://', 'https://', - 'git://', 'github.com/', 'git@')): - remote = path - elif not os.path.isdir(path): - raise TypeError("You must specify a directory to build in path") - else: - dockerignore = os.path.join(path, '.dockerignore') - exclude = None - if os.path.exists(dockerignore): - with open(dockerignore) as f: - exclude = list(filter( - lambda x: x != '' and x[0] != '#', - [line.strip() for line in f.read().splitlines()] - )) - dockerfile = process_dockerfile(dockerfile, path) - context = utils.tar( - path, exclude=exclude, dockerfile=dockerfile, gzip=gzip - ) - encoding = 'gzip' if gzip else encoding - - u = self._url('/build') - params = { - 't': tag, - 'remote': remote, - 'q': quiet, - 'nocache': nocache, - 'rm': rm, - 'forcerm': forcerm, - 'pull': pull, - 'dockerfile': dockerfile, - } - params.update(container_limits) - - if use_config_proxy: - proxy_args = self._proxy_configs.get_environment() - for k, v in proxy_args.items(): - buildargs.setdefault(k, v) - if buildargs: - params.update({'buildargs': json.dumps(buildargs)}) - - if shmsize: - if utils.version_gte(self._version, '1.22'): - params.update({'shmsize': shmsize}) - else: - raise errors.InvalidVersion( - 'shmsize was only introduced in API version 1.22' - ) - - if labels: - if utils.version_gte(self._version, '1.23'): - params.update({'labels': json.dumps(labels)}) - else: - raise errors.InvalidVersion( - 'labels was only introduced in API version 1.23' - ) - - if cache_from: - if utils.version_gte(self._version, '1.25'): - params.update({'cachefrom': json.dumps(cache_from)}) - else: - raise errors.InvalidVersion( - 'cache_from was only introduced in API version 1.25' - ) - - if target: - if utils.version_gte(self._version, '1.29'): - params.update({'target': target}) - else: - raise errors.InvalidVersion( - 'target was only introduced in API version 1.29' - ) - - if network_mode: - if utils.version_gte(self._version, '1.25'): - params.update({'networkmode': network_mode}) - else: - raise errors.InvalidVersion( - 'network_mode was only introduced in API version 1.25' - ) - - if squash: - if utils.version_gte(self._version, '1.25'): - params.update({'squash': squash}) - else: - raise errors.InvalidVersion( - 'squash was only introduced in API version 1.25' - ) - - if extra_hosts is not None: - if utils.version_lt(self._version, '1.27'): - raise errors.InvalidVersion( - 'extra_hosts was only introduced in API version 1.27' - ) - - if isinstance(extra_hosts, dict): - extra_hosts = utils.format_extra_hosts(extra_hosts) - params.update({'extrahosts': extra_hosts}) - - if platform is not None: - if utils.version_lt(self._version, '1.32'): - raise errors.InvalidVersion( - 'platform was only introduced in API version 1.32' - ) - params['platform'] = platform - - if isolation is not None: - if utils.version_lt(self._version, '1.24'): - raise errors.InvalidVersion( - 'isolation was only introduced in API version 1.24' - ) - params['isolation'] = isolation - - if context is not None: - headers = {'Content-Type': 'application/tar'} - if encoding: - headers['Content-Encoding'] = encoding - - self._set_auth_headers(headers) - - response = self._post( - u, - data=context, - params=params, - headers=headers, - stream=True, - timeout=timeout, - ) - - if context is not None and not custom_context: - context.close() - - return self._stream_helper(response, decode=decode) - - @utils.minimum_version('1.31') - def prune_builds(self): - """ - Delete the builder cache - - Returns: - (dict): A dictionary containing information about the operation's - result. The ``SpaceReclaimed`` key indicates the amount of - bytes of disk space reclaimed. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - url = self._url("/build/prune") - return self._result(self._post(url), True) - - def _set_auth_headers(self, headers): - log.debug('Looking for auth config') - - # If we don't have any auth data so far, try reloading the config - # file one more time in case anything showed up in there. - if not self._auth_configs or self._auth_configs.is_empty: - log.debug("No auth config in memory - loading from filesystem") - self._auth_configs = auth.load_config( - credstore_env=self.credstore_env - ) - - # Send the full auth configuration (if any exists), since the build - # could use any (or all) of the registries. - if self._auth_configs: - auth_data = self._auth_configs.get_all_credentials() - - # See https://github.com/docker/docker-py/issues/1683 - if (auth.INDEX_URL not in auth_data and - auth.INDEX_NAME in auth_data): - auth_data[auth.INDEX_URL] = auth_data.get(auth.INDEX_NAME, {}) - - log.debug( - 'Sending auth config ({})'.format( - ', '.join(repr(k) for k in auth_data.keys()) - ) - ) - - if auth_data: - headers['X-Registry-Config'] = auth.encode_header( - auth_data - ) - else: - log.debug('No auth config found') - - -def process_dockerfile(dockerfile, path): - if not dockerfile: - return (None, None) - - abs_dockerfile = dockerfile - if not os.path.isabs(dockerfile): - abs_dockerfile = os.path.join(path, dockerfile) - if constants.IS_WINDOWS_PLATFORM and path.startswith( - constants.WINDOWS_LONGPATH_PREFIX): - abs_dockerfile = '{}{}'.format( - constants.WINDOWS_LONGPATH_PREFIX, - os.path.normpath( - abs_dockerfile[len(constants.WINDOWS_LONGPATH_PREFIX):] - ) - ) - if (os.path.splitdrive(path)[0] != os.path.splitdrive(abs_dockerfile)[0] or - os.path.relpath(abs_dockerfile, path).startswith('..')): - # Dockerfile not in context - read data to insert into tar later - with open(abs_dockerfile) as df: - return ( - f'.dockerfile.{random.getrandbits(160):x}', - df.read() - ) - - # Dockerfile is inside the context - return path relative to context root - if dockerfile == abs_dockerfile: - # Only calculate relpath if necessary to avoid errors - # on Windows client -> Linux Docker - # see https://github.com/docker/compose/issues/5969 - dockerfile = os.path.relpath(abs_dockerfile, path) - return (dockerfile, None) diff --git a/server/venv/Lib/site-packages/docker/api/client.py b/server/venv/Lib/site-packages/docker/api/client.py deleted file mode 100644 index 65b9d9d..0000000 --- a/server/venv/Lib/site-packages/docker/api/client.py +++ /dev/null @@ -1,516 +0,0 @@ -import json -import struct -import urllib -from functools import partial - -import requests -import requests.exceptions -import websocket - -from .. import auth -from ..constants import (DEFAULT_NUM_POOLS, DEFAULT_NUM_POOLS_SSH, - DEFAULT_MAX_POOL_SIZE, DEFAULT_TIMEOUT_SECONDS, - DEFAULT_USER_AGENT, IS_WINDOWS_PLATFORM, - MINIMUM_DOCKER_API_VERSION, STREAM_HEADER_SIZE_BYTES) -from ..errors import (DockerException, InvalidVersion, TLSParameterError, - create_api_error_from_http_exception) -from ..tls import TLSConfig -from ..transport import SSLHTTPAdapter, UnixHTTPAdapter -from ..utils import check_resource, config, update_headers, utils -from ..utils.json_stream import json_stream -from ..utils.proxy import ProxyConfig -from ..utils.socket import consume_socket_output, demux_adaptor, frames_iter -from .build import BuildApiMixin -from .config import ConfigApiMixin -from .container import ContainerApiMixin -from .daemon import DaemonApiMixin -from .exec_api import ExecApiMixin -from .image import ImageApiMixin -from .network import NetworkApiMixin -from .plugin import PluginApiMixin -from .secret import SecretApiMixin -from .service import ServiceApiMixin -from .swarm import SwarmApiMixin -from .volume import VolumeApiMixin - -try: - from ..transport import NpipeHTTPAdapter -except ImportError: - pass - -try: - from ..transport import SSHHTTPAdapter -except ImportError: - pass - - -class APIClient( - requests.Session, - BuildApiMixin, - ConfigApiMixin, - ContainerApiMixin, - DaemonApiMixin, - ExecApiMixin, - ImageApiMixin, - NetworkApiMixin, - PluginApiMixin, - SecretApiMixin, - ServiceApiMixin, - SwarmApiMixin, - VolumeApiMixin): - """ - A low-level client for the Docker Engine API. - - Example: - - >>> import docker - >>> client = docker.APIClient(base_url='unix://var/run/docker.sock') - >>> client.version() - {u'ApiVersion': u'1.33', - u'Arch': u'amd64', - u'BuildTime': u'2017-11-19T18:46:37.000000000+00:00', - u'GitCommit': u'f4ffd2511c', - u'GoVersion': u'go1.9.2', - u'KernelVersion': u'4.14.3-1-ARCH', - u'MinAPIVersion': u'1.12', - u'Os': u'linux', - u'Version': u'17.10.0-ce'} - - Args: - base_url (str): URL to the Docker server. For example, - ``unix:///var/run/docker.sock`` or ``tcp://127.0.0.1:1234``. - version (str): The version of the API to use. Set to ``auto`` to - automatically detect the server's version. Default: ``1.35`` - timeout (int): Default timeout for API calls, in seconds. - tls (bool or :py:class:`~docker.tls.TLSConfig`): Enable TLS. Pass - ``True`` to enable it with default options, or pass a - :py:class:`~docker.tls.TLSConfig` object to use custom - configuration. - user_agent (str): Set a custom user agent for requests to the server. - credstore_env (dict): Override environment variables when calling the - credential store process. - use_ssh_client (bool): If set to `True`, an ssh connection is made - via shelling out to the ssh client. Ensure the ssh client is - installed and configured on the host. - max_pool_size (int): The maximum number of connections - to save in the pool. - """ - - __attrs__ = requests.Session.__attrs__ + ['_auth_configs', - '_general_configs', - '_version', - 'base_url', - 'timeout'] - - def __init__(self, base_url=None, version=None, - timeout=DEFAULT_TIMEOUT_SECONDS, tls=False, - user_agent=DEFAULT_USER_AGENT, num_pools=None, - credstore_env=None, use_ssh_client=False, - max_pool_size=DEFAULT_MAX_POOL_SIZE): - super().__init__() - - if tls and not base_url: - raise TLSParameterError( - 'If using TLS, the base_url argument must be provided.' - ) - - self.base_url = base_url - self.timeout = timeout - self.headers['User-Agent'] = user_agent - - self._general_configs = config.load_general_config() - - proxy_config = self._general_configs.get('proxies', {}) - try: - proxies = proxy_config[base_url] - except KeyError: - proxies = proxy_config.get('default', {}) - - self._proxy_configs = ProxyConfig.from_dict(proxies) - - self._auth_configs = auth.load_config( - config_dict=self._general_configs, credstore_env=credstore_env, - ) - self.credstore_env = credstore_env - - base_url = utils.parse_host( - base_url, IS_WINDOWS_PLATFORM, tls=bool(tls) - ) - # SSH has a different default for num_pools to all other adapters - num_pools = num_pools or DEFAULT_NUM_POOLS_SSH if \ - base_url.startswith('ssh://') else DEFAULT_NUM_POOLS - - if base_url.startswith('http+unix://'): - self._custom_adapter = UnixHTTPAdapter( - base_url, timeout, pool_connections=num_pools, - max_pool_size=max_pool_size - ) - self.mount('http+docker://', self._custom_adapter) - self._unmount('http://', 'https://') - # host part of URL should be unused, but is resolved by requests - # module in proxy_bypass_macosx_sysconf() - self.base_url = 'http+docker://localhost' - elif base_url.startswith('npipe://'): - if not IS_WINDOWS_PLATFORM: - raise DockerException( - 'The npipe:// protocol is only supported on Windows' - ) - try: - self._custom_adapter = NpipeHTTPAdapter( - base_url, timeout, pool_connections=num_pools, - max_pool_size=max_pool_size - ) - except NameError: - raise DockerException( - 'Install pypiwin32 package to enable npipe:// support' - ) - self.mount('http+docker://', self._custom_adapter) - self.base_url = 'http+docker://localnpipe' - elif base_url.startswith('ssh://'): - try: - self._custom_adapter = SSHHTTPAdapter( - base_url, timeout, pool_connections=num_pools, - max_pool_size=max_pool_size, shell_out=use_ssh_client - ) - except NameError: - raise DockerException( - 'Install paramiko package to enable ssh:// support' - ) - self.mount('http+docker://ssh', self._custom_adapter) - self._unmount('http://', 'https://') - self.base_url = 'http+docker://ssh' - else: - # Use SSLAdapter for the ability to specify SSL version - if isinstance(tls, TLSConfig): - tls.configure_client(self) - elif tls: - self._custom_adapter = SSLHTTPAdapter( - pool_connections=num_pools) - self.mount('https://', self._custom_adapter) - self.base_url = base_url - - # version detection needs to be after unix adapter mounting - if version is None or (isinstance( - version, - str - ) and version.lower() == 'auto'): - self._version = self._retrieve_server_version() - else: - self._version = version - if not isinstance(self._version, str): - raise DockerException( - 'Version parameter must be a string or None. Found {}'.format( - type(version).__name__ - ) - ) - if utils.version_lt(self._version, MINIMUM_DOCKER_API_VERSION): - raise InvalidVersion( - 'API versions below {} are no longer supported by this ' - 'library.'.format(MINIMUM_DOCKER_API_VERSION) - ) - - def _retrieve_server_version(self): - try: - return self.version(api_version=False)["ApiVersion"] - except KeyError: - raise DockerException( - 'Invalid response from docker daemon: key "ApiVersion"' - ' is missing.' - ) - except Exception as e: - raise DockerException( - f'Error while fetching server API version: {e}' - ) - - def _set_request_timeout(self, kwargs): - """Prepare the kwargs for an HTTP request by inserting the timeout - parameter, if not already present.""" - kwargs.setdefault('timeout', self.timeout) - return kwargs - - @update_headers - def _post(self, url, **kwargs): - return self.post(url, **self._set_request_timeout(kwargs)) - - @update_headers - def _get(self, url, **kwargs): - return self.get(url, **self._set_request_timeout(kwargs)) - - @update_headers - def _put(self, url, **kwargs): - return self.put(url, **self._set_request_timeout(kwargs)) - - @update_headers - def _delete(self, url, **kwargs): - return self.delete(url, **self._set_request_timeout(kwargs)) - - def _url(self, pathfmt, *args, **kwargs): - for arg in args: - if not isinstance(arg, str): - raise ValueError( - 'Expected a string but found {} ({}) ' - 'instead'.format(arg, type(arg)) - ) - - quote_f = partial(urllib.parse.quote, safe="/:") - args = map(quote_f, args) - - if kwargs.get('versioned_api', True): - return '{}/v{}{}'.format( - self.base_url, self._version, pathfmt.format(*args) - ) - else: - return f'{self.base_url}{pathfmt.format(*args)}' - - def _raise_for_status(self, response): - """Raises stored :class:`APIError`, if one occurred.""" - try: - response.raise_for_status() - except requests.exceptions.HTTPError as e: - raise create_api_error_from_http_exception(e) from e - - def _result(self, response, json=False, binary=False): - assert not (json and binary) - self._raise_for_status(response) - - if json: - return response.json() - if binary: - return response.content - return response.text - - def _post_json(self, url, data, **kwargs): - # Go <1.1 can't unserialize null to a string - # so we do this disgusting thing here. - data2 = {} - if data is not None and isinstance(data, dict): - for k, v in iter(data.items()): - if v is not None: - data2[k] = v - elif data is not None: - data2 = data - - if 'headers' not in kwargs: - kwargs['headers'] = {} - kwargs['headers']['Content-Type'] = 'application/json' - return self._post(url, data=json.dumps(data2), **kwargs) - - def _attach_params(self, override=None): - return override or { - 'stdout': 1, - 'stderr': 1, - 'stream': 1 - } - - @check_resource('container') - def _attach_websocket(self, container, params=None): - url = self._url("/containers/{0}/attach/ws", container) - req = requests.Request("POST", url, params=self._attach_params(params)) - full_url = req.prepare().url - full_url = full_url.replace("http://", "ws://", 1) - full_url = full_url.replace("https://", "wss://", 1) - return self._create_websocket_connection(full_url) - - def _create_websocket_connection(self, url): - return websocket.create_connection(url) - - def _get_raw_response_socket(self, response): - self._raise_for_status(response) - if self.base_url == "http+docker://localnpipe": - sock = response.raw._fp.fp.raw.sock - elif self.base_url.startswith('http+docker://ssh'): - sock = response.raw._fp.fp.channel - else: - sock = response.raw._fp.fp.raw - if self.base_url.startswith("https://"): - sock = sock._sock - try: - # Keep a reference to the response to stop it being garbage - # collected. If the response is garbage collected, it will - # close TLS sockets. - sock._response = response - except AttributeError: - # UNIX sockets can't have attributes set on them, but that's - # fine because we won't be doing TLS over them - pass - - return sock - - def _stream_helper(self, response, decode=False): - """Generator for data coming from a chunked-encoded HTTP response.""" - - if response.raw._fp.chunked: - if decode: - yield from json_stream(self._stream_helper(response, False)) - else: - reader = response.raw - while not reader.closed: - # this read call will block until we get a chunk - data = reader.read(1) - if not data: - break - if reader._fp.chunk_left: - data += reader.read(reader._fp.chunk_left) - yield data - else: - # Response isn't chunked, meaning we probably - # encountered an error immediately - yield self._result(response, json=decode) - - def _multiplexed_buffer_helper(self, response): - """A generator of multiplexed data blocks read from a buffered - response.""" - buf = self._result(response, binary=True) - buf_length = len(buf) - walker = 0 - while True: - if buf_length - walker < STREAM_HEADER_SIZE_BYTES: - break - header = buf[walker:walker + STREAM_HEADER_SIZE_BYTES] - _, length = struct.unpack_from('>BxxxL', header) - start = walker + STREAM_HEADER_SIZE_BYTES - end = start + length - walker = end - yield buf[start:end] - - def _multiplexed_response_stream_helper(self, response): - """A generator of multiplexed data blocks coming from a response - stream.""" - - # Disable timeout on the underlying socket to prevent - # Read timed out(s) for long running processes - socket = self._get_raw_response_socket(response) - self._disable_socket_timeout(socket) - - while True: - header = response.raw.read(STREAM_HEADER_SIZE_BYTES) - if not header: - break - _, length = struct.unpack('>BxxxL', header) - if not length: - continue - data = response.raw.read(length) - if not data: - break - yield data - - def _stream_raw_result(self, response, chunk_size=1, decode=True): - ''' Stream result for TTY-enabled container and raw binary data''' - self._raise_for_status(response) - - # Disable timeout on the underlying socket to prevent - # Read timed out(s) for long running processes - socket = self._get_raw_response_socket(response) - self._disable_socket_timeout(socket) - - yield from response.iter_content(chunk_size, decode) - - def _read_from_socket(self, response, stream, tty=True, demux=False): - """Consume all data from the socket, close the response and return the - data. If stream=True, then a generator is returned instead and the - caller is responsible for closing the response. - """ - socket = self._get_raw_response_socket(response) - - gen = frames_iter(socket, tty) - - if demux: - # The generator will output tuples (stdout, stderr) - gen = (demux_adaptor(*frame) for frame in gen) - else: - # The generator will output strings - gen = (data for (_, data) in gen) - - if stream: - return gen - else: - try: - # Wait for all frames, concatenate them, and return the result - return consume_socket_output(gen, demux=demux) - finally: - response.close() - - def _disable_socket_timeout(self, socket): - """ Depending on the combination of python version and whether we're - connecting over http or https, we might need to access _sock, which - may or may not exist; or we may need to just settimeout on socket - itself, which also may or may not have settimeout on it. To avoid - missing the correct one, we try both. - - We also do not want to set the timeout if it is already disabled, as - you run the risk of changing a socket that was non-blocking to - blocking, for example when using gevent. - """ - sockets = [socket, getattr(socket, '_sock', None)] - - for s in sockets: - if not hasattr(s, 'settimeout'): - continue - - timeout = -1 - - if hasattr(s, 'gettimeout'): - timeout = s.gettimeout() - - # Don't change the timeout if it is already disabled. - if timeout is None or timeout == 0.0: - continue - - s.settimeout(None) - - @check_resource('container') - def _check_is_tty(self, container): - cont = self.inspect_container(container) - return cont['Config']['Tty'] - - def _get_result(self, container, stream, res): - return self._get_result_tty(stream, res, self._check_is_tty(container)) - - def _get_result_tty(self, stream, res, is_tty): - # We should also use raw streaming (without keep-alives) - # if we're dealing with a tty-enabled container. - if is_tty: - return self._stream_raw_result(res) if stream else \ - self._result(res, binary=True) - - self._raise_for_status(res) - sep = b'' - if stream: - return self._multiplexed_response_stream_helper(res) - else: - return sep.join( - [x for x in self._multiplexed_buffer_helper(res)] - ) - - def _unmount(self, *args): - for proto in args: - self.adapters.pop(proto) - - def get_adapter(self, url): - try: - return super().get_adapter(url) - except requests.exceptions.InvalidSchema as e: - if self._custom_adapter: - return self._custom_adapter - else: - raise e - - @property - def api_version(self): - return self._version - - def reload_config(self, dockercfg_path=None): - """ - Force a reload of the auth configuration - - Args: - dockercfg_path (str): Use a custom path for the Docker config file - (default ``$HOME/.docker/config.json`` if present, - otherwise ``$HOME/.dockercfg``) - - Returns: - None - """ - self._auth_configs = auth.load_config( - dockercfg_path, credstore_env=self.credstore_env - ) diff --git a/server/venv/Lib/site-packages/docker/api/config.py b/server/venv/Lib/site-packages/docker/api/config.py deleted file mode 100644 index 88c367e..0000000 --- a/server/venv/Lib/site-packages/docker/api/config.py +++ /dev/null @@ -1,92 +0,0 @@ -import base64 - -from .. import utils - - -class ConfigApiMixin: - @utils.minimum_version('1.30') - def create_config(self, name, data, labels=None, templating=None): - """ - Create a config - - Args: - name (string): Name of the config - data (bytes): Config data to be stored - labels (dict): A mapping of labels to assign to the config - templating (dict): dictionary containing the name of the - templating driver to be used expressed as - { name: } - - Returns (dict): ID of the newly created config - """ - if not isinstance(data, bytes): - data = data.encode('utf-8') - - data = base64.b64encode(data) - data = data.decode('ascii') - body = { - 'Data': data, - 'Name': name, - 'Labels': labels, - 'Templating': templating - } - - url = self._url('/configs/create') - return self._result( - self._post_json(url, data=body), True - ) - - @utils.minimum_version('1.30') - @utils.check_resource('id') - def inspect_config(self, id): - """ - Retrieve config metadata - - Args: - id (string): Full ID of the config to inspect - - Returns (dict): A dictionary of metadata - - Raises: - :py:class:`docker.errors.NotFound` - if no config with that ID exists - """ - url = self._url('/configs/{0}', id) - return self._result(self._get(url), True) - - @utils.minimum_version('1.30') - @utils.check_resource('id') - def remove_config(self, id): - """ - Remove a config - - Args: - id (string): Full ID of the config to remove - - Returns (boolean): True if successful - - Raises: - :py:class:`docker.errors.NotFound` - if no config with that ID exists - """ - url = self._url('/configs/{0}', id) - res = self._delete(url) - self._raise_for_status(res) - return True - - @utils.minimum_version('1.30') - def configs(self, filters=None): - """ - List configs - - Args: - filters (dict): A map of filters to process on the configs - list. Available filters: ``names`` - - Returns (list): A list of configs - """ - url = self._url('/configs') - params = {} - if filters: - params['filters'] = utils.convert_filters(filters) - return self._result(self._get(url, params=params), True) diff --git a/server/venv/Lib/site-packages/docker/api/container.py b/server/venv/Lib/site-packages/docker/api/container.py deleted file mode 100644 index 40607e7..0000000 --- a/server/venv/Lib/site-packages/docker/api/container.py +++ /dev/null @@ -1,1339 +0,0 @@ -from datetime import datetime - -from .. import errors -from .. import utils -from ..constants import DEFAULT_DATA_CHUNK_SIZE -from ..types import CancellableStream -from ..types import ContainerConfig -from ..types import EndpointConfig -from ..types import HostConfig -from ..types import NetworkingConfig - - -class ContainerApiMixin: - @utils.check_resource('container') - def attach(self, container, stdout=True, stderr=True, - stream=False, logs=False, demux=False): - """ - Attach to a container. - - The ``.logs()`` function is a wrapper around this method, which you can - use instead if you want to fetch/stream container output without first - retrieving the entire backlog. - - Args: - container (str): The container to attach to. - stdout (bool): Include stdout. - stderr (bool): Include stderr. - stream (bool): Return container output progressively as an iterator - of strings, rather than a single string. - logs (bool): Include the container's previous output. - demux (bool): Keep stdout and stderr separate. - - Returns: - By default, the container's output as a single string (two if - ``demux=True``: one for stdout and one for stderr). - - If ``stream=True``, an iterator of output strings. If - ``demux=True``, two iterators are returned: one for stdout and one - for stderr. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - params = { - 'logs': logs and 1 or 0, - 'stdout': stdout and 1 or 0, - 'stderr': stderr and 1 or 0, - 'stream': stream and 1 or 0 - } - - headers = { - 'Connection': 'Upgrade', - 'Upgrade': 'tcp' - } - - u = self._url("/containers/{0}/attach", container) - response = self._post(u, headers=headers, params=params, stream=True) - - output = self._read_from_socket( - response, stream, self._check_is_tty(container), demux=demux) - - if stream: - return CancellableStream(output, response) - else: - return output - - @utils.check_resource('container') - def attach_socket(self, container, params=None, ws=False): - """ - Like ``attach``, but returns the underlying socket-like object for the - HTTP request. - - Args: - container (str): The container to attach to. - params (dict): Dictionary of request parameters (e.g. ``stdout``, - ``stderr``, ``stream``). - For ``detachKeys``, ~/.docker/config.json is used by default. - ws (bool): Use websockets instead of raw HTTP. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - if params is None: - params = { - 'stdout': 1, - 'stderr': 1, - 'stream': 1 - } - - if 'detachKeys' not in params \ - and 'detachKeys' in self._general_configs: - - params['detachKeys'] = self._general_configs['detachKeys'] - - if ws: - return self._attach_websocket(container, params) - - headers = { - 'Connection': 'Upgrade', - 'Upgrade': 'tcp' - } - - u = self._url("/containers/{0}/attach", container) - return self._get_raw_response_socket( - self.post( - u, None, params=self._attach_params(params), stream=True, - headers=headers - ) - ) - - @utils.check_resource('container') - def commit(self, container, repository=None, tag=None, message=None, - author=None, changes=None, conf=None): - """ - Commit a container to an image. Similar to the ``docker commit`` - command. - - Args: - container (str): The image hash of the container - repository (str): The repository to push the image to - tag (str): The tag to push - message (str): A commit message - author (str): The name of the author - changes (str): Dockerfile instructions to apply while committing - conf (dict): The configuration for the container. See the - `Engine API documentation - `_ - for full details. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - params = { - 'container': container, - 'repo': repository, - 'tag': tag, - 'comment': message, - 'author': author, - 'changes': changes - } - u = self._url("/commit") - return self._result( - self._post_json(u, data=conf, params=params), json=True - ) - - def containers(self, quiet=False, all=False, trunc=False, latest=False, - since=None, before=None, limit=-1, size=False, - filters=None): - """ - List containers. Similar to the ``docker ps`` command. - - Args: - quiet (bool): Only display numeric Ids - all (bool): Show all containers. Only running containers are shown - by default - trunc (bool): Truncate output - latest (bool): Show only the latest created container, include - non-running ones. - since (str): Show only containers created since Id or Name, include - non-running ones - before (str): Show only container created before Id or Name, - include non-running ones - limit (int): Show `limit` last created containers, include - non-running ones - size (bool): Display sizes - filters (dict): Filters to be processed on the image list. - Available filters: - - - `exited` (int): Only containers with specified exit code - - `status` (str): One of ``restarting``, ``running``, - ``paused``, ``exited`` - - `label` (str|list): format either ``"key"``, ``"key=value"`` - or a list of such. - - `id` (str): The id of the container. - - `name` (str): The name of the container. - - `ancestor` (str): Filter by container ancestor. Format of - ``[:tag]``, ````, or - ````. - - `before` (str): Only containers created before a particular - container. Give the container name or id. - - `since` (str): Only containers created after a particular - container. Give container name or id. - - A comprehensive list can be found in the documentation for - `docker ps - `_. - - Returns: - A list of dicts, one per container - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - params = { - 'limit': 1 if latest else limit, - 'all': 1 if all else 0, - 'size': 1 if size else 0, - 'trunc_cmd': 1 if trunc else 0, - 'since': since, - 'before': before - } - if filters: - params['filters'] = utils.convert_filters(filters) - u = self._url("/containers/json") - res = self._result(self._get(u, params=params), True) - - if quiet: - return [{'Id': x['Id']} for x in res] - if trunc: - for x in res: - x['Id'] = x['Id'][:12] - return res - - def create_container(self, image, command=None, hostname=None, user=None, - detach=False, stdin_open=False, tty=False, ports=None, - environment=None, volumes=None, - network_disabled=False, name=None, entrypoint=None, - working_dir=None, domainname=None, host_config=None, - mac_address=None, labels=None, stop_signal=None, - networking_config=None, healthcheck=None, - stop_timeout=None, runtime=None, - use_config_proxy=True, platform=None): - """ - Creates a container. Parameters are similar to those for the ``docker - run`` command except it doesn't support the attach options (``-a``). - - The arguments that are passed directly to this function are - host-independent configuration options. Host-specific configuration - is passed with the `host_config` argument. You'll normally want to - use this method in combination with the :py:meth:`create_host_config` - method to generate ``host_config``. - - **Port bindings** - - Port binding is done in two parts: first, provide a list of ports to - open inside the container with the ``ports`` parameter, then declare - bindings with the ``host_config`` parameter. For example: - - .. code-block:: python - - container_id = client.api.create_container( - 'busybox', 'ls', ports=[1111, 2222], - host_config=client.api.create_host_config(port_bindings={ - 1111: 4567, - 2222: None - }) - ) - - - You can limit the host address on which the port will be exposed like - such: - - .. code-block:: python - - client.api.create_host_config( - port_bindings={1111: ('127.0.0.1', 4567)} - ) - - Or without host port assignment: - - .. code-block:: python - - client.api.create_host_config(port_bindings={1111: ('127.0.0.1',)}) - - If you wish to use UDP instead of TCP (default), you need to declare - ports as such in both the config and host config: - - .. code-block:: python - - container_id = client.api.create_container( - 'busybox', 'ls', ports=[(1111, 'udp'), 2222], - host_config=client.api.create_host_config(port_bindings={ - '1111/udp': 4567, 2222: None - }) - ) - - To bind multiple host ports to a single container port, use the - following syntax: - - .. code-block:: python - - client.api.create_host_config(port_bindings={ - 1111: [1234, 4567] - }) - - You can also bind multiple IPs to a single container port: - - .. code-block:: python - - client.api.create_host_config(port_bindings={ - 1111: [ - ('192.168.0.100', 1234), - ('192.168.0.101', 1234) - ] - }) - - **Using volumes** - - Volume declaration is done in two parts. Provide a list of - paths to use as mountpoints inside the container with the - ``volumes`` parameter, and declare mappings from paths on the host - in the ``host_config`` section. - - .. code-block:: python - - container_id = client.api.create_container( - 'busybox', 'ls', volumes=['/mnt/vol1', '/mnt/vol2'], - host_config=client.api.create_host_config(binds={ - '/home/user1/': { - 'bind': '/mnt/vol2', - 'mode': 'rw', - }, - '/var/www': { - 'bind': '/mnt/vol1', - 'mode': 'ro', - } - }) - ) - - You can alternatively specify binds as a list. This code is equivalent - to the example above: - - .. code-block:: python - - container_id = client.api.create_container( - 'busybox', 'ls', volumes=['/mnt/vol1', '/mnt/vol2'], - host_config=client.api.create_host_config(binds=[ - '/home/user1/:/mnt/vol2', - '/var/www:/mnt/vol1:ro', - ]) - ) - - **Networking** - - You can specify networks to connect the container to by using the - ``networking_config`` parameter. At the time of creation, you can - only connect a container to a single networking, but you - can create more connections by using - :py:meth:`~connect_container_to_network`. - - For example: - - .. code-block:: python - - networking_config = client.api.create_networking_config({ - 'network1': client.api.create_endpoint_config( - ipv4_address='172.28.0.124', - aliases=['foo', 'bar'], - links=['container2'] - ) - }) - - ctnr = client.api.create_container( - img, command, networking_config=networking_config - ) - - Args: - image (str): The image to run - command (str or list): The command to be run in the container - hostname (str): Optional hostname for the container - user (str or int): Username or UID - detach (bool): Detached mode: run container in the background and - return container ID - stdin_open (bool): Keep STDIN open even if not attached - tty (bool): Allocate a pseudo-TTY - ports (list of ints): A list of port numbers - environment (dict or list): A dictionary or a list of strings in - the following format ``["PASSWORD=xxx"]`` or - ``{"PASSWORD": "xxx"}``. - volumes (str or list): List of paths inside the container to use - as volumes. - network_disabled (bool): Disable networking - name (str): A name for the container - entrypoint (str or list): An entrypoint - working_dir (str): Path to the working directory - domainname (str): The domain name to use for the container - host_config (dict): A dictionary created with - :py:meth:`create_host_config`. - mac_address (str): The Mac Address to assign the container - labels (dict or list): A dictionary of name-value labels (e.g. - ``{"label1": "value1", "label2": "value2"}``) or a list of - names of labels to set with empty values (e.g. - ``["label1", "label2"]``) - stop_signal (str): The stop signal to use to stop the container - (e.g. ``SIGINT``). - stop_timeout (int): Timeout to stop the container, in seconds. - Default: 10 - networking_config (dict): A networking configuration generated - by :py:meth:`create_networking_config`. - runtime (str): Runtime to use with this container. - healthcheck (dict): Specify a test to perform to check that the - container is healthy. - use_config_proxy (bool): If ``True``, and if the docker client - configuration file (``~/.docker/config.json`` by default) - contains a proxy configuration, the corresponding environment - variables will be set in the container being created. - platform (str): Platform in the format ``os[/arch[/variant]]``. - - Returns: - A dictionary with an image 'Id' key and a 'Warnings' key. - - Raises: - :py:class:`docker.errors.ImageNotFound` - If the specified image does not exist. - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - if isinstance(volumes, str): - volumes = [volumes, ] - - if isinstance(environment, dict): - environment = utils.utils.format_environment(environment) - - if use_config_proxy: - environment = self._proxy_configs.inject_proxy_environment( - environment - ) or None - - config = self.create_container_config( - image, command, hostname, user, detach, stdin_open, tty, - ports, environment, volumes, - network_disabled, entrypoint, working_dir, domainname, - host_config, mac_address, labels, - stop_signal, networking_config, healthcheck, - stop_timeout, runtime - ) - return self.create_container_from_config(config, name, platform) - - def create_container_config(self, *args, **kwargs): - return ContainerConfig(self._version, *args, **kwargs) - - def create_container_from_config(self, config, name=None, platform=None): - u = self._url("/containers/create") - params = { - 'name': name - } - if platform: - if utils.version_lt(self._version, '1.41'): - raise errors.InvalidVersion( - 'platform is not supported for API version < 1.41' - ) - params['platform'] = platform - res = self._post_json(u, data=config, params=params) - return self._result(res, True) - - def create_host_config(self, *args, **kwargs): - """ - Create a dictionary for the ``host_config`` argument to - :py:meth:`create_container`. - - Args: - auto_remove (bool): enable auto-removal of the container on daemon - side when the container's process exits. - binds (dict): Volumes to bind. See :py:meth:`create_container` - for more information. - blkio_weight_device: Block IO weight (relative device weight) in - the form of: ``[{"Path": "device_path", "Weight": weight}]``. - blkio_weight: Block IO weight (relative weight), accepts a weight - value between 10 and 1000. - cap_add (list of str): Add kernel capabilities. For example, - ``["SYS_ADMIN", "MKNOD"]``. - cap_drop (list of str): Drop kernel capabilities. - cpu_period (int): The length of a CPU period in microseconds. - cpu_quota (int): Microseconds of CPU time that the container can - get in a CPU period. - cpu_shares (int): CPU shares (relative weight). - cpuset_cpus (str): CPUs in which to allow execution (``0-3``, - ``0,1``). - cpuset_mems (str): Memory nodes (MEMs) in which to allow execution - (``0-3``, ``0,1``). Only effective on NUMA systems. - device_cgroup_rules (:py:class:`list`): A list of cgroup rules to - apply to the container. - device_read_bps: Limit read rate (bytes per second) from a device - in the form of: `[{"Path": "device_path", "Rate": rate}]` - device_read_iops: Limit read rate (IO per second) from a device. - device_write_bps: Limit write rate (bytes per second) from a - device. - device_write_iops: Limit write rate (IO per second) from a device. - devices (:py:class:`list`): Expose host devices to the container, - as a list of strings in the form - ``::``. - - For example, ``/dev/sda:/dev/xvda:rwm`` allows the container - to have read-write access to the host's ``/dev/sda`` via a - node named ``/dev/xvda`` inside the container. - device_requests (:py:class:`list`): Expose host resources such as - GPUs to the container, as a list of - :py:class:`docker.types.DeviceRequest` instances. - dns (:py:class:`list`): Set custom DNS servers. - dns_opt (:py:class:`list`): Additional options to be added to the - container's ``resolv.conf`` file - dns_search (:py:class:`list`): DNS search domains. - extra_hosts (dict): Additional hostnames to resolve inside the - container, as a mapping of hostname to IP address. - group_add (:py:class:`list`): List of additional group names and/or - IDs that the container process will run as. - init (bool): Run an init inside the container that forwards - signals and reaps processes - ipc_mode (str): Set the IPC mode for the container. - isolation (str): Isolation technology to use. Default: ``None``. - links (dict): Mapping of links using the - ``{'container': 'alias'}`` format. The alias is optional. - Containers declared in this dict will be linked to the new - container using the provided alias. Default: ``None``. - log_config (LogConfig): Logging configuration - lxc_conf (dict): LXC config. - mem_limit (float or str): Memory limit. Accepts float values - (which represent the memory limit of the created container in - bytes) or a string with a units identification char - (``100000b``, ``1000k``, ``128m``, ``1g``). If a string is - specified without a units character, bytes are assumed as an - mem_reservation (float or str): Memory soft limit. - mem_swappiness (int): Tune a container's memory swappiness - behavior. Accepts number between 0 and 100. - memswap_limit (str or int): Maximum amount of memory + swap a - container is allowed to consume. - mounts (:py:class:`list`): Specification for mounts to be added to - the container. More powerful alternative to ``binds``. Each - item in the list is expected to be a - :py:class:`docker.types.Mount` object. - network_mode (str): One of: - - - ``bridge`` Create a new network stack for the container on - the bridge network. - - ``none`` No networking for this container. - - ``container:`` Reuse another container's network - stack. - - ``host`` Use the host network stack. - This mode is incompatible with ``port_bindings``. - - oom_kill_disable (bool): Whether to disable OOM killer. - oom_score_adj (int): An integer value containing the score given - to the container in order to tune OOM killer preferences. - pid_mode (str): If set to ``host``, use the host PID namespace - inside the container. - pids_limit (int): Tune a container's pids limit. Set ``-1`` for - unlimited. - port_bindings (dict): See :py:meth:`create_container` - for more information. - Imcompatible with ``host`` in ``network_mode``. - privileged (bool): Give extended privileges to this container. - publish_all_ports (bool): Publish all ports to the host. - read_only (bool): Mount the container's root filesystem as read - only. - restart_policy (dict): Restart the container when it exits. - Configured as a dictionary with keys: - - - ``Name`` One of ``on-failure``, or ``always``. - - ``MaximumRetryCount`` Number of times to restart the - container on failure. - security_opt (:py:class:`list`): A list of string values to - customize labels for MLS systems, such as SELinux. - shm_size (str or int): Size of /dev/shm (e.g. ``1G``). - storage_opt (dict): Storage driver options per container as a - key-value mapping. - sysctls (dict): Kernel parameters to set in the container. - tmpfs (dict): Temporary filesystems to mount, as a dictionary - mapping a path inside the container to options for that path. - - For example: - - .. code-block:: python - - { - '/mnt/vol2': '', - '/mnt/vol1': 'size=3G,uid=1000' - } - - ulimits (:py:class:`list`): Ulimits to set inside the container, - as a list of :py:class:`docker.types.Ulimit` instances. - userns_mode (str): Sets the user namespace mode for the container - when user namespace remapping option is enabled. Supported - values are: ``host`` - uts_mode (str): Sets the UTS namespace mode for the container. - Supported values are: ``host`` - volumes_from (:py:class:`list`): List of container names or IDs to - get volumes from. - runtime (str): Runtime to use with this container. - - - Returns: - (dict) A dictionary which can be passed to the ``host_config`` - argument to :py:meth:`create_container`. - - Example: - - >>> client.api.create_host_config( - ... privileged=True, - ... cap_drop=['MKNOD'], - ... volumes_from=['nostalgic_newton'], - ... ) - {'CapDrop': ['MKNOD'], 'LxcConf': None, 'Privileged': True, - 'VolumesFrom': ['nostalgic_newton'], 'PublishAllPorts': False} - -""" - if not kwargs: - kwargs = {} - if 'version' in kwargs: - raise TypeError( - "create_host_config() got an unexpected " - "keyword argument 'version'" - ) - kwargs['version'] = self._version - return HostConfig(*args, **kwargs) - - def create_networking_config(self, *args, **kwargs): - """ - Create a networking config dictionary to be used as the - ``networking_config`` parameter in :py:meth:`create_container`. - - Args: - endpoints_config (dict): A dictionary mapping network names to - endpoint configurations generated by - :py:meth:`create_endpoint_config`. - - Returns: - (dict) A networking config. - - Example: - - >>> client.api.create_network('network1') - >>> networking_config = client.api.create_networking_config({ - 'network1': client.api.create_endpoint_config() - }) - >>> container = client.api.create_container( - img, command, networking_config=networking_config - ) - - """ - return NetworkingConfig(*args, **kwargs) - - def create_endpoint_config(self, *args, **kwargs): - """ - Create an endpoint config dictionary to be used with - :py:meth:`create_networking_config`. - - Args: - aliases (:py:class:`list`): A list of aliases for this endpoint. - Names in that list can be used within the network to reach the - container. Defaults to ``None``. - links (dict): Mapping of links for this endpoint using the - ``{'container': 'alias'}`` format. The alias is optional. - Containers declared in this dict will be linked to this - container using the provided alias. Defaults to ``None``. - ipv4_address (str): The IP address of this container on the - network, using the IPv4 protocol. Defaults to ``None``. - ipv6_address (str): The IP address of this container on the - network, using the IPv6 protocol. Defaults to ``None``. - link_local_ips (:py:class:`list`): A list of link-local (IPv4/IPv6) - addresses. - driver_opt (dict): A dictionary of options to provide to the - network driver. Defaults to ``None``. - - Returns: - (dict) An endpoint config. - - Example: - - >>> endpoint_config = client.api.create_endpoint_config( - aliases=['web', 'app'], - links={'app_db': 'db', 'another': None}, - ipv4_address='132.65.0.123' - ) - - """ - return EndpointConfig(self._version, *args, **kwargs) - - @utils.check_resource('container') - def diff(self, container): - """ - Inspect changes on a container's filesystem. - - Args: - container (str): The container to diff - - Returns: - (list) A list of dictionaries containing the attributes `Path` - and `Kind`. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self._result( - self._get(self._url("/containers/{0}/changes", container)), True - ) - - @utils.check_resource('container') - def export(self, container, chunk_size=DEFAULT_DATA_CHUNK_SIZE): - """ - Export the contents of a filesystem as a tar archive. - - Args: - container (str): The container to export - chunk_size (int): The number of bytes returned by each iteration - of the generator. If ``None``, data will be streamed as it is - received. Default: 2 MB - - Returns: - (generator): The archived filesystem data stream - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - res = self._get( - self._url("/containers/{0}/export", container), stream=True - ) - return self._stream_raw_result(res, chunk_size, False) - - @utils.check_resource('container') - def get_archive(self, container, path, chunk_size=DEFAULT_DATA_CHUNK_SIZE, - encode_stream=False): - """ - Retrieve a file or folder from a container in the form of a tar - archive. - - Args: - container (str): The container where the file is located - path (str): Path to the file or folder to retrieve - chunk_size (int): The number of bytes returned by each iteration - of the generator. If ``None``, data will be streamed as it is - received. Default: 2 MB - encode_stream (bool): Determines if data should be encoded - (gzip-compressed) during transmission. Default: False - - Returns: - (tuple): First element is a raw tar data stream. Second element is - a dict containing ``stat`` information on the specified ``path``. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - - Example: - - >>> c = docker.APIClient() - >>> f = open('./sh_bin.tar', 'wb') - >>> bits, stat = c.api.get_archive(container, '/bin/sh') - >>> print(stat) - {'name': 'sh', 'size': 1075464, 'mode': 493, - 'mtime': '2018-10-01T15:37:48-07:00', 'linkTarget': ''} - >>> for chunk in bits: - ... f.write(chunk) - >>> f.close() - """ - params = { - 'path': path - } - headers = { - "Accept-Encoding": "gzip, deflate" - } if encode_stream else { - "Accept-Encoding": "identity" - } - url = self._url('/containers/{0}/archive', container) - res = self._get(url, params=params, stream=True, headers=headers) - self._raise_for_status(res) - encoded_stat = res.headers.get('x-docker-container-path-stat') - return ( - self._stream_raw_result(res, chunk_size, False), - utils.decode_json_header(encoded_stat) if encoded_stat else None - ) - - @utils.check_resource('container') - def inspect_container(self, container): - """ - Identical to the `docker inspect` command, but only for containers. - - Args: - container (str): The container to inspect - - Returns: - (dict): Similar to the output of `docker inspect`, but as a - single dict - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self._result( - self._get(self._url("/containers/{0}/json", container)), True - ) - - @utils.check_resource('container') - def kill(self, container, signal=None): - """ - Kill a container or send a signal to a container. - - Args: - container (str): The container to kill - signal (str or int): The signal to send. Defaults to ``SIGKILL`` - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - url = self._url("/containers/{0}/kill", container) - params = {} - if signal is not None: - if not isinstance(signal, str): - signal = int(signal) - params['signal'] = signal - res = self._post(url, params=params) - - self._raise_for_status(res) - - @utils.check_resource('container') - def logs(self, container, stdout=True, stderr=True, stream=False, - timestamps=False, tail='all', since=None, follow=None, - until=None): - """ - Get logs from a container. Similar to the ``docker logs`` command. - - The ``stream`` parameter makes the ``logs`` function return a blocking - generator you can iterate over to retrieve log output as it happens. - - Args: - container (str): The container to get logs from - stdout (bool): Get ``STDOUT``. Default ``True`` - stderr (bool): Get ``STDERR``. Default ``True`` - stream (bool): Stream the response. Default ``False`` - timestamps (bool): Show timestamps. Default ``False`` - tail (str or int): Output specified number of lines at the end of - logs. Either an integer of number of lines or the string - ``all``. Default ``all`` - since (datetime, int, or float): Show logs since a given datetime, - integer epoch (in seconds) or float (in fractional seconds) - follow (bool): Follow log output. Default ``False`` - until (datetime, int, or float): Show logs that occurred before - the given datetime, integer epoch (in seconds), or - float (in fractional seconds) - - Returns: - (generator or str) - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - if follow is None: - follow = stream - params = {'stderr': stderr and 1 or 0, - 'stdout': stdout and 1 or 0, - 'timestamps': timestamps and 1 or 0, - 'follow': follow and 1 or 0, - } - if tail != 'all' and (not isinstance(tail, int) or tail < 0): - tail = 'all' - params['tail'] = tail - - if since is not None: - if isinstance(since, datetime): - params['since'] = utils.datetime_to_timestamp(since) - elif (isinstance(since, int) and since > 0): - params['since'] = since - elif (isinstance(since, float) and since > 0.0): - params['since'] = since - else: - raise errors.InvalidArgument( - 'since value should be datetime or positive int/float, ' - 'not {}'.format(type(since)) - ) - - if until is not None: - if utils.version_lt(self._version, '1.35'): - raise errors.InvalidVersion( - 'until is not supported for API version < 1.35' - ) - if isinstance(until, datetime): - params['until'] = utils.datetime_to_timestamp(until) - elif (isinstance(until, int) and until > 0): - params['until'] = until - elif (isinstance(until, float) and until > 0.0): - params['until'] = until - else: - raise errors.InvalidArgument( - 'until value should be datetime or positive int/float, ' - 'not {}'.format(type(until)) - ) - - url = self._url("/containers/{0}/logs", container) - res = self._get(url, params=params, stream=stream) - output = self._get_result(container, stream, res) - - if stream: - return CancellableStream(output, res) - else: - return output - - @utils.check_resource('container') - def pause(self, container): - """ - Pauses all processes within a container. - - Args: - container (str): The container to pause - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - url = self._url('/containers/{0}/pause', container) - res = self._post(url) - self._raise_for_status(res) - - @utils.check_resource('container') - def port(self, container, private_port): - """ - Lookup the public-facing port that is NAT-ed to ``private_port``. - Identical to the ``docker port`` command. - - Args: - container (str): The container to look up - private_port (int): The private port to inspect - - Returns: - (list of dict): The mapping for the host ports - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - - Example: - .. code-block:: bash - - $ docker run -d -p 80:80 ubuntu:14.04 /bin/sleep 30 - 7174d6347063a83f412fad6124c99cffd25ffe1a0807eb4b7f9cec76ac8cb43b - - .. code-block:: python - - >>> client.api.port('7174d6347063', 80) - [{'HostIp': '0.0.0.0', 'HostPort': '80'}] - """ - res = self._get(self._url("/containers/{0}/json", container)) - self._raise_for_status(res) - json_ = res.json() - private_port = str(private_port) - h_ports = None - - # Port settings is None when the container is running with - # network_mode=host. - port_settings = json_.get('NetworkSettings', {}).get('Ports') - if port_settings is None: - return None - - if '/' in private_port: - return port_settings.get(private_port) - - for protocol in ['tcp', 'udp', 'sctp']: - h_ports = port_settings.get(private_port + '/' + protocol) - if h_ports: - break - - return h_ports - - @utils.check_resource('container') - def put_archive(self, container, path, data): - """ - Insert a file or folder in an existing container using a tar archive as - source. - - Args: - container (str): The container where the file(s) will be extracted - path (str): Path inside the container where the file(s) will be - extracted. Must exist. - data (bytes or stream): tar data to be extracted - - Returns: - (bool): True if the call succeeds. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - params = {'path': path} - url = self._url('/containers/{0}/archive', container) - res = self._put(url, params=params, data=data) - self._raise_for_status(res) - return res.status_code == 200 - - @utils.minimum_version('1.25') - def prune_containers(self, filters=None): - """ - Delete stopped containers - - Args: - filters (dict): Filters to process on the prune list. - - Returns: - (dict): A dict containing a list of deleted container IDs and - the amount of disk space reclaimed in bytes. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - params = {} - if filters: - params['filters'] = utils.convert_filters(filters) - url = self._url('/containers/prune') - return self._result(self._post(url, params=params), True) - - @utils.check_resource('container') - def remove_container(self, container, v=False, link=False, force=False): - """ - Remove a container. Similar to the ``docker rm`` command. - - Args: - container (str): The container to remove - v (bool): Remove the volumes associated with the container - link (bool): Remove the specified link and not the underlying - container - force (bool): Force the removal of a running container (uses - ``SIGKILL``) - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - params = {'v': v, 'link': link, 'force': force} - res = self._delete( - self._url("/containers/{0}", container), params=params - ) - self._raise_for_status(res) - - @utils.check_resource('container') - def rename(self, container, name): - """ - Rename a container. Similar to the ``docker rename`` command. - - Args: - container (str): ID of the container to rename - name (str): New name for the container - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - url = self._url("/containers/{0}/rename", container) - params = {'name': name} - res = self._post(url, params=params) - self._raise_for_status(res) - - @utils.check_resource('container') - def resize(self, container, height, width): - """ - Resize the tty session. - - Args: - container (str or dict): The container to resize - height (int): Height of tty session - width (int): Width of tty session - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - params = {'h': height, 'w': width} - url = self._url("/containers/{0}/resize", container) - res = self._post(url, params=params) - self._raise_for_status(res) - - @utils.check_resource('container') - def restart(self, container, timeout=10): - """ - Restart a container. Similar to the ``docker restart`` command. - - Args: - container (str or dict): The container to restart. If a dict, the - ``Id`` key is used. - timeout (int): Number of seconds to try to stop for before killing - the container. Once killed it will then be restarted. Default - is 10 seconds. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - params = {'t': timeout} - url = self._url("/containers/{0}/restart", container) - conn_timeout = self.timeout - if conn_timeout is not None: - conn_timeout += timeout - res = self._post(url, params=params, timeout=conn_timeout) - self._raise_for_status(res) - - @utils.check_resource('container') - def start(self, container, *args, **kwargs): - """ - Start a container. Similar to the ``docker start`` command, but - doesn't support attach options. - - **Deprecation warning:** Passing configuration options in ``start`` is - no longer supported. Users are expected to provide host config options - in the ``host_config`` parameter of - :py:meth:`~ContainerApiMixin.create_container`. - - - Args: - container (str): The container to start - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - :py:class:`docker.errors.DeprecatedMethod` - If any argument besides ``container`` are provided. - - Example: - - >>> container = client.api.create_container( - ... image='busybox:latest', - ... command='/bin/sleep 30') - >>> client.api.start(container=container.get('Id')) - """ - if args or kwargs: - raise errors.DeprecatedMethod( - 'Providing configuration in the start() method is no longer ' - 'supported. Use the host_config param in create_container ' - 'instead.' - ) - url = self._url("/containers/{0}/start", container) - res = self._post(url) - self._raise_for_status(res) - - @utils.check_resource('container') - def stats(self, container, decode=None, stream=True, one_shot=None): - """ - Stream statistics for a specific container. Similar to the - ``docker stats`` command. - - Args: - container (str): The container to stream statistics from - decode (bool): If set to true, stream will be decoded into dicts - on the fly. Only applicable if ``stream`` is True. - False by default. - stream (bool): If set to false, only the current stats will be - returned instead of a stream. True by default. - one_shot (bool): If set to true, Only get a single stat instead of - waiting for 2 cycles. Must be used with stream=false. False by - default. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - - """ - url = self._url("/containers/{0}/stats", container) - params = { - 'stream': stream - } - if one_shot is not None: - if utils.version_lt(self._version, '1.41'): - raise errors.InvalidVersion( - 'one_shot is not supported for API version < 1.41' - ) - params['one-shot'] = one_shot - if stream: - if one_shot: - raise errors.InvalidArgument( - 'one_shot is only available in conjunction with ' - 'stream=False' - ) - return self._stream_helper( - self._get(url, stream=True, params=params), decode=decode - ) - else: - if decode: - raise errors.InvalidArgument( - "decode is only available in conjunction with stream=True" - ) - return self._result(self._get(url, params=params), json=True) - - @utils.check_resource('container') - def stop(self, container, timeout=None): - """ - Stops a container. Similar to the ``docker stop`` command. - - Args: - container (str): The container to stop - timeout (int): Timeout in seconds to wait for the container to - stop before sending a ``SIGKILL``. If None, then the - StopTimeout value of the container will be used. - Default: None - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - if timeout is None: - params = {} - timeout = 10 - else: - params = {'t': timeout} - url = self._url("/containers/{0}/stop", container) - conn_timeout = self.timeout - if conn_timeout is not None: - conn_timeout += timeout - res = self._post(url, params=params, timeout=conn_timeout) - self._raise_for_status(res) - - @utils.check_resource('container') - def top(self, container, ps_args=None): - """ - Display the running processes of a container. - - Args: - container (str): The container to inspect - ps_args (str): An optional arguments passed to ps (e.g. ``aux``) - - Returns: - (str): The output of the top - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - u = self._url("/containers/{0}/top", container) - params = {} - if ps_args is not None: - params['ps_args'] = ps_args - return self._result(self._get(u, params=params), True) - - @utils.check_resource('container') - def unpause(self, container): - """ - Unpause all processes within a container. - - Args: - container (str): The container to unpause - """ - url = self._url('/containers/{0}/unpause', container) - res = self._post(url) - self._raise_for_status(res) - - @utils.minimum_version('1.22') - @utils.check_resource('container') - def update_container( - self, container, blkio_weight=None, cpu_period=None, cpu_quota=None, - cpu_shares=None, cpuset_cpus=None, cpuset_mems=None, mem_limit=None, - mem_reservation=None, memswap_limit=None, kernel_memory=None, - restart_policy=None - ): - """ - Update resource configs of one or more containers. - - Args: - container (str): The container to inspect - blkio_weight (int): Block IO (relative weight), between 10 and 1000 - cpu_period (int): Limit CPU CFS (Completely Fair Scheduler) period - cpu_quota (int): Limit CPU CFS (Completely Fair Scheduler) quota - cpu_shares (int): CPU shares (relative weight) - cpuset_cpus (str): CPUs in which to allow execution - cpuset_mems (str): MEMs in which to allow execution - mem_limit (float or str): Memory limit - mem_reservation (float or str): Memory soft limit - memswap_limit (int or str): Total memory (memory + swap), -1 to - disable swap - kernel_memory (int or str): Kernel memory limit - restart_policy (dict): Restart policy dictionary - - Returns: - (dict): Dictionary containing a ``Warnings`` key. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - url = self._url('/containers/{0}/update', container) - data = {} - if blkio_weight: - data['BlkioWeight'] = blkio_weight - if cpu_period: - data['CpuPeriod'] = cpu_period - if cpu_shares: - data['CpuShares'] = cpu_shares - if cpu_quota: - data['CpuQuota'] = cpu_quota - if cpuset_cpus: - data['CpusetCpus'] = cpuset_cpus - if cpuset_mems: - data['CpusetMems'] = cpuset_mems - if mem_limit: - data['Memory'] = utils.parse_bytes(mem_limit) - if mem_reservation: - data['MemoryReservation'] = utils.parse_bytes(mem_reservation) - if memswap_limit: - data['MemorySwap'] = utils.parse_bytes(memswap_limit) - if kernel_memory: - data['KernelMemory'] = utils.parse_bytes(kernel_memory) - if restart_policy: - if utils.version_lt(self._version, '1.23'): - raise errors.InvalidVersion( - 'restart policy update is not supported ' - 'for API version < 1.23' - ) - data['RestartPolicy'] = restart_policy - - res = self._post_json(url, data=data) - return self._result(res, True) - - @utils.check_resource('container') - def wait(self, container, timeout=None, condition=None): - """ - Block until a container stops, then return its exit code. Similar to - the ``docker wait`` command. - - Args: - container (str or dict): The container to wait on. If a dict, the - ``Id`` key is used. - timeout (int): Request timeout - condition (str): Wait until a container state reaches the given - condition, either ``not-running`` (default), ``next-exit``, - or ``removed`` - - Returns: - (dict): The API's response as a Python dictionary, including - the container's exit code under the ``StatusCode`` attribute. - - Raises: - :py:class:`requests.exceptions.ReadTimeout` - If the timeout is exceeded. - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - url = self._url("/containers/{0}/wait", container) - params = {} - if condition is not None: - if utils.version_lt(self._version, '1.30'): - raise errors.InvalidVersion( - 'wait condition is not supported for API version < 1.30' - ) - params['condition'] = condition - - res = self._post(url, timeout=timeout, params=params) - return self._result(res, True) diff --git a/server/venv/Lib/site-packages/docker/api/daemon.py b/server/venv/Lib/site-packages/docker/api/daemon.py deleted file mode 100644 index a857213..0000000 --- a/server/venv/Lib/site-packages/docker/api/daemon.py +++ /dev/null @@ -1,181 +0,0 @@ -import os -from datetime import datetime - -from .. import auth, types, utils - - -class DaemonApiMixin: - @utils.minimum_version('1.25') - def df(self): - """ - Get data usage information. - - Returns: - (dict): A dictionary representing different resource categories - and their respective data usage. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - url = self._url('/system/df') - return self._result(self._get(url), True) - - def events(self, since=None, until=None, filters=None, decode=None): - """ - Get real-time events from the server. Similar to the ``docker events`` - command. - - Args: - since (UTC datetime or int): Get events from this point - until (UTC datetime or int): Get events until this point - filters (dict): Filter the events by event time, container or image - decode (bool): If set to true, stream will be decoded into dicts on - the fly. False by default. - - Returns: - A :py:class:`docker.types.daemon.CancellableStream` generator - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - - Example: - - >>> for event in client.events(decode=True) - ... print(event) - {u'from': u'image/with:tag', - u'id': u'container-id', - u'status': u'start', - u'time': 1423339459} - ... - - or - - >>> events = client.events() - >>> for event in events: - ... print(event) - >>> # and cancel from another thread - >>> events.close() - """ - - if isinstance(since, datetime): - since = utils.datetime_to_timestamp(since) - - if isinstance(until, datetime): - until = utils.datetime_to_timestamp(until) - - if filters: - filters = utils.convert_filters(filters) - - params = { - 'since': since, - 'until': until, - 'filters': filters - } - url = self._url('/events') - - response = self._get(url, params=params, stream=True, timeout=None) - stream = self._stream_helper(response, decode=decode) - - return types.CancellableStream(stream, response) - - def info(self): - """ - Display system-wide information. Identical to the ``docker info`` - command. - - Returns: - (dict): The info as a dict - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self._result(self._get(self._url("/info")), True) - - def login(self, username, password=None, email=None, registry=None, - reauth=False, dockercfg_path=None): - """ - Authenticate with a registry. Similar to the ``docker login`` command. - - Args: - username (str): The registry username - password (str): The plaintext password - email (str): The email for the registry account - registry (str): URL to the registry. E.g. - ``https://index.docker.io/v1/`` - reauth (bool): Whether or not to refresh existing authentication on - the Docker server. - dockercfg_path (str): Use a custom path for the Docker config file - (default ``$HOME/.docker/config.json`` if present, - otherwise ``$HOME/.dockercfg``) - - Returns: - (dict): The response from the login request - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - - # If we don't have any auth data so far, try reloading the config file - # one more time in case anything showed up in there. - # If dockercfg_path is passed check to see if the config file exists, - # if so load that config. - if dockercfg_path and os.path.exists(dockercfg_path): - self._auth_configs = auth.load_config( - dockercfg_path, credstore_env=self.credstore_env - ) - elif not self._auth_configs or self._auth_configs.is_empty: - self._auth_configs = auth.load_config( - credstore_env=self.credstore_env - ) - - authcfg = self._auth_configs.resolve_authconfig(registry) - # If we found an existing auth config for this registry and username - # combination, we can return it immediately unless reauth is requested. - if authcfg and authcfg.get('username', None) == username \ - and not reauth: - return authcfg - - req_data = { - 'username': username, - 'password': password, - 'email': email, - 'serveraddress': registry, - } - - response = self._post_json(self._url('/auth'), data=req_data) - if response.status_code == 200: - self._auth_configs.add_auth(registry or auth.INDEX_NAME, req_data) - return self._result(response, json=True) - - def ping(self): - """ - Checks the server is responsive. An exception will be raised if it - isn't responding. - - Returns: - (bool) The response from the server. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self._result(self._get(self._url('/_ping'))) == 'OK' - - def version(self, api_version=True): - """ - Returns version information from the server. Similar to the ``docker - version`` command. - - Returns: - (dict): The server version information - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - url = self._url("/version", versioned_api=api_version) - return self._result(self._get(url), json=True) diff --git a/server/venv/Lib/site-packages/docker/api/exec_api.py b/server/venv/Lib/site-packages/docker/api/exec_api.py deleted file mode 100644 index 63df9e6..0000000 --- a/server/venv/Lib/site-packages/docker/api/exec_api.py +++ /dev/null @@ -1,177 +0,0 @@ -from .. import errors -from .. import utils -from ..types import CancellableStream - - -class ExecApiMixin: - @utils.check_resource('container') - def exec_create(self, container, cmd, stdout=True, stderr=True, - stdin=False, tty=False, privileged=False, user='', - environment=None, workdir=None, detach_keys=None): - """ - Sets up an exec instance in a running container. - - Args: - container (str): Target container where exec instance will be - created - cmd (str or list): Command to be executed - stdout (bool): Attach to stdout. Default: ``True`` - stderr (bool): Attach to stderr. Default: ``True`` - stdin (bool): Attach to stdin. Default: ``False`` - tty (bool): Allocate a pseudo-TTY. Default: False - privileged (bool): Run as privileged. - user (str): User to execute command as. Default: root - environment (dict or list): A dictionary or a list of strings in - the following format ``["PASSWORD=xxx"]`` or - ``{"PASSWORD": "xxx"}``. - workdir (str): Path to working directory for this exec session - detach_keys (str): Override the key sequence for detaching - a container. Format is a single character `[a-Z]` - or `ctrl-` where `` is one of: - `a-z`, `@`, `^`, `[`, `,` or `_`. - ~/.docker/config.json is used by default. - - Returns: - (dict): A dictionary with an exec ``Id`` key. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - - if environment is not None and utils.version_lt(self._version, '1.25'): - raise errors.InvalidVersion( - 'Setting environment for exec is not supported in API < 1.25' - ) - - if isinstance(cmd, str): - cmd = utils.split_command(cmd) - - if isinstance(environment, dict): - environment = utils.utils.format_environment(environment) - - data = { - 'Container': container, - 'User': user, - 'Privileged': privileged, - 'Tty': tty, - 'AttachStdin': stdin, - 'AttachStdout': stdout, - 'AttachStderr': stderr, - 'Cmd': cmd, - 'Env': environment, - } - - if workdir is not None: - if utils.version_lt(self._version, '1.35'): - raise errors.InvalidVersion( - 'workdir is not supported for API version < 1.35' - ) - data['WorkingDir'] = workdir - - if detach_keys: - data['detachKeys'] = detach_keys - elif 'detachKeys' in self._general_configs: - data['detachKeys'] = self._general_configs['detachKeys'] - - url = self._url('/containers/{0}/exec', container) - res = self._post_json(url, data=data) - return self._result(res, True) - - def exec_inspect(self, exec_id): - """ - Return low-level information about an exec command. - - Args: - exec_id (str): ID of the exec instance - - Returns: - (dict): Dictionary of values returned by the endpoint. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - if isinstance(exec_id, dict): - exec_id = exec_id.get('Id') - res = self._get(self._url("/exec/{0}/json", exec_id)) - return self._result(res, True) - - def exec_resize(self, exec_id, height=None, width=None): - """ - Resize the tty session used by the specified exec command. - - Args: - exec_id (str): ID of the exec instance - height (int): Height of tty session - width (int): Width of tty session - """ - - if isinstance(exec_id, dict): - exec_id = exec_id.get('Id') - - params = {'h': height, 'w': width} - url = self._url("/exec/{0}/resize", exec_id) - res = self._post(url, params=params) - self._raise_for_status(res) - - @utils.check_resource('exec_id') - def exec_start(self, exec_id, detach=False, tty=False, stream=False, - socket=False, demux=False): - """ - Start a previously set up exec instance. - - Args: - exec_id (str): ID of the exec instance - detach (bool): If true, detach from the exec command. - Default: False - tty (bool): Allocate a pseudo-TTY. Default: False - stream (bool): Return response data progressively as an iterator - of strings, rather than a single string. - socket (bool): Return the connection socket to allow custom - read/write operations. Must be closed by the caller when done. - demux (bool): Return stdout and stderr separately - - Returns: - - (generator or str or tuple): If ``stream=True``, a generator - yielding response chunks. If ``socket=True``, a socket object for - the connection. A string containing response data otherwise. If - ``demux=True``, a tuple with two elements of type byte: stdout and - stderr. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - # we want opened socket if socket == True - - data = { - 'Tty': tty, - 'Detach': detach - } - - headers = {} if detach else { - 'Connection': 'Upgrade', - 'Upgrade': 'tcp' - } - - res = self._post_json( - self._url('/exec/{0}/start', exec_id), - headers=headers, - data=data, - stream=True - ) - if detach: - try: - return self._result(res) - finally: - res.close() - if socket: - return self._get_raw_response_socket(res) - - output = self._read_from_socket(res, stream, tty=tty, demux=demux) - if stream: - return CancellableStream(output, res) - else: - return output diff --git a/server/venv/Lib/site-packages/docker/api/image.py b/server/venv/Lib/site-packages/docker/api/image.py deleted file mode 100644 index 5e1466e..0000000 --- a/server/venv/Lib/site-packages/docker/api/image.py +++ /dev/null @@ -1,601 +0,0 @@ -import logging -import os - -from .. import auth, errors, utils -from ..constants import DEFAULT_DATA_CHUNK_SIZE - -log = logging.getLogger(__name__) - - -class ImageApiMixin: - - @utils.check_resource('image') - def get_image(self, image, chunk_size=DEFAULT_DATA_CHUNK_SIZE): - """ - Get a tarball of an image. Similar to the ``docker save`` command. - - Args: - image (str): Image name to get - chunk_size (int): The number of bytes returned by each iteration - of the generator. If ``None``, data will be streamed as it is - received. Default: 2 MB - - Returns: - (generator): A stream of raw archive data. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - - Example: - - >>> image = client.api.get_image("busybox:latest") - >>> f = open('/tmp/busybox-latest.tar', 'wb') - >>> for chunk in image: - >>> f.write(chunk) - >>> f.close() - """ - res = self._get(self._url("/images/{0}/get", image), stream=True) - return self._stream_raw_result(res, chunk_size, False) - - @utils.check_resource('image') - def history(self, image): - """ - Show the history of an image. - - Args: - image (str): The image to show history for - - Returns: - (str): The history of the image - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - res = self._get(self._url("/images/{0}/history", image)) - return self._result(res, True) - - def images(self, name=None, quiet=False, all=False, filters=None): - """ - List images. Similar to the ``docker images`` command. - - Args: - name (str): Only show images belonging to the repository ``name`` - quiet (bool): Only return numeric IDs as a list. - all (bool): Show intermediate image layers. By default, these are - filtered out. - filters (dict): Filters to be processed on the image list. - Available filters: - - ``dangling`` (bool) - - `label` (str|list): format either ``"key"``, ``"key=value"`` - or a list of such. - - Returns: - (dict or list): A list if ``quiet=True``, otherwise a dict. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - params = { - 'only_ids': 1 if quiet else 0, - 'all': 1 if all else 0, - } - if name: - if utils.version_lt(self._version, '1.25'): - # only use "filter" on API 1.24 and under, as it is deprecated - params['filter'] = name - else: - if filters: - filters['reference'] = name - else: - filters = {'reference': name} - if filters: - params['filters'] = utils.convert_filters(filters) - res = self._result(self._get(self._url("/images/json"), params=params), - True) - if quiet: - return [x['Id'] for x in res] - return res - - def import_image(self, src=None, repository=None, tag=None, image=None, - changes=None, stream_src=False): - """ - Import an image. Similar to the ``docker import`` command. - - If ``src`` is a string or unicode string, it will first be treated as a - path to a tarball on the local system. If there is an error reading - from that file, ``src`` will be treated as a URL instead to fetch the - image from. You can also pass an open file handle as ``src``, in which - case the data will be read from that file. - - If ``src`` is unset but ``image`` is set, the ``image`` parameter will - be taken as the name of an existing image to import from. - - Args: - src (str or file): Path to tarfile, URL, or file-like object - repository (str): The repository to create - tag (str): The tag to apply - image (str): Use another image like the ``FROM`` Dockerfile - parameter - """ - if not (src or image): - raise errors.DockerException( - 'Must specify src or image to import from' - ) - u = self._url('/images/create') - - params = _import_image_params( - repository, tag, image, - src=(src if isinstance(src, str) else None), - changes=changes - ) - headers = {'Content-Type': 'application/tar'} - - if image or params.get('fromSrc') != '-': # from image or URL - return self._result( - self._post(u, data=None, params=params) - ) - elif isinstance(src, str): # from file path - with open(src, 'rb') as f: - return self._result( - self._post( - u, data=f, params=params, headers=headers, timeout=None - ) - ) - else: # from raw data - if stream_src: - headers['Transfer-Encoding'] = 'chunked' - return self._result( - self._post(u, data=src, params=params, headers=headers) - ) - - def import_image_from_data(self, data, repository=None, tag=None, - changes=None): - """ - Like :py:meth:`~docker.api.image.ImageApiMixin.import_image`, but - allows importing in-memory bytes data. - - Args: - data (bytes collection): Bytes collection containing valid tar data - repository (str): The repository to create - tag (str): The tag to apply - """ - - u = self._url('/images/create') - params = _import_image_params( - repository, tag, src='-', changes=changes - ) - headers = {'Content-Type': 'application/tar'} - return self._result( - self._post( - u, data=data, params=params, headers=headers, timeout=None - ) - ) - - def import_image_from_file(self, filename, repository=None, tag=None, - changes=None): - """ - Like :py:meth:`~docker.api.image.ImageApiMixin.import_image`, but only - supports importing from a tar file on disk. - - Args: - filename (str): Full path to a tar file. - repository (str): The repository to create - tag (str): The tag to apply - - Raises: - IOError: File does not exist. - """ - - return self.import_image( - src=filename, repository=repository, tag=tag, changes=changes - ) - - def import_image_from_stream(self, stream, repository=None, tag=None, - changes=None): - return self.import_image( - src=stream, stream_src=True, repository=repository, tag=tag, - changes=changes - ) - - def import_image_from_url(self, url, repository=None, tag=None, - changes=None): - """ - Like :py:meth:`~docker.api.image.ImageApiMixin.import_image`, but only - supports importing from a URL. - - Args: - url (str): A URL pointing to a tar file. - repository (str): The repository to create - tag (str): The tag to apply - """ - return self.import_image( - src=url, repository=repository, tag=tag, changes=changes - ) - - def import_image_from_image(self, image, repository=None, tag=None, - changes=None): - """ - Like :py:meth:`~docker.api.image.ImageApiMixin.import_image`, but only - supports importing from another image, like the ``FROM`` Dockerfile - parameter. - - Args: - image (str): Image name to import from - repository (str): The repository to create - tag (str): The tag to apply - """ - return self.import_image( - image=image, repository=repository, tag=tag, changes=changes - ) - - @utils.check_resource('image') - def inspect_image(self, image): - """ - Get detailed information about an image. Similar to the ``docker - inspect`` command, but only for images. - - Args: - image (str): The image to inspect - - Returns: - (dict): Similar to the output of ``docker inspect``, but as a - single dict - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self._result( - self._get(self._url("/images/{0}/json", image)), True - ) - - @utils.minimum_version('1.30') - @utils.check_resource('image') - def inspect_distribution(self, image, auth_config=None): - """ - Get image digest and platform information by contacting the registry. - - Args: - image (str): The image name to inspect - auth_config (dict): Override the credentials that are found in the - config for this request. ``auth_config`` should contain the - ``username`` and ``password`` keys to be valid. - - Returns: - (dict): A dict containing distribution data - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - registry, _ = auth.resolve_repository_name(image) - - headers = {} - if auth_config is None: - header = auth.get_config_header(self, registry) - if header: - headers['X-Registry-Auth'] = header - else: - log.debug('Sending supplied auth config') - headers['X-Registry-Auth'] = auth.encode_header(auth_config) - - url = self._url("/distribution/{0}/json", image) - - return self._result( - self._get(url, headers=headers), True - ) - - def load_image(self, data, quiet=None): - """ - Load an image that was previously saved using - :py:meth:`~docker.api.image.ImageApiMixin.get_image` (or ``docker - save``). Similar to ``docker load``. - - Args: - data (binary): Image data to be loaded. - quiet (boolean): Suppress progress details in response. - - Returns: - (generator): Progress output as JSON objects. Only available for - API version >= 1.23 - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - params = {} - - if quiet is not None: - if utils.version_lt(self._version, '1.23'): - raise errors.InvalidVersion( - 'quiet is not supported in API version < 1.23' - ) - params['quiet'] = quiet - - res = self._post( - self._url("/images/load"), data=data, params=params, stream=True - ) - if utils.version_gte(self._version, '1.23'): - return self._stream_helper(res, decode=True) - - self._raise_for_status(res) - - @utils.minimum_version('1.25') - def prune_images(self, filters=None): - """ - Delete unused images - - Args: - filters (dict): Filters to process on the prune list. - Available filters: - - dangling (bool): When set to true (or 1), prune only - unused and untagged images. - - Returns: - (dict): A dict containing a list of deleted image IDs and - the amount of disk space reclaimed in bytes. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - url = self._url("/images/prune") - params = {} - if filters is not None: - params['filters'] = utils.convert_filters(filters) - return self._result(self._post(url, params=params), True) - - def pull(self, repository, tag=None, stream=False, auth_config=None, - decode=False, platform=None, all_tags=False): - """ - Pulls an image. Similar to the ``docker pull`` command. - - Args: - repository (str): The repository to pull - tag (str): The tag to pull. If ``tag`` is ``None`` or empty, it - is set to ``latest``. - stream (bool): Stream the output as a generator. Make sure to - consume the generator, otherwise pull might get cancelled. - auth_config (dict): Override the credentials that are found in the - config for this request. ``auth_config`` should contain the - ``username`` and ``password`` keys to be valid. - decode (bool): Decode the JSON data from the server into dicts. - Only applies with ``stream=True`` - platform (str): Platform in the format ``os[/arch[/variant]]`` - all_tags (bool): Pull all image tags, the ``tag`` parameter is - ignored. - - Returns: - (generator or str): The output - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - - Example: - - >>> resp = client.api.pull('busybox', stream=True, decode=True) - ... for line in resp: - ... print(json.dumps(line, indent=4)) - { - "status": "Pulling image (latest) from busybox", - "progressDetail": {}, - "id": "e72ac664f4f0" - } - { - "status": "Pulling image (latest) from busybox, endpoint: ...", - "progressDetail": {}, - "id": "e72ac664f4f0" - } - - """ - repository, image_tag = utils.parse_repository_tag(repository) - tag = tag or image_tag or 'latest' - - if all_tags: - tag = None - - registry, repo_name = auth.resolve_repository_name(repository) - - params = { - 'tag': tag, - 'fromImage': repository - } - headers = {} - - if auth_config is None: - header = auth.get_config_header(self, registry) - if header: - headers['X-Registry-Auth'] = header - else: - log.debug('Sending supplied auth config') - headers['X-Registry-Auth'] = auth.encode_header(auth_config) - - if platform is not None: - if utils.version_lt(self._version, '1.32'): - raise errors.InvalidVersion( - 'platform was only introduced in API version 1.32' - ) - params['platform'] = platform - - response = self._post( - self._url('/images/create'), params=params, headers=headers, - stream=stream, timeout=None - ) - - self._raise_for_status(response) - - if stream: - return self._stream_helper(response, decode=decode) - - return self._result(response) - - def push(self, repository, tag=None, stream=False, auth_config=None, - decode=False): - """ - Push an image or a repository to the registry. Similar to the ``docker - push`` command. - - Args: - repository (str): The repository to push to - tag (str): An optional tag to push - stream (bool): Stream the output as a blocking generator - auth_config (dict): Override the credentials that are found in the - config for this request. ``auth_config`` should contain the - ``username`` and ``password`` keys to be valid. - decode (bool): Decode the JSON data from the server into dicts. - Only applies with ``stream=True`` - - Returns: - (generator or str): The output from the server. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - - Example: - >>> resp = client.api.push( - ... 'yourname/app', - ... stream=True, - ... decode=True, - ... ) - ... for line in resp: - ... print(line) - {'status': 'Pushing repository yourname/app (1 tags)'} - {'status': 'Pushing','progressDetail': {}, 'id': '511136ea3c5a'} - {'status': 'Image already pushed, skipping', 'progressDetail':{}, - 'id': '511136ea3c5a'} - ... - - """ - if not tag: - repository, tag = utils.parse_repository_tag(repository) - registry, repo_name = auth.resolve_repository_name(repository) - u = self._url("/images/{0}/push", repository) - params = { - 'tag': tag - } - headers = {} - - if auth_config is None: - header = auth.get_config_header(self, registry) - if header: - headers['X-Registry-Auth'] = header - else: - log.debug('Sending supplied auth config') - headers['X-Registry-Auth'] = auth.encode_header(auth_config) - - response = self._post_json( - u, None, headers=headers, stream=stream, params=params - ) - - self._raise_for_status(response) - - if stream: - return self._stream_helper(response, decode=decode) - - return self._result(response) - - @utils.check_resource('image') - def remove_image(self, image, force=False, noprune=False): - """ - Remove an image. Similar to the ``docker rmi`` command. - - Args: - image (str): The image to remove - force (bool): Force removal of the image - noprune (bool): Do not delete untagged parents - """ - params = {'force': force, 'noprune': noprune} - res = self._delete(self._url("/images/{0}", image), params=params) - return self._result(res, True) - - def search(self, term, limit=None): - """ - Search for images on Docker Hub. Similar to the ``docker search`` - command. - - Args: - term (str): A term to search for. - limit (int): The maximum number of results to return. - - Returns: - (list of dicts): The response of the search. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - params = {'term': term} - if limit is not None: - params['limit'] = limit - - return self._result( - self._get(self._url("/images/search"), params=params), - True - ) - - @utils.check_resource('image') - def tag(self, image, repository, tag=None, force=False): - """ - Tag an image into a repository. Similar to the ``docker tag`` command. - - Args: - image (str): The image to tag - repository (str): The repository to set for the tag - tag (str): The tag name - force (bool): Force - - Returns: - (bool): ``True`` if successful - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - - Example: - - >>> client.api.tag('ubuntu', 'localhost:5000/ubuntu', 'latest', - force=True) - """ - params = { - 'tag': tag, - 'repo': repository, - 'force': 1 if force else 0 - } - url = self._url("/images/{0}/tag", image) - res = self._post(url, params=params) - self._raise_for_status(res) - return res.status_code == 201 - - -def is_file(src): - try: - return ( - isinstance(src, str) and - os.path.isfile(src) - ) - except TypeError: # a data string will make isfile() raise a TypeError - return False - - -def _import_image_params(repo, tag, image=None, src=None, - changes=None): - params = { - 'repo': repo, - 'tag': tag, - } - if image: - params['fromImage'] = image - elif src and not is_file(src): - params['fromSrc'] = src - else: - params['fromSrc'] = '-' - - if changes: - params['changes'] = changes - - return params diff --git a/server/venv/Lib/site-packages/docker/api/network.py b/server/venv/Lib/site-packages/docker/api/network.py deleted file mode 100644 index dd4e376..0000000 --- a/server/venv/Lib/site-packages/docker/api/network.py +++ /dev/null @@ -1,278 +0,0 @@ -from ..errors import InvalidVersion -from ..utils import check_resource, minimum_version -from ..utils import version_lt -from .. import utils - - -class NetworkApiMixin: - def networks(self, names=None, ids=None, filters=None): - """ - List networks. Similar to the ``docker network ls`` command. - - Args: - names (:py:class:`list`): List of names to filter by - ids (:py:class:`list`): List of ids to filter by - filters (dict): Filters to be processed on the network list. - Available filters: - - ``driver=[]`` Matches a network's driver. - - ``label=[]``, ``label=[=]`` or a list of - such. - - ``type=["custom"|"builtin"]`` Filters networks by type. - - Returns: - (dict): List of network objects. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - - if filters is None: - filters = {} - if names: - filters['name'] = names - if ids: - filters['id'] = ids - params = {'filters': utils.convert_filters(filters)} - url = self._url("/networks") - res = self._get(url, params=params) - return self._result(res, json=True) - - def create_network(self, name, driver=None, options=None, ipam=None, - check_duplicate=None, internal=False, labels=None, - enable_ipv6=False, attachable=None, scope=None, - ingress=None): - """ - Create a network. Similar to the ``docker network create``. - - Args: - name (str): Name of the network - driver (str): Name of the driver used to create the network - options (dict): Driver options as a key-value dictionary - ipam (IPAMConfig): Optional custom IP scheme for the network. - check_duplicate (bool): Request daemon to check for networks with - same name. Default: ``None``. - internal (bool): Restrict external access to the network. Default - ``False``. - labels (dict): Map of labels to set on the network. Default - ``None``. - enable_ipv6 (bool): Enable IPv6 on the network. Default ``False``. - attachable (bool): If enabled, and the network is in the global - scope, non-service containers on worker nodes will be able to - connect to the network. - scope (str): Specify the network's scope (``local``, ``global`` or - ``swarm``) - ingress (bool): If set, create an ingress network which provides - the routing-mesh in swarm mode. - - Returns: - (dict): The created network reference object - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - - Example: - A network using the bridge driver: - - >>> client.api.create_network("network1", driver="bridge") - - You can also create more advanced networks with custom IPAM - configurations. For example, setting the subnet to - ``192.168.52.0/24`` and gateway address to ``192.168.52.254``. - - .. code-block:: python - - >>> ipam_pool = docker.types.IPAMPool( - subnet='192.168.52.0/24', - gateway='192.168.52.254' - ) - >>> ipam_config = docker.types.IPAMConfig( - pool_configs=[ipam_pool] - ) - >>> client.api.create_network("network1", driver="bridge", - ipam=ipam_config) - """ - if options is not None and not isinstance(options, dict): - raise TypeError('options must be a dictionary') - - data = { - 'Name': name, - 'Driver': driver, - 'Options': options, - 'IPAM': ipam, - 'CheckDuplicate': check_duplicate, - } - - if labels is not None: - if version_lt(self._version, '1.23'): - raise InvalidVersion( - 'network labels were introduced in API 1.23' - ) - if not isinstance(labels, dict): - raise TypeError('labels must be a dictionary') - data["Labels"] = labels - - if enable_ipv6: - if version_lt(self._version, '1.23'): - raise InvalidVersion( - 'enable_ipv6 was introduced in API 1.23' - ) - data['EnableIPv6'] = True - - if internal: - if version_lt(self._version, '1.22'): - raise InvalidVersion('Internal networks are not ' - 'supported in API version < 1.22') - data['Internal'] = True - - if attachable is not None: - if version_lt(self._version, '1.24'): - raise InvalidVersion( - 'attachable is not supported in API version < 1.24' - ) - data['Attachable'] = attachable - - if ingress is not None: - if version_lt(self._version, '1.29'): - raise InvalidVersion( - 'ingress is not supported in API version < 1.29' - ) - - data['Ingress'] = ingress - - if scope is not None: - if version_lt(self._version, '1.30'): - raise InvalidVersion( - 'scope is not supported in API version < 1.30' - ) - data['Scope'] = scope - - url = self._url("/networks/create") - res = self._post_json(url, data=data) - return self._result(res, json=True) - - @minimum_version('1.25') - def prune_networks(self, filters=None): - """ - Delete unused networks - - Args: - filters (dict): Filters to process on the prune list. - - Returns: - (dict): A dict containing a list of deleted network names and - the amount of disk space reclaimed in bytes. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - params = {} - if filters: - params['filters'] = utils.convert_filters(filters) - url = self._url('/networks/prune') - return self._result(self._post(url, params=params), True) - - @check_resource('net_id') - def remove_network(self, net_id): - """ - Remove a network. Similar to the ``docker network rm`` command. - - Args: - net_id (str): The network's id - """ - url = self._url("/networks/{0}", net_id) - res = self._delete(url) - self._raise_for_status(res) - - @check_resource('net_id') - def inspect_network(self, net_id, verbose=None, scope=None): - """ - Get detailed information about a network. - - Args: - net_id (str): ID of network - verbose (bool): Show the service details across the cluster in - swarm mode. - scope (str): Filter the network by scope (``swarm``, ``global`` - or ``local``). - """ - params = {} - if verbose is not None: - if version_lt(self._version, '1.28'): - raise InvalidVersion('verbose was introduced in API 1.28') - params['verbose'] = verbose - if scope is not None: - if version_lt(self._version, '1.31'): - raise InvalidVersion('scope was introduced in API 1.31') - params['scope'] = scope - - url = self._url("/networks/{0}", net_id) - res = self._get(url, params=params) - return self._result(res, json=True) - - @check_resource('container') - def connect_container_to_network(self, container, net_id, - ipv4_address=None, ipv6_address=None, - aliases=None, links=None, - link_local_ips=None, driver_opt=None, - mac_address=None): - """ - Connect a container to a network. - - Args: - container (str): container-id/name to be connected to the network - net_id (str): network id - aliases (:py:class:`list`): A list of aliases for this endpoint. - Names in that list can be used within the network to reach the - container. Defaults to ``None``. - links (:py:class:`list`): A list of links for this endpoint. - Containers declared in this list will be linked to this - container. Defaults to ``None``. - ipv4_address (str): The IP address of this container on the - network, using the IPv4 protocol. Defaults to ``None``. - ipv6_address (str): The IP address of this container on the - network, using the IPv6 protocol. Defaults to ``None``. - link_local_ips (:py:class:`list`): A list of link-local - (IPv4/IPv6) addresses. - mac_address (str): The MAC address of this container on the - network. Defaults to ``None``. - """ - data = { - "Container": container, - "EndpointConfig": self.create_endpoint_config( - aliases=aliases, links=links, ipv4_address=ipv4_address, - ipv6_address=ipv6_address, link_local_ips=link_local_ips, - driver_opt=driver_opt, - mac_address=mac_address - ), - } - - url = self._url("/networks/{0}/connect", net_id) - res = self._post_json(url, data=data) - self._raise_for_status(res) - - @check_resource('container') - def disconnect_container_from_network(self, container, net_id, - force=False): - """ - Disconnect a container from a network. - - Args: - container (str): container ID or name to be disconnected from the - network - net_id (str): network ID - force (bool): Force the container to disconnect from a network. - Default: ``False`` - """ - data = {"Container": container} - if force: - if version_lt(self._version, '1.22'): - raise InvalidVersion( - 'Forced disconnect was introduced in API 1.22' - ) - data['Force'] = force - url = self._url("/networks/{0}/disconnect", net_id) - res = self._post_json(url, data=data) - self._raise_for_status(res) diff --git a/server/venv/Lib/site-packages/docker/api/plugin.py b/server/venv/Lib/site-packages/docker/api/plugin.py deleted file mode 100644 index 10210c1..0000000 --- a/server/venv/Lib/site-packages/docker/api/plugin.py +++ /dev/null @@ -1,261 +0,0 @@ -from .. import auth, utils - - -class PluginApiMixin: - @utils.minimum_version('1.25') - @utils.check_resource('name') - def configure_plugin(self, name, options): - """ - Configure a plugin. - - Args: - name (string): The name of the plugin. The ``:latest`` tag is - optional, and is the default if omitted. - options (dict): A key-value mapping of options - - Returns: - ``True`` if successful - """ - url = self._url('/plugins/{0}/set', name) - data = options - if isinstance(data, dict): - data = [f'{k}={v}' for k, v in data.items()] - res = self._post_json(url, data=data) - self._raise_for_status(res) - return True - - @utils.minimum_version('1.25') - def create_plugin(self, name, plugin_data_dir, gzip=False): - """ - Create a new plugin. - - Args: - name (string): The name of the plugin. The ``:latest`` tag is - optional, and is the default if omitted. - plugin_data_dir (string): Path to the plugin data directory. - Plugin data directory must contain the ``config.json`` - manifest file and the ``rootfs`` directory. - gzip (bool): Compress the context using gzip. Default: False - - Returns: - ``True`` if successful - """ - url = self._url('/plugins/create') - - with utils.create_archive( - root=plugin_data_dir, gzip=gzip, - files=set(utils.build.walk(plugin_data_dir, [])) - ) as archv: - res = self._post(url, params={'name': name}, data=archv) - self._raise_for_status(res) - return True - - @utils.minimum_version('1.25') - def disable_plugin(self, name, force=False): - """ - Disable an installed plugin. - - Args: - name (string): The name of the plugin. The ``:latest`` tag is - optional, and is the default if omitted. - force (bool): To enable the force query parameter. - - Returns: - ``True`` if successful - """ - url = self._url('/plugins/{0}/disable', name) - res = self._post(url, params={'force': force}) - self._raise_for_status(res) - return True - - @utils.minimum_version('1.25') - def enable_plugin(self, name, timeout=0): - """ - Enable an installed plugin. - - Args: - name (string): The name of the plugin. The ``:latest`` tag is - optional, and is the default if omitted. - timeout (int): Operation timeout (in seconds). Default: 0 - - Returns: - ``True`` if successful - """ - url = self._url('/plugins/{0}/enable', name) - params = {'timeout': timeout} - res = self._post(url, params=params) - self._raise_for_status(res) - return True - - @utils.minimum_version('1.25') - def inspect_plugin(self, name): - """ - Retrieve plugin metadata. - - Args: - name (string): The name of the plugin. The ``:latest`` tag is - optional, and is the default if omitted. - - Returns: - A dict containing plugin info - """ - url = self._url('/plugins/{0}/json', name) - return self._result(self._get(url), True) - - @utils.minimum_version('1.25') - def pull_plugin(self, remote, privileges, name=None): - """ - Pull and install a plugin. After the plugin is installed, it can be - enabled using :py:meth:`~enable_plugin`. - - Args: - remote (string): Remote reference for the plugin to install. - The ``:latest`` tag is optional, and is the default if - omitted. - privileges (:py:class:`list`): A list of privileges the user - consents to grant to the plugin. Can be retrieved using - :py:meth:`~plugin_privileges`. - name (string): Local name for the pulled plugin. The - ``:latest`` tag is optional, and is the default if omitted. - - Returns: - An iterable object streaming the decoded API logs - """ - url = self._url('/plugins/pull') - params = { - 'remote': remote, - } - if name: - params['name'] = name - - headers = {} - registry, repo_name = auth.resolve_repository_name(remote) - header = auth.get_config_header(self, registry) - if header: - headers['X-Registry-Auth'] = header - response = self._post_json( - url, params=params, headers=headers, data=privileges, - stream=True - ) - self._raise_for_status(response) - return self._stream_helper(response, decode=True) - - @utils.minimum_version('1.25') - def plugins(self): - """ - Retrieve a list of installed plugins. - - Returns: - A list of dicts, one per plugin - """ - url = self._url('/plugins') - return self._result(self._get(url), True) - - @utils.minimum_version('1.25') - def plugin_privileges(self, name): - """ - Retrieve list of privileges to be granted to a plugin. - - Args: - name (string): Name of the remote plugin to examine. The - ``:latest`` tag is optional, and is the default if omitted. - - Returns: - A list of dictionaries representing the plugin's - permissions - - """ - params = { - 'remote': name, - } - - headers = {} - registry, repo_name = auth.resolve_repository_name(name) - header = auth.get_config_header(self, registry) - if header: - headers['X-Registry-Auth'] = header - - url = self._url('/plugins/privileges') - return self._result( - self._get(url, params=params, headers=headers), True - ) - - @utils.minimum_version('1.25') - @utils.check_resource('name') - def push_plugin(self, name): - """ - Push a plugin to the registry. - - Args: - name (string): Name of the plugin to upload. The ``:latest`` - tag is optional, and is the default if omitted. - - Returns: - ``True`` if successful - """ - url = self._url('/plugins/{0}/pull', name) - - headers = {} - registry, repo_name = auth.resolve_repository_name(name) - header = auth.get_config_header(self, registry) - if header: - headers['X-Registry-Auth'] = header - res = self._post(url, headers=headers) - self._raise_for_status(res) - return self._stream_helper(res, decode=True) - - @utils.minimum_version('1.25') - @utils.check_resource('name') - def remove_plugin(self, name, force=False): - """ - Remove an installed plugin. - - Args: - name (string): Name of the plugin to remove. The ``:latest`` - tag is optional, and is the default if omitted. - force (bool): Disable the plugin before removing. This may - result in issues if the plugin is in use by a container. - - Returns: - ``True`` if successful - """ - url = self._url('/plugins/{0}', name) - res = self._delete(url, params={'force': force}) - self._raise_for_status(res) - return True - - @utils.minimum_version('1.26') - @utils.check_resource('name') - def upgrade_plugin(self, name, remote, privileges): - """ - Upgrade an installed plugin. - - Args: - name (string): Name of the plugin to upgrade. The ``:latest`` - tag is optional and is the default if omitted. - remote (string): Remote reference to upgrade to. The - ``:latest`` tag is optional and is the default if omitted. - privileges (:py:class:`list`): A list of privileges the user - consents to grant to the plugin. Can be retrieved using - :py:meth:`~plugin_privileges`. - - Returns: - An iterable object streaming the decoded API logs - """ - - url = self._url('/plugins/{0}/upgrade', name) - params = { - 'remote': remote, - } - - headers = {} - registry, repo_name = auth.resolve_repository_name(remote) - header = auth.get_config_header(self, registry) - if header: - headers['X-Registry-Auth'] = header - response = self._post_json( - url, params=params, headers=headers, data=privileges, - stream=True - ) - self._raise_for_status(response) - return self._stream_helper(response, decode=True) diff --git a/server/venv/Lib/site-packages/docker/api/secret.py b/server/venv/Lib/site-packages/docker/api/secret.py deleted file mode 100644 index cd440b9..0000000 --- a/server/venv/Lib/site-packages/docker/api/secret.py +++ /dev/null @@ -1,99 +0,0 @@ -import base64 - -from .. import errors -from .. import utils - - -class SecretApiMixin: - @utils.minimum_version('1.25') - def create_secret(self, name, data, labels=None, driver=None): - """ - Create a secret - - Args: - name (string): Name of the secret - data (bytes): Secret data to be stored - labels (dict): A mapping of labels to assign to the secret - driver (DriverConfig): A custom driver configuration. If - unspecified, the default ``internal`` driver will be used - - Returns (dict): ID of the newly created secret - """ - if not isinstance(data, bytes): - data = data.encode('utf-8') - - data = base64.b64encode(data) - data = data.decode('ascii') - body = { - 'Data': data, - 'Name': name, - 'Labels': labels - } - - if driver is not None: - if utils.version_lt(self._version, '1.31'): - raise errors.InvalidVersion( - 'Secret driver is only available for API version > 1.31' - ) - - body['Driver'] = driver - - url = self._url('/secrets/create') - return self._result( - self._post_json(url, data=body), True - ) - - @utils.minimum_version('1.25') - @utils.check_resource('id') - def inspect_secret(self, id): - """ - Retrieve secret metadata - - Args: - id (string): Full ID of the secret to inspect - - Returns (dict): A dictionary of metadata - - Raises: - :py:class:`docker.errors.NotFound` - if no secret with that ID exists - """ - url = self._url('/secrets/{0}', id) - return self._result(self._get(url), True) - - @utils.minimum_version('1.25') - @utils.check_resource('id') - def remove_secret(self, id): - """ - Remove a secret - - Args: - id (string): Full ID of the secret to remove - - Returns (boolean): True if successful - - Raises: - :py:class:`docker.errors.NotFound` - if no secret with that ID exists - """ - url = self._url('/secrets/{0}', id) - res = self._delete(url) - self._raise_for_status(res) - return True - - @utils.minimum_version('1.25') - def secrets(self, filters=None): - """ - List secrets - - Args: - filters (dict): A map of filters to process on the secrets - list. Available filters: ``names`` - - Returns (list): A list of secrets - """ - url = self._url('/secrets') - params = {} - if filters: - params['filters'] = utils.convert_filters(filters) - return self._result(self._get(url, params=params), True) diff --git a/server/venv/Lib/site-packages/docker/api/service.py b/server/venv/Lib/site-packages/docker/api/service.py deleted file mode 100644 index 652b7c2..0000000 --- a/server/venv/Lib/site-packages/docker/api/service.py +++ /dev/null @@ -1,488 +0,0 @@ -from .. import auth, errors, utils -from ..types import ServiceMode - - -def _check_api_features(version, task_template, update_config, endpoint_spec, - rollback_config): - - def raise_version_error(param, min_version): - raise errors.InvalidVersion( - '{} is not supported in API version < {}'.format( - param, min_version - ) - ) - - if update_config is not None: - if utils.version_lt(version, '1.25'): - if 'MaxFailureRatio' in update_config: - raise_version_error('UpdateConfig.max_failure_ratio', '1.25') - if 'Monitor' in update_config: - raise_version_error('UpdateConfig.monitor', '1.25') - - if utils.version_lt(version, '1.28'): - if update_config.get('FailureAction') == 'rollback': - raise_version_error( - 'UpdateConfig.failure_action rollback', '1.28' - ) - - if utils.version_lt(version, '1.29'): - if 'Order' in update_config: - raise_version_error('UpdateConfig.order', '1.29') - - if rollback_config is not None: - if utils.version_lt(version, '1.28'): - raise_version_error('rollback_config', '1.28') - - if utils.version_lt(version, '1.29'): - if 'Order' in update_config: - raise_version_error('RollbackConfig.order', '1.29') - - if endpoint_spec is not None: - if utils.version_lt(version, '1.32') and 'Ports' in endpoint_spec: - if any(p.get('PublishMode') for p in endpoint_spec['Ports']): - raise_version_error('EndpointSpec.Ports[].mode', '1.32') - - if task_template is not None: - if 'ForceUpdate' in task_template and utils.version_lt( - version, '1.25'): - raise_version_error('force_update', '1.25') - - if task_template.get('Placement'): - if utils.version_lt(version, '1.30'): - if task_template['Placement'].get('Platforms'): - raise_version_error('Placement.platforms', '1.30') - if utils.version_lt(version, '1.27'): - if task_template['Placement'].get('Preferences'): - raise_version_error('Placement.preferences', '1.27') - - if task_template.get('ContainerSpec'): - container_spec = task_template.get('ContainerSpec') - - if utils.version_lt(version, '1.25'): - if container_spec.get('TTY'): - raise_version_error('ContainerSpec.tty', '1.25') - if container_spec.get('Hostname') is not None: - raise_version_error('ContainerSpec.hostname', '1.25') - if container_spec.get('Hosts') is not None: - raise_version_error('ContainerSpec.hosts', '1.25') - if container_spec.get('Groups') is not None: - raise_version_error('ContainerSpec.groups', '1.25') - if container_spec.get('DNSConfig') is not None: - raise_version_error('ContainerSpec.dns_config', '1.25') - if container_spec.get('Healthcheck') is not None: - raise_version_error('ContainerSpec.healthcheck', '1.25') - - if utils.version_lt(version, '1.28'): - if container_spec.get('ReadOnly') is not None: - raise_version_error('ContainerSpec.dns_config', '1.28') - if container_spec.get('StopSignal') is not None: - raise_version_error('ContainerSpec.stop_signal', '1.28') - - if utils.version_lt(version, '1.30'): - if container_spec.get('Configs') is not None: - raise_version_error('ContainerSpec.configs', '1.30') - if container_spec.get('Privileges') is not None: - raise_version_error('ContainerSpec.privileges', '1.30') - - if utils.version_lt(version, '1.35'): - if container_spec.get('Isolation') is not None: - raise_version_error('ContainerSpec.isolation', '1.35') - - if utils.version_lt(version, '1.38'): - if container_spec.get('Init') is not None: - raise_version_error('ContainerSpec.init', '1.38') - - if task_template.get('Resources'): - if utils.version_lt(version, '1.32'): - if task_template['Resources'].get('GenericResources'): - raise_version_error('Resources.generic_resources', '1.32') - - -def _merge_task_template(current, override): - merged = current.copy() - if override is not None: - for ts_key, ts_value in override.items(): - if ts_key == 'ContainerSpec': - if 'ContainerSpec' not in merged: - merged['ContainerSpec'] = {} - for cs_key, cs_value in override['ContainerSpec'].items(): - if cs_value is not None: - merged['ContainerSpec'][cs_key] = cs_value - elif ts_value is not None: - merged[ts_key] = ts_value - return merged - - -class ServiceApiMixin: - @utils.minimum_version('1.24') - def create_service( - self, task_template, name=None, labels=None, mode=None, - update_config=None, networks=None, endpoint_config=None, - endpoint_spec=None, rollback_config=None - ): - """ - Create a service. - - Args: - task_template (TaskTemplate): Specification of the task to start as - part of the new service. - name (string): User-defined name for the service. Optional. - labels (dict): A map of labels to associate with the service. - Optional. - mode (ServiceMode): Scheduling mode for the service (replicated - or global). Defaults to replicated. - update_config (UpdateConfig): Specification for the update strategy - of the service. Default: ``None`` - rollback_config (RollbackConfig): Specification for the rollback - strategy of the service. Default: ``None`` - networks (:py:class:`list`): List of network names or IDs or - :py:class:`~docker.types.NetworkAttachmentConfig` to attach the - service to. Default: ``None``. - endpoint_spec (EndpointSpec): Properties that can be configured to - access and load balance a service. Default: ``None``. - - Returns: - A dictionary containing an ``ID`` key for the newly created - service. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - - _check_api_features( - self._version, task_template, update_config, endpoint_spec, - rollback_config - ) - - url = self._url('/services/create') - headers = {} - image = task_template.get('ContainerSpec', {}).get('Image', None) - if image is None: - raise errors.DockerException( - 'Missing mandatory Image key in ContainerSpec' - ) - if mode and not isinstance(mode, dict): - mode = ServiceMode(mode) - - registry, repo_name = auth.resolve_repository_name(image) - auth_header = auth.get_config_header(self, registry) - if auth_header: - headers['X-Registry-Auth'] = auth_header - if utils.version_lt(self._version, '1.25'): - networks = networks or task_template.pop('Networks', None) - data = { - 'Name': name, - 'Labels': labels, - 'TaskTemplate': task_template, - 'Mode': mode, - 'Networks': utils.convert_service_networks(networks), - 'EndpointSpec': endpoint_spec - } - - if update_config is not None: - data['UpdateConfig'] = update_config - - if rollback_config is not None: - data['RollbackConfig'] = rollback_config - - return self._result( - self._post_json(url, data=data, headers=headers), True - ) - - @utils.minimum_version('1.24') - @utils.check_resource('service') - def inspect_service(self, service, insert_defaults=None): - """ - Return information about a service. - - Args: - service (str): Service name or ID. - insert_defaults (boolean): If true, default values will be merged - into the service inspect output. - - Returns: - (dict): A dictionary of the server-side representation of the - service, including all relevant properties. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - url = self._url('/services/{0}', service) - params = {} - if insert_defaults is not None: - if utils.version_lt(self._version, '1.29'): - raise errors.InvalidVersion( - 'insert_defaults is not supported in API version < 1.29' - ) - params['insertDefaults'] = insert_defaults - - return self._result(self._get(url, params=params), True) - - @utils.minimum_version('1.24') - @utils.check_resource('task') - def inspect_task(self, task): - """ - Retrieve information about a task. - - Args: - task (str): Task ID - - Returns: - (dict): Information about the task. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - url = self._url('/tasks/{0}', task) - return self._result(self._get(url), True) - - @utils.minimum_version('1.24') - @utils.check_resource('service') - def remove_service(self, service): - """ - Stop and remove a service. - - Args: - service (str): Service name or ID - - Returns: - ``True`` if successful. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - - url = self._url('/services/{0}', service) - resp = self._delete(url) - self._raise_for_status(resp) - return True - - @utils.minimum_version('1.24') - def services(self, filters=None, status=None): - """ - List services. - - Args: - filters (dict): Filters to process on the nodes list. Valid - filters: ``id``, ``name`` , ``label`` and ``mode``. - Default: ``None``. - status (bool): Include the service task count of running and - desired tasks. Default: ``None``. - - Returns: - A list of dictionaries containing data about each service. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - params = { - 'filters': utils.convert_filters(filters) if filters else None - } - if status is not None: - if utils.version_lt(self._version, '1.41'): - raise errors.InvalidVersion( - 'status is not supported in API version < 1.41' - ) - params['status'] = status - url = self._url('/services') - return self._result(self._get(url, params=params), True) - - @utils.minimum_version('1.25') - @utils.check_resource('service') - def service_logs(self, service, details=False, follow=False, stdout=False, - stderr=False, since=0, timestamps=False, tail='all', - is_tty=None): - """ - Get log stream for a service. - Note: This endpoint works only for services with the ``json-file`` - or ``journald`` logging drivers. - - Args: - service (str): ID or name of the service - details (bool): Show extra details provided to logs. - Default: ``False`` - follow (bool): Keep connection open to read logs as they are - sent by the Engine. Default: ``False`` - stdout (bool): Return logs from ``stdout``. Default: ``False`` - stderr (bool): Return logs from ``stderr``. Default: ``False`` - since (int): UNIX timestamp for the logs staring point. - Default: 0 - timestamps (bool): Add timestamps to every log line. - tail (string or int): Number of log lines to be returned, - counting from the current end of the logs. Specify an - integer or ``'all'`` to output all log lines. - Default: ``all`` - is_tty (bool): Whether the service's :py:class:`ContainerSpec` - enables the TTY option. If omitted, the method will query - the Engine for the information, causing an additional - roundtrip. - - Returns (generator): Logs for the service. - """ - params = { - 'details': details, - 'follow': follow, - 'stdout': stdout, - 'stderr': stderr, - 'since': since, - 'timestamps': timestamps, - 'tail': tail - } - - url = self._url('/services/{0}/logs', service) - res = self._get(url, params=params, stream=True) - if is_tty is None: - is_tty = self.inspect_service( - service - )['Spec']['TaskTemplate']['ContainerSpec'].get('TTY', False) - return self._get_result_tty(True, res, is_tty) - - @utils.minimum_version('1.24') - def tasks(self, filters=None): - """ - Retrieve a list of tasks. - - Args: - filters (dict): A map of filters to process on the tasks list. - Valid filters: ``id``, ``name``, ``service``, ``node``, - ``label`` and ``desired-state``. - - Returns: - (:py:class:`list`): List of task dictionaries. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - - params = { - 'filters': utils.convert_filters(filters) if filters else None - } - url = self._url('/tasks') - return self._result(self._get(url, params=params), True) - - @utils.minimum_version('1.24') - @utils.check_resource('service') - def update_service(self, service, version, task_template=None, name=None, - labels=None, mode=None, update_config=None, - networks=None, endpoint_config=None, - endpoint_spec=None, fetch_current_spec=False, - rollback_config=None): - """ - Update a service. - - Args: - service (string): A service identifier (either its name or service - ID). - version (int): The version number of the service object being - updated. This is required to avoid conflicting writes. - task_template (TaskTemplate): Specification of the updated task to - start as part of the service. - name (string): New name for the service. Optional. - labels (dict): A map of labels to associate with the service. - Optional. - mode (ServiceMode): Scheduling mode for the service (replicated - or global). Defaults to replicated. - update_config (UpdateConfig): Specification for the update strategy - of the service. Default: ``None``. - rollback_config (RollbackConfig): Specification for the rollback - strategy of the service. Default: ``None`` - networks (:py:class:`list`): List of network names or IDs or - :py:class:`~docker.types.NetworkAttachmentConfig` to attach the - service to. Default: ``None``. - endpoint_spec (EndpointSpec): Properties that can be configured to - access and load balance a service. Default: ``None``. - fetch_current_spec (boolean): Use the undefined settings from the - current specification of the service. Default: ``False`` - - Returns: - A dictionary containing a ``Warnings`` key. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - - _check_api_features( - self._version, task_template, update_config, endpoint_spec, - rollback_config - ) - - if fetch_current_spec: - inspect_defaults = True - if utils.version_lt(self._version, '1.29'): - inspect_defaults = None - current = self.inspect_service( - service, insert_defaults=inspect_defaults - )['Spec'] - - else: - current = {} - - url = self._url('/services/{0}/update', service) - data = {} - headers = {} - - data['Name'] = current.get('Name') if name is None else name - - data['Labels'] = current.get('Labels') if labels is None else labels - - if mode is not None: - if not isinstance(mode, dict): - mode = ServiceMode(mode) - data['Mode'] = mode - else: - data['Mode'] = current.get('Mode') - - data['TaskTemplate'] = _merge_task_template( - current.get('TaskTemplate', {}), task_template - ) - - container_spec = data['TaskTemplate'].get('ContainerSpec', {}) - image = container_spec.get('Image', None) - if image is not None: - registry, repo_name = auth.resolve_repository_name(image) - auth_header = auth.get_config_header(self, registry) - if auth_header: - headers['X-Registry-Auth'] = auth_header - - if update_config is not None: - data['UpdateConfig'] = update_config - else: - data['UpdateConfig'] = current.get('UpdateConfig') - - if rollback_config is not None: - data['RollbackConfig'] = rollback_config - else: - data['RollbackConfig'] = current.get('RollbackConfig') - - if networks is not None: - converted_networks = utils.convert_service_networks(networks) - if utils.version_lt(self._version, '1.25'): - data['Networks'] = converted_networks - else: - data['TaskTemplate']['Networks'] = converted_networks - elif utils.version_lt(self._version, '1.25'): - data['Networks'] = current.get('Networks') - elif data['TaskTemplate'].get('Networks') is None: - current_task_template = current.get('TaskTemplate', {}) - current_networks = current_task_template.get('Networks') - if current_networks is None: - current_networks = current.get('Networks') - if current_networks is not None: - data['TaskTemplate']['Networks'] = current_networks - - if endpoint_spec is not None: - data['EndpointSpec'] = endpoint_spec - else: - data['EndpointSpec'] = current.get('EndpointSpec') - - resp = self._post_json( - url, data=data, params={'version': version}, headers=headers - ) - return self._result(resp, json=True) diff --git a/server/venv/Lib/site-packages/docker/api/swarm.py b/server/venv/Lib/site-packages/docker/api/swarm.py deleted file mode 100644 index d09dd08..0000000 --- a/server/venv/Lib/site-packages/docker/api/swarm.py +++ /dev/null @@ -1,463 +0,0 @@ -import logging -import http.client as http_client -from ..constants import DEFAULT_SWARM_ADDR_POOL, DEFAULT_SWARM_SUBNET_SIZE -from .. import errors -from .. import types -from .. import utils - -log = logging.getLogger(__name__) - - -class SwarmApiMixin: - - def create_swarm_spec(self, *args, **kwargs): - """ - Create a :py:class:`docker.types.SwarmSpec` instance that can be used - as the ``swarm_spec`` argument in - :py:meth:`~docker.api.swarm.SwarmApiMixin.init_swarm`. - - Args: - task_history_retention_limit (int): Maximum number of tasks - history stored. - snapshot_interval (int): Number of logs entries between snapshot. - keep_old_snapshots (int): Number of snapshots to keep beyond the - current snapshot. - log_entries_for_slow_followers (int): Number of log entries to - keep around to sync up slow followers after a snapshot is - created. - heartbeat_tick (int): Amount of ticks (in seconds) between each - heartbeat. - election_tick (int): Amount of ticks (in seconds) needed without a - leader to trigger a new election. - dispatcher_heartbeat_period (int): The delay for an agent to send - a heartbeat to the dispatcher. - node_cert_expiry (int): Automatic expiry for nodes certificates. - external_cas (:py:class:`list`): Configuration for forwarding - signing requests to an external certificate authority. Use - a list of :py:class:`docker.types.SwarmExternalCA`. - name (string): Swarm's name - labels (dict): User-defined key/value metadata. - signing_ca_cert (str): The desired signing CA certificate for all - swarm node TLS leaf certificates, in PEM format. - signing_ca_key (str): The desired signing CA key for all swarm - node TLS leaf certificates, in PEM format. - ca_force_rotate (int): An integer whose purpose is to force swarm - to generate a new signing CA certificate and key, if none have - been specified. - autolock_managers (boolean): If set, generate a key and use it to - lock data stored on the managers. - log_driver (DriverConfig): The default log driver to use for tasks - created in the orchestrator. - - Returns: - :py:class:`docker.types.SwarmSpec` - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - - Example: - - >>> spec = client.api.create_swarm_spec( - snapshot_interval=5000, log_entries_for_slow_followers=1200 - ) - >>> client.api.init_swarm( - advertise_addr='eth0', listen_addr='0.0.0.0:5000', - force_new_cluster=False, swarm_spec=spec - ) - """ - ext_ca = kwargs.pop('external_ca', None) - if ext_ca: - kwargs['external_cas'] = [ext_ca] - return types.SwarmSpec(self._version, *args, **kwargs) - - @utils.minimum_version('1.24') - def get_unlock_key(self): - """ - Get the unlock key for this Swarm manager. - - Returns: - A ``dict`` containing an ``UnlockKey`` member - """ - return self._result(self._get(self._url('/swarm/unlockkey')), True) - - @utils.minimum_version('1.24') - def init_swarm(self, advertise_addr=None, listen_addr='0.0.0.0:2377', - force_new_cluster=False, swarm_spec=None, - default_addr_pool=None, subnet_size=None, - data_path_addr=None, data_path_port=None): - """ - Initialize a new Swarm using the current connected engine as the first - node. - - Args: - advertise_addr (string): Externally reachable address advertised - to other nodes. This can either be an address/port combination - in the form ``192.168.1.1:4567``, or an interface followed by a - port number, like ``eth0:4567``. If the port number is omitted, - the port number from the listen address is used. If - ``advertise_addr`` is not specified, it will be automatically - detected when possible. Default: None - listen_addr (string): Listen address used for inter-manager - communication, as well as determining the networking interface - used for the VXLAN Tunnel Endpoint (VTEP). This can either be - an address/port combination in the form ``192.168.1.1:4567``, - or an interface followed by a port number, like ``eth0:4567``. - If the port number is omitted, the default swarm listening port - is used. Default: '0.0.0.0:2377' - force_new_cluster (bool): Force creating a new Swarm, even if - already part of one. Default: False - swarm_spec (dict): Configuration settings of the new Swarm. Use - ``APIClient.create_swarm_spec`` to generate a valid - configuration. Default: None - default_addr_pool (list of strings): Default Address Pool specifies - default subnet pools for global scope networks. Each pool - should be specified as a CIDR block, like '10.0.0.0/8'. - Default: None - subnet_size (int): SubnetSize specifies the subnet size of the - networks created from the default subnet pool. Default: None - data_path_addr (string): Address or interface to use for data path - traffic. For example, 192.168.1.1, or an interface, like eth0. - data_path_port (int): Port number to use for data path traffic. - Acceptable port range is 1024 to 49151. If set to ``None`` or - 0, the default port 4789 will be used. Default: None - - Returns: - (str): The ID of the created node. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - - url = self._url('/swarm/init') - if swarm_spec is not None and not isinstance(swarm_spec, dict): - raise TypeError('swarm_spec must be a dictionary') - - if default_addr_pool is not None: - if utils.version_lt(self._version, '1.39'): - raise errors.InvalidVersion( - 'Address pool is only available for API version >= 1.39' - ) - # subnet_size becomes 0 if not set with default_addr_pool - if subnet_size is None: - subnet_size = DEFAULT_SWARM_SUBNET_SIZE - - if subnet_size is not None: - if utils.version_lt(self._version, '1.39'): - raise errors.InvalidVersion( - 'Subnet size is only available for API version >= 1.39' - ) - # subnet_size is ignored if set without default_addr_pool - if default_addr_pool is None: - default_addr_pool = DEFAULT_SWARM_ADDR_POOL - - data = { - 'AdvertiseAddr': advertise_addr, - 'ListenAddr': listen_addr, - 'DefaultAddrPool': default_addr_pool, - 'SubnetSize': subnet_size, - 'ForceNewCluster': force_new_cluster, - 'Spec': swarm_spec, - } - - if data_path_addr is not None: - if utils.version_lt(self._version, '1.30'): - raise errors.InvalidVersion( - 'Data address path is only available for ' - 'API version >= 1.30' - ) - data['DataPathAddr'] = data_path_addr - - if data_path_port is not None: - if utils.version_lt(self._version, '1.40'): - raise errors.InvalidVersion( - 'Data path port is only available for ' - 'API version >= 1.40' - ) - data['DataPathPort'] = data_path_port - - response = self._post_json(url, data=data) - return self._result(response, json=True) - - @utils.minimum_version('1.24') - def inspect_swarm(self): - """ - Retrieve low-level information about the current swarm. - - Returns: - A dictionary containing data about the swarm. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - url = self._url('/swarm') - return self._result(self._get(url), True) - - @utils.check_resource('node_id') - @utils.minimum_version('1.24') - def inspect_node(self, node_id): - """ - Retrieve low-level information about a swarm node - - Args: - node_id (string): ID of the node to be inspected. - - Returns: - A dictionary containing data about this node. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - url = self._url('/nodes/{0}', node_id) - return self._result(self._get(url), True) - - @utils.minimum_version('1.24') - def join_swarm(self, remote_addrs, join_token, listen_addr='0.0.0.0:2377', - advertise_addr=None, data_path_addr=None): - """ - Make this Engine join a swarm that has already been created. - - Args: - remote_addrs (:py:class:`list`): Addresses of one or more manager - nodes already participating in the Swarm to join. - join_token (string): Secret token for joining this Swarm. - listen_addr (string): Listen address used for inter-manager - communication if the node gets promoted to manager, as well as - determining the networking interface used for the VXLAN Tunnel - Endpoint (VTEP). Default: ``'0.0.0.0:2377`` - advertise_addr (string): Externally reachable address advertised - to other nodes. This can either be an address/port combination - in the form ``192.168.1.1:4567``, or an interface followed by a - port number, like ``eth0:4567``. If the port number is omitted, - the port number from the listen address is used. If - AdvertiseAddr is not specified, it will be automatically - detected when possible. Default: ``None`` - data_path_addr (string): Address or interface to use for data path - traffic. For example, 192.168.1.1, or an interface, like eth0. - - Returns: - ``True`` if the request went through. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - data = { - 'RemoteAddrs': remote_addrs, - 'ListenAddr': listen_addr, - 'JoinToken': join_token, - 'AdvertiseAddr': advertise_addr, - } - - if data_path_addr is not None: - if utils.version_lt(self._version, '1.30'): - raise errors.InvalidVersion( - 'Data address path is only available for ' - 'API version >= 1.30' - ) - data['DataPathAddr'] = data_path_addr - - url = self._url('/swarm/join') - response = self._post_json(url, data=data) - self._raise_for_status(response) - return True - - @utils.minimum_version('1.24') - def leave_swarm(self, force=False): - """ - Leave a swarm. - - Args: - force (bool): Leave the swarm even if this node is a manager. - Default: ``False`` - - Returns: - ``True`` if the request went through. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - url = self._url('/swarm/leave') - response = self._post(url, params={'force': force}) - # Ignore "this node is not part of a swarm" error - if force and response.status_code == http_client.NOT_ACCEPTABLE: - return True - # FIXME: Temporary workaround for 1.13.0-rc bug - # https://github.com/docker/docker/issues/29192 - if force and response.status_code == http_client.SERVICE_UNAVAILABLE: - return True - self._raise_for_status(response) - return True - - @utils.minimum_version('1.24') - def nodes(self, filters=None): - """ - List swarm nodes. - - Args: - filters (dict): Filters to process on the nodes list. Valid - filters: ``id``, ``name``, ``membership`` and ``role``. - Default: ``None`` - - Returns: - A list of dictionaries containing data about each swarm node. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - url = self._url('/nodes') - params = {} - if filters: - params['filters'] = utils.convert_filters(filters) - - return self._result(self._get(url, params=params), True) - - @utils.check_resource('node_id') - @utils.minimum_version('1.24') - def remove_node(self, node_id, force=False): - """ - Remove a node from the swarm. - - Args: - node_id (string): ID of the node to be removed. - force (bool): Force remove an active node. Default: `False` - - Raises: - :py:class:`docker.errors.NotFound` - If the node referenced doesn't exist in the swarm. - - :py:class:`docker.errors.APIError` - If the server returns an error. - Returns: - `True` if the request was successful. - """ - url = self._url('/nodes/{0}', node_id) - params = { - 'force': force - } - res = self._delete(url, params=params) - self._raise_for_status(res) - return True - - @utils.minimum_version('1.24') - def unlock_swarm(self, key): - """ - Unlock a locked swarm. - - Args: - key (string): The unlock key as provided by - :py:meth:`get_unlock_key` - - Raises: - :py:class:`docker.errors.InvalidArgument` - If the key argument is in an incompatible format - - :py:class:`docker.errors.APIError` - If the server returns an error. - - Returns: - `True` if the request was successful. - - Example: - - >>> key = client.api.get_unlock_key() - >>> client.unlock_swarm(key) - - """ - if isinstance(key, dict): - if 'UnlockKey' not in key: - raise errors.InvalidArgument('Invalid unlock key format') - else: - key = {'UnlockKey': key} - - url = self._url('/swarm/unlock') - res = self._post_json(url, data=key) - self._raise_for_status(res) - return True - - @utils.minimum_version('1.24') - def update_node(self, node_id, version, node_spec=None): - """ - Update the node's configuration - - Args: - - node_id (string): ID of the node to be updated. - version (int): The version number of the node object being - updated. This is required to avoid conflicting writes. - node_spec (dict): Configuration settings to update. Any values - not provided will be removed. Default: ``None`` - - Returns: - `True` if the request went through. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - - Example: - - >>> node_spec = {'Availability': 'active', - 'Name': 'node-name', - 'Role': 'manager', - 'Labels': {'foo': 'bar'} - } - >>> client.api.update_node(node_id='24ifsmvkjbyhk', version=8, - node_spec=node_spec) - - """ - url = self._url('/nodes/{0}/update?version={1}', node_id, str(version)) - res = self._post_json(url, data=node_spec) - self._raise_for_status(res) - return True - - @utils.minimum_version('1.24') - def update_swarm(self, version, swarm_spec=None, - rotate_worker_token=False, - rotate_manager_token=False, - rotate_manager_unlock_key=False): - """ - Update the Swarm's configuration - - Args: - version (int): The version number of the swarm object being - updated. This is required to avoid conflicting writes. - swarm_spec (dict): Configuration settings to update. Use - :py:meth:`~docker.api.swarm.SwarmApiMixin.create_swarm_spec` to - generate a valid configuration. Default: ``None``. - rotate_worker_token (bool): Rotate the worker join token. Default: - ``False``. - rotate_manager_token (bool): Rotate the manager join token. - Default: ``False``. - rotate_manager_unlock_key (bool): Rotate the manager unlock key. - Default: ``False``. - - Returns: - ``True`` if the request went through. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - url = self._url('/swarm/update') - params = { - 'rotateWorkerToken': rotate_worker_token, - 'rotateManagerToken': rotate_manager_token, - 'version': version - } - if rotate_manager_unlock_key: - if utils.version_lt(self._version, '1.25'): - raise errors.InvalidVersion( - 'Rotate manager unlock key ' - 'is only available for API version >= 1.25' - ) - params['rotateManagerUnlockKey'] = rotate_manager_unlock_key - - response = self._post_json(url, data=swarm_spec, params=params) - self._raise_for_status(response) - return True diff --git a/server/venv/Lib/site-packages/docker/api/volume.py b/server/venv/Lib/site-packages/docker/api/volume.py deleted file mode 100644 index 98b42a1..0000000 --- a/server/venv/Lib/site-packages/docker/api/volume.py +++ /dev/null @@ -1,164 +0,0 @@ -from .. import errors -from .. import utils - - -class VolumeApiMixin: - def volumes(self, filters=None): - """ - List volumes currently registered by the docker daemon. Similar to the - ``docker volume ls`` command. - - Args: - filters (dict): Server-side list filtering options. - - Returns: - (dict): Dictionary with list of volume objects as value of the - ``Volumes`` key. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - - Example: - - >>> client.api.volumes() - {u'Volumes': [{u'Driver': u'local', - u'Mountpoint': u'/var/lib/docker/volumes/foobar/_data', - u'Name': u'foobar'}, - {u'Driver': u'local', - u'Mountpoint': u'/var/lib/docker/volumes/baz/_data', - u'Name': u'baz'}]} - """ - - params = { - 'filters': utils.convert_filters(filters) if filters else None - } - url = self._url('/volumes') - return self._result(self._get(url, params=params), True) - - def create_volume(self, name=None, driver=None, driver_opts=None, - labels=None): - """ - Create and register a named volume - - Args: - name (str): Name of the volume - driver (str): Name of the driver used to create the volume - driver_opts (dict): Driver options as a key-value dictionary - labels (dict): Labels to set on the volume - - Returns: - (dict): The created volume reference object - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - - Example: - - >>> volume = client.api.create_volume( - ... name='foobar', - ... driver='local', - ... driver_opts={'foo': 'bar', 'baz': 'false'}, - ... labels={"key": "value"}, - ... ) - ... print(volume) - {u'Driver': u'local', - u'Labels': {u'key': u'value'}, - u'Mountpoint': u'/var/lib/docker/volumes/foobar/_data', - u'Name': u'foobar', - u'Scope': u'local'} - - """ - url = self._url('/volumes/create') - if driver_opts is not None and not isinstance(driver_opts, dict): - raise TypeError('driver_opts must be a dictionary') - - data = { - 'Name': name, - 'Driver': driver, - 'DriverOpts': driver_opts, - } - - if labels is not None: - if utils.compare_version('1.23', self._version) < 0: - raise errors.InvalidVersion( - 'volume labels were introduced in API 1.23' - ) - if not isinstance(labels, dict): - raise TypeError('labels must be a dictionary') - data["Labels"] = labels - - return self._result(self._post_json(url, data=data), True) - - def inspect_volume(self, name): - """ - Retrieve volume info by name. - - Args: - name (str): volume name - - Returns: - (dict): Volume information dictionary - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - - Example: - - >>> client.api.inspect_volume('foobar') - {u'Driver': u'local', - u'Mountpoint': u'/var/lib/docker/volumes/foobar/_data', - u'Name': u'foobar'} - - """ - url = self._url('/volumes/{0}', name) - return self._result(self._get(url), True) - - @utils.minimum_version('1.25') - def prune_volumes(self, filters=None): - """ - Delete unused volumes - - Args: - filters (dict): Filters to process on the prune list. - - Returns: - (dict): A dict containing a list of deleted volume names and - the amount of disk space reclaimed in bytes. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - params = {} - if filters: - params['filters'] = utils.convert_filters(filters) - url = self._url('/volumes/prune') - return self._result(self._post(url, params=params), True) - - def remove_volume(self, name, force=False): - """ - Remove a volume. Similar to the ``docker volume rm`` command. - - Args: - name (str): The volume's name - force (bool): Force removal of volumes that were already removed - out of band by the volume driver plugin. - - Raises: - :py:class:`docker.errors.APIError` - If volume failed to remove. - """ - params = {} - if force: - if utils.version_lt(self._version, '1.25'): - raise errors.InvalidVersion( - 'force removal was introduced in API 1.25' - ) - params = {'force': force} - - url = self._url('/volumes/{0}', name, params=params) - resp = self._delete(url) - self._raise_for_status(resp) diff --git a/server/venv/Lib/site-packages/docker/auth.py b/server/venv/Lib/site-packages/docker/auth.py deleted file mode 100644 index cb38855..0000000 --- a/server/venv/Lib/site-packages/docker/auth.py +++ /dev/null @@ -1,388 +0,0 @@ -import base64 -import json -import logging - -from . import credentials -from . import errors -from .utils import config - -INDEX_NAME = 'docker.io' -INDEX_URL = f'https://index.{INDEX_NAME}/v1/' -TOKEN_USERNAME = '' - -log = logging.getLogger(__name__) - - -def resolve_repository_name(repo_name): - if '://' in repo_name: - raise errors.InvalidRepository( - f'Repository name cannot contain a scheme ({repo_name})' - ) - - index_name, remote_name = split_repo_name(repo_name) - if index_name[0] == '-' or index_name[-1] == '-': - raise errors.InvalidRepository( - 'Invalid index name ({}). Cannot begin or end with a' - ' hyphen.'.format(index_name) - ) - return resolve_index_name(index_name), remote_name - - -def resolve_index_name(index_name): - index_name = convert_to_hostname(index_name) - if index_name == 'index.' + INDEX_NAME: - index_name = INDEX_NAME - return index_name - - -def get_config_header(client, registry): - log.debug('Looking for auth config') - if not client._auth_configs or client._auth_configs.is_empty: - log.debug( - "No auth config in memory - loading from filesystem" - ) - client._auth_configs = load_config(credstore_env=client.credstore_env) - authcfg = resolve_authconfig( - client._auth_configs, registry, credstore_env=client.credstore_env - ) - # Do not fail here if no authentication exists for this - # specific registry as we can have a readonly pull. Just - # put the header if we can. - if authcfg: - log.debug('Found auth config') - # auth_config needs to be a dict in the format used by - # auth.py username , password, serveraddress, email - return encode_header(authcfg) - log.debug('No auth config found') - return None - - -def split_repo_name(repo_name): - parts = repo_name.split('/', 1) - if len(parts) == 1 or ( - '.' not in parts[0] and ':' not in parts[0] and parts[0] != 'localhost' - ): - # This is a docker index repo (ex: username/foobar or ubuntu) - return INDEX_NAME, repo_name - return tuple(parts) - - -def get_credential_store(authconfig, registry): - if not isinstance(authconfig, AuthConfig): - authconfig = AuthConfig(authconfig) - return authconfig.get_credential_store(registry) - - -class AuthConfig(dict): - def __init__(self, dct, credstore_env=None): - if 'auths' not in dct: - dct['auths'] = {} - self.update(dct) - self._credstore_env = credstore_env - self._stores = {} - - @classmethod - def parse_auth(cls, entries, raise_on_error=False): - """ - Parses authentication entries - - Args: - entries: Dict of authentication entries. - raise_on_error: If set to true, an invalid format will raise - InvalidConfigFile - - Returns: - Authentication registry. - """ - - conf = {} - for registry, entry in entries.items(): - if not isinstance(entry, dict): - log.debug( - 'Config entry for key {} is not auth config'.format( - registry - ) - ) - # We sometimes fall back to parsing the whole config as if it - # was the auth config by itself, for legacy purposes. In that - # case, we fail silently and return an empty conf if any of the - # keys is not formatted properly. - if raise_on_error: - raise errors.InvalidConfigFile( - 'Invalid configuration for registry {}'.format( - registry - ) - ) - return {} - if 'identitytoken' in entry: - log.debug( - 'Found an IdentityToken entry for registry {}'.format( - registry - ) - ) - conf[registry] = { - 'IdentityToken': entry['identitytoken'] - } - continue # Other values are irrelevant if we have a token - - if 'auth' not in entry: - # Starting with engine v1.11 (API 1.23), an empty dictionary is - # a valid value in the auths config. - # https://github.com/docker/compose/issues/3265 - log.debug( - 'Auth data for {} is absent. Client might be using a ' - 'credentials store instead.'.format(registry) - ) - conf[registry] = {} - continue - - username, password = decode_auth(entry['auth']) - log.debug( - 'Found entry (registry={}, username={})' - .format(repr(registry), repr(username)) - ) - - conf[registry] = { - 'username': username, - 'password': password, - 'email': entry.get('email'), - 'serveraddress': registry, - } - return conf - - @classmethod - def load_config(cls, config_path, config_dict, credstore_env=None): - """ - Loads authentication data from a Docker configuration file in the given - root directory or if config_path is passed use given path. - Lookup priority: - explicit config_path parameter > DOCKER_CONFIG environment - variable > ~/.docker/config.json > ~/.dockercfg - """ - - if not config_dict: - config_file = config.find_config_file(config_path) - - if not config_file: - return cls({}, credstore_env) - try: - with open(config_file) as f: - config_dict = json.load(f) - except (OSError, KeyError, ValueError) as e: - # Likely missing new Docker config file or it's in an - # unknown format, continue to attempt to read old location - # and format. - log.debug(e) - return cls(_load_legacy_config(config_file), credstore_env) - - res = {} - if config_dict.get('auths'): - log.debug("Found 'auths' section") - res.update({ - 'auths': cls.parse_auth( - config_dict.pop('auths'), raise_on_error=True - ) - }) - if config_dict.get('credsStore'): - log.debug("Found 'credsStore' section") - res.update({'credsStore': config_dict.pop('credsStore')}) - if config_dict.get('credHelpers'): - log.debug("Found 'credHelpers' section") - res.update({'credHelpers': config_dict.pop('credHelpers')}) - if res: - return cls(res, credstore_env) - - log.debug( - "Couldn't find auth-related section ; attempting to interpret " - "as auth-only file" - ) - return cls({'auths': cls.parse_auth(config_dict)}, credstore_env) - - @property - def auths(self): - return self.get('auths', {}) - - @property - def creds_store(self): - return self.get('credsStore', None) - - @property - def cred_helpers(self): - return self.get('credHelpers', {}) - - @property - def is_empty(self): - return ( - not self.auths and not self.creds_store and not self.cred_helpers - ) - - def resolve_authconfig(self, registry=None): - """ - Returns the authentication data from the given auth configuration for a - specific registry. As with the Docker client, legacy entries in the - config with full URLs are stripped down to hostnames before checking - for a match. Returns None if no match was found. - """ - - if self.creds_store or self.cred_helpers: - store_name = self.get_credential_store(registry) - if store_name is not None: - log.debug( - f'Using credentials store "{store_name}"' - ) - cfg = self._resolve_authconfig_credstore(registry, store_name) - if cfg is not None: - return cfg - log.debug('No entry in credstore - fetching from auth dict') - - # Default to the public index server - registry = resolve_index_name(registry) if registry else INDEX_NAME - log.debug(f"Looking for auth entry for {repr(registry)}") - - if registry in self.auths: - log.debug(f"Found {repr(registry)}") - return self.auths[registry] - - for key, conf in self.auths.items(): - if resolve_index_name(key) == registry: - log.debug(f"Found {repr(key)}") - return conf - - log.debug("No entry found") - return None - - def _resolve_authconfig_credstore(self, registry, credstore_name): - if not registry or registry == INDEX_NAME: - # The ecosystem is a little schizophrenic with index.docker.io VS - # docker.io - in that case, it seems the full URL is necessary. - registry = INDEX_URL - log.debug(f"Looking for auth entry for {repr(registry)}") - store = self._get_store_instance(credstore_name) - try: - data = store.get(registry) - res = { - 'ServerAddress': registry, - } - if data['Username'] == TOKEN_USERNAME: - res['IdentityToken'] = data['Secret'] - else: - res.update({ - 'Username': data['Username'], - 'Password': data['Secret'], - }) - return res - except credentials.CredentialsNotFound: - log.debug('No entry found') - return None - except credentials.StoreError as e: - raise errors.DockerException( - f'Credentials store error: {repr(e)}' - ) - - def _get_store_instance(self, name): - if name not in self._stores: - self._stores[name] = credentials.Store( - name, environment=self._credstore_env - ) - return self._stores[name] - - def get_credential_store(self, registry): - if not registry or registry == INDEX_NAME: - registry = INDEX_URL - - return self.cred_helpers.get(registry) or self.creds_store - - def get_all_credentials(self): - auth_data = self.auths.copy() - if self.creds_store: - # Retrieve all credentials from the default store - store = self._get_store_instance(self.creds_store) - for k in store.list().keys(): - auth_data[k] = self._resolve_authconfig_credstore( - k, self.creds_store - ) - auth_data[convert_to_hostname(k)] = auth_data[k] - - # credHelpers entries take priority over all others - for reg, store_name in self.cred_helpers.items(): - auth_data[reg] = self._resolve_authconfig_credstore( - reg, store_name - ) - auth_data[convert_to_hostname(reg)] = auth_data[reg] - - return auth_data - - def add_auth(self, reg, data): - self['auths'][reg] = data - - -def resolve_authconfig(authconfig, registry=None, credstore_env=None): - if not isinstance(authconfig, AuthConfig): - authconfig = AuthConfig(authconfig, credstore_env) - return authconfig.resolve_authconfig(registry) - - -def convert_to_hostname(url): - return url.replace('http://', '').replace('https://', '').split('/', 1)[0] - - -def decode_auth(auth): - if isinstance(auth, str): - auth = auth.encode('ascii') - s = base64.b64decode(auth) - login, pwd = s.split(b':', 1) - return login.decode('utf8'), pwd.decode('utf8') - - -def encode_header(auth): - auth_json = json.dumps(auth).encode('ascii') - return base64.urlsafe_b64encode(auth_json) - - -def parse_auth(entries, raise_on_error=False): - """ - Parses authentication entries - - Args: - entries: Dict of authentication entries. - raise_on_error: If set to true, an invalid format will raise - InvalidConfigFile - - Returns: - Authentication registry. - """ - - return AuthConfig.parse_auth(entries, raise_on_error) - - -def load_config(config_path=None, config_dict=None, credstore_env=None): - return AuthConfig.load_config(config_path, config_dict, credstore_env) - - -def _load_legacy_config(config_file): - log.debug("Attempting to parse legacy auth file format") - try: - data = [] - with open(config_file) as f: - for line in f.readlines(): - data.append(line.strip().split(' = ')[1]) - if len(data) < 2: - # Not enough data - raise errors.InvalidConfigFile( - 'Invalid or empty configuration file!' - ) - - username, password = decode_auth(data[0]) - return {'auths': { - INDEX_NAME: { - 'username': username, - 'password': password, - 'email': data[1], - 'serveraddress': INDEX_URL, - } - }} - except Exception as e: - log.debug(e) - - log.debug("All parsing attempts failed - returning empty config") - return {} diff --git a/server/venv/Lib/site-packages/docker/client.py b/server/venv/Lib/site-packages/docker/client.py deleted file mode 100644 index 4dbd846..0000000 --- a/server/venv/Lib/site-packages/docker/client.py +++ /dev/null @@ -1,224 +0,0 @@ -from .api.client import APIClient -from .constants import (DEFAULT_TIMEOUT_SECONDS, DEFAULT_MAX_POOL_SIZE) -from .models.configs import ConfigCollection -from .models.containers import ContainerCollection -from .models.images import ImageCollection -from .models.networks import NetworkCollection -from .models.nodes import NodeCollection -from .models.plugins import PluginCollection -from .models.secrets import SecretCollection -from .models.services import ServiceCollection -from .models.swarm import Swarm -from .models.volumes import VolumeCollection -from .utils import kwargs_from_env - - -class DockerClient: - """ - A client for communicating with a Docker server. - - Example: - - >>> import docker - >>> client = docker.DockerClient(base_url='unix://var/run/docker.sock') - - Args: - base_url (str): URL to the Docker server. For example, - ``unix:///var/run/docker.sock`` or ``tcp://127.0.0.1:1234``. - version (str): The version of the API to use. Set to ``auto`` to - automatically detect the server's version. Default: ``1.35`` - timeout (int): Default timeout for API calls, in seconds. - tls (bool or :py:class:`~docker.tls.TLSConfig`): Enable TLS. Pass - ``True`` to enable it with default options, or pass a - :py:class:`~docker.tls.TLSConfig` object to use custom - configuration. - user_agent (str): Set a custom user agent for requests to the server. - credstore_env (dict): Override environment variables when calling the - credential store process. - use_ssh_client (bool): If set to `True`, an ssh connection is made - via shelling out to the ssh client. Ensure the ssh client is - installed and configured on the host. - max_pool_size (int): The maximum number of connections - to save in the pool. - """ - def __init__(self, *args, **kwargs): - self.api = APIClient(*args, **kwargs) - - @classmethod - def from_env(cls, **kwargs): - """ - Return a client configured from environment variables. - - The environment variables used are the same as those used by the - Docker command-line client. They are: - - .. envvar:: DOCKER_HOST - - The URL to the Docker host. - - .. envvar:: DOCKER_TLS_VERIFY - - Verify the host against a CA certificate. - - .. envvar:: DOCKER_CERT_PATH - - A path to a directory containing TLS certificates to use when - connecting to the Docker host. - - Args: - version (str): The version of the API to use. Set to ``auto`` to - automatically detect the server's version. Default: ``auto`` - timeout (int): Default timeout for API calls, in seconds. - max_pool_size (int): The maximum number of connections - to save in the pool. - ssl_version (int): A valid `SSL version`_. - assert_hostname (bool): Verify the hostname of the server. - environment (dict): The environment to read environment variables - from. Default: the value of ``os.environ`` - credstore_env (dict): Override environment variables when calling - the credential store process. - use_ssh_client (bool): If set to `True`, an ssh connection is - made via shelling out to the ssh client. Ensure the ssh - client is installed and configured on the host. - - Example: - - >>> import docker - >>> client = docker.from_env() - - .. _`SSL version`: - https://docs.python.org/3.5/library/ssl.html#ssl.PROTOCOL_TLSv1 - """ - timeout = kwargs.pop('timeout', DEFAULT_TIMEOUT_SECONDS) - max_pool_size = kwargs.pop('max_pool_size', DEFAULT_MAX_POOL_SIZE) - version = kwargs.pop('version', None) - use_ssh_client = kwargs.pop('use_ssh_client', False) - return cls( - timeout=timeout, - max_pool_size=max_pool_size, - version=version, - use_ssh_client=use_ssh_client, - **kwargs_from_env(**kwargs) - ) - - # Resources - @property - def configs(self): - """ - An object for managing configs on the server. See the - :doc:`configs documentation ` for full details. - """ - return ConfigCollection(client=self) - - @property - def containers(self): - """ - An object for managing containers on the server. See the - :doc:`containers documentation ` for full details. - """ - return ContainerCollection(client=self) - - @property - def images(self): - """ - An object for managing images on the server. See the - :doc:`images documentation ` for full details. - """ - return ImageCollection(client=self) - - @property - def networks(self): - """ - An object for managing networks on the server. See the - :doc:`networks documentation ` for full details. - """ - return NetworkCollection(client=self) - - @property - def nodes(self): - """ - An object for managing nodes on the server. See the - :doc:`nodes documentation ` for full details. - """ - return NodeCollection(client=self) - - @property - def plugins(self): - """ - An object for managing plugins on the server. See the - :doc:`plugins documentation ` for full details. - """ - return PluginCollection(client=self) - - @property - def secrets(self): - """ - An object for managing secrets on the server. See the - :doc:`secrets documentation ` for full details. - """ - return SecretCollection(client=self) - - @property - def services(self): - """ - An object for managing services on the server. See the - :doc:`services documentation ` for full details. - """ - return ServiceCollection(client=self) - - @property - def swarm(self): - """ - An object for managing a swarm on the server. See the - :doc:`swarm documentation ` for full details. - """ - return Swarm(client=self) - - @property - def volumes(self): - """ - An object for managing volumes on the server. See the - :doc:`volumes documentation ` for full details. - """ - return VolumeCollection(client=self) - - # Top-level methods - def events(self, *args, **kwargs): - return self.api.events(*args, **kwargs) - events.__doc__ = APIClient.events.__doc__ - - def df(self): - return self.api.df() - df.__doc__ = APIClient.df.__doc__ - - def info(self, *args, **kwargs): - return self.api.info(*args, **kwargs) - info.__doc__ = APIClient.info.__doc__ - - def login(self, *args, **kwargs): - return self.api.login(*args, **kwargs) - login.__doc__ = APIClient.login.__doc__ - - def ping(self, *args, **kwargs): - return self.api.ping(*args, **kwargs) - ping.__doc__ = APIClient.ping.__doc__ - - def version(self, *args, **kwargs): - return self.api.version(*args, **kwargs) - version.__doc__ = APIClient.version.__doc__ - - def close(self): - return self.api.close() - close.__doc__ = APIClient.close.__doc__ - - def __getattr__(self, name): - s = [f"'DockerClient' object has no attribute '{name}'"] - # If a user calls a method on APIClient, they - if hasattr(APIClient, name): - s.append("In Docker SDK for Python 2.0, this method is now on the " - "object APIClient. See the low-level API section of the " - "documentation for more details.") - raise AttributeError(' '.join(s)) - - -from_env = DockerClient.from_env diff --git a/server/venv/Lib/site-packages/docker/constants.py b/server/venv/Lib/site-packages/docker/constants.py deleted file mode 100644 index ed341a9..0000000 --- a/server/venv/Lib/site-packages/docker/constants.py +++ /dev/null @@ -1,44 +0,0 @@ -import sys -from .version import __version__ - -DEFAULT_DOCKER_API_VERSION = '1.41' -MINIMUM_DOCKER_API_VERSION = '1.21' -DEFAULT_TIMEOUT_SECONDS = 60 -STREAM_HEADER_SIZE_BYTES = 8 -CONTAINER_LIMITS_KEYS = [ - 'memory', 'memswap', 'cpushares', 'cpusetcpus' -] - -DEFAULT_HTTP_HOST = "127.0.0.1" -DEFAULT_UNIX_SOCKET = "http+unix:///var/run/docker.sock" -DEFAULT_NPIPE = 'npipe:////./pipe/docker_engine' - -BYTE_UNITS = { - 'b': 1, - 'k': 1024, - 'm': 1024 * 1024, - 'g': 1024 * 1024 * 1024 -} - - -INSECURE_REGISTRY_DEPRECATION_WARNING = \ - 'The `insecure_registry` argument to {} ' \ - 'is deprecated and non-functional. Please remove it.' - -IS_WINDOWS_PLATFORM = (sys.platform == 'win32') -WINDOWS_LONGPATH_PREFIX = '\\\\?\\' - -DEFAULT_USER_AGENT = f"docker-sdk-python/{__version__}" -DEFAULT_NUM_POOLS = 25 - -# The OpenSSH server default value for MaxSessions is 10 which means we can -# use up to 9, leaving the final session for the underlying SSH connection. -# For more details see: https://github.com/docker/docker-py/issues/2246 -DEFAULT_NUM_POOLS_SSH = 9 - -DEFAULT_MAX_POOL_SIZE = 10 - -DEFAULT_DATA_CHUNK_SIZE = 1024 * 2048 - -DEFAULT_SWARM_ADDR_POOL = ['10.0.0.0/8'] -DEFAULT_SWARM_SUBNET_SIZE = 24 diff --git a/server/venv/Lib/site-packages/docker/context/__init__.py b/server/venv/Lib/site-packages/docker/context/__init__.py deleted file mode 100644 index 0a6707f..0000000 --- a/server/venv/Lib/site-packages/docker/context/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# flake8: noqa -from .context import Context -from .api import ContextAPI diff --git a/server/venv/Lib/site-packages/docker/context/api.py b/server/venv/Lib/site-packages/docker/context/api.py deleted file mode 100644 index 380e8c4..0000000 --- a/server/venv/Lib/site-packages/docker/context/api.py +++ /dev/null @@ -1,203 +0,0 @@ -import json -import os - -from docker import errors -from docker.context.config import get_meta_dir -from docker.context.config import METAFILE -from docker.context.config import get_current_context_name -from docker.context.config import write_context_name_to_docker_config -from docker.context import Context - - -class ContextAPI: - """Context API. - Contains methods for context management: - create, list, remove, get, inspect. - """ - DEFAULT_CONTEXT = Context("default", "swarm") - - @classmethod - def create_context( - cls, name, orchestrator=None, host=None, tls_cfg=None, - default_namespace=None, skip_tls_verify=False): - """Creates a new context. - Returns: - (Context): a Context object. - Raises: - :py:class:`docker.errors.MissingContextParameter` - If a context name is not provided. - :py:class:`docker.errors.ContextAlreadyExists` - If a context with the name already exists. - :py:class:`docker.errors.ContextException` - If name is default. - - Example: - - >>> from docker.context import ContextAPI - >>> ctx = ContextAPI.create_context(name='test') - >>> print(ctx.Metadata) - { - "Name": "test", - "Metadata": {}, - "Endpoints": { - "docker": { - "Host": "unix:///var/run/docker.sock", - "SkipTLSVerify": false - } - } - } - """ - if not name: - raise errors.MissingContextParameter("name") - if name == "default": - raise errors.ContextException( - '"default" is a reserved context name') - ctx = Context.load_context(name) - if ctx: - raise errors.ContextAlreadyExists(name) - endpoint = "docker" - if orchestrator and orchestrator != "swarm": - endpoint = orchestrator - ctx = Context(name, orchestrator) - ctx.set_endpoint( - endpoint, host, tls_cfg, - skip_tls_verify=skip_tls_verify, - def_namespace=default_namespace) - ctx.save() - return ctx - - @classmethod - def get_context(cls, name=None): - """Retrieves a context object. - Args: - name (str): The name of the context - - Example: - - >>> from docker.context import ContextAPI - >>> ctx = ContextAPI.get_context(name='test') - >>> print(ctx.Metadata) - { - "Name": "test", - "Metadata": {}, - "Endpoints": { - "docker": { - "Host": "unix:///var/run/docker.sock", - "SkipTLSVerify": false - } - } - } - """ - if not name: - name = get_current_context_name() - if name == "default": - return cls.DEFAULT_CONTEXT - return Context.load_context(name) - - @classmethod - def contexts(cls): - """Context list. - Returns: - (Context): List of context objects. - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - names = [] - for dirname, dirnames, fnames in os.walk(get_meta_dir()): - for filename in fnames + dirnames: - if filename == METAFILE: - try: - data = json.load( - open(os.path.join(dirname, filename))) - names.append(data["Name"]) - except Exception as e: - raise errors.ContextException( - "Failed to load metafile {}: {}".format( - filename, e)) - - contexts = [cls.DEFAULT_CONTEXT] - for name in names: - contexts.append(Context.load_context(name)) - return contexts - - @classmethod - def get_current_context(cls): - """Get current context. - Returns: - (Context): current context object. - """ - return cls.get_context() - - @classmethod - def set_current_context(cls, name="default"): - ctx = cls.get_context(name) - if not ctx: - raise errors.ContextNotFound(name) - - err = write_context_name_to_docker_config(name) - if err: - raise errors.ContextException( - f'Failed to set current context: {err}') - - @classmethod - def remove_context(cls, name): - """Remove a context. Similar to the ``docker context rm`` command. - - Args: - name (str): The name of the context - - Raises: - :py:class:`docker.errors.MissingContextParameter` - If a context name is not provided. - :py:class:`docker.errors.ContextNotFound` - If a context with the name does not exist. - :py:class:`docker.errors.ContextException` - If name is default. - - Example: - - >>> from docker.context import ContextAPI - >>> ContextAPI.remove_context(name='test') - >>> - """ - if not name: - raise errors.MissingContextParameter("name") - if name == "default": - raise errors.ContextException( - 'context "default" cannot be removed') - ctx = Context.load_context(name) - if not ctx: - raise errors.ContextNotFound(name) - if name == get_current_context_name(): - write_context_name_to_docker_config(None) - ctx.remove() - - @classmethod - def inspect_context(cls, name="default"): - """Remove a context. Similar to the ``docker context inspect`` command. - - Args: - name (str): The name of the context - - Raises: - :py:class:`docker.errors.MissingContextParameter` - If a context name is not provided. - :py:class:`docker.errors.ContextNotFound` - If a context with the name does not exist. - - Example: - - >>> from docker.context import ContextAPI - >>> ContextAPI.remove_context(name='test') - >>> - """ - if not name: - raise errors.MissingContextParameter("name") - if name == "default": - return cls.DEFAULT_CONTEXT() - ctx = Context.load_context(name) - if not ctx: - raise errors.ContextNotFound(name) - - return ctx() diff --git a/server/venv/Lib/site-packages/docker/context/config.py b/server/venv/Lib/site-packages/docker/context/config.py deleted file mode 100644 index d761aef..0000000 --- a/server/venv/Lib/site-packages/docker/context/config.py +++ /dev/null @@ -1,81 +0,0 @@ -import os -import json -import hashlib - -from docker import utils -from docker.constants import IS_WINDOWS_PLATFORM -from docker.constants import DEFAULT_UNIX_SOCKET -from docker.utils.config import find_config_file - -METAFILE = "meta.json" - - -def get_current_context_name(): - name = "default" - docker_cfg_path = find_config_file() - if docker_cfg_path: - try: - with open(docker_cfg_path) as f: - name = json.load(f).get("currentContext", "default") - except Exception: - return "default" - return name - - -def write_context_name_to_docker_config(name=None): - if name == 'default': - name = None - docker_cfg_path = find_config_file() - config = {} - if docker_cfg_path: - try: - with open(docker_cfg_path) as f: - config = json.load(f) - except Exception as e: - return e - current_context = config.get("currentContext", None) - if current_context and not name: - del config["currentContext"] - elif name: - config["currentContext"] = name - else: - return - try: - with open(docker_cfg_path, "w") as f: - json.dump(config, f, indent=4) - except Exception as e: - return e - - -def get_context_id(name): - return hashlib.sha256(name.encode('utf-8')).hexdigest() - - -def get_context_dir(): - return os.path.join(os.path.dirname(find_config_file() or ""), "contexts") - - -def get_meta_dir(name=None): - meta_dir = os.path.join(get_context_dir(), "meta") - if name: - return os.path.join(meta_dir, get_context_id(name)) - return meta_dir - - -def get_meta_file(name): - return os.path.join(get_meta_dir(name), METAFILE) - - -def get_tls_dir(name=None, endpoint=""): - context_dir = get_context_dir() - if name: - return os.path.join(context_dir, "tls", get_context_id(name), endpoint) - return os.path.join(context_dir, "tls") - - -def get_context_host(path=None, tls=False): - host = utils.parse_host(path, IS_WINDOWS_PLATFORM, tls) - if host == DEFAULT_UNIX_SOCKET: - # remove http+ from default docker socket url - return host.strip("http+") - return host diff --git a/server/venv/Lib/site-packages/docker/context/context.py b/server/venv/Lib/site-packages/docker/context/context.py deleted file mode 100644 index dbaa01c..0000000 --- a/server/venv/Lib/site-packages/docker/context/context.py +++ /dev/null @@ -1,243 +0,0 @@ -import os -import json -from shutil import copyfile, rmtree -from docker.tls import TLSConfig -from docker.errors import ContextException -from docker.context.config import get_meta_dir -from docker.context.config import get_meta_file -from docker.context.config import get_tls_dir -from docker.context.config import get_context_host - - -class Context: - """A context.""" - - def __init__(self, name, orchestrator=None, host=None, endpoints=None, - tls=False): - if not name: - raise Exception("Name not provided") - self.name = name - self.context_type = None - self.orchestrator = orchestrator - self.endpoints = {} - self.tls_cfg = {} - self.meta_path = "IN MEMORY" - self.tls_path = "IN MEMORY" - - if not endpoints: - # set default docker endpoint if no endpoint is set - default_endpoint = "docker" if ( - not orchestrator or orchestrator == "swarm" - ) else orchestrator - - self.endpoints = { - default_endpoint: { - "Host": get_context_host(host, tls), - "SkipTLSVerify": not tls - } - } - return - - # check docker endpoints - for k, v in endpoints.items(): - if not isinstance(v, dict): - # unknown format - raise ContextException("""Unknown endpoint format for - context {}: {}""".format(name, v)) - - self.endpoints[k] = v - if k != "docker": - continue - - self.endpoints[k]["Host"] = v.get("Host", get_context_host( - host, tls)) - self.endpoints[k]["SkipTLSVerify"] = bool(v.get( - "SkipTLSVerify", not tls)) - - def set_endpoint( - self, name="docker", host=None, tls_cfg=None, - skip_tls_verify=False, def_namespace=None): - self.endpoints[name] = { - "Host": get_context_host(host, not skip_tls_verify), - "SkipTLSVerify": skip_tls_verify - } - if def_namespace: - self.endpoints[name]["DefaultNamespace"] = def_namespace - - if tls_cfg: - self.tls_cfg[name] = tls_cfg - - def inspect(self): - return self.__call__() - - @classmethod - def load_context(cls, name): - meta = Context._load_meta(name) - if meta: - instance = cls( - meta["Name"], - orchestrator=meta["Metadata"].get("StackOrchestrator", None), - endpoints=meta.get("Endpoints", None)) - instance.context_type = meta["Metadata"].get("Type", None) - instance._load_certs() - instance.meta_path = get_meta_dir(name) - return instance - return None - - @classmethod - def _load_meta(cls, name): - meta_file = get_meta_file(name) - if not os.path.isfile(meta_file): - return None - - metadata = {} - try: - with open(meta_file) as f: - metadata = json.load(f) - except (OSError, KeyError, ValueError) as e: - # unknown format - raise Exception("""Detected corrupted meta file for - context {} : {}""".format(name, e)) - - # for docker endpoints, set defaults for - # Host and SkipTLSVerify fields - for k, v in metadata["Endpoints"].items(): - if k != "docker": - continue - metadata["Endpoints"][k]["Host"] = v.get( - "Host", get_context_host(None, False)) - metadata["Endpoints"][k]["SkipTLSVerify"] = bool( - v.get("SkipTLSVerify", True)) - - return metadata - - def _load_certs(self): - certs = {} - tls_dir = get_tls_dir(self.name) - for endpoint in self.endpoints.keys(): - if not os.path.isdir(os.path.join(tls_dir, endpoint)): - continue - ca_cert = None - cert = None - key = None - for filename in os.listdir(os.path.join(tls_dir, endpoint)): - if filename.startswith("ca"): - ca_cert = os.path.join(tls_dir, endpoint, filename) - elif filename.startswith("cert"): - cert = os.path.join(tls_dir, endpoint, filename) - elif filename.startswith("key"): - key = os.path.join(tls_dir, endpoint, filename) - if all([ca_cert, cert, key]): - verify = None - if endpoint == "docker" and not self.endpoints["docker"].get( - "SkipTLSVerify", False): - verify = True - certs[endpoint] = TLSConfig( - client_cert=(cert, key), ca_cert=ca_cert, verify=verify) - self.tls_cfg = certs - self.tls_path = tls_dir - - def save(self): - meta_dir = get_meta_dir(self.name) - if not os.path.isdir(meta_dir): - os.makedirs(meta_dir) - with open(get_meta_file(self.name), "w") as f: - f.write(json.dumps(self.Metadata)) - - tls_dir = get_tls_dir(self.name) - for endpoint, tls in self.tls_cfg.items(): - if not os.path.isdir(os.path.join(tls_dir, endpoint)): - os.makedirs(os.path.join(tls_dir, endpoint)) - - ca_file = tls.ca_cert - if ca_file: - copyfile(ca_file, os.path.join( - tls_dir, endpoint, os.path.basename(ca_file))) - - if tls.cert: - cert_file, key_file = tls.cert - copyfile(cert_file, os.path.join( - tls_dir, endpoint, os.path.basename(cert_file))) - copyfile(key_file, os.path.join( - tls_dir, endpoint, os.path.basename(key_file))) - - self.meta_path = get_meta_dir(self.name) - self.tls_path = get_tls_dir(self.name) - - def remove(self): - if os.path.isdir(self.meta_path): - rmtree(self.meta_path) - if os.path.isdir(self.tls_path): - rmtree(self.tls_path) - - def __repr__(self): - return f"<{self.__class__.__name__}: '{self.name}'>" - - def __str__(self): - return json.dumps(self.__call__(), indent=2) - - def __call__(self): - result = self.Metadata - result.update(self.TLSMaterial) - result.update(self.Storage) - return result - - def is_docker_host(self): - return self.context_type is None - - @property - def Name(self): - return self.name - - @property - def Host(self): - if not self.orchestrator or self.orchestrator == "swarm": - endpoint = self.endpoints.get("docker", None) - if endpoint: - return endpoint.get("Host", None) - return None - - return self.endpoints[self.orchestrator].get("Host", None) - - @property - def Orchestrator(self): - return self.orchestrator - - @property - def Metadata(self): - meta = {} - if self.orchestrator: - meta = {"StackOrchestrator": self.orchestrator} - return { - "Name": self.name, - "Metadata": meta, - "Endpoints": self.endpoints - } - - @property - def TLSConfig(self): - key = self.orchestrator - if not key or key == "swarm": - key = "docker" - if key in self.tls_cfg.keys(): - return self.tls_cfg[key] - return None - - @property - def TLSMaterial(self): - certs = {} - for endpoint, tls in self.tls_cfg.items(): - cert, key = tls.cert - certs[endpoint] = list( - map(os.path.basename, [tls.ca_cert, cert, key])) - return { - "TLSMaterial": certs - } - - @property - def Storage(self): - return { - "Storage": { - "MetadataPath": self.meta_path, - "TLSPath": self.tls_path - }} diff --git a/server/venv/Lib/site-packages/docker/credentials/__init__.py b/server/venv/Lib/site-packages/docker/credentials/__init__.py deleted file mode 100644 index 31ad28e..0000000 --- a/server/venv/Lib/site-packages/docker/credentials/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# flake8: noqa -from .store import Store -from .errors import StoreError, CredentialsNotFound -from .constants import * diff --git a/server/venv/Lib/site-packages/docker/credentials/constants.py b/server/venv/Lib/site-packages/docker/credentials/constants.py deleted file mode 100644 index 6a82d8d..0000000 --- a/server/venv/Lib/site-packages/docker/credentials/constants.py +++ /dev/null @@ -1,4 +0,0 @@ -PROGRAM_PREFIX = 'docker-credential-' -DEFAULT_LINUX_STORE = 'secretservice' -DEFAULT_OSX_STORE = 'osxkeychain' -DEFAULT_WIN32_STORE = 'wincred' diff --git a/server/venv/Lib/site-packages/docker/credentials/errors.py b/server/venv/Lib/site-packages/docker/credentials/errors.py deleted file mode 100644 index 42a1bc1..0000000 --- a/server/venv/Lib/site-packages/docker/credentials/errors.py +++ /dev/null @@ -1,25 +0,0 @@ -class StoreError(RuntimeError): - pass - - -class CredentialsNotFound(StoreError): - pass - - -class InitializationError(StoreError): - pass - - -def process_store_error(cpe, program): - message = cpe.output.decode('utf-8') - if 'credentials not found in native keychain' in message: - return CredentialsNotFound( - 'No matching credentials in {}'.format( - program - ) - ) - return StoreError( - 'Credentials store {} exited with "{}".'.format( - program, cpe.output.decode('utf-8').strip() - ) - ) diff --git a/server/venv/Lib/site-packages/docker/credentials/store.py b/server/venv/Lib/site-packages/docker/credentials/store.py deleted file mode 100644 index b7ab53f..0000000 --- a/server/venv/Lib/site-packages/docker/credentials/store.py +++ /dev/null @@ -1,101 +0,0 @@ -import errno -import json -import shutil -import subprocess -import warnings - -from . import constants -from . import errors -from .utils import create_environment_dict - - -class Store: - def __init__(self, program, environment=None): - """ Create a store object that acts as an interface to - perform the basic operations for storing, retrieving - and erasing credentials using `program`. - """ - self.program = constants.PROGRAM_PREFIX + program - self.exe = shutil.which(self.program) - self.environment = environment - if self.exe is None: - warnings.warn( - '{} not installed or not available in PATH'.format( - self.program - ) - ) - - def get(self, server): - """ Retrieve credentials for `server`. If no credentials are found, - a `StoreError` will be raised. - """ - if not isinstance(server, bytes): - server = server.encode('utf-8') - data = self._execute('get', server) - result = json.loads(data.decode('utf-8')) - - # docker-credential-pass will return an object for inexistent servers - # whereas other helpers will exit with returncode != 0. For - # consistency, if no significant data is returned, - # raise CredentialsNotFound - if result['Username'] == '' and result['Secret'] == '': - raise errors.CredentialsNotFound( - f'No matching credentials in {self.program}' - ) - - return result - - def store(self, server, username, secret): - """ Store credentials for `server`. Raises a `StoreError` if an error - occurs. - """ - data_input = json.dumps({ - 'ServerURL': server, - 'Username': username, - 'Secret': secret - }).encode('utf-8') - return self._execute('store', data_input) - - def erase(self, server): - """ Erase credentials for `server`. Raises a `StoreError` if an error - occurs. - """ - if not isinstance(server, bytes): - server = server.encode('utf-8') - self._execute('erase', server) - - def list(self): - """ List stored credentials. Requires v0.4.0+ of the helper. - """ - data = self._execute('list', None) - return json.loads(data.decode('utf-8')) - - def _execute(self, subcmd, data_input): - if self.exe is None: - raise errors.StoreError( - '{} not installed or not available in PATH'.format( - self.program - ) - ) - output = None - env = create_environment_dict(self.environment) - try: - output = subprocess.check_output( - [self.exe, subcmd], input=data_input, env=env, - ) - except subprocess.CalledProcessError as e: - raise errors.process_store_error(e, self.program) - except OSError as e: - if e.errno == errno.ENOENT: - raise errors.StoreError( - '{} not installed or not available in PATH'.format( - self.program - ) - ) - else: - raise errors.StoreError( - 'Unexpected OS error "{}", errno={}'.format( - e.strerror, e.errno - ) - ) - return output diff --git a/server/venv/Lib/site-packages/docker/credentials/utils.py b/server/venv/Lib/site-packages/docker/credentials/utils.py deleted file mode 100644 index 5c83d05..0000000 --- a/server/venv/Lib/site-packages/docker/credentials/utils.py +++ /dev/null @@ -1,10 +0,0 @@ -import os - - -def create_environment_dict(overrides): - """ - Create and return a copy of os.environ with the specified overrides - """ - result = os.environ.copy() - result.update(overrides or {}) - return result diff --git a/server/venv/Lib/site-packages/docker/errors.py b/server/venv/Lib/site-packages/docker/errors.py deleted file mode 100644 index 8cf8670..0000000 --- a/server/venv/Lib/site-packages/docker/errors.py +++ /dev/null @@ -1,207 +0,0 @@ -import requests - -_image_not_found_explanation_fragments = frozenset( - fragment.lower() for fragment in [ - 'no such image', - 'not found: does not exist or no pull access', - 'repository does not exist', - 'was found but does not match the specified platform', - ] -) - - -class DockerException(Exception): - """ - A base class from which all other exceptions inherit. - - If you want to catch all errors that the Docker SDK might raise, - catch this base exception. - """ - - -def create_api_error_from_http_exception(e): - """ - Create a suitable APIError from requests.exceptions.HTTPError. - """ - response = e.response - try: - explanation = response.json()['message'] - except ValueError: - explanation = (response.content or '').strip() - cls = APIError - if response.status_code == 404: - explanation_msg = (explanation or '').lower() - if any(fragment in explanation_msg - for fragment in _image_not_found_explanation_fragments): - cls = ImageNotFound - else: - cls = NotFound - raise cls(e, response=response, explanation=explanation) from e - - -class APIError(requests.exceptions.HTTPError, DockerException): - """ - An HTTP error from the API. - """ - def __init__(self, message, response=None, explanation=None): - # requests 1.2 supports response as a keyword argument, but - # requests 1.1 doesn't - super().__init__(message) - self.response = response - self.explanation = explanation - - def __str__(self): - message = super().__str__() - - if self.is_client_error(): - message = '{} Client Error for {}: {}'.format( - self.response.status_code, self.response.url, - self.response.reason) - - elif self.is_server_error(): - message = '{} Server Error for {}: {}'.format( - self.response.status_code, self.response.url, - self.response.reason) - - if self.explanation: - message = f'{message} ("{self.explanation}")' - - return message - - @property - def status_code(self): - if self.response is not None: - return self.response.status_code - - def is_error(self): - return self.is_client_error() or self.is_server_error() - - def is_client_error(self): - if self.status_code is None: - return False - return 400 <= self.status_code < 500 - - def is_server_error(self): - if self.status_code is None: - return False - return 500 <= self.status_code < 600 - - -class NotFound(APIError): - pass - - -class ImageNotFound(NotFound): - pass - - -class InvalidVersion(DockerException): - pass - - -class InvalidRepository(DockerException): - pass - - -class InvalidConfigFile(DockerException): - pass - - -class InvalidArgument(DockerException): - pass - - -class DeprecatedMethod(DockerException): - pass - - -class TLSParameterError(DockerException): - def __init__(self, msg): - self.msg = msg - - def __str__(self): - return self.msg + (". TLS configurations should map the Docker CLI " - "client configurations. See " - "https://docs.docker.com/engine/articles/https/ " - "for API details.") - - -class NullResource(DockerException, ValueError): - pass - - -class ContainerError(DockerException): - """ - Represents a container that has exited with a non-zero exit code. - """ - def __init__(self, container, exit_status, command, image, stderr): - self.container = container - self.exit_status = exit_status - self.command = command - self.image = image - self.stderr = stderr - - err = f": {stderr}" if stderr is not None else "" - msg = ("Command '{}' in image '{}' returned non-zero exit " - "status {}{}").format(command, image, exit_status, err) - - super().__init__(msg) - - -class StreamParseError(RuntimeError): - def __init__(self, reason): - self.msg = reason - - -class BuildError(DockerException): - def __init__(self, reason, build_log): - super().__init__(reason) - self.msg = reason - self.build_log = build_log - - -class ImageLoadError(DockerException): - pass - - -def create_unexpected_kwargs_error(name, kwargs): - quoted_kwargs = [f"'{k}'" for k in sorted(kwargs)] - text = [f"{name}() "] - if len(quoted_kwargs) == 1: - text.append("got an unexpected keyword argument ") - else: - text.append("got unexpected keyword arguments ") - text.append(', '.join(quoted_kwargs)) - return TypeError(''.join(text)) - - -class MissingContextParameter(DockerException): - def __init__(self, param): - self.param = param - - def __str__(self): - return (f"missing parameter: {self.param}") - - -class ContextAlreadyExists(DockerException): - def __init__(self, name): - self.name = name - - def __str__(self): - return (f"context {self.name} already exists") - - -class ContextException(DockerException): - def __init__(self, msg): - self.msg = msg - - def __str__(self): - return (self.msg) - - -class ContextNotFound(DockerException): - def __init__(self, name): - self.name = name - - def __str__(self): - return (f"context '{self.name}' not found") diff --git a/server/venv/Lib/site-packages/docker/models/__init__.py b/server/venv/Lib/site-packages/docker/models/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/docker/models/configs.py b/server/venv/Lib/site-packages/docker/models/configs.py deleted file mode 100644 index 3588c8b..0000000 --- a/server/venv/Lib/site-packages/docker/models/configs.py +++ /dev/null @@ -1,69 +0,0 @@ -from ..api import APIClient -from .resource import Model, Collection - - -class Config(Model): - """A config.""" - id_attribute = 'ID' - - def __repr__(self): - return f"<{self.__class__.__name__}: '{self.name}'>" - - @property - def name(self): - return self.attrs['Spec']['Name'] - - def remove(self): - """ - Remove this config. - - Raises: - :py:class:`docker.errors.APIError` - If config failed to remove. - """ - return self.client.api.remove_config(self.id) - - -class ConfigCollection(Collection): - """Configs on the Docker server.""" - model = Config - - def create(self, **kwargs): - obj = self.client.api.create_config(**kwargs) - return self.prepare_model(obj) - create.__doc__ = APIClient.create_config.__doc__ - - def get(self, config_id): - """ - Get a config. - - Args: - config_id (str): Config ID. - - Returns: - (:py:class:`Config`): The config. - - Raises: - :py:class:`docker.errors.NotFound` - If the config does not exist. - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.prepare_model(self.client.api.inspect_config(config_id)) - - def list(self, **kwargs): - """ - List configs. Similar to the ``docker config ls`` command. - - Args: - filters (dict): Server-side list filtering options. - - Returns: - (list of :py:class:`Config`): The configs. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - resp = self.client.api.configs(**kwargs) - return [self.prepare_model(obj) for obj in resp] diff --git a/server/venv/Lib/site-packages/docker/models/containers.py b/server/venv/Lib/site-packages/docker/models/containers.py deleted file mode 100644 index 2eeefda..0000000 --- a/server/venv/Lib/site-packages/docker/models/containers.py +++ /dev/null @@ -1,1173 +0,0 @@ -import copy -import ntpath -from collections import namedtuple - -from ..api import APIClient -from ..constants import DEFAULT_DATA_CHUNK_SIZE -from ..errors import ( - ContainerError, DockerException, ImageNotFound, - NotFound, create_unexpected_kwargs_error -) -from ..types import HostConfig -from ..utils import version_gte -from .images import Image -from .resource import Collection, Model - - -class Container(Model): - """ Local representation of a container object. Detailed configuration may - be accessed through the :py:attr:`attrs` attribute. Note that local - attributes are cached; users may call :py:meth:`reload` to - query the Docker daemon for the current properties, causing - :py:attr:`attrs` to be refreshed. - """ - @property - def name(self): - """ - The name of the container. - """ - if self.attrs.get('Name') is not None: - return self.attrs['Name'].lstrip('/') - - @property - def image(self): - """ - The image of the container. - """ - image_id = self.attrs.get('ImageID', self.attrs['Image']) - if image_id is None: - return None - return self.client.images.get(image_id.split(':')[1]) - - @property - def labels(self): - """ - The labels of a container as dictionary. - """ - try: - result = self.attrs['Config'].get('Labels') - return result or {} - except KeyError: - raise DockerException( - 'Label data is not available for sparse objects. Call reload()' - ' to retrieve all information' - ) - - @property - def status(self): - """ - The status of the container. For example, ``running``, or ``exited``. - """ - if isinstance(self.attrs['State'], dict): - return self.attrs['State']['Status'] - return self.attrs['State'] - - @property - def ports(self): - """ - The ports that the container exposes as a dictionary. - """ - return self.attrs.get('NetworkSettings', {}).get('Ports', {}) - - def attach(self, **kwargs): - """ - Attach to this container. - - :py:meth:`logs` is a wrapper around this method, which you can - use instead if you want to fetch/stream container output without first - retrieving the entire backlog. - - Args: - stdout (bool): Include stdout. - stderr (bool): Include stderr. - stream (bool): Return container output progressively as an iterator - of strings, rather than a single string. - logs (bool): Include the container's previous output. - - Returns: - By default, the container's output as a single string. - - If ``stream=True``, an iterator of output strings. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.client.api.attach(self.id, **kwargs) - - def attach_socket(self, **kwargs): - """ - Like :py:meth:`attach`, but returns the underlying socket-like object - for the HTTP request. - - Args: - params (dict): Dictionary of request parameters (e.g. ``stdout``, - ``stderr``, ``stream``). - ws (bool): Use websockets instead of raw HTTP. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.client.api.attach_socket(self.id, **kwargs) - - def commit(self, repository=None, tag=None, **kwargs): - """ - Commit a container to an image. Similar to the ``docker commit`` - command. - - Args: - repository (str): The repository to push the image to - tag (str): The tag to push - message (str): A commit message - author (str): The name of the author - changes (str): Dockerfile instructions to apply while committing - conf (dict): The configuration for the container. See the - `Engine API documentation - `_ - for full details. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - - resp = self.client.api.commit(self.id, repository=repository, tag=tag, - **kwargs) - return self.client.images.get(resp['Id']) - - def diff(self): - """ - Inspect changes on a container's filesystem. - - Returns: - (list) A list of dictionaries containing the attributes `Path` - and `Kind`. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.client.api.diff(self.id) - - def exec_run(self, cmd, stdout=True, stderr=True, stdin=False, tty=False, - privileged=False, user='', detach=False, stream=False, - socket=False, environment=None, workdir=None, demux=False): - """ - Run a command inside this container. Similar to - ``docker exec``. - - Args: - cmd (str or list): Command to be executed - stdout (bool): Attach to stdout. Default: ``True`` - stderr (bool): Attach to stderr. Default: ``True`` - stdin (bool): Attach to stdin. Default: ``False`` - tty (bool): Allocate a pseudo-TTY. Default: False - privileged (bool): Run as privileged. - user (str): User to execute command as. Default: root - detach (bool): If true, detach from the exec command. - Default: False - stream (bool): Stream response data. Default: False - socket (bool): Return the connection socket to allow custom - read/write operations. Default: False - environment (dict or list): A dictionary or a list of strings in - the following format ``["PASSWORD=xxx"]`` or - ``{"PASSWORD": "xxx"}``. - workdir (str): Path to working directory for this exec session - demux (bool): Return stdout and stderr separately - - Returns: - (ExecResult): A tuple of (exit_code, output) - exit_code: (int): - Exit code for the executed command or ``None`` if - either ``stream`` or ``socket`` is ``True``. - output: (generator, bytes, or tuple): - If ``stream=True``, a generator yielding response chunks. - If ``socket=True``, a socket object for the connection. - If ``demux=True``, a tuple of two bytes: stdout and stderr. - A bytestring containing response data otherwise. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - resp = self.client.api.exec_create( - self.id, cmd, stdout=stdout, stderr=stderr, stdin=stdin, tty=tty, - privileged=privileged, user=user, environment=environment, - workdir=workdir, - ) - exec_output = self.client.api.exec_start( - resp['Id'], detach=detach, tty=tty, stream=stream, socket=socket, - demux=demux - ) - if socket or stream: - return ExecResult(None, exec_output) - - return ExecResult( - self.client.api.exec_inspect(resp['Id'])['ExitCode'], - exec_output - ) - - def export(self, chunk_size=DEFAULT_DATA_CHUNK_SIZE): - """ - Export the contents of the container's filesystem as a tar archive. - - Args: - chunk_size (int): The number of bytes returned by each iteration - of the generator. If ``None``, data will be streamed as it is - received. Default: 2 MB - - Returns: - (str): The filesystem tar archive - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.client.api.export(self.id, chunk_size) - - def get_archive(self, path, chunk_size=DEFAULT_DATA_CHUNK_SIZE, - encode_stream=False): - """ - Retrieve a file or folder from the container in the form of a tar - archive. - - Args: - path (str): Path to the file or folder to retrieve - chunk_size (int): The number of bytes returned by each iteration - of the generator. If ``None``, data will be streamed as it is - received. Default: 2 MB - encode_stream (bool): Determines if data should be encoded - (gzip-compressed) during transmission. Default: False - - Returns: - (tuple): First element is a raw tar data stream. Second element is - a dict containing ``stat`` information on the specified ``path``. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - - Example: - - >>> f = open('./sh_bin.tar', 'wb') - >>> bits, stat = container.get_archive('/bin/sh') - >>> print(stat) - {'name': 'sh', 'size': 1075464, 'mode': 493, - 'mtime': '2018-10-01T15:37:48-07:00', 'linkTarget': ''} - >>> for chunk in bits: - ... f.write(chunk) - >>> f.close() - """ - return self.client.api.get_archive(self.id, path, - chunk_size, encode_stream) - - def kill(self, signal=None): - """ - Kill or send a signal to the container. - - Args: - signal (str or int): The signal to send. Defaults to ``SIGKILL`` - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - - return self.client.api.kill(self.id, signal=signal) - - def logs(self, **kwargs): - """ - Get logs from this container. Similar to the ``docker logs`` command. - - The ``stream`` parameter makes the ``logs`` function return a blocking - generator you can iterate over to retrieve log output as it happens. - - Args: - stdout (bool): Get ``STDOUT``. Default ``True`` - stderr (bool): Get ``STDERR``. Default ``True`` - stream (bool): Stream the response. Default ``False`` - timestamps (bool): Show timestamps. Default ``False`` - tail (str or int): Output specified number of lines at the end of - logs. Either an integer of number of lines or the string - ``all``. Default ``all`` - since (datetime, int, or float): Show logs since a given datetime, - integer epoch (in seconds) or float (in nanoseconds) - follow (bool): Follow log output. Default ``False`` - until (datetime, int, or float): Show logs that occurred before - the given datetime, integer epoch (in seconds), or - float (in nanoseconds) - - Returns: - (generator or str): Logs from the container. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.client.api.logs(self.id, **kwargs) - - def pause(self): - """ - Pauses all processes within this container. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.client.api.pause(self.id) - - def put_archive(self, path, data): - """ - Insert a file or folder in this container using a tar archive as - source. - - Args: - path (str): Path inside the container where the file(s) will be - extracted. Must exist. - data (bytes or stream): tar data to be extracted - - Returns: - (bool): True if the call succeeds. - - Raises: - :py:class:`~docker.errors.APIError` If an error occurs. - """ - return self.client.api.put_archive(self.id, path, data) - - def remove(self, **kwargs): - """ - Remove this container. Similar to the ``docker rm`` command. - - Args: - v (bool): Remove the volumes associated with the container - link (bool): Remove the specified link and not the underlying - container - force (bool): Force the removal of a running container (uses - ``SIGKILL``) - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.client.api.remove_container(self.id, **kwargs) - - def rename(self, name): - """ - Rename this container. Similar to the ``docker rename`` command. - - Args: - name (str): New name for the container - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.client.api.rename(self.id, name) - - def resize(self, height, width): - """ - Resize the tty session. - - Args: - height (int): Height of tty session - width (int): Width of tty session - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.client.api.resize(self.id, height, width) - - def restart(self, **kwargs): - """ - Restart this container. Similar to the ``docker restart`` command. - - Args: - timeout (int): Number of seconds to try to stop for before killing - the container. Once killed it will then be restarted. Default - is 10 seconds. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.client.api.restart(self.id, **kwargs) - - def start(self, **kwargs): - """ - Start this container. Similar to the ``docker start`` command, but - doesn't support attach options. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.client.api.start(self.id, **kwargs) - - def stats(self, **kwargs): - """ - Stream statistics for this container. Similar to the - ``docker stats`` command. - - Args: - decode (bool): If set to true, stream will be decoded into dicts - on the fly. Only applicable if ``stream`` is True. - False by default. - stream (bool): If set to false, only the current stats will be - returned instead of a stream. True by default. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.client.api.stats(self.id, **kwargs) - - def stop(self, **kwargs): - """ - Stops a container. Similar to the ``docker stop`` command. - - Args: - timeout (int): Timeout in seconds to wait for the container to - stop before sending a ``SIGKILL``. Default: 10 - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.client.api.stop(self.id, **kwargs) - - def top(self, **kwargs): - """ - Display the running processes of the container. - - Args: - ps_args (str): An optional arguments passed to ps (e.g. ``aux``) - - Returns: - (str): The output of the top - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.client.api.top(self.id, **kwargs) - - def unpause(self): - """ - Unpause all processes within the container. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.client.api.unpause(self.id) - - def update(self, **kwargs): - """ - Update resource configuration of the containers. - - Args: - blkio_weight (int): Block IO (relative weight), between 10 and 1000 - cpu_period (int): Limit CPU CFS (Completely Fair Scheduler) period - cpu_quota (int): Limit CPU CFS (Completely Fair Scheduler) quota - cpu_shares (int): CPU shares (relative weight) - cpuset_cpus (str): CPUs in which to allow execution - cpuset_mems (str): MEMs in which to allow execution - mem_limit (int or str): Memory limit - mem_reservation (int or str): Memory soft limit - memswap_limit (int or str): Total memory (memory + swap), -1 to - disable swap - kernel_memory (int or str): Kernel memory limit - restart_policy (dict): Restart policy dictionary - - Returns: - (dict): Dictionary containing a ``Warnings`` key. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.client.api.update_container(self.id, **kwargs) - - def wait(self, **kwargs): - """ - Block until the container stops, then return its exit code. Similar to - the ``docker wait`` command. - - Args: - timeout (int): Request timeout - condition (str): Wait until a container state reaches the given - condition, either ``not-running`` (default), ``next-exit``, - or ``removed`` - - Returns: - (dict): The API's response as a Python dictionary, including - the container's exit code under the ``StatusCode`` attribute. - - Raises: - :py:class:`requests.exceptions.ReadTimeout` - If the timeout is exceeded. - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.client.api.wait(self.id, **kwargs) - - -class ContainerCollection(Collection): - model = Container - - def run(self, image, command=None, stdout=True, stderr=False, - remove=False, **kwargs): - """ - Run a container. By default, it will wait for the container to finish - and return its logs, similar to ``docker run``. - - If the ``detach`` argument is ``True``, it will start the container - and immediately return a :py:class:`Container` object, similar to - ``docker run -d``. - - Example: - Run a container and get its output: - - >>> import docker - >>> client = docker.from_env() - >>> client.containers.run('alpine', 'echo hello world') - b'hello world\\n' - - Run a container and detach: - - >>> container = client.containers.run('bfirsh/reticulate-splines', - detach=True) - >>> container.logs() - 'Reticulating spline 1...\\nReticulating spline 2...\\n' - - Args: - image (str): The image to run. - command (str or list): The command to run in the container. - auto_remove (bool): enable auto-removal of the container on daemon - side when the container's process exits. - blkio_weight_device: Block IO weight (relative device weight) in - the form of: ``[{"Path": "device_path", "Weight": weight}]``. - blkio_weight: Block IO weight (relative weight), accepts a weight - value between 10 and 1000. - cap_add (list of str): Add kernel capabilities. For example, - ``["SYS_ADMIN", "MKNOD"]``. - cap_drop (list of str): Drop kernel capabilities. - cgroup_parent (str): Override the default parent cgroup. - cgroupns (str): Override the default cgroup namespace mode for the - container. One of: - - ``private`` the container runs in its own private cgroup - namespace. - - ``host`` use the host system's cgroup namespace. - cpu_count (int): Number of usable CPUs (Windows only). - cpu_percent (int): Usable percentage of the available CPUs - (Windows only). - cpu_period (int): The length of a CPU period in microseconds. - cpu_quota (int): Microseconds of CPU time that the container can - get in a CPU period. - cpu_rt_period (int): Limit CPU real-time period in microseconds. - cpu_rt_runtime (int): Limit CPU real-time runtime in microseconds. - cpu_shares (int): CPU shares (relative weight). - cpuset_cpus (str): CPUs in which to allow execution (``0-3``, - ``0,1``). - cpuset_mems (str): Memory nodes (MEMs) in which to allow execution - (``0-3``, ``0,1``). Only effective on NUMA systems. - detach (bool): Run container in the background and return a - :py:class:`Container` object. - device_cgroup_rules (:py:class:`list`): A list of cgroup rules to - apply to the container. - device_read_bps: Limit read rate (bytes per second) from a device - in the form of: `[{"Path": "device_path", "Rate": rate}]` - device_read_iops: Limit read rate (IO per second) from a device. - device_write_bps: Limit write rate (bytes per second) from a - device. - device_write_iops: Limit write rate (IO per second) from a device. - devices (:py:class:`list`): Expose host devices to the container, - as a list of strings in the form - ``::``. - - For example, ``/dev/sda:/dev/xvda:rwm`` allows the container - to have read-write access to the host's ``/dev/sda`` via a - node named ``/dev/xvda`` inside the container. - device_requests (:py:class:`list`): Expose host resources such as - GPUs to the container, as a list of - :py:class:`docker.types.DeviceRequest` instances. - dns (:py:class:`list`): Set custom DNS servers. - dns_opt (:py:class:`list`): Additional options to be added to the - container's ``resolv.conf`` file. - dns_search (:py:class:`list`): DNS search domains. - domainname (str or list): Set custom DNS search domains. - entrypoint (str or list): The entrypoint for the container. - environment (dict or list): Environment variables to set inside - the container, as a dictionary or a list of strings in the - format ``["SOMEVARIABLE=xxx"]``. - extra_hosts (dict): Additional hostnames to resolve inside the - container, as a mapping of hostname to IP address. - group_add (:py:class:`list`): List of additional group names and/or - IDs that the container process will run as. - healthcheck (dict): Specify a test to perform to check that the - container is healthy. The dict takes the following keys: - - - test (:py:class:`list` or str): Test to perform to determine - container health. Possible values: - - - Empty list: Inherit healthcheck from parent image - - ``["NONE"]``: Disable healthcheck - - ``["CMD", args...]``: exec arguments directly. - - ``["CMD-SHELL", command]``: Run command in the system's - default shell. - - If a string is provided, it will be used as a ``CMD-SHELL`` - command. - - interval (int): The time to wait between checks in - nanoseconds. It should be 0 or at least 1000000 (1 ms). - - timeout (int): The time to wait before considering the check - to have hung. It should be 0 or at least 1000000 (1 ms). - - retries (int): The number of consecutive failures needed to - consider a container as unhealthy. - - start_period (int): Start period for the container to - initialize before starting health-retries countdown in - nanoseconds. It should be 0 or at least 1000000 (1 ms). - hostname (str): Optional hostname for the container. - init (bool): Run an init inside the container that forwards - signals and reaps processes - init_path (str): Path to the docker-init binary - ipc_mode (str): Set the IPC mode for the container. - isolation (str): Isolation technology to use. Default: `None`. - kernel_memory (int or str): Kernel memory limit - labels (dict or list): A dictionary of name-value labels (e.g. - ``{"label1": "value1", "label2": "value2"}``) or a list of - names of labels to set with empty values (e.g. - ``["label1", "label2"]``) - links (dict): Mapping of links using the - ``{'container': 'alias'}`` format. The alias is optional. - Containers declared in this dict will be linked to the new - container using the provided alias. Default: ``None``. - log_config (LogConfig): Logging configuration. - lxc_conf (dict): LXC config. - mac_address (str): MAC address to assign to the container. - mem_limit (int or str): Memory limit. Accepts float values - (which represent the memory limit of the created container in - bytes) or a string with a units identification char - (``100000b``, ``1000k``, ``128m``, ``1g``). If a string is - specified without a units character, bytes are assumed as an - intended unit. - mem_reservation (int or str): Memory soft limit. - mem_swappiness (int): Tune a container's memory swappiness - behavior. Accepts number between 0 and 100. - memswap_limit (str or int): Maximum amount of memory + swap a - container is allowed to consume. - mounts (:py:class:`list`): Specification for mounts to be added to - the container. More powerful alternative to ``volumes``. Each - item in the list is expected to be a - :py:class:`docker.types.Mount` object. - name (str): The name for this container. - nano_cpus (int): CPU quota in units of 1e-9 CPUs. - network (str): Name of the network this container will be connected - to at creation time. You can connect to additional networks - using :py:meth:`Network.connect`. Incompatible with - ``network_mode``. - network_disabled (bool): Disable networking. - network_mode (str): One of: - - - ``bridge`` Create a new network stack for the container on - the bridge network. - - ``none`` No networking for this container. - - ``container:`` Reuse another container's network - stack. - - ``host`` Use the host network stack. - This mode is incompatible with ``ports``. - - Incompatible with ``network``. - network_driver_opt (dict): A dictionary of options to provide - to the network driver. Defaults to ``None``. Used in - conjuction with ``network``. Incompatible - with ``network_mode``. - oom_kill_disable (bool): Whether to disable OOM killer. - oom_score_adj (int): An integer value containing the score given - to the container in order to tune OOM killer preferences. - pid_mode (str): If set to ``host``, use the host PID namespace - inside the container. - pids_limit (int): Tune a container's pids limit. Set ``-1`` for - unlimited. - platform (str): Platform in the format ``os[/arch[/variant]]``. - Only used if the method needs to pull the requested image. - ports (dict): Ports to bind inside the container. - - The keys of the dictionary are the ports to bind inside the - container, either as an integer or a string in the form - ``port/protocol``, where the protocol is either ``tcp``, - ``udp``, or ``sctp``. - - The values of the dictionary are the corresponding ports to - open on the host, which can be either: - - - The port number, as an integer. For example, - ``{'2222/tcp': 3333}`` will expose port 2222 inside the - container as port 3333 on the host. - - ``None``, to assign a random host port. For example, - ``{'2222/tcp': None}``. - - A tuple of ``(address, port)`` if you want to specify the - host interface. For example, - ``{'1111/tcp': ('127.0.0.1', 1111)}``. - - A list of integers, if you want to bind multiple host ports - to a single container port. For example, - ``{'1111/tcp': [1234, 4567]}``. - - Incompatible with ``host`` network mode. - privileged (bool): Give extended privileges to this container. - publish_all_ports (bool): Publish all ports to the host. - read_only (bool): Mount the container's root filesystem as read - only. - remove (bool): Remove the container when it has finished running. - Default: ``False``. - restart_policy (dict): Restart the container when it exits. - Configured as a dictionary with keys: - - - ``Name`` One of ``on-failure``, or ``always``. - - ``MaximumRetryCount`` Number of times to restart the - container on failure. - - For example: - ``{"Name": "on-failure", "MaximumRetryCount": 5}`` - - runtime (str): Runtime to use with this container. - security_opt (:py:class:`list`): A list of string values to - customize labels for MLS systems, such as SELinux. - shm_size (str or int): Size of /dev/shm (e.g. ``1G``). - stdin_open (bool): Keep ``STDIN`` open even if not attached. - stdout (bool): Return logs from ``STDOUT`` when ``detach=False``. - Default: ``True``. - stderr (bool): Return logs from ``STDERR`` when ``detach=False``. - Default: ``False``. - stop_signal (str): The stop signal to use to stop the container - (e.g. ``SIGINT``). - storage_opt (dict): Storage driver options per container as a - key-value mapping. - stream (bool): If true and ``detach`` is false, return a log - generator instead of a string. Ignored if ``detach`` is true. - Default: ``False``. - sysctls (dict): Kernel parameters to set in the container. - tmpfs (dict): Temporary filesystems to mount, as a dictionary - mapping a path inside the container to options for that path. - - For example: - - .. code-block:: python - - { - '/mnt/vol2': '', - '/mnt/vol1': 'size=3G,uid=1000' - } - - tty (bool): Allocate a pseudo-TTY. - ulimits (:py:class:`list`): Ulimits to set inside the container, - as a list of :py:class:`docker.types.Ulimit` instances. - use_config_proxy (bool): If ``True``, and if the docker client - configuration file (``~/.docker/config.json`` by default) - contains a proxy configuration, the corresponding environment - variables will be set in the container being built. - user (str or int): Username or UID to run commands as inside the - container. - userns_mode (str): Sets the user namespace mode for the container - when user namespace remapping option is enabled. Supported - values are: ``host`` - uts_mode (str): Sets the UTS namespace mode for the container. - Supported values are: ``host`` - version (str): The version of the API to use. Set to ``auto`` to - automatically detect the server's version. Default: ``1.35`` - volume_driver (str): The name of a volume driver/plugin. - volumes (dict or list): A dictionary to configure volumes mounted - inside the container. The key is either the host path or a - volume name, and the value is a dictionary with the keys: - - - ``bind`` The path to mount the volume inside the container - - ``mode`` Either ``rw`` to mount the volume read/write, or - ``ro`` to mount it read-only. - - For example: - - .. code-block:: python - - {'/home/user1/': {'bind': '/mnt/vol2', 'mode': 'rw'}, - '/var/www': {'bind': '/mnt/vol1', 'mode': 'ro'}} - - Or a list of strings which each one of its elements specifies a - mount volume. - - For example: - - .. code-block:: python - - ['/home/user1/:/mnt/vol2','/var/www:/mnt/vol1'] - - volumes_from (:py:class:`list`): List of container names or IDs to - get volumes from. - working_dir (str): Path to the working directory. - - Returns: - The container logs, either ``STDOUT``, ``STDERR``, or both, - depending on the value of the ``stdout`` and ``stderr`` arguments. - - ``STDOUT`` and ``STDERR`` may be read only if either ``json-file`` - or ``journald`` logging driver used. Thus, if you are using none of - these drivers, a ``None`` object is returned instead. See the - `Engine API documentation - `_ - for full details. - - If ``detach`` is ``True``, a :py:class:`Container` object is - returned instead. - - Raises: - :py:class:`docker.errors.ContainerError` - If the container exits with a non-zero exit code and - ``detach`` is ``False``. - :py:class:`docker.errors.ImageNotFound` - If the specified image does not exist. - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - if isinstance(image, Image): - image = image.id - stream = kwargs.pop('stream', False) - detach = kwargs.pop('detach', False) - platform = kwargs.get('platform', None) - - if detach and remove: - if version_gte(self.client.api._version, '1.25'): - kwargs["auto_remove"] = True - else: - raise RuntimeError("The options 'detach' and 'remove' cannot " - "be used together in api versions < 1.25.") - - if kwargs.get('network') and kwargs.get('network_mode'): - raise RuntimeError( - 'The options "network" and "network_mode" can not be used ' - 'together.' - ) - - if kwargs.get('network_driver_opt') and not kwargs.get('network'): - raise RuntimeError( - 'The options "network_driver_opt" can not be used ' - 'without "network".' - ) - - try: - container = self.create(image=image, command=command, - detach=detach, **kwargs) - except ImageNotFound: - self.client.images.pull(image, platform=platform) - container = self.create(image=image, command=command, - detach=detach, **kwargs) - - container.start() - - if detach: - return container - - logging_driver = container.attrs['HostConfig']['LogConfig']['Type'] - - out = None - if logging_driver == 'json-file' or logging_driver == 'journald': - out = container.logs( - stdout=stdout, stderr=stderr, stream=True, follow=True - ) - - exit_status = container.wait()['StatusCode'] - if exit_status != 0: - out = None - if not kwargs.get('auto_remove'): - out = container.logs(stdout=False, stderr=True) - - if remove: - container.remove() - if exit_status != 0: - raise ContainerError( - container, exit_status, command, image, out - ) - - return out if stream or out is None else b''.join( - [line for line in out] - ) - - def create(self, image, command=None, **kwargs): - """ - Create a container without starting it. Similar to ``docker create``. - - Takes the same arguments as :py:meth:`run`, except for ``stdout``, - ``stderr``, and ``remove``. - - Returns: - A :py:class:`Container` object. - - Raises: - :py:class:`docker.errors.ImageNotFound` - If the specified image does not exist. - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - if isinstance(image, Image): - image = image.id - kwargs['image'] = image - kwargs['command'] = command - kwargs['version'] = self.client.api._version - create_kwargs = _create_container_args(kwargs) - resp = self.client.api.create_container(**create_kwargs) - return self.get(resp['Id']) - - def get(self, container_id): - """ - Get a container by name or ID. - - Args: - container_id (str): Container name or ID. - - Returns: - A :py:class:`Container` object. - - Raises: - :py:class:`docker.errors.NotFound` - If the container does not exist. - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - resp = self.client.api.inspect_container(container_id) - return self.prepare_model(resp) - - def list(self, all=False, before=None, filters=None, limit=-1, since=None, - sparse=False, ignore_removed=False): - """ - List containers. Similar to the ``docker ps`` command. - - Args: - all (bool): Show all containers. Only running containers are shown - by default - since (str): Show only containers created since Id or Name, include - non-running ones - before (str): Show only container created before Id or Name, - include non-running ones - limit (int): Show `limit` last created containers, include - non-running ones - filters (dict): Filters to be processed on the image list. - Available filters: - - - `exited` (int): Only containers with specified exit code - - `status` (str): One of ``restarting``, ``running``, - ``paused``, ``exited`` - - `label` (str|list): format either ``"key"``, ``"key=value"`` - or a list of such. - - `id` (str): The id of the container. - - `name` (str): The name of the container. - - `ancestor` (str): Filter by container ancestor. Format of - ``[:tag]``, ````, or - ````. - - `before` (str): Only containers created before a particular - container. Give the container name or id. - - `since` (str): Only containers created after a particular - container. Give container name or id. - - A comprehensive list can be found in the documentation for - `docker ps - `_. - - sparse (bool): Do not inspect containers. Returns partial - information, but guaranteed not to block. Use - :py:meth:`Container.reload` on resulting objects to retrieve - all attributes. Default: ``False`` - ignore_removed (bool): Ignore failures due to missing containers - when attempting to inspect containers from the original list. - Set to ``True`` if race conditions are likely. Has no effect - if ``sparse=True``. Default: ``False`` - - Returns: - (list of :py:class:`Container`) - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - resp = self.client.api.containers(all=all, before=before, - filters=filters, limit=limit, - since=since) - if sparse: - return [self.prepare_model(r) for r in resp] - else: - containers = [] - for r in resp: - try: - containers.append(self.get(r['Id'])) - # a container may have been removed while iterating - except NotFound: - if not ignore_removed: - raise - return containers - - def prune(self, filters=None): - return self.client.api.prune_containers(filters=filters) - prune.__doc__ = APIClient.prune_containers.__doc__ - - -# kwargs to copy straight from run to create -RUN_CREATE_KWARGS = [ - 'command', - 'detach', - 'domainname', - 'entrypoint', - 'environment', - 'healthcheck', - 'hostname', - 'image', - 'labels', - 'mac_address', - 'name', - 'network_disabled', - 'platform', - 'stdin_open', - 'stop_signal', - 'tty', - 'use_config_proxy', - 'user', - 'working_dir', -] - -# kwargs to copy straight from run to host_config -RUN_HOST_CONFIG_KWARGS = [ - 'auto_remove', - 'blkio_weight_device', - 'blkio_weight', - 'cap_add', - 'cap_drop', - 'cgroup_parent', - 'cgroupns', - 'cpu_count', - 'cpu_percent', - 'cpu_period', - 'cpu_quota', - 'cpu_shares', - 'cpuset_cpus', - 'cpuset_mems', - 'cpu_rt_period', - 'cpu_rt_runtime', - 'device_cgroup_rules', - 'device_read_bps', - 'device_read_iops', - 'device_write_bps', - 'device_write_iops', - 'devices', - 'device_requests', - 'dns_opt', - 'dns_search', - 'dns', - 'extra_hosts', - 'group_add', - 'init', - 'init_path', - 'ipc_mode', - 'isolation', - 'kernel_memory', - 'links', - 'log_config', - 'lxc_conf', - 'mem_limit', - 'mem_reservation', - 'mem_swappiness', - 'memswap_limit', - 'mounts', - 'nano_cpus', - 'network_mode', - 'oom_kill_disable', - 'oom_score_adj', - 'pid_mode', - 'pids_limit', - 'privileged', - 'publish_all_ports', - 'read_only', - 'restart_policy', - 'security_opt', - 'shm_size', - 'storage_opt', - 'sysctls', - 'tmpfs', - 'ulimits', - 'userns_mode', - 'uts_mode', - 'version', - 'volume_driver', - 'volumes_from', - 'runtime' -] - - -def _create_container_args(kwargs): - """ - Convert arguments to create() to arguments to create_container(). - """ - # Copy over kwargs which can be copied directly - create_kwargs = {} - for key in copy.copy(kwargs): - if key in RUN_CREATE_KWARGS: - create_kwargs[key] = kwargs.pop(key) - host_config_kwargs = {} - for key in copy.copy(kwargs): - if key in RUN_HOST_CONFIG_KWARGS: - host_config_kwargs[key] = kwargs.pop(key) - - # Process kwargs which are split over both create and host_config - ports = kwargs.pop('ports', {}) - if ports: - host_config_kwargs['port_bindings'] = ports - - volumes = kwargs.pop('volumes', {}) - if volumes: - host_config_kwargs['binds'] = volumes - - network = kwargs.pop('network', None) - network_driver_opt = kwargs.pop('network_driver_opt', None) - if network: - network_configuration = {'driver_opt': network_driver_opt} \ - if network_driver_opt else None - - create_kwargs['networking_config'] = {network: network_configuration} - host_config_kwargs['network_mode'] = network - - # All kwargs should have been consumed by this point, so raise - # error if any are left - if kwargs: - raise create_unexpected_kwargs_error('run', kwargs) - - create_kwargs['host_config'] = HostConfig(**host_config_kwargs) - - # Fill in any kwargs which need processing by create_host_config first - port_bindings = create_kwargs['host_config'].get('PortBindings') - if port_bindings: - # sort to make consistent for tests - create_kwargs['ports'] = [tuple(p.split('/', 1)) - for p in sorted(port_bindings.keys())] - if volumes: - if isinstance(volumes, dict): - create_kwargs['volumes'] = [ - v.get('bind') for v in volumes.values() - ] - else: - create_kwargs['volumes'] = [ - _host_volume_from_bind(v) for v in volumes - ] - return create_kwargs - - -def _host_volume_from_bind(bind): - drive, rest = ntpath.splitdrive(bind) - bits = rest.split(':', 1) - if len(bits) == 1 or bits[1] in ('ro', 'rw'): - return drive + bits[0] - elif bits[1].endswith(':ro') or bits[1].endswith(':rw'): - return bits[1][:-3] - else: - return bits[1] - - -ExecResult = namedtuple('ExecResult', 'exit_code,output') -""" A result of Container.exec_run with the properties ``exit_code`` and - ``output``. """ diff --git a/server/venv/Lib/site-packages/docker/models/images.py b/server/venv/Lib/site-packages/docker/models/images.py deleted file mode 100644 index e3ec39d..0000000 --- a/server/venv/Lib/site-packages/docker/models/images.py +++ /dev/null @@ -1,507 +0,0 @@ -import itertools -import re -import warnings - -from ..api import APIClient -from ..constants import DEFAULT_DATA_CHUNK_SIZE -from ..errors import BuildError, ImageLoadError, InvalidArgument -from ..utils import parse_repository_tag -from ..utils.json_stream import json_stream -from .resource import Collection, Model - - -class Image(Model): - """ - An image on the server. - """ - def __repr__(self): - return "<{}: '{}'>".format( - self.__class__.__name__, - "', '".join(self.tags), - ) - - @property - def labels(self): - """ - The labels of an image as dictionary. - """ - result = self.attrs['Config'].get('Labels') - return result or {} - - @property - def short_id(self): - """ - The ID of the image truncated to 12 characters, plus the ``sha256:`` - prefix. - """ - if self.id.startswith('sha256:'): - return self.id[:19] - return self.id[:12] - - @property - def tags(self): - """ - The image's tags. - """ - tags = self.attrs.get('RepoTags') - if tags is None: - tags = [] - return [tag for tag in tags if tag != ':'] - - def history(self): - """ - Show the history of an image. - - Returns: - (str): The history of the image. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.client.api.history(self.id) - - def remove(self, force=False, noprune=False): - """ - Remove this image. - - Args: - force (bool): Force removal of the image - noprune (bool): Do not delete untagged parents - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.client.api.remove_image( - self.id, - force=force, - noprune=noprune, - ) - - def save(self, chunk_size=DEFAULT_DATA_CHUNK_SIZE, named=False): - """ - Get a tarball of an image. Similar to the ``docker save`` command. - - Args: - chunk_size (int): The generator will return up to that much data - per iteration, but may return less. If ``None``, data will be - streamed as it is received. Default: 2 MB - named (str or bool): If ``False`` (default), the tarball will not - retain repository and tag information for this image. If set - to ``True``, the first tag in the :py:attr:`~tags` list will - be used to identify the image. Alternatively, any element of - the :py:attr:`~tags` list can be used as an argument to use - that specific tag as the saved identifier. - - Returns: - (generator): A stream of raw archive data. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - - Example: - - >>> image = cli.images.get("busybox:latest") - >>> f = open('/tmp/busybox-latest.tar', 'wb') - >>> for chunk in image.save(): - >>> f.write(chunk) - >>> f.close() - """ - img = self.id - if named: - img = self.tags[0] if self.tags else img - if isinstance(named, str): - if named not in self.tags: - raise InvalidArgument( - f"{named} is not a valid tag for this image" - ) - img = named - - return self.client.api.get_image(img, chunk_size) - - def tag(self, repository, tag=None, **kwargs): - """ - Tag this image into a repository. Similar to the ``docker tag`` - command. - - Args: - repository (str): The repository to set for the tag - tag (str): The tag name - force (bool): Force - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - - Returns: - (bool): ``True`` if successful - """ - return self.client.api.tag(self.id, repository, tag=tag, **kwargs) - - -class RegistryData(Model): - """ - Image metadata stored on the registry, including available platforms. - """ - def __init__(self, image_name, *args, **kwargs): - super().__init__(*args, **kwargs) - self.image_name = image_name - - @property - def id(self): - """ - The ID of the object. - """ - return self.attrs['Descriptor']['digest'] - - @property - def short_id(self): - """ - The ID of the image truncated to 12 characters, plus the ``sha256:`` - prefix. - """ - return self.id[:19] - - def pull(self, platform=None): - """ - Pull the image digest. - - Args: - platform (str): The platform to pull the image for. - Default: ``None`` - - Returns: - (:py:class:`Image`): A reference to the pulled image. - """ - repository, _ = parse_repository_tag(self.image_name) - return self.collection.pull(repository, tag=self.id, platform=platform) - - def has_platform(self, platform): - """ - Check whether the given platform identifier is available for this - digest. - - Args: - platform (str or dict): A string using the ``os[/arch[/variant]]`` - format, or a platform dictionary. - - Returns: - (bool): ``True`` if the platform is recognized as available, - ``False`` otherwise. - - Raises: - :py:class:`docker.errors.InvalidArgument` - If the platform argument is not a valid descriptor. - """ - if platform and not isinstance(platform, dict): - parts = platform.split('/') - if len(parts) > 3 or len(parts) < 1: - raise InvalidArgument( - f'"{platform}" is not a valid platform descriptor' - ) - platform = {'os': parts[0]} - if len(parts) > 2: - platform['variant'] = parts[2] - if len(parts) > 1: - platform['architecture'] = parts[1] - return normalize_platform( - platform, self.client.version() - ) in self.attrs['Platforms'] - - def reload(self): - self.attrs = self.client.api.inspect_distribution(self.image_name) - - reload.__doc__ = Model.reload.__doc__ - - -class ImageCollection(Collection): - model = Image - - def build(self, **kwargs): - """ - Build an image and return it. Similar to the ``docker build`` - command. Either ``path`` or ``fileobj`` must be set. - - If you already have a tar file for the Docker build context (including - a Dockerfile), pass a readable file-like object to ``fileobj`` - and also pass ``custom_context=True``. If the stream is also - compressed, set ``encoding`` to the correct value (e.g ``gzip``). - - If you want to get the raw output of the build, use the - :py:meth:`~docker.api.build.BuildApiMixin.build` method in the - low-level API. - - Args: - path (str): Path to the directory containing the Dockerfile - fileobj: A file object to use as the Dockerfile. (Or a file-like - object) - tag (str): A tag to add to the final image - quiet (bool): Whether to return the status - nocache (bool): Don't use the cache when set to ``True`` - rm (bool): Remove intermediate containers. The ``docker build`` - command now defaults to ``--rm=true``, but we have kept the old - default of `False` to preserve backward compatibility - timeout (int): HTTP timeout - custom_context (bool): Optional if using ``fileobj`` - encoding (str): The encoding for a stream. Set to ``gzip`` for - compressing - pull (bool): Downloads any updates to the FROM image in Dockerfiles - forcerm (bool): Always remove intermediate containers, even after - unsuccessful builds - dockerfile (str): path within the build context to the Dockerfile - buildargs (dict): A dictionary of build arguments - container_limits (dict): A dictionary of limits applied to each - container created by the build process. Valid keys: - - - memory (int): set memory limit for build - - memswap (int): Total memory (memory + swap), -1 to disable - swap - - cpushares (int): CPU shares (relative weight) - - cpusetcpus (str): CPUs in which to allow execution, e.g., - ``"0-3"``, ``"0,1"`` - shmsize (int): Size of `/dev/shm` in bytes. The size must be - greater than 0. If omitted the system uses 64MB - labels (dict): A dictionary of labels to set on the image - cache_from (list): A list of images used for build cache - resolution - target (str): Name of the build-stage to build in a multi-stage - Dockerfile - network_mode (str): networking mode for the run commands during - build - squash (bool): Squash the resulting images layers into a - single layer. - extra_hosts (dict): Extra hosts to add to /etc/hosts in building - containers, as a mapping of hostname to IP address. - platform (str): Platform in the format ``os[/arch[/variant]]``. - isolation (str): Isolation technology used during build. - Default: `None`. - use_config_proxy (bool): If ``True``, and if the docker client - configuration file (``~/.docker/config.json`` by default) - contains a proxy configuration, the corresponding environment - variables will be set in the container being built. - - Returns: - (tuple): The first item is the :py:class:`Image` object for the - image that was built. The second item is a generator of the - build logs as JSON-decoded objects. - - Raises: - :py:class:`docker.errors.BuildError` - If there is an error during the build. - :py:class:`docker.errors.APIError` - If the server returns any other error. - ``TypeError`` - If neither ``path`` nor ``fileobj`` is specified. - """ - resp = self.client.api.build(**kwargs) - if isinstance(resp, str): - return self.get(resp) - last_event = None - image_id = None - result_stream, internal_stream = itertools.tee(json_stream(resp)) - for chunk in internal_stream: - if 'error' in chunk: - raise BuildError(chunk['error'], result_stream) - if 'stream' in chunk: - match = re.search( - r'(^Successfully built |sha256:)([0-9a-f]+)$', - chunk['stream'] - ) - if match: - image_id = match.group(2) - last_event = chunk - if image_id: - return (self.get(image_id), result_stream) - raise BuildError(last_event or 'Unknown', result_stream) - - def get(self, name): - """ - Gets an image. - - Args: - name (str): The name of the image. - - Returns: - (:py:class:`Image`): The image. - - Raises: - :py:class:`docker.errors.ImageNotFound` - If the image does not exist. - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.prepare_model(self.client.api.inspect_image(name)) - - def get_registry_data(self, name, auth_config=None): - """ - Gets the registry data for an image. - - Args: - name (str): The name of the image. - auth_config (dict): Override the credentials that are found in the - config for this request. ``auth_config`` should contain the - ``username`` and ``password`` keys to be valid. - - Returns: - (:py:class:`RegistryData`): The data object. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return RegistryData( - image_name=name, - attrs=self.client.api.inspect_distribution(name, auth_config), - client=self.client, - collection=self, - ) - - def list(self, name=None, all=False, filters=None): - """ - List images on the server. - - Args: - name (str): Only show images belonging to the repository ``name`` - all (bool): Show intermediate image layers. By default, these are - filtered out. - filters (dict): Filters to be processed on the image list. - Available filters: - - ``dangling`` (bool) - - `label` (str|list): format either ``"key"``, ``"key=value"`` - or a list of such. - - Returns: - (list of :py:class:`Image`): The images. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - resp = self.client.api.images(name=name, all=all, filters=filters) - return [self.get(r["Id"]) for r in resp] - - def load(self, data): - """ - Load an image that was previously saved using - :py:meth:`~docker.models.images.Image.save` (or ``docker save``). - Similar to ``docker load``. - - Args: - data (binary): Image data to be loaded. - - Returns: - (list of :py:class:`Image`): The images. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - resp = self.client.api.load_image(data) - images = [] - for chunk in resp: - if 'stream' in chunk: - match = re.search( - r'(^Loaded image ID: |^Loaded image: )(.+)$', - chunk['stream'] - ) - if match: - image_id = match.group(2) - images.append(image_id) - if 'error' in chunk: - raise ImageLoadError(chunk['error']) - - return [self.get(i) for i in images] - - def pull(self, repository, tag=None, all_tags=False, **kwargs): - """ - Pull an image of the given name and return it. Similar to the - ``docker pull`` command. - If ``tag`` is ``None`` or empty, it is set to ``latest``. - If ``all_tags`` is set, the ``tag`` parameter is ignored and all image - tags will be pulled. - - If you want to get the raw pull output, use the - :py:meth:`~docker.api.image.ImageApiMixin.pull` method in the - low-level API. - - Args: - repository (str): The repository to pull - tag (str): The tag to pull - auth_config (dict): Override the credentials that are found in the - config for this request. ``auth_config`` should contain the - ``username`` and ``password`` keys to be valid. - platform (str): Platform in the format ``os[/arch[/variant]]`` - all_tags (bool): Pull all image tags - - Returns: - (:py:class:`Image` or list): The image that has been pulled. - If ``all_tags`` is True, the method will return a list - of :py:class:`Image` objects belonging to this repository. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - - Example: - - >>> # Pull the image tagged `latest` in the busybox repo - >>> image = client.images.pull('busybox') - - >>> # Pull all tags in the busybox repo - >>> images = client.images.pull('busybox', all_tags=True) - """ - repository, image_tag = parse_repository_tag(repository) - tag = tag or image_tag or 'latest' - - if 'stream' in kwargs: - warnings.warn( - '`stream` is not a valid parameter for this method' - ' and will be overridden' - ) - del kwargs['stream'] - - pull_log = self.client.api.pull( - repository, tag=tag, stream=True, all_tags=all_tags, **kwargs - ) - for _ in pull_log: - # We don't do anything with the logs, but we need - # to keep the connection alive and wait for the image - # to be pulled. - pass - if not all_tags: - return self.get('{0}{2}{1}'.format( - repository, tag, '@' if tag.startswith('sha256:') else ':' - )) - return self.list(repository) - - def push(self, repository, tag=None, **kwargs): - return self.client.api.push(repository, tag=tag, **kwargs) - push.__doc__ = APIClient.push.__doc__ - - def remove(self, *args, **kwargs): - self.client.api.remove_image(*args, **kwargs) - remove.__doc__ = APIClient.remove_image.__doc__ - - def search(self, *args, **kwargs): - return self.client.api.search(*args, **kwargs) - search.__doc__ = APIClient.search.__doc__ - - def prune(self, filters=None): - return self.client.api.prune_images(filters=filters) - prune.__doc__ = APIClient.prune_images.__doc__ - - def prune_builds(self, *args, **kwargs): - return self.client.api.prune_builds(*args, **kwargs) - prune_builds.__doc__ = APIClient.prune_builds.__doc__ - - -def normalize_platform(platform, engine_info): - if platform is None: - platform = {} - if 'os' not in platform: - platform['os'] = engine_info['Os'] - if 'architecture' not in platform: - platform['architecture'] = engine_info['Arch'] - return platform diff --git a/server/venv/Lib/site-packages/docker/models/networks.py b/server/venv/Lib/site-packages/docker/models/networks.py deleted file mode 100644 index f502879..0000000 --- a/server/venv/Lib/site-packages/docker/models/networks.py +++ /dev/null @@ -1,218 +0,0 @@ -from ..api import APIClient -from ..utils import version_gte -from .containers import Container -from .resource import Model, Collection - - -class Network(Model): - """ - A Docker network. - """ - @property - def name(self): - """ - The name of the network. - """ - return self.attrs.get('Name') - - @property - def containers(self): - """ - The containers that are connected to the network, as a list of - :py:class:`~docker.models.containers.Container` objects. - """ - return [ - self.client.containers.get(cid) for cid in - (self.attrs.get('Containers') or {}).keys() - ] - - def connect(self, container, *args, **kwargs): - """ - Connect a container to this network. - - Args: - container (str): Container to connect to this network, as either - an ID, name, or :py:class:`~docker.models.containers.Container` - object. - aliases (:py:class:`list`): A list of aliases for this endpoint. - Names in that list can be used within the network to reach the - container. Defaults to ``None``. - links (:py:class:`list`): A list of links for this endpoint. - Containers declared in this list will be linkedto this - container. Defaults to ``None``. - ipv4_address (str): The IP address of this container on the - network, using the IPv4 protocol. Defaults to ``None``. - ipv6_address (str): The IP address of this container on the - network, using the IPv6 protocol. Defaults to ``None``. - link_local_ips (:py:class:`list`): A list of link-local (IPv4/IPv6) - addresses. - driver_opt (dict): A dictionary of options to provide to the - network driver. Defaults to ``None``. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - if isinstance(container, Container): - container = container.id - return self.client.api.connect_container_to_network( - container, self.id, *args, **kwargs - ) - - def disconnect(self, container, *args, **kwargs): - """ - Disconnect a container from this network. - - Args: - container (str): Container to disconnect from this network, as - either an ID, name, or - :py:class:`~docker.models.containers.Container` object. - force (bool): Force the container to disconnect from a network. - Default: ``False`` - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - if isinstance(container, Container): - container = container.id - return self.client.api.disconnect_container_from_network( - container, self.id, *args, **kwargs - ) - - def remove(self): - """ - Remove this network. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.client.api.remove_network(self.id) - - -class NetworkCollection(Collection): - """ - Networks on the Docker server. - """ - model = Network - - def create(self, name, *args, **kwargs): - """ - Create a network. Similar to the ``docker network create``. - - Args: - name (str): Name of the network - driver (str): Name of the driver used to create the network - options (dict): Driver options as a key-value dictionary - ipam (IPAMConfig): Optional custom IP scheme for the network. - check_duplicate (bool): Request daemon to check for networks with - same name. Default: ``None``. - internal (bool): Restrict external access to the network. Default - ``False``. - labels (dict): Map of labels to set on the network. Default - ``None``. - enable_ipv6 (bool): Enable IPv6 on the network. Default ``False``. - attachable (bool): If enabled, and the network is in the global - scope, non-service containers on worker nodes will be able to - connect to the network. - scope (str): Specify the network's scope (``local``, ``global`` or - ``swarm``) - ingress (bool): If set, create an ingress network which provides - the routing-mesh in swarm mode. - - Returns: - (:py:class:`Network`): The network that was created. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - - Example: - A network using the bridge driver: - - >>> client.networks.create("network1", driver="bridge") - - You can also create more advanced networks with custom IPAM - configurations. For example, setting the subnet to - ``192.168.52.0/24`` and gateway address to ``192.168.52.254``. - - .. code-block:: python - - >>> ipam_pool = docker.types.IPAMPool( - subnet='192.168.52.0/24', - gateway='192.168.52.254' - ) - >>> ipam_config = docker.types.IPAMConfig( - pool_configs=[ipam_pool] - ) - >>> client.networks.create( - "network1", - driver="bridge", - ipam=ipam_config - ) - - """ - resp = self.client.api.create_network(name, *args, **kwargs) - return self.get(resp['Id']) - - def get(self, network_id, *args, **kwargs): - """ - Get a network by its ID. - - Args: - network_id (str): The ID of the network. - verbose (bool): Retrieve the service details across the cluster in - swarm mode. - scope (str): Filter the network by scope (``swarm``, ``global`` - or ``local``). - - Returns: - (:py:class:`Network`) The network. - - Raises: - :py:class:`docker.errors.NotFound` - If the network does not exist. - - :py:class:`docker.errors.APIError` - If the server returns an error. - - """ - return self.prepare_model( - self.client.api.inspect_network(network_id, *args, **kwargs) - ) - - def list(self, *args, **kwargs): - """ - List networks. Similar to the ``docker network ls`` command. - - Args: - names (:py:class:`list`): List of names to filter by. - ids (:py:class:`list`): List of ids to filter by. - filters (dict): Filters to be processed on the network list. - Available filters: - - ``driver=[]`` Matches a network's driver. - - `label` (str|list): format either ``"key"``, ``"key=value"`` - or a list of such. - - ``type=["custom"|"builtin"]`` Filters networks by type. - greedy (bool): Fetch more details for each network individually. - You might want this to get the containers attached to them. - - Returns: - (list of :py:class:`Network`) The networks on the server. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - greedy = kwargs.pop('greedy', False) - resp = self.client.api.networks(*args, **kwargs) - networks = [self.prepare_model(item) for item in resp] - if greedy and version_gte(self.client.api._version, '1.28'): - for net in networks: - net.reload() - return networks - - def prune(self, filters=None): - return self.client.api.prune_networks(filters=filters) - prune.__doc__ = APIClient.prune_networks.__doc__ diff --git a/server/venv/Lib/site-packages/docker/models/nodes.py b/server/venv/Lib/site-packages/docker/models/nodes.py deleted file mode 100644 index 8dd9350..0000000 --- a/server/venv/Lib/site-packages/docker/models/nodes.py +++ /dev/null @@ -1,107 +0,0 @@ -from .resource import Model, Collection - - -class Node(Model): - """A node in a swarm.""" - id_attribute = 'ID' - - @property - def version(self): - """ - The version number of the service. If this is not the same as the - server, the :py:meth:`update` function will not work and you will - need to call :py:meth:`reload` before calling it again. - """ - return self.attrs.get('Version').get('Index') - - def update(self, node_spec): - """ - Update the node's configuration. - - Args: - node_spec (dict): Configuration settings to update. Any values - not provided will be removed. Default: ``None`` - - Returns: - `True` if the request went through. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - - Example: - - >>> node_spec = {'Availability': 'active', - 'Name': 'node-name', - 'Role': 'manager', - 'Labels': {'foo': 'bar'} - } - >>> node.update(node_spec) - - """ - return self.client.api.update_node(self.id, self.version, node_spec) - - def remove(self, force=False): - """ - Remove this node from the swarm. - - Args: - force (bool): Force remove an active node. Default: `False` - - Returns: - `True` if the request was successful. - - Raises: - :py:class:`docker.errors.NotFound` - If the node doesn't exist in the swarm. - - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.client.api.remove_node(self.id, force=force) - - -class NodeCollection(Collection): - """Nodes on the Docker server.""" - model = Node - - def get(self, node_id): - """ - Get a node. - - Args: - node_id (string): ID of the node to be inspected. - - Returns: - A :py:class:`Node` object. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.prepare_model(self.client.api.inspect_node(node_id)) - - def list(self, *args, **kwargs): - """ - List swarm nodes. - - Args: - filters (dict): Filters to process on the nodes list. Valid - filters: ``id``, ``name``, ``membership`` and ``role``. - Default: ``None`` - - Returns: - A list of :py:class:`Node` objects. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - - Example: - - >>> client.nodes.list(filters={'role': 'manager'}) - """ - return [ - self.prepare_model(n) - for n in self.client.api.nodes(*args, **kwargs) - ] diff --git a/server/venv/Lib/site-packages/docker/models/plugins.py b/server/venv/Lib/site-packages/docker/models/plugins.py deleted file mode 100644 index 16f5245..0000000 --- a/server/venv/Lib/site-packages/docker/models/plugins.py +++ /dev/null @@ -1,206 +0,0 @@ -from .. import errors -from .resource import Collection, Model - - -class Plugin(Model): - """ - A plugin on the server. - """ - def __repr__(self): - return f"<{self.__class__.__name__}: '{self.name}'>" - - @property - def name(self): - """ - The plugin's name. - """ - return self.attrs.get('Name') - - @property - def enabled(self): - """ - Whether the plugin is enabled. - """ - return self.attrs.get('Enabled') - - @property - def settings(self): - """ - A dictionary representing the plugin's configuration. - """ - return self.attrs.get('Settings') - - def configure(self, options): - """ - Update the plugin's settings. - - Args: - options (dict): A key-value mapping of options. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - self.client.api.configure_plugin(self.name, options) - self.reload() - - def disable(self, force=False): - """ - Disable the plugin. - - Args: - force (bool): Force disable. Default: False - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - - self.client.api.disable_plugin(self.name, force) - self.reload() - - def enable(self, timeout=0): - """ - Enable the plugin. - - Args: - timeout (int): Timeout in seconds. Default: 0 - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - self.client.api.enable_plugin(self.name, timeout) - self.reload() - - def push(self): - """ - Push the plugin to a remote registry. - - Returns: - A dict iterator streaming the status of the upload. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.client.api.push_plugin(self.name) - - def remove(self, force=False): - """ - Remove the plugin from the server. - - Args: - force (bool): Remove even if the plugin is enabled. - Default: False - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.client.api.remove_plugin(self.name, force=force) - - def upgrade(self, remote=None): - """ - Upgrade the plugin. - - Args: - remote (string): Remote reference to upgrade to. The - ``:latest`` tag is optional and is the default if omitted. - Default: this plugin's name. - - Returns: - A generator streaming the decoded API logs - """ - if self.enabled: - raise errors.DockerError( - 'Plugin must be disabled before upgrading.' - ) - - if remote is None: - remote = self.name - privileges = self.client.api.plugin_privileges(remote) - yield from self.client.api.upgrade_plugin( - self.name, - remote, - privileges, - ) - self.reload() - - -class PluginCollection(Collection): - model = Plugin - - def create(self, name, plugin_data_dir, gzip=False): - """ - Create a new plugin. - - Args: - name (string): The name of the plugin. The ``:latest`` tag is - optional, and is the default if omitted. - plugin_data_dir (string): Path to the plugin data directory. - Plugin data directory must contain the ``config.json`` - manifest file and the ``rootfs`` directory. - gzip (bool): Compress the context using gzip. Default: False - - Returns: - (:py:class:`Plugin`): The newly created plugin. - """ - self.client.api.create_plugin(name, plugin_data_dir, gzip) - return self.get(name) - - def get(self, name): - """ - Gets a plugin. - - Args: - name (str): The name of the plugin. - - Returns: - (:py:class:`Plugin`): The plugin. - - Raises: - :py:class:`docker.errors.NotFound` If the plugin does not - exist. - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.prepare_model(self.client.api.inspect_plugin(name)) - - def install(self, remote_name, local_name=None): - """ - Pull and install a plugin. - - Args: - remote_name (string): Remote reference for the plugin to - install. The ``:latest`` tag is optional, and is the - default if omitted. - local_name (string): Local name for the pulled plugin. - The ``:latest`` tag is optional, and is the default if - omitted. Optional. - - Returns: - (:py:class:`Plugin`): The installed plugin - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - privileges = self.client.api.plugin_privileges(remote_name) - it = self.client.api.pull_plugin(remote_name, privileges, local_name) - for data in it: - pass - return self.get(local_name or remote_name) - - def list(self): - """ - List plugins installed on the server. - - Returns: - (list of :py:class:`Plugin`): The plugins. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - resp = self.client.api.plugins() - return [self.prepare_model(r) for r in resp] diff --git a/server/venv/Lib/site-packages/docker/models/resource.py b/server/venv/Lib/site-packages/docker/models/resource.py deleted file mode 100644 index 89030e5..0000000 --- a/server/venv/Lib/site-packages/docker/models/resource.py +++ /dev/null @@ -1,92 +0,0 @@ -class Model: - """ - A base class for representing a single object on the server. - """ - id_attribute = 'Id' - - def __init__(self, attrs=None, client=None, collection=None): - #: A client pointing at the server that this object is on. - self.client = client - - #: The collection that this model is part of. - self.collection = collection - - #: The raw representation of this object from the API - self.attrs = attrs - if self.attrs is None: - self.attrs = {} - - def __repr__(self): - return f"<{self.__class__.__name__}: {self.short_id}>" - - def __eq__(self, other): - return isinstance(other, self.__class__) and self.id == other.id - - def __hash__(self): - return hash(f"{self.__class__.__name__}:{self.id}") - - @property - def id(self): - """ - The ID of the object. - """ - return self.attrs.get(self.id_attribute) - - @property - def short_id(self): - """ - The ID of the object, truncated to 12 characters. - """ - return self.id[:12] - - def reload(self): - """ - Load this object from the server again and update ``attrs`` with the - new data. - """ - new_model = self.collection.get(self.id) - self.attrs = new_model.attrs - - -class Collection: - """ - A base class for representing all objects of a particular type on the - server. - """ - - #: The type of object this collection represents, set by subclasses - model = None - - def __init__(self, client=None): - #: The client pointing at the server that this collection of objects - #: is on. - self.client = client - - def __call__(self, *args, **kwargs): - raise TypeError( - "'{}' object is not callable. You might be trying to use the old " - "(pre-2.0) API - use docker.APIClient if so." - .format(self.__class__.__name__)) - - def list(self): - raise NotImplementedError - - def get(self, key): - raise NotImplementedError - - def create(self, attrs=None): - raise NotImplementedError - - def prepare_model(self, attrs): - """ - Create a model from a set of attributes. - """ - if isinstance(attrs, Model): - attrs.client = self.client - attrs.collection = self - return attrs - elif isinstance(attrs, dict): - return self.model(attrs=attrs, client=self.client, collection=self) - else: - raise Exception("Can't create %s from %s" % - (self.model.__name__, attrs)) diff --git a/server/venv/Lib/site-packages/docker/models/secrets.py b/server/venv/Lib/site-packages/docker/models/secrets.py deleted file mode 100644 index da01d44..0000000 --- a/server/venv/Lib/site-packages/docker/models/secrets.py +++ /dev/null @@ -1,70 +0,0 @@ -from ..api import APIClient -from .resource import Model, Collection - - -class Secret(Model): - """A secret.""" - id_attribute = 'ID' - - def __repr__(self): - return f"<{self.__class__.__name__}: '{self.name}'>" - - @property - def name(self): - return self.attrs['Spec']['Name'] - - def remove(self): - """ - Remove this secret. - - Raises: - :py:class:`docker.errors.APIError` - If secret failed to remove. - """ - return self.client.api.remove_secret(self.id) - - -class SecretCollection(Collection): - """Secrets on the Docker server.""" - model = Secret - - def create(self, **kwargs): - obj = self.client.api.create_secret(**kwargs) - obj.setdefault("Spec", {})["Name"] = kwargs.get("name") - return self.prepare_model(obj) - create.__doc__ = APIClient.create_secret.__doc__ - - def get(self, secret_id): - """ - Get a secret. - - Args: - secret_id (str): Secret ID. - - Returns: - (:py:class:`Secret`): The secret. - - Raises: - :py:class:`docker.errors.NotFound` - If the secret does not exist. - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.prepare_model(self.client.api.inspect_secret(secret_id)) - - def list(self, **kwargs): - """ - List secrets. Similar to the ``docker secret ls`` command. - - Args: - filters (dict): Server-side list filtering options. - - Returns: - (list of :py:class:`Secret`): The secrets. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - resp = self.client.api.secrets(**kwargs) - return [self.prepare_model(obj) for obj in resp] diff --git a/server/venv/Lib/site-packages/docker/models/services.py b/server/venv/Lib/site-packages/docker/models/services.py deleted file mode 100644 index 7003704..0000000 --- a/server/venv/Lib/site-packages/docker/models/services.py +++ /dev/null @@ -1,388 +0,0 @@ -import copy -from docker.errors import create_unexpected_kwargs_error, InvalidArgument -from docker.types import TaskTemplate, ContainerSpec, Placement, ServiceMode -from .resource import Model, Collection - - -class Service(Model): - """A service.""" - id_attribute = 'ID' - - @property - def name(self): - """The service's name.""" - return self.attrs['Spec']['Name'] - - @property - def version(self): - """ - The version number of the service. If this is not the same as the - server, the :py:meth:`update` function will not work and you will - need to call :py:meth:`reload` before calling it again. - """ - return self.attrs.get('Version').get('Index') - - def remove(self): - """ - Stop and remove the service. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.client.api.remove_service(self.id) - - def tasks(self, filters=None): - """ - List the tasks in this service. - - Args: - filters (dict): A map of filters to process on the tasks list. - Valid filters: ``id``, ``name``, ``node``, - ``label``, and ``desired-state``. - - Returns: - :py:class:`list`: List of task dictionaries. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - if filters is None: - filters = {} - filters['service'] = self.id - return self.client.api.tasks(filters=filters) - - def update(self, **kwargs): - """ - Update a service's configuration. Similar to the ``docker service - update`` command. - - Takes the same parameters as :py:meth:`~ServiceCollection.create`. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - # Image is required, so if it hasn't been set, use current image - if 'image' not in kwargs: - spec = self.attrs['Spec']['TaskTemplate']['ContainerSpec'] - kwargs['image'] = spec['Image'] - - if kwargs.get('force_update') is True: - task_template = self.attrs['Spec']['TaskTemplate'] - current_value = int(task_template.get('ForceUpdate', 0)) - kwargs['force_update'] = current_value + 1 - - create_kwargs = _get_create_service_kwargs('update', kwargs) - - return self.client.api.update_service( - self.id, - self.version, - **create_kwargs - ) - - def logs(self, **kwargs): - """ - Get log stream for the service. - Note: This method works only for services with the ``json-file`` - or ``journald`` logging drivers. - - Args: - details (bool): Show extra details provided to logs. - Default: ``False`` - follow (bool): Keep connection open to read logs as they are - sent by the Engine. Default: ``False`` - stdout (bool): Return logs from ``stdout``. Default: ``False`` - stderr (bool): Return logs from ``stderr``. Default: ``False`` - since (int): UNIX timestamp for the logs staring point. - Default: 0 - timestamps (bool): Add timestamps to every log line. - tail (string or int): Number of log lines to be returned, - counting from the current end of the logs. Specify an - integer or ``'all'`` to output all log lines. - Default: ``all`` - - Returns: - generator: Logs for the service. - """ - is_tty = self.attrs['Spec']['TaskTemplate']['ContainerSpec'].get( - 'TTY', False - ) - return self.client.api.service_logs(self.id, is_tty=is_tty, **kwargs) - - def scale(self, replicas): - """ - Scale service container. - - Args: - replicas (int): The number of containers that should be running. - - Returns: - bool: ``True`` if successful. - """ - - if 'Global' in self.attrs['Spec']['Mode'].keys(): - raise InvalidArgument('Cannot scale a global container') - - service_mode = ServiceMode('replicated', replicas) - return self.client.api.update_service(self.id, self.version, - mode=service_mode, - fetch_current_spec=True) - - def force_update(self): - """ - Force update the service even if no changes require it. - - Returns: - bool: ``True`` if successful. - """ - - return self.update(force_update=True, fetch_current_spec=True) - - -class ServiceCollection(Collection): - """Services on the Docker server.""" - model = Service - - def create(self, image, command=None, **kwargs): - """ - Create a service. Similar to the ``docker service create`` command. - - Args: - image (str): The image name to use for the containers. - command (list of str or str): Command to run. - args (list of str): Arguments to the command. - constraints (list of str): :py:class:`~docker.types.Placement` - constraints. - preferences (list of tuple): :py:class:`~docker.types.Placement` - preferences. - maxreplicas (int): :py:class:`~docker.types.Placement` maxreplicas - or (int) representing maximum number of replicas per node. - platforms (list of tuple): A list of platform constraints - expressed as ``(arch, os)`` tuples. - container_labels (dict): Labels to apply to the container. - endpoint_spec (EndpointSpec): Properties that can be configured to - access and load balance a service. Default: ``None``. - env (list of str): Environment variables, in the form - ``KEY=val``. - hostname (string): Hostname to set on the container. - init (boolean): Run an init inside the container that forwards - signals and reaps processes - isolation (string): Isolation technology used by the service's - containers. Only used for Windows containers. - labels (dict): Labels to apply to the service. - log_driver (str): Log driver to use for containers. - log_driver_options (dict): Log driver options. - mode (ServiceMode): Scheduling mode for the service. - Default:``None`` - mounts (list of str): Mounts for the containers, in the form - ``source:target:options``, where options is either - ``ro`` or ``rw``. - name (str): Name to give to the service. - networks (:py:class:`list`): List of network names or IDs or - :py:class:`~docker.types.NetworkAttachmentConfig` to attach the - service to. Default: ``None``. - resources (Resources): Resource limits and reservations. - restart_policy (RestartPolicy): Restart policy for containers. - secrets (list of :py:class:`~docker.types.SecretReference`): List - of secrets accessible to containers for this service. - stop_grace_period (int): Amount of time to wait for - containers to terminate before forcefully killing them. - update_config (UpdateConfig): Specification for the update strategy - of the service. Default: ``None`` - rollback_config (RollbackConfig): Specification for the rollback - strategy of the service. Default: ``None`` - user (str): User to run commands as. - workdir (str): Working directory for commands to run. - tty (boolean): Whether a pseudo-TTY should be allocated. - groups (:py:class:`list`): A list of additional groups that the - container process will run as. - open_stdin (boolean): Open ``stdin`` - read_only (boolean): Mount the container's root filesystem as read - only. - stop_signal (string): Set signal to stop the service's containers - healthcheck (Healthcheck): Healthcheck - configuration for this service. - hosts (:py:class:`dict`): A set of host to IP mappings to add to - the container's `hosts` file. - dns_config (DNSConfig): Specification for DNS - related configurations in resolver configuration file. - configs (:py:class:`list`): List of - :py:class:`~docker.types.ConfigReference` that will be exposed - to the service. - privileges (Privileges): Security options for the service's - containers. - cap_add (:py:class:`list`): A list of kernel capabilities to add to - the default set for the container. - cap_drop (:py:class:`list`): A list of kernel capabilities to drop - from the default set for the container. - sysctls (:py:class:`dict`): A dict of sysctl values to add to the - container - - Returns: - :py:class:`Service`: The created service. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - kwargs['image'] = image - kwargs['command'] = command - create_kwargs = _get_create_service_kwargs('create', kwargs) - service_id = self.client.api.create_service(**create_kwargs) - return self.get(service_id) - - def get(self, service_id, insert_defaults=None): - """ - Get a service. - - Args: - service_id (str): The ID of the service. - insert_defaults (boolean): If true, default values will be merged - into the output. - - Returns: - :py:class:`Service`: The service. - - Raises: - :py:class:`docker.errors.NotFound` - If the service does not exist. - :py:class:`docker.errors.APIError` - If the server returns an error. - :py:class:`docker.errors.InvalidVersion` - If one of the arguments is not supported with the current - API version. - """ - return self.prepare_model( - self.client.api.inspect_service(service_id, insert_defaults) - ) - - def list(self, **kwargs): - """ - List services. - - Args: - filters (dict): Filters to process on the nodes list. Valid - filters: ``id``, ``name`` , ``label`` and ``mode``. - Default: ``None``. - status (bool): Include the service task count of running and - desired tasks. Default: ``None``. - - Returns: - list of :py:class:`Service`: The services. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return [ - self.prepare_model(s) - for s in self.client.api.services(**kwargs) - ] - - -# kwargs to copy straight over to ContainerSpec -CONTAINER_SPEC_KWARGS = [ - 'args', - 'cap_add', - 'cap_drop', - 'command', - 'configs', - 'dns_config', - 'env', - 'groups', - 'healthcheck', - 'hostname', - 'hosts', - 'image', - 'init', - 'isolation', - 'labels', - 'mounts', - 'open_stdin', - 'privileges', - 'read_only', - 'secrets', - 'stop_grace_period', - 'stop_signal', - 'tty', - 'user', - 'workdir', - 'sysctls', -] - -# kwargs to copy straight over to TaskTemplate -TASK_TEMPLATE_KWARGS = [ - 'networks', - 'resources', - 'restart_policy', -] - -# kwargs to copy straight over to create_service -CREATE_SERVICE_KWARGS = [ - 'name', - 'labels', - 'mode', - 'update_config', - 'rollback_config', - 'endpoint_spec', -] - -PLACEMENT_KWARGS = [ - 'constraints', - 'preferences', - 'platforms', - 'maxreplicas', -] - - -def _get_create_service_kwargs(func_name, kwargs): - # Copy over things which can be copied directly - create_kwargs = {} - for key in copy.copy(kwargs): - if key in CREATE_SERVICE_KWARGS: - create_kwargs[key] = kwargs.pop(key) - container_spec_kwargs = {} - for key in copy.copy(kwargs): - if key in CONTAINER_SPEC_KWARGS: - container_spec_kwargs[key] = kwargs.pop(key) - task_template_kwargs = {} - for key in copy.copy(kwargs): - if key in TASK_TEMPLATE_KWARGS: - task_template_kwargs[key] = kwargs.pop(key) - - if 'container_labels' in kwargs: - container_spec_kwargs['labels'] = kwargs.pop('container_labels') - - placement = {} - for key in copy.copy(kwargs): - if key in PLACEMENT_KWARGS: - placement[key] = kwargs.pop(key) - placement = Placement(**placement) - task_template_kwargs['placement'] = placement - - if 'log_driver' in kwargs: - task_template_kwargs['log_driver'] = { - 'Name': kwargs.pop('log_driver'), - 'Options': kwargs.pop('log_driver_options', {}) - } - - if func_name == 'update': - if 'force_update' in kwargs: - task_template_kwargs['force_update'] = kwargs.pop('force_update') - - # fetch the current spec by default if updating the service - # through the model - fetch_current_spec = kwargs.pop('fetch_current_spec', True) - create_kwargs['fetch_current_spec'] = fetch_current_spec - - # All kwargs should have been consumed by this point, so raise - # error if any are left - if kwargs: - raise create_unexpected_kwargs_error(func_name, kwargs) - - container_spec = ContainerSpec(**container_spec_kwargs) - task_template_kwargs['container_spec'] = container_spec - create_kwargs['task_template'] = TaskTemplate(**task_template_kwargs) - return create_kwargs diff --git a/server/venv/Lib/site-packages/docker/models/swarm.py b/server/venv/Lib/site-packages/docker/models/swarm.py deleted file mode 100644 index 1e39f3f..0000000 --- a/server/venv/Lib/site-packages/docker/models/swarm.py +++ /dev/null @@ -1,189 +0,0 @@ -from docker.api import APIClient -from docker.errors import APIError -from .resource import Model - - -class Swarm(Model): - """ - The server's Swarm state. This a singleton that must be reloaded to get - the current state of the Swarm. - """ - id_attribute = 'ID' - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - if self.client: - try: - self.reload() - except APIError as e: - # FIXME: https://github.com/docker/docker/issues/29192 - if e.response.status_code not in (406, 503): - raise - - @property - def version(self): - """ - The version number of the swarm. If this is not the same as the - server, the :py:meth:`update` function will not work and you will - need to call :py:meth:`reload` before calling it again. - """ - return self.attrs.get('Version').get('Index') - - def get_unlock_key(self): - return self.client.api.get_unlock_key() - get_unlock_key.__doc__ = APIClient.get_unlock_key.__doc__ - - def init(self, advertise_addr=None, listen_addr='0.0.0.0:2377', - force_new_cluster=False, default_addr_pool=None, - subnet_size=None, data_path_addr=None, data_path_port=None, - **kwargs): - """ - Initialize a new swarm on this Engine. - - Args: - advertise_addr (str): Externally reachable address advertised to - other nodes. This can either be an address/port combination in - the form ``192.168.1.1:4567``, or an interface followed by a - port number, like ``eth0:4567``. If the port number is omitted, - the port number from the listen address is used. - - If not specified, it will be automatically detected when - possible. - listen_addr (str): Listen address used for inter-manager - communication, as well as determining the networking interface - used for the VXLAN Tunnel Endpoint (VTEP). This can either be - an address/port combination in the form ``192.168.1.1:4567``, - or an interface followed by a port number, like ``eth0:4567``. - If the port number is omitted, the default swarm listening port - is used. Default: ``0.0.0.0:2377`` - force_new_cluster (bool): Force creating a new Swarm, even if - already part of one. Default: False - default_addr_pool (list of str): Default Address Pool specifies - default subnet pools for global scope networks. Each pool - should be specified as a CIDR block, like '10.0.0.0/8'. - Default: None - subnet_size (int): SubnetSize specifies the subnet size of the - networks created from the default subnet pool. Default: None - data_path_addr (string): Address or interface to use for data path - traffic. For example, 192.168.1.1, or an interface, like eth0. - data_path_port (int): Port number to use for data path traffic. - Acceptable port range is 1024 to 49151. If set to ``None`` or - 0, the default port 4789 will be used. Default: None - task_history_retention_limit (int): Maximum number of tasks - history stored. - snapshot_interval (int): Number of logs entries between snapshot. - keep_old_snapshots (int): Number of snapshots to keep beyond the - current snapshot. - log_entries_for_slow_followers (int): Number of log entries to - keep around to sync up slow followers after a snapshot is - created. - heartbeat_tick (int): Amount of ticks (in seconds) between each - heartbeat. - election_tick (int): Amount of ticks (in seconds) needed without a - leader to trigger a new election. - dispatcher_heartbeat_period (int): The delay for an agent to send - a heartbeat to the dispatcher. - node_cert_expiry (int): Automatic expiry for nodes certificates. - external_ca (dict): Configuration for forwarding signing requests - to an external certificate authority. Use - ``docker.types.SwarmExternalCA``. - name (string): Swarm's name - labels (dict): User-defined key/value metadata. - signing_ca_cert (str): The desired signing CA certificate for all - swarm node TLS leaf certificates, in PEM format. - signing_ca_key (str): The desired signing CA key for all swarm - node TLS leaf certificates, in PEM format. - ca_force_rotate (int): An integer whose purpose is to force swarm - to generate a new signing CA certificate and key, if none have - been specified. - autolock_managers (boolean): If set, generate a key and use it to - lock data stored on the managers. - log_driver (DriverConfig): The default log driver to use for tasks - created in the orchestrator. - - Returns: - (str): The ID of the created node. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - - Example: - - >>> client.swarm.init( - advertise_addr='eth0', listen_addr='0.0.0.0:5000', - force_new_cluster=False, default_addr_pool=['10.20.0.0/16], - subnet_size=24, snapshot_interval=5000, - log_entries_for_slow_followers=1200 - ) - - """ - init_kwargs = { - 'advertise_addr': advertise_addr, - 'listen_addr': listen_addr, - 'force_new_cluster': force_new_cluster, - 'default_addr_pool': default_addr_pool, - 'subnet_size': subnet_size, - 'data_path_addr': data_path_addr, - 'data_path_port': data_path_port, - } - init_kwargs['swarm_spec'] = self.client.api.create_swarm_spec(**kwargs) - node_id = self.client.api.init_swarm(**init_kwargs) - self.reload() - return node_id - - def join(self, *args, **kwargs): - return self.client.api.join_swarm(*args, **kwargs) - join.__doc__ = APIClient.join_swarm.__doc__ - - def leave(self, *args, **kwargs): - return self.client.api.leave_swarm(*args, **kwargs) - leave.__doc__ = APIClient.leave_swarm.__doc__ - - def reload(self): - """ - Inspect the swarm on the server and store the response in - :py:attr:`attrs`. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - self.attrs = self.client.api.inspect_swarm() - - def unlock(self, key): - return self.client.api.unlock_swarm(key) - unlock.__doc__ = APIClient.unlock_swarm.__doc__ - - def update(self, rotate_worker_token=False, rotate_manager_token=False, - rotate_manager_unlock_key=False, **kwargs): - """ - Update the swarm's configuration. - - It takes the same arguments as :py:meth:`init`, except - ``advertise_addr``, ``listen_addr``, and ``force_new_cluster``. In - addition, it takes these arguments: - - Args: - rotate_worker_token (bool): Rotate the worker join token. Default: - ``False``. - rotate_manager_token (bool): Rotate the manager join token. - Default: ``False``. - rotate_manager_unlock_key (bool): Rotate the manager unlock key. - Default: ``False``. - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - - """ - # this seems to have to be set - if kwargs.get('node_cert_expiry') is None: - kwargs['node_cert_expiry'] = 7776000000000000 - - return self.client.api.update_swarm( - version=self.version, - swarm_spec=self.client.api.create_swarm_spec(**kwargs), - rotate_worker_token=rotate_worker_token, - rotate_manager_token=rotate_manager_token, - rotate_manager_unlock_key=rotate_manager_unlock_key - ) diff --git a/server/venv/Lib/site-packages/docker/models/volumes.py b/server/venv/Lib/site-packages/docker/models/volumes.py deleted file mode 100644 index 3c2e837..0000000 --- a/server/venv/Lib/site-packages/docker/models/volumes.py +++ /dev/null @@ -1,99 +0,0 @@ -from ..api import APIClient -from .resource import Model, Collection - - -class Volume(Model): - """A volume.""" - id_attribute = 'Name' - - @property - def name(self): - """The name of the volume.""" - return self.attrs['Name'] - - def remove(self, force=False): - """ - Remove this volume. - - Args: - force (bool): Force removal of volumes that were already removed - out of band by the volume driver plugin. - Raises: - :py:class:`docker.errors.APIError` - If volume failed to remove. - """ - return self.client.api.remove_volume(self.id, force=force) - - -class VolumeCollection(Collection): - """Volumes on the Docker server.""" - model = Volume - - def create(self, name=None, **kwargs): - """ - Create a volume. - - Args: - name (str): Name of the volume. If not specified, the engine - generates a name. - driver (str): Name of the driver used to create the volume - driver_opts (dict): Driver options as a key-value dictionary - labels (dict): Labels to set on the volume - - Returns: - (:py:class:`Volume`): The volume created. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - - Example: - - >>> volume = client.volumes.create(name='foobar', driver='local', - driver_opts={'foo': 'bar', 'baz': 'false'}, - labels={"key": "value"}) - - """ - obj = self.client.api.create_volume(name, **kwargs) - return self.prepare_model(obj) - - def get(self, volume_id): - """ - Get a volume. - - Args: - volume_id (str): Volume name. - - Returns: - (:py:class:`Volume`): The volume. - - Raises: - :py:class:`docker.errors.NotFound` - If the volume does not exist. - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - return self.prepare_model(self.client.api.inspect_volume(volume_id)) - - def list(self, **kwargs): - """ - List volumes. Similar to the ``docker volume ls`` command. - - Args: - filters (dict): Server-side list filtering options. - - Returns: - (list of :py:class:`Volume`): The volumes. - - Raises: - :py:class:`docker.errors.APIError` - If the server returns an error. - """ - resp = self.client.api.volumes(**kwargs) - if not resp.get('Volumes'): - return [] - return [self.prepare_model(obj) for obj in resp['Volumes']] - - def prune(self, filters=None): - return self.client.api.prune_volumes(filters=filters) - prune.__doc__ = APIClient.prune_volumes.__doc__ diff --git a/server/venv/Lib/site-packages/docker/tls.py b/server/venv/Lib/site-packages/docker/tls.py deleted file mode 100644 index f4dffb2..0000000 --- a/server/venv/Lib/site-packages/docker/tls.py +++ /dev/null @@ -1,94 +0,0 @@ -import os -import ssl - -from . import errors -from .transport import SSLHTTPAdapter - - -class TLSConfig: - """ - TLS configuration. - - Args: - client_cert (tuple of str): Path to client cert, path to client key. - ca_cert (str): Path to CA cert file. - verify (bool or str): This can be a bool or a path to a CA cert - file to verify against. If ``True``, verify using ca_cert; - if ``False`` or not specified, do not verify. - ssl_version (int): A valid `SSL version`_. - assert_hostname (bool): Verify the hostname of the server. - - .. _`SSL version`: - https://docs.python.org/3.5/library/ssl.html#ssl.PROTOCOL_TLSv1 - """ - cert = None - ca_cert = None - verify = None - ssl_version = None - - def __init__(self, client_cert=None, ca_cert=None, verify=None, - ssl_version=None, assert_hostname=None, - assert_fingerprint=None): - # Argument compatibility/mapping with - # https://docs.docker.com/engine/articles/https/ - # This diverges from the Docker CLI in that users can specify 'tls' - # here, but also disable any public/default CA pool verification by - # leaving verify=False - - self.assert_hostname = assert_hostname - self.assert_fingerprint = assert_fingerprint - - # If the user provides an SSL version, we should use their preference - if ssl_version: - self.ssl_version = ssl_version - else: - self.ssl_version = ssl.PROTOCOL_TLS_CLIENT - - # "client_cert" must have both or neither cert/key files. In - # either case, Alert the user when both are expected, but any are - # missing. - - if client_cert: - try: - tls_cert, tls_key = client_cert - except ValueError: - raise errors.TLSParameterError( - 'client_cert must be a tuple of' - ' (client certificate, key file)' - ) - - if not (tls_cert and tls_key) or (not os.path.isfile(tls_cert) or - not os.path.isfile(tls_key)): - raise errors.TLSParameterError( - 'Path to a certificate and key files must be provided' - ' through the client_cert param' - ) - self.cert = (tls_cert, tls_key) - - # If verify is set, make sure the cert exists - self.verify = verify - self.ca_cert = ca_cert - if self.verify and self.ca_cert and not os.path.isfile(self.ca_cert): - raise errors.TLSParameterError( - 'Invalid CA certificate provided for `ca_cert`.' - ) - - def configure_client(self, client): - """ - Configure a client with these TLS options. - """ - client.ssl_version = self.ssl_version - - if self.verify and self.ca_cert: - client.verify = self.ca_cert - else: - client.verify = self.verify - - if self.cert: - client.cert = self.cert - - client.mount('https://', SSLHTTPAdapter( - ssl_version=self.ssl_version, - assert_hostname=self.assert_hostname, - assert_fingerprint=self.assert_fingerprint, - )) diff --git a/server/venv/Lib/site-packages/docker/transport/__init__.py b/server/venv/Lib/site-packages/docker/transport/__init__.py deleted file mode 100644 index e37fc3b..0000000 --- a/server/venv/Lib/site-packages/docker/transport/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# flake8: noqa -from .unixconn import UnixHTTPAdapter -from .ssladapter import SSLHTTPAdapter -try: - from .npipeconn import NpipeHTTPAdapter - from .npipesocket import NpipeSocket -except ImportError: - pass - -try: - from .sshconn import SSHHTTPAdapter -except ImportError: - pass diff --git a/server/venv/Lib/site-packages/docker/transport/basehttpadapter.py b/server/venv/Lib/site-packages/docker/transport/basehttpadapter.py deleted file mode 100644 index dfbb193..0000000 --- a/server/venv/Lib/site-packages/docker/transport/basehttpadapter.py +++ /dev/null @@ -1,8 +0,0 @@ -import requests.adapters - - -class BaseHTTPAdapter(requests.adapters.HTTPAdapter): - def close(self): - super().close() - if hasattr(self, 'pools'): - self.pools.clear() diff --git a/server/venv/Lib/site-packages/docker/transport/npipeconn.py b/server/venv/Lib/site-packages/docker/transport/npipeconn.py deleted file mode 100644 index 45988b2..0000000 --- a/server/venv/Lib/site-packages/docker/transport/npipeconn.py +++ /dev/null @@ -1,103 +0,0 @@ -import queue -import requests.adapters - -from docker.transport.basehttpadapter import BaseHTTPAdapter -from .. import constants -from .npipesocket import NpipeSocket - -import urllib3 -import urllib3.connection - -RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer - - -class NpipeHTTPConnection(urllib3.connection.HTTPConnection): - def __init__(self, npipe_path, timeout=60): - super().__init__( - 'localhost', timeout=timeout - ) - self.npipe_path = npipe_path - self.timeout = timeout - - def connect(self): - sock = NpipeSocket() - sock.settimeout(self.timeout) - sock.connect(self.npipe_path) - self.sock = sock - - -class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool): - def __init__(self, npipe_path, timeout=60, maxsize=10): - super().__init__( - 'localhost', timeout=timeout, maxsize=maxsize - ) - self.npipe_path = npipe_path - self.timeout = timeout - - def _new_conn(self): - return NpipeHTTPConnection( - self.npipe_path, self.timeout - ) - - # When re-using connections, urllib3 tries to call select() on our - # NpipeSocket instance, causing a crash. To circumvent this, we override - # _get_conn, where that check happens. - def _get_conn(self, timeout): - conn = None - try: - conn = self.pool.get(block=self.block, timeout=timeout) - - except AttributeError: # self.pool is None - raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.") - - except queue.Empty: - if self.block: - raise urllib3.exceptions.EmptyPoolError( - self, - "Pool reached maximum size and no more " - "connections are allowed." - ) - # Oh well, we'll create a new connection then - - return conn or self._new_conn() - - -class NpipeHTTPAdapter(BaseHTTPAdapter): - - __attrs__ = requests.adapters.HTTPAdapter.__attrs__ + ['npipe_path', - 'pools', - 'timeout', - 'max_pool_size'] - - def __init__(self, base_url, timeout=60, - pool_connections=constants.DEFAULT_NUM_POOLS, - max_pool_size=constants.DEFAULT_MAX_POOL_SIZE): - self.npipe_path = base_url.replace('npipe://', '') - self.timeout = timeout - self.max_pool_size = max_pool_size - self.pools = RecentlyUsedContainer( - pool_connections, dispose_func=lambda p: p.close() - ) - super().__init__() - - def get_connection(self, url, proxies=None): - with self.pools.lock: - pool = self.pools.get(url) - if pool: - return pool - - pool = NpipeHTTPConnectionPool( - self.npipe_path, self.timeout, - maxsize=self.max_pool_size - ) - self.pools[url] = pool - - return pool - - def request_url(self, request, proxies): - # The select_proxy utility in requests errors out when the provided URL - # doesn't have a hostname, like is the case when using a UNIX socket. - # Since proxies are an irrelevant notion in the case of UNIX sockets - # anyway, we simply return the path URL directly. - # See also: https://github.com/docker/docker-sdk-python/issues/811 - return request.path_url diff --git a/server/venv/Lib/site-packages/docker/transport/npipesocket.py b/server/venv/Lib/site-packages/docker/transport/npipesocket.py deleted file mode 100644 index 9cbe40c..0000000 --- a/server/venv/Lib/site-packages/docker/transport/npipesocket.py +++ /dev/null @@ -1,230 +0,0 @@ -import functools -import time -import io - -import win32file -import win32pipe -import pywintypes -import win32event -import win32api - -cERROR_PIPE_BUSY = 0xe7 -cSECURITY_SQOS_PRESENT = 0x100000 -cSECURITY_ANONYMOUS = 0 - -MAXIMUM_RETRY_COUNT = 10 - - -def check_closed(f): - @functools.wraps(f) - def wrapped(self, *args, **kwargs): - if self._closed: - raise RuntimeError( - 'Can not reuse socket after connection was closed.' - ) - return f(self, *args, **kwargs) - return wrapped - - -class NpipeSocket: - """ Partial implementation of the socket API over windows named pipes. - This implementation is only designed to be used as a client socket, - and server-specific methods (bind, listen, accept...) are not - implemented. - """ - - def __init__(self, handle=None): - self._timeout = win32pipe.NMPWAIT_USE_DEFAULT_WAIT - self._handle = handle - self._closed = False - - def accept(self): - raise NotImplementedError() - - def bind(self, address): - raise NotImplementedError() - - def close(self): - self._handle.Close() - self._closed = True - - @check_closed - def connect(self, address, retry_count=0): - try: - handle = win32file.CreateFile( - address, - win32file.GENERIC_READ | win32file.GENERIC_WRITE, - 0, - None, - win32file.OPEN_EXISTING, - (cSECURITY_ANONYMOUS - | cSECURITY_SQOS_PRESENT - | win32file.FILE_FLAG_OVERLAPPED), - 0 - ) - except win32pipe.error as e: - # See Remarks: - # https://msdn.microsoft.com/en-us/library/aa365800.aspx - if e.winerror == cERROR_PIPE_BUSY: - # Another program or thread has grabbed our pipe instance - # before we got to it. Wait for availability and attempt to - # connect again. - retry_count = retry_count + 1 - if (retry_count < MAXIMUM_RETRY_COUNT): - time.sleep(1) - return self.connect(address, retry_count) - raise e - - self.flags = win32pipe.GetNamedPipeInfo(handle)[0] - - self._handle = handle - self._address = address - - @check_closed - def connect_ex(self, address): - return self.connect(address) - - @check_closed - def detach(self): - self._closed = True - return self._handle - - @check_closed - def dup(self): - return NpipeSocket(self._handle) - - def getpeername(self): - return self._address - - def getsockname(self): - return self._address - - def getsockopt(self, level, optname, buflen=None): - raise NotImplementedError() - - def ioctl(self, control, option): - raise NotImplementedError() - - def listen(self, backlog): - raise NotImplementedError() - - def makefile(self, mode=None, bufsize=None): - if mode.strip('b') != 'r': - raise NotImplementedError() - rawio = NpipeFileIOBase(self) - if bufsize is None or bufsize <= 0: - bufsize = io.DEFAULT_BUFFER_SIZE - return io.BufferedReader(rawio, buffer_size=bufsize) - - @check_closed - def recv(self, bufsize, flags=0): - err, data = win32file.ReadFile(self._handle, bufsize) - return data - - @check_closed - def recvfrom(self, bufsize, flags=0): - data = self.recv(bufsize, flags) - return (data, self._address) - - @check_closed - def recvfrom_into(self, buf, nbytes=0, flags=0): - return self.recv_into(buf, nbytes, flags), self._address - - @check_closed - def recv_into(self, buf, nbytes=0): - readbuf = buf - if not isinstance(buf, memoryview): - readbuf = memoryview(buf) - - event = win32event.CreateEvent(None, True, True, None) - try: - overlapped = pywintypes.OVERLAPPED() - overlapped.hEvent = event - err, data = win32file.ReadFile( - self._handle, - readbuf[:nbytes] if nbytes else readbuf, - overlapped - ) - wait_result = win32event.WaitForSingleObject(event, self._timeout) - if wait_result == win32event.WAIT_TIMEOUT: - win32file.CancelIo(self._handle) - raise TimeoutError - return win32file.GetOverlappedResult(self._handle, overlapped, 0) - finally: - win32api.CloseHandle(event) - - @check_closed - def send(self, string, flags=0): - event = win32event.CreateEvent(None, True, True, None) - try: - overlapped = pywintypes.OVERLAPPED() - overlapped.hEvent = event - win32file.WriteFile(self._handle, string, overlapped) - wait_result = win32event.WaitForSingleObject(event, self._timeout) - if wait_result == win32event.WAIT_TIMEOUT: - win32file.CancelIo(self._handle) - raise TimeoutError - return win32file.GetOverlappedResult(self._handle, overlapped, 0) - finally: - win32api.CloseHandle(event) - - @check_closed - def sendall(self, string, flags=0): - return self.send(string, flags) - - @check_closed - def sendto(self, string, address): - self.connect(address) - return self.send(string) - - def setblocking(self, flag): - if flag: - return self.settimeout(None) - return self.settimeout(0) - - def settimeout(self, value): - if value is None: - # Blocking mode - self._timeout = win32event.INFINITE - elif not isinstance(value, (float, int)) or value < 0: - raise ValueError('Timeout value out of range') - else: - # Timeout mode - Value converted to milliseconds - self._timeout = int(value * 1000) - - def gettimeout(self): - return self._timeout - - def setsockopt(self, level, optname, value): - raise NotImplementedError() - - @check_closed - def shutdown(self, how): - return self.close() - - -class NpipeFileIOBase(io.RawIOBase): - def __init__(self, npipe_socket): - self.sock = npipe_socket - - def close(self): - super().close() - self.sock = None - - def fileno(self): - return self.sock.fileno() - - def isatty(self): - return False - - def readable(self): - return True - - def readinto(self, buf): - return self.sock.recv_into(buf) - - def seekable(self): - return False - - def writable(self): - return False diff --git a/server/venv/Lib/site-packages/docker/transport/sshconn.py b/server/venv/Lib/site-packages/docker/transport/sshconn.py deleted file mode 100644 index a92beb6..0000000 --- a/server/venv/Lib/site-packages/docker/transport/sshconn.py +++ /dev/null @@ -1,250 +0,0 @@ -import paramiko -import queue -import urllib.parse -import requests.adapters -import logging -import os -import signal -import socket -import subprocess - -from docker.transport.basehttpadapter import BaseHTTPAdapter -from .. import constants - -import urllib3 -import urllib3.connection - -RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer - - -class SSHSocket(socket.socket): - def __init__(self, host): - super().__init__( - socket.AF_INET, socket.SOCK_STREAM) - self.host = host - self.port = None - self.user = None - if ':' in self.host: - self.host, self.port = self.host.split(':') - if '@' in self.host: - self.user, self.host = self.host.split('@') - - self.proc = None - - def connect(self, **kwargs): - args = ['ssh'] - if self.user: - args = args + ['-l', self.user] - - if self.port: - args = args + ['-p', self.port] - - args = args + ['--', self.host, 'docker system dial-stdio'] - - preexec_func = None - if not constants.IS_WINDOWS_PLATFORM: - def f(): - signal.signal(signal.SIGINT, signal.SIG_IGN) - preexec_func = f - - env = dict(os.environ) - - # drop LD_LIBRARY_PATH and SSL_CERT_FILE - env.pop('LD_LIBRARY_PATH', None) - env.pop('SSL_CERT_FILE', None) - - self.proc = subprocess.Popen( - args, - env=env, - stdout=subprocess.PIPE, - stdin=subprocess.PIPE, - preexec_fn=preexec_func) - - def _write(self, data): - if not self.proc or self.proc.stdin.closed: - raise Exception('SSH subprocess not initiated.' - 'connect() must be called first.') - written = self.proc.stdin.write(data) - self.proc.stdin.flush() - return written - - def sendall(self, data): - self._write(data) - - def send(self, data): - return self._write(data) - - def recv(self, n): - if not self.proc: - raise Exception('SSH subprocess not initiated.' - 'connect() must be called first.') - return self.proc.stdout.read(n) - - def makefile(self, mode): - if not self.proc: - self.connect() - self.proc.stdout.channel = self - - return self.proc.stdout - - def close(self): - if not self.proc or self.proc.stdin.closed: - return - self.proc.stdin.write(b'\n\n') - self.proc.stdin.flush() - self.proc.terminate() - - -class SSHConnection(urllib3.connection.HTTPConnection): - def __init__(self, ssh_transport=None, timeout=60, host=None): - super().__init__( - 'localhost', timeout=timeout - ) - self.ssh_transport = ssh_transport - self.timeout = timeout - self.ssh_host = host - - def connect(self): - if self.ssh_transport: - sock = self.ssh_transport.open_session() - sock.settimeout(self.timeout) - sock.exec_command('docker system dial-stdio') - else: - sock = SSHSocket(self.ssh_host) - sock.settimeout(self.timeout) - sock.connect() - - self.sock = sock - - -class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool): - scheme = 'ssh' - - def __init__(self, ssh_client=None, timeout=60, maxsize=10, host=None): - super().__init__( - 'localhost', timeout=timeout, maxsize=maxsize - ) - self.ssh_transport = None - self.timeout = timeout - if ssh_client: - self.ssh_transport = ssh_client.get_transport() - self.ssh_host = host - - def _new_conn(self): - return SSHConnection(self.ssh_transport, self.timeout, self.ssh_host) - - # When re-using connections, urllib3 calls fileno() on our - # SSH channel instance, quickly overloading our fd limit. To avoid this, - # we override _get_conn - def _get_conn(self, timeout): - conn = None - try: - conn = self.pool.get(block=self.block, timeout=timeout) - - except AttributeError: # self.pool is None - raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.") - - except queue.Empty: - if self.block: - raise urllib3.exceptions.EmptyPoolError( - self, - "Pool reached maximum size and no more " - "connections are allowed." - ) - # Oh well, we'll create a new connection then - - return conn or self._new_conn() - - -class SSHHTTPAdapter(BaseHTTPAdapter): - - __attrs__ = requests.adapters.HTTPAdapter.__attrs__ + [ - 'pools', 'timeout', 'ssh_client', 'ssh_params', 'max_pool_size' - ] - - def __init__(self, base_url, timeout=60, - pool_connections=constants.DEFAULT_NUM_POOLS, - max_pool_size=constants.DEFAULT_MAX_POOL_SIZE, - shell_out=False): - self.ssh_client = None - if not shell_out: - self._create_paramiko_client(base_url) - self._connect() - - self.ssh_host = base_url - if base_url.startswith('ssh://'): - self.ssh_host = base_url[len('ssh://'):] - - self.timeout = timeout - self.max_pool_size = max_pool_size - self.pools = RecentlyUsedContainer( - pool_connections, dispose_func=lambda p: p.close() - ) - super().__init__() - - def _create_paramiko_client(self, base_url): - logging.getLogger("paramiko").setLevel(logging.WARNING) - self.ssh_client = paramiko.SSHClient() - base_url = urllib.parse.urlparse(base_url) - self.ssh_params = { - "hostname": base_url.hostname, - "port": base_url.port, - "username": base_url.username - } - ssh_config_file = os.path.expanduser("~/.ssh/config") - if os.path.exists(ssh_config_file): - conf = paramiko.SSHConfig() - with open(ssh_config_file) as f: - conf.parse(f) - host_config = conf.lookup(base_url.hostname) - if 'proxycommand' in host_config: - self.ssh_params["sock"] = paramiko.ProxyCommand( - host_config['proxycommand'] - ) - if 'hostname' in host_config: - self.ssh_params['hostname'] = host_config['hostname'] - if base_url.port is None and 'port' in host_config: - self.ssh_params['port'] = host_config['port'] - if base_url.username is None and 'user' in host_config: - self.ssh_params['username'] = host_config['user'] - if 'identityfile' in host_config: - self.ssh_params['key_filename'] = host_config['identityfile'] - - self.ssh_client.load_system_host_keys() - self.ssh_client.set_missing_host_key_policy(paramiko.RejectPolicy()) - - def _connect(self): - if self.ssh_client: - self.ssh_client.connect(**self.ssh_params) - - def get_connection(self, url, proxies=None): - if not self.ssh_client: - return SSHConnectionPool( - ssh_client=self.ssh_client, - timeout=self.timeout, - maxsize=self.max_pool_size, - host=self.ssh_host - ) - with self.pools.lock: - pool = self.pools.get(url) - if pool: - return pool - - # Connection is closed try a reconnect - if self.ssh_client and not self.ssh_client.get_transport(): - self._connect() - - pool = SSHConnectionPool( - ssh_client=self.ssh_client, - timeout=self.timeout, - maxsize=self.max_pool_size, - host=self.ssh_host - ) - self.pools[url] = pool - - return pool - - def close(self): - super().close() - if self.ssh_client: - self.ssh_client.close() diff --git a/server/venv/Lib/site-packages/docker/transport/ssladapter.py b/server/venv/Lib/site-packages/docker/transport/ssladapter.py deleted file mode 100644 index 69274bd..0000000 --- a/server/venv/Lib/site-packages/docker/transport/ssladapter.py +++ /dev/null @@ -1,62 +0,0 @@ -""" Resolves OpenSSL issues in some servers: - https://lukasa.co.uk/2013/01/Choosing_SSL_Version_In_Requests/ - https://github.com/kennethreitz/requests/pull/799 -""" -from packaging.version import Version -from requests.adapters import HTTPAdapter - -from docker.transport.basehttpadapter import BaseHTTPAdapter - -import urllib3 - - -PoolManager = urllib3.poolmanager.PoolManager - - -class SSLHTTPAdapter(BaseHTTPAdapter): - '''An HTTPS Transport Adapter that uses an arbitrary SSL version.''' - - __attrs__ = HTTPAdapter.__attrs__ + ['assert_fingerprint', - 'assert_hostname', - 'ssl_version'] - - def __init__(self, ssl_version=None, assert_hostname=None, - assert_fingerprint=None, **kwargs): - self.ssl_version = ssl_version - self.assert_hostname = assert_hostname - self.assert_fingerprint = assert_fingerprint - super().__init__(**kwargs) - - def init_poolmanager(self, connections, maxsize, block=False): - kwargs = { - 'num_pools': connections, - 'maxsize': maxsize, - 'block': block, - 'assert_hostname': self.assert_hostname, - 'assert_fingerprint': self.assert_fingerprint, - } - if self.ssl_version and self.can_override_ssl_version(): - kwargs['ssl_version'] = self.ssl_version - - self.poolmanager = PoolManager(**kwargs) - - def get_connection(self, *args, **kwargs): - """ - Ensure assert_hostname is set correctly on our pool - - We already take care of a normal poolmanager via init_poolmanager - - But we still need to take care of when there is a proxy poolmanager - """ - conn = super().get_connection(*args, **kwargs) - if conn.assert_hostname != self.assert_hostname: - conn.assert_hostname = self.assert_hostname - return conn - - def can_override_ssl_version(self): - urllib_ver = urllib3.__version__.split('-')[0] - if urllib_ver is None: - return False - if urllib_ver == 'dev': - return True - return Version(urllib_ver) > Version('1.5') diff --git a/server/venv/Lib/site-packages/docker/transport/unixconn.py b/server/venv/Lib/site-packages/docker/transport/unixconn.py deleted file mode 100644 index fae10f2..0000000 --- a/server/venv/Lib/site-packages/docker/transport/unixconn.py +++ /dev/null @@ -1,87 +0,0 @@ -import requests.adapters -import socket - -from docker.transport.basehttpadapter import BaseHTTPAdapter -from .. import constants - -import urllib3 -import urllib3.connection - - -RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer - - -class UnixHTTPConnection(urllib3.connection.HTTPConnection): - - def __init__(self, base_url, unix_socket, timeout=60): - super().__init__( - 'localhost', timeout=timeout - ) - self.base_url = base_url - self.unix_socket = unix_socket - self.timeout = timeout - - def connect(self): - sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - sock.settimeout(self.timeout) - sock.connect(self.unix_socket) - self.sock = sock - - -class UnixHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool): - def __init__(self, base_url, socket_path, timeout=60, maxsize=10): - super().__init__( - 'localhost', timeout=timeout, maxsize=maxsize - ) - self.base_url = base_url - self.socket_path = socket_path - self.timeout = timeout - - def _new_conn(self): - return UnixHTTPConnection( - self.base_url, self.socket_path, self.timeout - ) - - -class UnixHTTPAdapter(BaseHTTPAdapter): - - __attrs__ = requests.adapters.HTTPAdapter.__attrs__ + ['pools', - 'socket_path', - 'timeout', - 'max_pool_size'] - - def __init__(self, socket_url, timeout=60, - pool_connections=constants.DEFAULT_NUM_POOLS, - max_pool_size=constants.DEFAULT_MAX_POOL_SIZE): - socket_path = socket_url.replace('http+unix://', '') - if not socket_path.startswith('/'): - socket_path = '/' + socket_path - self.socket_path = socket_path - self.timeout = timeout - self.max_pool_size = max_pool_size - self.pools = RecentlyUsedContainer( - pool_connections, dispose_func=lambda p: p.close() - ) - super().__init__() - - def get_connection(self, url, proxies=None): - with self.pools.lock: - pool = self.pools.get(url) - if pool: - return pool - - pool = UnixHTTPConnectionPool( - url, self.socket_path, self.timeout, - maxsize=self.max_pool_size - ) - self.pools[url] = pool - - return pool - - def request_url(self, request, proxies): - # The select_proxy utility in requests errors out when the provided URL - # doesn't have a hostname, like is the case when using a UNIX socket. - # Since proxies are an irrelevant notion in the case of UNIX sockets - # anyway, we simply return the path URL directly. - # See also: https://github.com/docker/docker-py/issues/811 - return request.path_url diff --git a/server/venv/Lib/site-packages/docker/types/__init__.py b/server/venv/Lib/site-packages/docker/types/__init__.py deleted file mode 100644 index b425746..0000000 --- a/server/venv/Lib/site-packages/docker/types/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# flake8: noqa -from .containers import ( - ContainerConfig, HostConfig, LogConfig, Ulimit, DeviceRequest -) -from .daemon import CancellableStream -from .healthcheck import Healthcheck -from .networks import EndpointConfig, IPAMConfig, IPAMPool, NetworkingConfig -from .services import ( - ConfigReference, ContainerSpec, DNSConfig, DriverConfig, EndpointSpec, - Mount, Placement, PlacementPreference, Privileges, Resources, - RestartPolicy, RollbackConfig, SecretReference, ServiceMode, TaskTemplate, - UpdateConfig, NetworkAttachmentConfig -) -from .swarm import SwarmSpec, SwarmExternalCA diff --git a/server/venv/Lib/site-packages/docker/types/base.py b/server/venv/Lib/site-packages/docker/types/base.py deleted file mode 100644 index 8851f1e..0000000 --- a/server/venv/Lib/site-packages/docker/types/base.py +++ /dev/null @@ -1,4 +0,0 @@ -class DictType(dict): - def __init__(self, init): - for k, v in init.items(): - self[k] = v diff --git a/server/venv/Lib/site-packages/docker/types/containers.py b/server/venv/Lib/site-packages/docker/types/containers.py deleted file mode 100644 index 84df0f7..0000000 --- a/server/venv/Lib/site-packages/docker/types/containers.py +++ /dev/null @@ -1,779 +0,0 @@ -from .. import errors -from ..utils.utils import ( - convert_port_bindings, convert_tmpfs_mounts, convert_volume_binds, - format_environment, format_extra_hosts, normalize_links, parse_bytes, - parse_devices, split_command, version_gte, version_lt, -) -from .base import DictType -from .healthcheck import Healthcheck - - -class LogConfigTypesEnum: - _values = ( - 'json-file', - 'syslog', - 'journald', - 'gelf', - 'fluentd', - 'none' - ) - JSON, SYSLOG, JOURNALD, GELF, FLUENTD, NONE = _values - - -class LogConfig(DictType): - """ - Configure logging for a container, when provided as an argument to - :py:meth:`~docker.api.container.ContainerApiMixin.create_host_config`. - You may refer to the - `official logging driver documentation `_ - for more information. - - Args: - type (str): Indicate which log driver to use. A set of valid drivers - is provided as part of the :py:attr:`LogConfig.types` - enum. Other values may be accepted depending on the engine version - and available logging plugins. - config (dict): A driver-dependent configuration dictionary. Please - refer to the driver's documentation for a list of valid config - keys. - - Example: - - >>> from docker.types import LogConfig - >>> lc = LogConfig(type=LogConfig.types.JSON, config={ - ... 'max-size': '1g', - ... 'labels': 'production_status,geo' - ... }) - >>> hc = client.create_host_config(log_config=lc) - >>> container = client.create_container('busybox', 'true', - ... host_config=hc) - >>> client.inspect_container(container)['HostConfig']['LogConfig'] - {'Type': 'json-file', 'Config': {'labels': 'production_status,geo', 'max-size': '1g'}} - """ # noqa: E501 - types = LogConfigTypesEnum - - def __init__(self, **kwargs): - log_driver_type = kwargs.get('type', kwargs.get('Type')) - config = kwargs.get('config', kwargs.get('Config')) or {} - - if config and not isinstance(config, dict): - raise ValueError("LogConfig.config must be a dictionary") - - super().__init__({ - 'Type': log_driver_type, - 'Config': config - }) - - @property - def type(self): - return self['Type'] - - @type.setter - def type(self, value): - self['Type'] = value - - @property - def config(self): - return self['Config'] - - def set_config_value(self, key, value): - """ Set a the value for ``key`` to ``value`` inside the ``config`` - dict. - """ - self.config[key] = value - - def unset_config(self, key): - """ Remove the ``key`` property from the ``config`` dict. """ - if key in self.config: - del self.config[key] - - -class Ulimit(DictType): - """ - Create a ulimit declaration to be used with - :py:meth:`~docker.api.container.ContainerApiMixin.create_host_config`. - - Args: - - name (str): Which ulimit will this apply to. The valid names can be - found in '/etc/security/limits.conf' on a gnu/linux system. - soft (int): The soft limit for this ulimit. Optional. - hard (int): The hard limit for this ulimit. Optional. - - Example: - - >>> nproc_limit = docker.types.Ulimit(name='nproc', soft=1024) - >>> hc = client.create_host_config(ulimits=[nproc_limit]) - >>> container = client.create_container( - 'busybox', 'true', host_config=hc - ) - >>> client.inspect_container(container)['HostConfig']['Ulimits'] - [{'Name': 'nproc', 'Hard': 0, 'Soft': 1024}] - - """ - def __init__(self, **kwargs): - name = kwargs.get('name', kwargs.get('Name')) - soft = kwargs.get('soft', kwargs.get('Soft')) - hard = kwargs.get('hard', kwargs.get('Hard')) - if not isinstance(name, str): - raise ValueError("Ulimit.name must be a string") - if soft and not isinstance(soft, int): - raise ValueError("Ulimit.soft must be an integer") - if hard and not isinstance(hard, int): - raise ValueError("Ulimit.hard must be an integer") - super().__init__({ - 'Name': name, - 'Soft': soft, - 'Hard': hard - }) - - @property - def name(self): - return self['Name'] - - @name.setter - def name(self, value): - self['Name'] = value - - @property - def soft(self): - return self.get('Soft') - - @soft.setter - def soft(self, value): - self['Soft'] = value - - @property - def hard(self): - return self.get('Hard') - - @hard.setter - def hard(self, value): - self['Hard'] = value - - -class DeviceRequest(DictType): - """ - Create a device request to be used with - :py:meth:`~docker.api.container.ContainerApiMixin.create_host_config`. - - Args: - - driver (str): Which driver to use for this device. Optional. - count (int): Number or devices to request. Optional. - Set to -1 to request all available devices. - device_ids (list): List of strings for device IDs. Optional. - Set either ``count`` or ``device_ids``. - capabilities (list): List of lists of strings to request - capabilities. Optional. The global list acts like an OR, - and the sub-lists are AND. The driver will try to satisfy - one of the sub-lists. - Available capabilities for the ``nvidia`` driver can be found - `here `_. - options (dict): Driver-specific options. Optional. - """ - - def __init__(self, **kwargs): - driver = kwargs.get('driver', kwargs.get('Driver')) - count = kwargs.get('count', kwargs.get('Count')) - device_ids = kwargs.get('device_ids', kwargs.get('DeviceIDs')) - capabilities = kwargs.get('capabilities', kwargs.get('Capabilities')) - options = kwargs.get('options', kwargs.get('Options')) - - if driver is None: - driver = '' - elif not isinstance(driver, str): - raise ValueError('DeviceRequest.driver must be a string') - if count is None: - count = 0 - elif not isinstance(count, int): - raise ValueError('DeviceRequest.count must be an integer') - if device_ids is None: - device_ids = [] - elif not isinstance(device_ids, list): - raise ValueError('DeviceRequest.device_ids must be a list') - if capabilities is None: - capabilities = [] - elif not isinstance(capabilities, list): - raise ValueError('DeviceRequest.capabilities must be a list') - if options is None: - options = {} - elif not isinstance(options, dict): - raise ValueError('DeviceRequest.options must be a dict') - - super().__init__({ - 'Driver': driver, - 'Count': count, - 'DeviceIDs': device_ids, - 'Capabilities': capabilities, - 'Options': options - }) - - @property - def driver(self): - return self['Driver'] - - @driver.setter - def driver(self, value): - self['Driver'] = value - - @property - def count(self): - return self['Count'] - - @count.setter - def count(self, value): - self['Count'] = value - - @property - def device_ids(self): - return self['DeviceIDs'] - - @device_ids.setter - def device_ids(self, value): - self['DeviceIDs'] = value - - @property - def capabilities(self): - return self['Capabilities'] - - @capabilities.setter - def capabilities(self, value): - self['Capabilities'] = value - - @property - def options(self): - return self['Options'] - - @options.setter - def options(self, value): - self['Options'] = value - - -class HostConfig(dict): - def __init__(self, version, binds=None, port_bindings=None, - lxc_conf=None, publish_all_ports=False, links=None, - privileged=False, dns=None, dns_search=None, - volumes_from=None, network_mode=None, restart_policy=None, - cap_add=None, cap_drop=None, devices=None, extra_hosts=None, - read_only=None, pid_mode=None, ipc_mode=None, - security_opt=None, ulimits=None, log_config=None, - mem_limit=None, memswap_limit=None, mem_reservation=None, - kernel_memory=None, mem_swappiness=None, cgroup_parent=None, - group_add=None, cpu_quota=None, cpu_period=None, - blkio_weight=None, blkio_weight_device=None, - device_read_bps=None, device_write_bps=None, - device_read_iops=None, device_write_iops=None, - oom_kill_disable=False, shm_size=None, sysctls=None, - tmpfs=None, oom_score_adj=None, dns_opt=None, cpu_shares=None, - cpuset_cpus=None, userns_mode=None, uts_mode=None, - pids_limit=None, isolation=None, auto_remove=False, - storage_opt=None, init=None, init_path=None, - volume_driver=None, cpu_count=None, cpu_percent=None, - nano_cpus=None, cpuset_mems=None, runtime=None, mounts=None, - cpu_rt_period=None, cpu_rt_runtime=None, - device_cgroup_rules=None, device_requests=None, - cgroupns=None): - - if mem_limit is not None: - self['Memory'] = parse_bytes(mem_limit) - - if memswap_limit is not None: - self['MemorySwap'] = parse_bytes(memswap_limit) - - if mem_reservation: - self['MemoryReservation'] = parse_bytes(mem_reservation) - - if kernel_memory: - self['KernelMemory'] = parse_bytes(kernel_memory) - - if mem_swappiness is not None: - if not isinstance(mem_swappiness, int): - raise host_config_type_error( - 'mem_swappiness', mem_swappiness, 'int' - ) - - self['MemorySwappiness'] = mem_swappiness - - if shm_size is not None: - if isinstance(shm_size, str): - shm_size = parse_bytes(shm_size) - - self['ShmSize'] = shm_size - - if pid_mode: - if version_lt(version, '1.24') and pid_mode != 'host': - raise host_config_value_error('pid_mode', pid_mode) - self['PidMode'] = pid_mode - - if ipc_mode: - self['IpcMode'] = ipc_mode - - if privileged: - self['Privileged'] = privileged - - if oom_kill_disable: - self['OomKillDisable'] = oom_kill_disable - - if oom_score_adj: - if version_lt(version, '1.22'): - raise host_config_version_error('oom_score_adj', '1.22') - if not isinstance(oom_score_adj, int): - raise host_config_type_error( - 'oom_score_adj', oom_score_adj, 'int' - ) - self['OomScoreAdj'] = oom_score_adj - - if publish_all_ports: - self['PublishAllPorts'] = publish_all_ports - - if read_only is not None: - self['ReadonlyRootfs'] = read_only - - if dns_search: - self['DnsSearch'] = dns_search - - if network_mode == 'host' and port_bindings: - raise host_config_incompatible_error( - 'network_mode', 'host', 'port_bindings' - ) - self['NetworkMode'] = network_mode or 'default' - - if restart_policy: - if not isinstance(restart_policy, dict): - raise host_config_type_error( - 'restart_policy', restart_policy, 'dict' - ) - - self['RestartPolicy'] = restart_policy - - if cap_add: - self['CapAdd'] = cap_add - - if cap_drop: - self['CapDrop'] = cap_drop - - if devices: - self['Devices'] = parse_devices(devices) - - if group_add: - self['GroupAdd'] = [str(grp) for grp in group_add] - - if dns is not None: - self['Dns'] = dns - - if dns_opt is not None: - self['DnsOptions'] = dns_opt - - if security_opt is not None: - if not isinstance(security_opt, list): - raise host_config_type_error( - 'security_opt', security_opt, 'list' - ) - - self['SecurityOpt'] = security_opt - - if sysctls: - if not isinstance(sysctls, dict): - raise host_config_type_error('sysctls', sysctls, 'dict') - self['Sysctls'] = {} - for k, v in sysctls.items(): - self['Sysctls'][k] = str(v) - - if volumes_from is not None: - if isinstance(volumes_from, str): - volumes_from = volumes_from.split(',') - - self['VolumesFrom'] = volumes_from - - if binds is not None: - self['Binds'] = convert_volume_binds(binds) - - if port_bindings is not None: - self['PortBindings'] = convert_port_bindings(port_bindings) - - if extra_hosts is not None: - if isinstance(extra_hosts, dict): - extra_hosts = format_extra_hosts(extra_hosts) - - self['ExtraHosts'] = extra_hosts - - if links is not None: - self['Links'] = normalize_links(links) - - if isinstance(lxc_conf, dict): - formatted = [] - for k, v in lxc_conf.items(): - formatted.append({'Key': k, 'Value': str(v)}) - lxc_conf = formatted - - if lxc_conf is not None: - self['LxcConf'] = lxc_conf - - if cgroup_parent is not None: - self['CgroupParent'] = cgroup_parent - - if ulimits is not None: - if not isinstance(ulimits, list): - raise host_config_type_error('ulimits', ulimits, 'list') - self['Ulimits'] = [] - for lmt in ulimits: - if not isinstance(lmt, Ulimit): - lmt = Ulimit(**lmt) - self['Ulimits'].append(lmt) - - if log_config is not None: - if not isinstance(log_config, LogConfig): - if not isinstance(log_config, dict): - raise host_config_type_error( - 'log_config', log_config, 'LogConfig' - ) - log_config = LogConfig(**log_config) - - self['LogConfig'] = log_config - - if cpu_quota: - if not isinstance(cpu_quota, int): - raise host_config_type_error('cpu_quota', cpu_quota, 'int') - self['CpuQuota'] = cpu_quota - - if cpu_period: - if not isinstance(cpu_period, int): - raise host_config_type_error('cpu_period', cpu_period, 'int') - self['CpuPeriod'] = cpu_period - - if cpu_shares: - if not isinstance(cpu_shares, int): - raise host_config_type_error('cpu_shares', cpu_shares, 'int') - - self['CpuShares'] = cpu_shares - - if cpuset_cpus: - self['CpusetCpus'] = cpuset_cpus - - if cpuset_mems: - if not isinstance(cpuset_mems, str): - raise host_config_type_error( - 'cpuset_mems', cpuset_mems, 'str' - ) - self['CpusetMems'] = cpuset_mems - - if cpu_rt_period: - if version_lt(version, '1.25'): - raise host_config_version_error('cpu_rt_period', '1.25') - - if not isinstance(cpu_rt_period, int): - raise host_config_type_error( - 'cpu_rt_period', cpu_rt_period, 'int' - ) - self['CPURealtimePeriod'] = cpu_rt_period - - if cpu_rt_runtime: - if version_lt(version, '1.25'): - raise host_config_version_error('cpu_rt_runtime', '1.25') - - if not isinstance(cpu_rt_runtime, int): - raise host_config_type_error( - 'cpu_rt_runtime', cpu_rt_runtime, 'int' - ) - self['CPURealtimeRuntime'] = cpu_rt_runtime - - if blkio_weight: - if not isinstance(blkio_weight, int): - raise host_config_type_error( - 'blkio_weight', blkio_weight, 'int' - ) - if version_lt(version, '1.22'): - raise host_config_version_error('blkio_weight', '1.22') - self["BlkioWeight"] = blkio_weight - - if blkio_weight_device: - if not isinstance(blkio_weight_device, list): - raise host_config_type_error( - 'blkio_weight_device', blkio_weight_device, 'list' - ) - if version_lt(version, '1.22'): - raise host_config_version_error('blkio_weight_device', '1.22') - self["BlkioWeightDevice"] = blkio_weight_device - - if device_read_bps: - if not isinstance(device_read_bps, list): - raise host_config_type_error( - 'device_read_bps', device_read_bps, 'list' - ) - if version_lt(version, '1.22'): - raise host_config_version_error('device_read_bps', '1.22') - self["BlkioDeviceReadBps"] = device_read_bps - - if device_write_bps: - if not isinstance(device_write_bps, list): - raise host_config_type_error( - 'device_write_bps', device_write_bps, 'list' - ) - if version_lt(version, '1.22'): - raise host_config_version_error('device_write_bps', '1.22') - self["BlkioDeviceWriteBps"] = device_write_bps - - if device_read_iops: - if not isinstance(device_read_iops, list): - raise host_config_type_error( - 'device_read_iops', device_read_iops, 'list' - ) - if version_lt(version, '1.22'): - raise host_config_version_error('device_read_iops', '1.22') - self["BlkioDeviceReadIOps"] = device_read_iops - - if device_write_iops: - if not isinstance(device_write_iops, list): - raise host_config_type_error( - 'device_write_iops', device_write_iops, 'list' - ) - if version_lt(version, '1.22'): - raise host_config_version_error('device_write_iops', '1.22') - self["BlkioDeviceWriteIOps"] = device_write_iops - - if tmpfs: - if version_lt(version, '1.22'): - raise host_config_version_error('tmpfs', '1.22') - self["Tmpfs"] = convert_tmpfs_mounts(tmpfs) - - if userns_mode: - if version_lt(version, '1.23'): - raise host_config_version_error('userns_mode', '1.23') - - if userns_mode != "host": - raise host_config_value_error("userns_mode", userns_mode) - self['UsernsMode'] = userns_mode - - if uts_mode: - if uts_mode != "host": - raise host_config_value_error("uts_mode", uts_mode) - self['UTSMode'] = uts_mode - - if pids_limit: - if not isinstance(pids_limit, int): - raise host_config_type_error('pids_limit', pids_limit, 'int') - if version_lt(version, '1.23'): - raise host_config_version_error('pids_limit', '1.23') - self["PidsLimit"] = pids_limit - - if isolation: - if not isinstance(isolation, str): - raise host_config_type_error('isolation', isolation, 'string') - if version_lt(version, '1.24'): - raise host_config_version_error('isolation', '1.24') - self['Isolation'] = isolation - - if auto_remove: - if version_lt(version, '1.25'): - raise host_config_version_error('auto_remove', '1.25') - self['AutoRemove'] = auto_remove - - if storage_opt is not None: - if version_lt(version, '1.24'): - raise host_config_version_error('storage_opt', '1.24') - self['StorageOpt'] = storage_opt - - if init is not None: - if version_lt(version, '1.25'): - raise host_config_version_error('init', '1.25') - self['Init'] = init - - if init_path is not None: - if version_lt(version, '1.25'): - raise host_config_version_error('init_path', '1.25') - - if version_gte(version, '1.29'): - # https://github.com/moby/moby/pull/32470 - raise host_config_version_error('init_path', '1.29', False) - self['InitPath'] = init_path - - if volume_driver is not None: - self['VolumeDriver'] = volume_driver - - if cpu_count: - if not isinstance(cpu_count, int): - raise host_config_type_error('cpu_count', cpu_count, 'int') - if version_lt(version, '1.25'): - raise host_config_version_error('cpu_count', '1.25') - - self['CpuCount'] = cpu_count - - if cpu_percent: - if not isinstance(cpu_percent, int): - raise host_config_type_error('cpu_percent', cpu_percent, 'int') - if version_lt(version, '1.25'): - raise host_config_version_error('cpu_percent', '1.25') - - self['CpuPercent'] = cpu_percent - - if nano_cpus: - if not isinstance(nano_cpus, int): - raise host_config_type_error('nano_cpus', nano_cpus, 'int') - if version_lt(version, '1.25'): - raise host_config_version_error('nano_cpus', '1.25') - - self['NanoCpus'] = nano_cpus - - if runtime: - if version_lt(version, '1.25'): - raise host_config_version_error('runtime', '1.25') - self['Runtime'] = runtime - - if mounts is not None: - if version_lt(version, '1.30'): - raise host_config_version_error('mounts', '1.30') - self['Mounts'] = mounts - - if device_cgroup_rules is not None: - if version_lt(version, '1.28'): - raise host_config_version_error('device_cgroup_rules', '1.28') - if not isinstance(device_cgroup_rules, list): - raise host_config_type_error( - 'device_cgroup_rules', device_cgroup_rules, 'list' - ) - self['DeviceCgroupRules'] = device_cgroup_rules - - if device_requests is not None: - if version_lt(version, '1.40'): - raise host_config_version_error('device_requests', '1.40') - if not isinstance(device_requests, list): - raise host_config_type_error( - 'device_requests', device_requests, 'list' - ) - self['DeviceRequests'] = [] - for req in device_requests: - if not isinstance(req, DeviceRequest): - req = DeviceRequest(**req) - self['DeviceRequests'].append(req) - - if cgroupns: - self['CgroupnsMode'] = cgroupns - - -def host_config_type_error(param, param_value, expected): - error_msg = 'Invalid type for {0} param: expected {1} but found {2}' - return TypeError(error_msg.format(param, expected, type(param_value))) - - -def host_config_version_error(param, version, less_than=True): - operator = '<' if less_than else '>' - error_msg = '{0} param is not supported in API versions {1} {2}' - return errors.InvalidVersion(error_msg.format(param, operator, version)) - - -def host_config_value_error(param, param_value): - error_msg = 'Invalid value for {0} param: {1}' - return ValueError(error_msg.format(param, param_value)) - - -def host_config_incompatible_error(param, param_value, incompatible_param): - error_msg = '\"{1}\" {0} is incompatible with {2}' - return errors.InvalidArgument( - error_msg.format(param, param_value, incompatible_param) - ) - - -class ContainerConfig(dict): - def __init__( - self, version, image, command, hostname=None, user=None, detach=False, - stdin_open=False, tty=False, ports=None, environment=None, - volumes=None, network_disabled=False, entrypoint=None, - working_dir=None, domainname=None, host_config=None, mac_address=None, - labels=None, stop_signal=None, networking_config=None, - healthcheck=None, stop_timeout=None, runtime=None - ): - - if stop_timeout is not None and version_lt(version, '1.25'): - raise errors.InvalidVersion( - 'stop_timeout was only introduced in API version 1.25' - ) - - if healthcheck is not None: - if version_lt(version, '1.24'): - raise errors.InvalidVersion( - 'Health options were only introduced in API version 1.24' - ) - - if version_lt(version, '1.29') and 'StartPeriod' in healthcheck: - raise errors.InvalidVersion( - 'healthcheck start period was introduced in API ' - 'version 1.29' - ) - - if isinstance(command, str): - command = split_command(command) - - if isinstance(entrypoint, str): - entrypoint = split_command(entrypoint) - - if isinstance(environment, dict): - environment = format_environment(environment) - - if isinstance(labels, list): - labels = {lbl: '' for lbl in labels} - - if isinstance(ports, list): - exposed_ports = {} - for port_definition in ports: - port = port_definition - proto = 'tcp' - if isinstance(port_definition, tuple): - if len(port_definition) == 2: - proto = port_definition[1] - port = port_definition[0] - exposed_ports[f'{port}/{proto}'] = {} - ports = exposed_ports - - if isinstance(volumes, str): - volumes = [volumes, ] - - if isinstance(volumes, list): - volumes_dict = {} - for vol in volumes: - volumes_dict[vol] = {} - volumes = volumes_dict - - if healthcheck and isinstance(healthcheck, dict): - healthcheck = Healthcheck(**healthcheck) - - attach_stdin = False - attach_stdout = False - attach_stderr = False - stdin_once = False - - if not detach: - attach_stdout = True - attach_stderr = True - - if stdin_open: - attach_stdin = True - stdin_once = True - - self.update({ - 'Hostname': hostname, - 'Domainname': domainname, - 'ExposedPorts': ports, - 'User': str(user) if user is not None else None, - 'Tty': tty, - 'OpenStdin': stdin_open, - 'StdinOnce': stdin_once, - 'AttachStdin': attach_stdin, - 'AttachStdout': attach_stdout, - 'AttachStderr': attach_stderr, - 'Env': environment, - 'Cmd': command, - 'Image': image, - 'Volumes': volumes, - 'NetworkDisabled': network_disabled, - 'Entrypoint': entrypoint, - 'WorkingDir': working_dir, - 'HostConfig': host_config, - 'NetworkingConfig': networking_config, - 'MacAddress': mac_address, - 'Labels': labels, - 'StopSignal': stop_signal, - 'Healthcheck': healthcheck, - 'StopTimeout': stop_timeout, - 'Runtime': runtime - }) diff --git a/server/venv/Lib/site-packages/docker/types/daemon.py b/server/venv/Lib/site-packages/docker/types/daemon.py deleted file mode 100644 index 096b2cc..0000000 --- a/server/venv/Lib/site-packages/docker/types/daemon.py +++ /dev/null @@ -1,71 +0,0 @@ -import socket - -import urllib3 - -from ..errors import DockerException - - -class CancellableStream: - """ - Stream wrapper for real-time events, logs, etc. from the server. - - Example: - >>> events = client.events() - >>> for event in events: - ... print(event) - >>> # and cancel from another thread - >>> events.close() - """ - - def __init__(self, stream, response): - self._stream = stream - self._response = response - - def __iter__(self): - return self - - def __next__(self): - try: - return next(self._stream) - except urllib3.exceptions.ProtocolError: - raise StopIteration - except OSError: - raise StopIteration - - next = __next__ - - def close(self): - """ - Closes the event streaming. - """ - - if not self._response.raw.closed: - # find the underlying socket object - # based on api.client._get_raw_response_socket - - sock_fp = self._response.raw._fp.fp - - if hasattr(sock_fp, 'raw'): - sock_raw = sock_fp.raw - - if hasattr(sock_raw, 'sock'): - sock = sock_raw.sock - - elif hasattr(sock_raw, '_sock'): - sock = sock_raw._sock - - elif hasattr(sock_fp, 'channel'): - # We're working with a paramiko (SSH) channel, which doesn't - # support cancelable streams with the current implementation - raise DockerException( - 'Cancellable streams not supported for the SSH protocol' - ) - else: - sock = sock_fp._sock - - if hasattr(urllib3.contrib, 'pyopenssl') and isinstance( - sock, urllib3.contrib.pyopenssl.WrappedSocket): - sock = sock.socket - - sock.shutdown(socket.SHUT_RDWR) - sock.close() diff --git a/server/venv/Lib/site-packages/docker/types/healthcheck.py b/server/venv/Lib/site-packages/docker/types/healthcheck.py deleted file mode 100644 index dfc88a9..0000000 --- a/server/venv/Lib/site-packages/docker/types/healthcheck.py +++ /dev/null @@ -1,88 +0,0 @@ -from .base import DictType - - -class Healthcheck(DictType): - """ - Defines a healthcheck configuration for a container or service. - - Args: - test (:py:class:`list` or str): Test to perform to determine - container health. Possible values: - - - Empty list: Inherit healthcheck from parent image - - ``["NONE"]``: Disable healthcheck - - ``["CMD", args...]``: exec arguments directly. - - ``["CMD-SHELL", command]``: Run command in the system's - default shell. - - If a string is provided, it will be used as a ``CMD-SHELL`` - command. - interval (int): The time to wait between checks in nanoseconds. It - should be 0 or at least 1000000 (1 ms). - timeout (int): The time to wait before considering the check to - have hung. It should be 0 or at least 1000000 (1 ms). - retries (int): The number of consecutive failures needed to - consider a container as unhealthy. - start_period (int): Start period for the container to - initialize before starting health-retries countdown in - nanoseconds. It should be 0 or at least 1000000 (1 ms). - """ - def __init__(self, **kwargs): - test = kwargs.get('test', kwargs.get('Test')) - if isinstance(test, str): - test = ["CMD-SHELL", test] - - interval = kwargs.get('interval', kwargs.get('Interval')) - timeout = kwargs.get('timeout', kwargs.get('Timeout')) - retries = kwargs.get('retries', kwargs.get('Retries')) - start_period = kwargs.get('start_period', kwargs.get('StartPeriod')) - - super().__init__({ - 'Test': test, - 'Interval': interval, - 'Timeout': timeout, - 'Retries': retries, - 'StartPeriod': start_period - }) - - @property - def test(self): - return self['Test'] - - @test.setter - def test(self, value): - if isinstance(value, str): - value = ["CMD-SHELL", value] - self['Test'] = value - - @property - def interval(self): - return self['Interval'] - - @interval.setter - def interval(self, value): - self['Interval'] = value - - @property - def timeout(self): - return self['Timeout'] - - @timeout.setter - def timeout(self, value): - self['Timeout'] = value - - @property - def retries(self): - return self['Retries'] - - @retries.setter - def retries(self, value): - self['Retries'] = value - - @property - def start_period(self): - return self['StartPeriod'] - - @start_period.setter - def start_period(self, value): - self['StartPeriod'] = value diff --git a/server/venv/Lib/site-packages/docker/types/networks.py b/server/venv/Lib/site-packages/docker/types/networks.py deleted file mode 100644 index ed1ced1..0000000 --- a/server/venv/Lib/site-packages/docker/types/networks.py +++ /dev/null @@ -1,128 +0,0 @@ -from .. import errors -from ..utils import normalize_links, version_lt - - -class EndpointConfig(dict): - def __init__(self, version, aliases=None, links=None, ipv4_address=None, - ipv6_address=None, link_local_ips=None, driver_opt=None, - mac_address=None): - if version_lt(version, '1.22'): - raise errors.InvalidVersion( - 'Endpoint config is not supported for API version < 1.22' - ) - - if aliases: - self["Aliases"] = aliases - - if links: - self["Links"] = normalize_links(links) - - ipam_config = {} - if ipv4_address: - ipam_config['IPv4Address'] = ipv4_address - - if ipv6_address: - ipam_config['IPv6Address'] = ipv6_address - - if mac_address: - if version_lt(version, '1.25'): - raise errors.InvalidVersion( - 'mac_address is not supported for API version < 1.25' - ) - self['MacAddress'] = mac_address - - if link_local_ips is not None: - if version_lt(version, '1.24'): - raise errors.InvalidVersion( - 'link_local_ips is not supported for API version < 1.24' - ) - ipam_config['LinkLocalIPs'] = link_local_ips - - if ipam_config: - self['IPAMConfig'] = ipam_config - - if driver_opt: - if version_lt(version, '1.32'): - raise errors.InvalidVersion( - 'DriverOpts is not supported for API version < 1.32' - ) - if not isinstance(driver_opt, dict): - raise TypeError('driver_opt must be a dictionary') - self['DriverOpts'] = driver_opt - - -class NetworkingConfig(dict): - def __init__(self, endpoints_config=None): - if endpoints_config: - self["EndpointsConfig"] = endpoints_config - - -class IPAMConfig(dict): - """ - Create an IPAM (IP Address Management) config dictionary to be used with - :py:meth:`~docker.api.network.NetworkApiMixin.create_network`. - - Args: - - driver (str): The IPAM driver to use. Defaults to ``default``. - pool_configs (:py:class:`list`): A list of pool configurations - (:py:class:`~docker.types.IPAMPool`). Defaults to empty list. - options (dict): Driver options as a key-value dictionary. - Defaults to `None`. - - Example: - - >>> ipam_config = docker.types.IPAMConfig(driver='default') - >>> network = client.create_network('network1', ipam=ipam_config) - - """ - def __init__(self, driver='default', pool_configs=None, options=None): - self.update({ - 'Driver': driver, - 'Config': pool_configs or [] - }) - - if options: - if not isinstance(options, dict): - raise TypeError('IPAMConfig options must be a dictionary') - self['Options'] = options - - -class IPAMPool(dict): - """ - Create an IPAM pool config dictionary to be added to the - ``pool_configs`` parameter of - :py:class:`~docker.types.IPAMConfig`. - - Args: - - subnet (str): Custom subnet for this IPAM pool using the CIDR - notation. Defaults to ``None``. - iprange (str): Custom IP range for endpoints in this IPAM pool using - the CIDR notation. Defaults to ``None``. - gateway (str): Custom IP address for the pool's gateway. - aux_addresses (dict): A dictionary of ``key -> ip_address`` - relationships specifying auxiliary addresses that need to be - allocated by the IPAM driver. - - Example: - - >>> ipam_pool = docker.types.IPAMPool( - subnet='124.42.0.0/16', - iprange='124.42.0.0/24', - gateway='124.42.0.254', - aux_addresses={ - 'reserved1': '124.42.1.1' - } - ) - >>> ipam_config = docker.types.IPAMConfig( - pool_configs=[ipam_pool]) - """ - def __init__(self, subnet=None, iprange=None, gateway=None, - aux_addresses=None): - self.update({ - 'Subnet': subnet, - 'IPRange': iprange, - 'Gateway': gateway, - 'AuxiliaryAddresses': aux_addresses - }) diff --git a/server/venv/Lib/site-packages/docker/types/services.py b/server/venv/Lib/site-packages/docker/types/services.py deleted file mode 100644 index a3383ef..0000000 --- a/server/venv/Lib/site-packages/docker/types/services.py +++ /dev/null @@ -1,863 +0,0 @@ -from .. import errors -from ..constants import IS_WINDOWS_PLATFORM -from ..utils import ( - check_resource, format_environment, format_extra_hosts, parse_bytes, - split_command, convert_service_networks, -) - - -class TaskTemplate(dict): - """ - Describe the task specification to be used when creating or updating a - service. - - Args: - - container_spec (ContainerSpec): Container settings for containers - started as part of this task. - log_driver (DriverConfig): Log configuration for containers created as - part of the service. - resources (Resources): Resource requirements which apply to each - individual container created as part of the service. - restart_policy (RestartPolicy): Specification for the restart policy - which applies to containers created as part of this service. - placement (Placement): Placement instructions for the scheduler. - If a list is passed instead, it is assumed to be a list of - constraints as part of a :py:class:`Placement` object. - networks (:py:class:`list`): List of network names or IDs or - :py:class:`NetworkAttachmentConfig` to attach the service to. - force_update (int): A counter that triggers an update even if no - relevant parameters have been changed. - """ - - def __init__(self, container_spec, resources=None, restart_policy=None, - placement=None, log_driver=None, networks=None, - force_update=None): - self['ContainerSpec'] = container_spec - if resources: - self['Resources'] = resources - if restart_policy: - self['RestartPolicy'] = restart_policy - if placement: - if isinstance(placement, list): - placement = Placement(constraints=placement) - self['Placement'] = placement - if log_driver: - self['LogDriver'] = log_driver - if networks: - self['Networks'] = convert_service_networks(networks) - - if force_update is not None: - if not isinstance(force_update, int): - raise TypeError('force_update must be an integer') - self['ForceUpdate'] = force_update - - @property - def container_spec(self): - return self.get('ContainerSpec') - - @property - def resources(self): - return self.get('Resources') - - @property - def restart_policy(self): - return self.get('RestartPolicy') - - @property - def placement(self): - return self.get('Placement') - - -class ContainerSpec(dict): - """ - Describes the behavior of containers that are part of a task, and is used - when declaring a :py:class:`~docker.types.TaskTemplate`. - - Args: - - image (string): The image name to use for the container. - command (string or list): The command to be run in the image. - args (:py:class:`list`): Arguments to the command. - hostname (string): The hostname to set on the container. - env (dict): Environment variables. - workdir (string): The working directory for commands to run in. - user (string): The user inside the container. - labels (dict): A map of labels to associate with the service. - mounts (:py:class:`list`): A list of specifications for mounts to be - added to containers created as part of the service. See the - :py:class:`~docker.types.Mount` class for details. - stop_grace_period (int): Amount of time to wait for the container to - terminate before forcefully killing it. - secrets (:py:class:`list`): List of :py:class:`SecretReference` to be - made available inside the containers. - tty (boolean): Whether a pseudo-TTY should be allocated. - groups (:py:class:`list`): A list of additional groups that the - container process will run as. - open_stdin (boolean): Open ``stdin`` - read_only (boolean): Mount the container's root filesystem as read - only. - stop_signal (string): Set signal to stop the service's containers - healthcheck (Healthcheck): Healthcheck - configuration for this service. - hosts (:py:class:`dict`): A set of host to IP mappings to add to - the container's ``hosts`` file. - dns_config (DNSConfig): Specification for DNS - related configurations in resolver configuration file. - configs (:py:class:`list`): List of :py:class:`ConfigReference` that - will be exposed to the service. - privileges (Privileges): Security options for the service's containers. - isolation (string): Isolation technology used by the service's - containers. Only used for Windows containers. - init (boolean): Run an init inside the container that forwards signals - and reaps processes. - cap_add (:py:class:`list`): A list of kernel capabilities to add to the - default set for the container. - cap_drop (:py:class:`list`): A list of kernel capabilities to drop from - the default set for the container. - sysctls (:py:class:`dict`): A dict of sysctl values to add to - the container - """ - - def __init__(self, image, command=None, args=None, hostname=None, env=None, - workdir=None, user=None, labels=None, mounts=None, - stop_grace_period=None, secrets=None, tty=None, groups=None, - open_stdin=None, read_only=None, stop_signal=None, - healthcheck=None, hosts=None, dns_config=None, configs=None, - privileges=None, isolation=None, init=None, cap_add=None, - cap_drop=None, sysctls=None): - self['Image'] = image - - if isinstance(command, str): - command = split_command(command) - self['Command'] = command - self['Args'] = args - - if hostname is not None: - self['Hostname'] = hostname - if env is not None: - if isinstance(env, dict): - self['Env'] = format_environment(env) - else: - self['Env'] = env - if workdir is not None: - self['Dir'] = workdir - if user is not None: - self['User'] = user - if groups is not None: - self['Groups'] = groups - if stop_signal is not None: - self['StopSignal'] = stop_signal - if stop_grace_period is not None: - self['StopGracePeriod'] = stop_grace_period - if labels is not None: - self['Labels'] = labels - if hosts is not None: - self['Hosts'] = format_extra_hosts(hosts, task=True) - - if mounts is not None: - parsed_mounts = [] - for mount in mounts: - if isinstance(mount, str): - parsed_mounts.append(Mount.parse_mount_string(mount)) - else: - # If mount already parsed - parsed_mounts.append(mount) - self['Mounts'] = parsed_mounts - - if secrets is not None: - if not isinstance(secrets, list): - raise TypeError('secrets must be a list') - self['Secrets'] = secrets - - if configs is not None: - if not isinstance(configs, list): - raise TypeError('configs must be a list') - self['Configs'] = configs - - if dns_config is not None: - self['DNSConfig'] = dns_config - if privileges is not None: - self['Privileges'] = privileges - if healthcheck is not None: - self['Healthcheck'] = healthcheck - - if tty is not None: - self['TTY'] = tty - if open_stdin is not None: - self['OpenStdin'] = open_stdin - if read_only is not None: - self['ReadOnly'] = read_only - - if isolation is not None: - self['Isolation'] = isolation - - if init is not None: - self['Init'] = init - - if cap_add is not None: - if not isinstance(cap_add, list): - raise TypeError('cap_add must be a list') - - self['CapabilityAdd'] = cap_add - - if cap_drop is not None: - if not isinstance(cap_drop, list): - raise TypeError('cap_drop must be a list') - - self['CapabilityDrop'] = cap_drop - - if sysctls is not None: - if not isinstance(sysctls, dict): - raise TypeError('sysctls must be a dict') - - self['Sysctls'] = sysctls - - -class Mount(dict): - """ - Describes a mounted folder's configuration inside a container. A list of - :py:class:`Mount` would be used as part of a - :py:class:`~docker.types.ContainerSpec`. - - Args: - - target (string): Container path. - source (string): Mount source (e.g. a volume name or a host path). - type (string): The mount type (``bind`` / ``volume`` / ``tmpfs`` / - ``npipe``). Default: ``volume``. - read_only (bool): Whether the mount should be read-only. - consistency (string): The consistency requirement for the mount. One of - ``default```, ``consistent``, ``cached``, ``delegated``. - propagation (string): A propagation mode with the value ``[r]private``, - ``[r]shared``, or ``[r]slave``. Only valid for the ``bind`` type. - no_copy (bool): False if the volume should be populated with the data - from the target. Default: ``False``. Only valid for the ``volume`` - type. - labels (dict): User-defined name and labels for the volume. Only valid - for the ``volume`` type. - driver_config (DriverConfig): Volume driver configuration. Only valid - for the ``volume`` type. - tmpfs_size (int or string): The size for the tmpfs mount in bytes. - tmpfs_mode (int): The permission mode for the tmpfs mount. - """ - - def __init__(self, target, source, type='volume', read_only=False, - consistency=None, propagation=None, no_copy=False, - labels=None, driver_config=None, tmpfs_size=None, - tmpfs_mode=None): - self['Target'] = target - self['Source'] = source - if type not in ('bind', 'volume', 'tmpfs', 'npipe'): - raise errors.InvalidArgument( - f'Unsupported mount type: "{type}"' - ) - self['Type'] = type - self['ReadOnly'] = read_only - - if consistency: - self['Consistency'] = consistency - - if type == 'bind': - if propagation is not None: - self['BindOptions'] = { - 'Propagation': propagation - } - if any([labels, driver_config, no_copy, tmpfs_size, tmpfs_mode]): - raise errors.InvalidArgument( - 'Incompatible options have been provided for the bind ' - 'type mount.' - ) - elif type == 'volume': - volume_opts = {} - if no_copy: - volume_opts['NoCopy'] = True - if labels: - volume_opts['Labels'] = labels - if driver_config: - volume_opts['DriverConfig'] = driver_config - if volume_opts: - self['VolumeOptions'] = volume_opts - if any([propagation, tmpfs_size, tmpfs_mode]): - raise errors.InvalidArgument( - 'Incompatible options have been provided for the volume ' - 'type mount.' - ) - elif type == 'tmpfs': - tmpfs_opts = {} - if tmpfs_mode: - if not isinstance(tmpfs_mode, int): - raise errors.InvalidArgument( - 'tmpfs_mode must be an integer' - ) - tmpfs_opts['Mode'] = tmpfs_mode - if tmpfs_size: - tmpfs_opts['SizeBytes'] = parse_bytes(tmpfs_size) - if tmpfs_opts: - self['TmpfsOptions'] = tmpfs_opts - if any([propagation, labels, driver_config, no_copy]): - raise errors.InvalidArgument( - 'Incompatible options have been provided for the tmpfs ' - 'type mount.' - ) - - @classmethod - def parse_mount_string(cls, string): - parts = string.split(':') - if len(parts) > 3: - raise errors.InvalidArgument( - f'Invalid mount format "{string}"' - ) - if len(parts) == 1: - return cls(target=parts[0], source=None) - else: - target = parts[1] - source = parts[0] - mount_type = 'volume' - if source.startswith('/') or ( - IS_WINDOWS_PLATFORM and source[0].isalpha() and - source[1] == ':' - ): - # FIXME: That windows condition will fail earlier since we - # split on ':'. We should look into doing a smarter split - # if we detect we are on Windows. - mount_type = 'bind' - read_only = not (len(parts) == 2 or parts[2] == 'rw') - return cls(target, source, read_only=read_only, type=mount_type) - - -class Resources(dict): - """ - Configures resource allocation for containers when made part of a - :py:class:`~docker.types.ContainerSpec`. - - Args: - - cpu_limit (int): CPU limit in units of 10^9 CPU shares. - mem_limit (int): Memory limit in Bytes. - cpu_reservation (int): CPU reservation in units of 10^9 CPU shares. - mem_reservation (int): Memory reservation in Bytes. - generic_resources (dict or :py:class:`list`): Node level generic - resources, for example a GPU, using the following format: - ``{ resource_name: resource_value }``. Alternatively, a list of - of resource specifications as defined by the Engine API. - """ - - def __init__(self, cpu_limit=None, mem_limit=None, cpu_reservation=None, - mem_reservation=None, generic_resources=None): - limits = {} - reservation = {} - if cpu_limit is not None: - limits['NanoCPUs'] = cpu_limit - if mem_limit is not None: - limits['MemoryBytes'] = mem_limit - if cpu_reservation is not None: - reservation['NanoCPUs'] = cpu_reservation - if mem_reservation is not None: - reservation['MemoryBytes'] = mem_reservation - if generic_resources is not None: - reservation['GenericResources'] = ( - _convert_generic_resources_dict(generic_resources) - ) - if limits: - self['Limits'] = limits - if reservation: - self['Reservations'] = reservation - - -def _convert_generic_resources_dict(generic_resources): - if isinstance(generic_resources, list): - return generic_resources - if not isinstance(generic_resources, dict): - raise errors.InvalidArgument( - 'generic_resources must be a dict or a list' - ' (found {})'.format(type(generic_resources)) - ) - resources = [] - for kind, value in generic_resources.items(): - resource_type = None - if isinstance(value, int): - resource_type = 'DiscreteResourceSpec' - elif isinstance(value, str): - resource_type = 'NamedResourceSpec' - else: - raise errors.InvalidArgument( - 'Unsupported generic resource reservation ' - 'type: {}'.format({kind: value}) - ) - resources.append({ - resource_type: {'Kind': kind, 'Value': value} - }) - return resources - - -class UpdateConfig(dict): - """ - - Used to specify the way container updates should be performed by a service. - - Args: - - parallelism (int): Maximum number of tasks to be updated in one - iteration (0 means unlimited parallelism). Default: 0. - delay (int): Amount of time between updates, in nanoseconds. - failure_action (string): Action to take if an updated task fails to - run, or stops running during the update. Acceptable values are - ``continue``, ``pause``, as well as ``rollback`` since API v1.28. - Default: ``continue`` - monitor (int): Amount of time to monitor each updated task for - failures, in nanoseconds. - max_failure_ratio (float): The fraction of tasks that may fail during - an update before the failure action is invoked, specified as a - floating point number between 0 and 1. Default: 0 - order (string): Specifies the order of operations when rolling out an - updated task. Either ``start-first`` or ``stop-first`` are accepted. - """ - - def __init__(self, parallelism=0, delay=None, failure_action='continue', - monitor=None, max_failure_ratio=None, order=None): - self['Parallelism'] = parallelism - if delay is not None: - self['Delay'] = delay - if failure_action not in ('pause', 'continue', 'rollback'): - raise errors.InvalidArgument( - 'failure_action must be one of `pause`, `continue`, `rollback`' - ) - self['FailureAction'] = failure_action - - if monitor is not None: - if not isinstance(monitor, int): - raise TypeError('monitor must be an integer') - self['Monitor'] = monitor - - if max_failure_ratio is not None: - if not isinstance(max_failure_ratio, (float, int)): - raise TypeError('max_failure_ratio must be a float') - if max_failure_ratio > 1 or max_failure_ratio < 0: - raise errors.InvalidArgument( - 'max_failure_ratio must be a number between 0 and 1' - ) - self['MaxFailureRatio'] = max_failure_ratio - - if order is not None: - if order not in ('start-first', 'stop-first'): - raise errors.InvalidArgument( - 'order must be either `start-first` or `stop-first`' - ) - self['Order'] = order - - -class RollbackConfig(UpdateConfig): - """ - Used to specify the way container rollbacks should be performed by a - service - - Args: - parallelism (int): Maximum number of tasks to be rolled back in one - iteration (0 means unlimited parallelism). Default: 0 - delay (int): Amount of time between rollbacks, in nanoseconds. - failure_action (string): Action to take if a rolled back task fails to - run, or stops running during the rollback. Acceptable values are - ``continue``, ``pause`` or ``rollback``. - Default: ``continue`` - monitor (int): Amount of time to monitor each rolled back task for - failures, in nanoseconds. - max_failure_ratio (float): The fraction of tasks that may fail during - a rollback before the failure action is invoked, specified as a - floating point number between 0 and 1. Default: 0 - order (string): Specifies the order of operations when rolling out a - rolled back task. Either ``start-first`` or ``stop-first`` are - accepted. - """ - pass - - -class RestartConditionTypesEnum: - _values = ( - 'none', - 'on-failure', - 'any', - ) - NONE, ON_FAILURE, ANY = _values - - -class RestartPolicy(dict): - """ - Used when creating a :py:class:`~docker.types.ContainerSpec`, - dictates whether a container should restart after stopping or failing. - - Args: - - condition (string): Condition for restart (``none``, ``on-failure``, - or ``any``). Default: `none`. - delay (int): Delay between restart attempts. Default: 0 - max_attempts (int): Maximum attempts to restart a given container - before giving up. Default value is 0, which is ignored. - window (int): Time window used to evaluate the restart policy. Default - value is 0, which is unbounded. - """ - - condition_types = RestartConditionTypesEnum - - def __init__(self, condition=RestartConditionTypesEnum.NONE, delay=0, - max_attempts=0, window=0): - if condition not in self.condition_types._values: - raise TypeError( - f'Invalid RestartPolicy condition {condition}' - ) - - self['Condition'] = condition - self['Delay'] = delay - self['MaxAttempts'] = max_attempts - self['Window'] = window - - -class DriverConfig(dict): - """ - Indicates which driver to use, as well as its configuration. Can be used - as ``log_driver`` in a :py:class:`~docker.types.ContainerSpec`, - for the `driver_config` in a volume :py:class:`~docker.types.Mount`, or - as the driver object in - :py:meth:`create_secret`. - - Args: - - name (string): Name of the driver to use. - options (dict): Driver-specific options. Default: ``None``. - """ - - def __init__(self, name, options=None): - self['Name'] = name - if options: - self['Options'] = options - - -class EndpointSpec(dict): - """ - Describes properties to access and load-balance a service. - - Args: - - mode (string): The mode of resolution to use for internal load - balancing between tasks (``'vip'`` or ``'dnsrr'``). Defaults to - ``'vip'`` if not provided. - ports (dict): Exposed ports that this service is accessible on from the - outside, in the form of ``{ published_port: target_port }`` or - ``{ published_port: }``. Port config tuple format - is ``(target_port [, protocol [, publish_mode]])``. - Ports can only be provided if the ``vip`` resolution mode is used. - """ - - def __init__(self, mode=None, ports=None): - if ports: - self['Ports'] = convert_service_ports(ports) - if mode: - self['Mode'] = mode - - -def convert_service_ports(ports): - if isinstance(ports, list): - return ports - if not isinstance(ports, dict): - raise TypeError( - 'Invalid type for ports, expected dict or list' - ) - - result = [] - for k, v in ports.items(): - port_spec = { - 'Protocol': 'tcp', - 'PublishedPort': k - } - - if isinstance(v, tuple): - port_spec['TargetPort'] = v[0] - if len(v) >= 2 and v[1] is not None: - port_spec['Protocol'] = v[1] - if len(v) == 3: - port_spec['PublishMode'] = v[2] - if len(v) > 3: - raise ValueError( - 'Service port configuration can have at most 3 elements: ' - '(target_port, protocol, mode)' - ) - else: - port_spec['TargetPort'] = v - - result.append(port_spec) - return result - - -class ServiceMode(dict): - """ - Indicate whether a service or a job should be deployed as a replicated - or global service, and associated parameters - - Args: - mode (string): Can be either ``replicated``, ``global``, - ``replicated-job`` or ``global-job`` - replicas (int): Number of replicas. For replicated services only. - concurrency (int): Number of concurrent jobs. For replicated job - services only. - """ - - def __init__(self, mode, replicas=None, concurrency=None): - replicated_modes = ('replicated', 'replicated-job') - supported_modes = replicated_modes + ('global', 'global-job') - - if mode not in supported_modes: - raise errors.InvalidArgument( - 'mode must be either "replicated", "global", "replicated-job"' - ' or "global-job"' - ) - - if mode not in replicated_modes: - if replicas is not None: - raise errors.InvalidArgument( - 'replicas can only be used for "replicated" or' - ' "replicated-job" mode' - ) - - if concurrency is not None: - raise errors.InvalidArgument( - 'concurrency can only be used for "replicated-job" mode' - ) - - service_mode = self._convert_mode(mode) - self.mode = service_mode - self[service_mode] = {} - - if replicas is not None: - if mode == 'replicated': - self[service_mode]['Replicas'] = replicas - - if mode == 'replicated-job': - self[service_mode]['MaxConcurrent'] = concurrency or 1 - self[service_mode]['TotalCompletions'] = replicas - - @staticmethod - def _convert_mode(original_mode): - if original_mode == 'global-job': - return 'GlobalJob' - - if original_mode == 'replicated-job': - return 'ReplicatedJob' - - return original_mode - - @property - def replicas(self): - if 'replicated' in self: - return self['replicated'].get('Replicas') - - if 'ReplicatedJob' in self: - return self['ReplicatedJob'].get('TotalCompletions') - - return None - - -class SecretReference(dict): - """ - Secret reference to be used as part of a :py:class:`ContainerSpec`. - Describes how a secret is made accessible inside the service's - containers. - - Args: - secret_id (string): Secret's ID - secret_name (string): Secret's name as defined at its creation. - filename (string): Name of the file containing the secret. Defaults - to the secret's name if not specified. - uid (string): UID of the secret file's owner. Default: 0 - gid (string): GID of the secret file's group. Default: 0 - mode (int): File access mode inside the container. Default: 0o444 - """ - @check_resource('secret_id') - def __init__(self, secret_id, secret_name, filename=None, uid=None, - gid=None, mode=0o444): - self['SecretName'] = secret_name - self['SecretID'] = secret_id - self['File'] = { - 'Name': filename or secret_name, - 'UID': uid or '0', - 'GID': gid or '0', - 'Mode': mode - } - - -class ConfigReference(dict): - """ - Config reference to be used as part of a :py:class:`ContainerSpec`. - Describes how a config is made accessible inside the service's - containers. - - Args: - config_id (string): Config's ID - config_name (string): Config's name as defined at its creation. - filename (string): Name of the file containing the config. Defaults - to the config's name if not specified. - uid (string): UID of the config file's owner. Default: 0 - gid (string): GID of the config file's group. Default: 0 - mode (int): File access mode inside the container. Default: 0o444 - """ - @check_resource('config_id') - def __init__(self, config_id, config_name, filename=None, uid=None, - gid=None, mode=0o444): - self['ConfigName'] = config_name - self['ConfigID'] = config_id - self['File'] = { - 'Name': filename or config_name, - 'UID': uid or '0', - 'GID': gid or '0', - 'Mode': mode - } - - -class Placement(dict): - """ - Placement constraints to be used as part of a :py:class:`TaskTemplate` - - Args: - constraints (:py:class:`list` of str): A list of constraints - preferences (:py:class:`list` of tuple): Preferences provide a way - to make the scheduler aware of factors such as topology. They - are provided in order from highest to lowest precedence and - are expressed as ``(strategy, descriptor)`` tuples. See - :py:class:`PlacementPreference` for details. - maxreplicas (int): Maximum number of replicas per node - platforms (:py:class:`list` of tuple): A list of platforms - expressed as ``(arch, os)`` tuples - """ - - def __init__(self, constraints=None, preferences=None, platforms=None, - maxreplicas=None): - if constraints is not None: - self['Constraints'] = constraints - if preferences is not None: - self['Preferences'] = [] - for pref in preferences: - if isinstance(pref, tuple): - pref = PlacementPreference(*pref) - self['Preferences'].append(pref) - if maxreplicas is not None: - self['MaxReplicas'] = maxreplicas - if platforms: - self['Platforms'] = [] - for plat in platforms: - self['Platforms'].append({ - 'Architecture': plat[0], 'OS': plat[1] - }) - - -class PlacementPreference(dict): - """ - Placement preference to be used as an element in the list of - preferences for :py:class:`Placement` objects. - - Args: - strategy (string): The placement strategy to implement. Currently, - the only supported strategy is ``spread``. - descriptor (string): A label descriptor. For the spread strategy, - the scheduler will try to spread tasks evenly over groups of - nodes identified by this label. - """ - - def __init__(self, strategy, descriptor): - if strategy != 'spread': - raise errors.InvalidArgument( - 'PlacementPreference strategy value is invalid ({}):' - ' must be "spread".'.format(strategy) - ) - self['Spread'] = {'SpreadDescriptor': descriptor} - - -class DNSConfig(dict): - """ - Specification for DNS related configurations in resolver configuration - file (``resolv.conf``). Part of a :py:class:`ContainerSpec` definition. - - Args: - nameservers (:py:class:`list`): The IP addresses of the name - servers. - search (:py:class:`list`): A search list for host-name lookup. - options (:py:class:`list`): A list of internal resolver variables - to be modified (e.g., ``debug``, ``ndots:3``, etc.). - """ - - def __init__(self, nameservers=None, search=None, options=None): - self['Nameservers'] = nameservers - self['Search'] = search - self['Options'] = options - - -class Privileges(dict): - r""" - Security options for a service's containers. - Part of a :py:class:`ContainerSpec` definition. - - Args: - credentialspec_file (str): Load credential spec from this file. - The file is read by the daemon, and must be present in the - CredentialSpecs subdirectory in the docker data directory, - which defaults to ``C:\ProgramData\Docker\`` on Windows. - Can not be combined with credentialspec_registry. - - credentialspec_registry (str): Load credential spec from this value - in the Windows registry. The specified registry value must be - located in: ``HKLM\SOFTWARE\Microsoft\Windows NT\CurrentVersion - \Virtualization\Containers\CredentialSpecs``. - Can not be combined with credentialspec_file. - - selinux_disable (boolean): Disable SELinux - selinux_user (string): SELinux user label - selinux_role (string): SELinux role label - selinux_type (string): SELinux type label - selinux_level (string): SELinux level label - """ - - def __init__(self, credentialspec_file=None, credentialspec_registry=None, - selinux_disable=None, selinux_user=None, selinux_role=None, - selinux_type=None, selinux_level=None): - credential_spec = {} - if credentialspec_registry is not None: - credential_spec['Registry'] = credentialspec_registry - if credentialspec_file is not None: - credential_spec['File'] = credentialspec_file - - if len(credential_spec) > 1: - raise errors.InvalidArgument( - 'credentialspec_file and credentialspec_registry are mutually' - ' exclusive' - ) - - selinux_context = { - 'Disable': selinux_disable, - 'User': selinux_user, - 'Role': selinux_role, - 'Type': selinux_type, - 'Level': selinux_level, - } - - if len(credential_spec) > 0: - self['CredentialSpec'] = credential_spec - - if len(selinux_context) > 0: - self['SELinuxContext'] = selinux_context - - -class NetworkAttachmentConfig(dict): - """ - Network attachment options for a service. - - Args: - target (str): The target network for attachment. - Can be a network name or ID. - aliases (:py:class:`list`): A list of discoverable alternate names - for the service. - options (:py:class:`dict`): Driver attachment options for the - network target. - """ - - def __init__(self, target, aliases=None, options=None): - self['Target'] = target - self['Aliases'] = aliases - self['DriverOpts'] = options diff --git a/server/venv/Lib/site-packages/docker/types/swarm.py b/server/venv/Lib/site-packages/docker/types/swarm.py deleted file mode 100644 index 9687a82..0000000 --- a/server/venv/Lib/site-packages/docker/types/swarm.py +++ /dev/null @@ -1,119 +0,0 @@ -from ..errors import InvalidVersion -from ..utils import version_lt - - -class SwarmSpec(dict): - """ - Describe a Swarm's configuration and options. Use - :py:meth:`~docker.api.swarm.SwarmApiMixin.create_swarm_spec` - to instantiate. - """ - def __init__(self, version, task_history_retention_limit=None, - snapshot_interval=None, keep_old_snapshots=None, - log_entries_for_slow_followers=None, heartbeat_tick=None, - election_tick=None, dispatcher_heartbeat_period=None, - node_cert_expiry=None, external_cas=None, name=None, - labels=None, signing_ca_cert=None, signing_ca_key=None, - ca_force_rotate=None, autolock_managers=None, - log_driver=None): - if task_history_retention_limit is not None: - self['Orchestration'] = { - 'TaskHistoryRetentionLimit': task_history_retention_limit - } - if any([snapshot_interval, - keep_old_snapshots, - log_entries_for_slow_followers, - heartbeat_tick, - election_tick]): - self['Raft'] = { - 'SnapshotInterval': snapshot_interval, - 'KeepOldSnapshots': keep_old_snapshots, - 'LogEntriesForSlowFollowers': log_entries_for_slow_followers, - 'HeartbeatTick': heartbeat_tick, - 'ElectionTick': election_tick - } - - if dispatcher_heartbeat_period: - self['Dispatcher'] = { - 'HeartbeatPeriod': dispatcher_heartbeat_period - } - - ca_config = {} - if node_cert_expiry is not None: - ca_config['NodeCertExpiry'] = node_cert_expiry - if external_cas: - if version_lt(version, '1.25'): - if len(external_cas) > 1: - raise InvalidVersion( - 'Support for multiple external CAs is not available ' - 'for API version < 1.25' - ) - ca_config['ExternalCA'] = external_cas[0] - else: - ca_config['ExternalCAs'] = external_cas - if signing_ca_key: - if version_lt(version, '1.30'): - raise InvalidVersion( - 'signing_ca_key is not supported in API version < 1.30' - ) - ca_config['SigningCAKey'] = signing_ca_key - if signing_ca_cert: - if version_lt(version, '1.30'): - raise InvalidVersion( - 'signing_ca_cert is not supported in API version < 1.30' - ) - ca_config['SigningCACert'] = signing_ca_cert - if ca_force_rotate is not None: - if version_lt(version, '1.30'): - raise InvalidVersion( - 'force_rotate is not supported in API version < 1.30' - ) - ca_config['ForceRotate'] = ca_force_rotate - if ca_config: - self['CAConfig'] = ca_config - - if autolock_managers is not None: - if version_lt(version, '1.25'): - raise InvalidVersion( - 'autolock_managers is not supported in API version < 1.25' - ) - - self['EncryptionConfig'] = {'AutoLockManagers': autolock_managers} - - if log_driver is not None: - if version_lt(version, '1.25'): - raise InvalidVersion( - 'log_driver is not supported in API version < 1.25' - ) - - self['TaskDefaults'] = {'LogDriver': log_driver} - - if name is not None: - self['Name'] = name - if labels is not None: - self['Labels'] = labels - - -class SwarmExternalCA(dict): - """ - Configuration for forwarding signing requests to an external - certificate authority. - - Args: - url (string): URL where certificate signing requests should be - sent. - protocol (string): Protocol for communication with the external CA. - options (dict): An object with key/value pairs that are interpreted - as protocol-specific options for the external CA driver. - ca_cert (string): The root CA certificate (in PEM format) this - external CA uses to issue TLS certificates (assumed to be to - the current swarm root CA certificate if not provided). - - - - """ - def __init__(self, url, protocol=None, options=None, ca_cert=None): - self['URL'] = url - self['Protocol'] = protocol - self['Options'] = options - self['CACert'] = ca_cert diff --git a/server/venv/Lib/site-packages/docker/utils/__init__.py b/server/venv/Lib/site-packages/docker/utils/__init__.py deleted file mode 100644 index 81c8186..0000000 --- a/server/venv/Lib/site-packages/docker/utils/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# flake8: noqa -from .build import create_archive, exclude_paths, mkbuildcontext, tar -from .decorators import check_resource, minimum_version, update_headers -from .utils import ( - compare_version, convert_port_bindings, convert_volume_binds, - parse_repository_tag, parse_host, - kwargs_from_env, convert_filters, datetime_to_timestamp, - create_host_config, parse_bytes, parse_env_file, version_lt, - version_gte, decode_json_header, split_command, create_ipam_config, - create_ipam_pool, parse_devices, normalize_links, convert_service_networks, - format_environment, format_extra_hosts -) - diff --git a/server/venv/Lib/site-packages/docker/utils/build.py b/server/venv/Lib/site-packages/docker/utils/build.py deleted file mode 100644 index 59564c4..0000000 --- a/server/venv/Lib/site-packages/docker/utils/build.py +++ /dev/null @@ -1,252 +0,0 @@ -import io -import os -import re -import tarfile -import tempfile - -from .fnmatch import fnmatch -from ..constants import IS_WINDOWS_PLATFORM - - -_SEP = re.compile('/|\\\\') if IS_WINDOWS_PLATFORM else re.compile('/') - - -def tar(path, exclude=None, dockerfile=None, fileobj=None, gzip=False): - root = os.path.abspath(path) - exclude = exclude or [] - dockerfile = dockerfile or (None, None) - extra_files = [] - if dockerfile[1] is not None: - dockerignore_contents = '\n'.join( - (exclude or ['.dockerignore']) + [dockerfile[0]] - ) - extra_files = [ - ('.dockerignore', dockerignore_contents), - dockerfile, - ] - return create_archive( - files=sorted(exclude_paths(root, exclude, dockerfile=dockerfile[0])), - root=root, fileobj=fileobj, gzip=gzip, extra_files=extra_files - ) - - -def exclude_paths(root, patterns, dockerfile=None): - """ - Given a root directory path and a list of .dockerignore patterns, return - an iterator of all paths (both regular files and directories) in the root - directory that do *not* match any of the patterns. - - All paths returned are relative to the root. - """ - - if dockerfile is None: - dockerfile = 'Dockerfile' - - patterns.append('!' + dockerfile) - pm = PatternMatcher(patterns) - return set(pm.walk(root)) - - -def build_file_list(root): - files = [] - for dirname, dirnames, fnames in os.walk(root): - for filename in fnames + dirnames: - longpath = os.path.join(dirname, filename) - files.append( - longpath.replace(root, '', 1).lstrip('/') - ) - - return files - - -def create_archive(root, files=None, fileobj=None, gzip=False, - extra_files=None): - extra_files = extra_files or [] - if not fileobj: - fileobj = tempfile.NamedTemporaryFile() - t = tarfile.open(mode='w:gz' if gzip else 'w', fileobj=fileobj) - if files is None: - files = build_file_list(root) - extra_names = {e[0] for e in extra_files} - for path in files: - if path in extra_names: - # Extra files override context files with the same name - continue - full_path = os.path.join(root, path) - - i = t.gettarinfo(full_path, arcname=path) - if i is None: - # This happens when we encounter a socket file. We can safely - # ignore it and proceed. - continue - - # Workaround https://bugs.python.org/issue32713 - if i.mtime < 0 or i.mtime > 8**11 - 1: - i.mtime = int(i.mtime) - - if IS_WINDOWS_PLATFORM: - # Windows doesn't keep track of the execute bit, so we make files - # and directories executable by default. - i.mode = i.mode & 0o755 | 0o111 - - if i.isfile(): - try: - with open(full_path, 'rb') as f: - t.addfile(i, f) - except OSError: - raise OSError( - f'Can not read file in context: {full_path}' - ) - else: - # Directories, FIFOs, symlinks... don't need to be read. - t.addfile(i, None) - - for name, contents in extra_files: - info = tarfile.TarInfo(name) - contents_encoded = contents.encode('utf-8') - info.size = len(contents_encoded) - t.addfile(info, io.BytesIO(contents_encoded)) - - t.close() - fileobj.seek(0) - return fileobj - - -def mkbuildcontext(dockerfile): - f = tempfile.NamedTemporaryFile() - t = tarfile.open(mode='w', fileobj=f) - if isinstance(dockerfile, io.StringIO): - dfinfo = tarfile.TarInfo('Dockerfile') - raise TypeError('Please use io.BytesIO to create in-memory ' - 'Dockerfiles with Python 3') - elif isinstance(dockerfile, io.BytesIO): - dfinfo = tarfile.TarInfo('Dockerfile') - dfinfo.size = len(dockerfile.getvalue()) - dockerfile.seek(0) - else: - dfinfo = t.gettarinfo(fileobj=dockerfile, arcname='Dockerfile') - t.addfile(dfinfo, dockerfile) - t.close() - f.seek(0) - return f - - -def split_path(p): - return [pt for pt in re.split(_SEP, p) if pt and pt != '.'] - - -def normalize_slashes(p): - if IS_WINDOWS_PLATFORM: - return '/'.join(split_path(p)) - return p - - -def walk(root, patterns, default=True): - pm = PatternMatcher(patterns) - return pm.walk(root) - - -# Heavily based on -# https://github.com/moby/moby/blob/master/pkg/fileutils/fileutils.go -class PatternMatcher: - def __init__(self, patterns): - self.patterns = list(filter( - lambda p: p.dirs, [Pattern(p) for p in patterns] - )) - self.patterns.append(Pattern('!.dockerignore')) - - def matches(self, filepath): - matched = False - parent_path = os.path.dirname(filepath) - parent_path_dirs = split_path(parent_path) - - for pattern in self.patterns: - negative = pattern.exclusion - match = pattern.match(filepath) - if not match and parent_path != '': - if len(pattern.dirs) <= len(parent_path_dirs): - match = pattern.match( - os.path.sep.join(parent_path_dirs[:len(pattern.dirs)]) - ) - - if match: - matched = not negative - - return matched - - def walk(self, root): - def rec_walk(current_dir): - for f in os.listdir(current_dir): - fpath = os.path.join( - os.path.relpath(current_dir, root), f - ) - if fpath.startswith('.' + os.path.sep): - fpath = fpath[2:] - match = self.matches(fpath) - if not match: - yield fpath - - cur = os.path.join(root, fpath) - if not os.path.isdir(cur) or os.path.islink(cur): - continue - - if match: - # If we want to skip this file and it's a directory - # then we should first check to see if there's an - # excludes pattern (e.g. !dir/file) that starts with this - # dir. If so then we can't skip this dir. - skip = True - - for pat in self.patterns: - if not pat.exclusion: - continue - if pat.cleaned_pattern.startswith( - normalize_slashes(fpath)): - skip = False - break - if skip: - continue - yield from rec_walk(cur) - - return rec_walk(root) - - -class Pattern: - def __init__(self, pattern_str): - self.exclusion = False - if pattern_str.startswith('!'): - self.exclusion = True - pattern_str = pattern_str[1:] - - self.dirs = self.normalize(pattern_str) - self.cleaned_pattern = '/'.join(self.dirs) - - @classmethod - def normalize(cls, p): - - # Remove trailing spaces - p = p.strip() - - # Leading and trailing slashes are not relevant. Yes, - # "foo.py/" must exclude the "foo.py" regular file. "." - # components are not relevant either, even if the whole - # pattern is only ".", as the Docker reference states: "For - # historical reasons, the pattern . is ignored." - # ".." component must be cleared with the potential previous - # component, regardless of whether it exists: "A preprocessing - # step [...] eliminates . and .. elements using Go's - # filepath.". - i = 0 - split = split_path(p) - while i < len(split): - if split[i] == '..': - del split[i] - if i > 0: - del split[i - 1] - i -= 1 - else: - i += 1 - return split - - def match(self, filepath): - return fnmatch(normalize_slashes(filepath), self.cleaned_pattern) diff --git a/server/venv/Lib/site-packages/docker/utils/config.py b/server/venv/Lib/site-packages/docker/utils/config.py deleted file mode 100644 index 8e24959..0000000 --- a/server/venv/Lib/site-packages/docker/utils/config.py +++ /dev/null @@ -1,66 +0,0 @@ -import json -import logging -import os - -from ..constants import IS_WINDOWS_PLATFORM - -DOCKER_CONFIG_FILENAME = os.path.join('.docker', 'config.json') -LEGACY_DOCKER_CONFIG_FILENAME = '.dockercfg' - -log = logging.getLogger(__name__) - - -def find_config_file(config_path=None): - paths = list(filter(None, [ - config_path, # 1 - config_path_from_environment(), # 2 - os.path.join(home_dir(), DOCKER_CONFIG_FILENAME), # 3 - os.path.join(home_dir(), LEGACY_DOCKER_CONFIG_FILENAME), # 4 - ])) - - log.debug(f"Trying paths: {repr(paths)}") - - for path in paths: - if os.path.exists(path): - log.debug(f"Found file at path: {path}") - return path - - log.debug("No config file found") - - return None - - -def config_path_from_environment(): - config_dir = os.environ.get('DOCKER_CONFIG') - if not config_dir: - return None - return os.path.join(config_dir, os.path.basename(DOCKER_CONFIG_FILENAME)) - - -def home_dir(): - """ - Get the user's home directory, using the same logic as the Docker Engine - client - use %USERPROFILE% on Windows, $HOME/getuid on POSIX. - """ - if IS_WINDOWS_PLATFORM: - return os.environ.get('USERPROFILE', '') - else: - return os.path.expanduser('~') - - -def load_general_config(config_path=None): - config_file = find_config_file(config_path) - - if not config_file: - return {} - - try: - with open(config_file) as f: - return json.load(f) - except (OSError, ValueError) as e: - # In the case of a legacy `.dockercfg` file, we won't - # be able to load any JSON data. - log.debug(e) - - log.debug("All parsing attempts failed - returning empty config") - return {} diff --git a/server/venv/Lib/site-packages/docker/utils/decorators.py b/server/venv/Lib/site-packages/docker/utils/decorators.py deleted file mode 100644 index cf1baf4..0000000 --- a/server/venv/Lib/site-packages/docker/utils/decorators.py +++ /dev/null @@ -1,47 +0,0 @@ -import functools - -from .. import errors -from . import utils - - -def check_resource(resource_name): - def decorator(f): - @functools.wraps(f) - def wrapped(self, resource_id=None, *args, **kwargs): - if resource_id is None and kwargs.get(resource_name): - resource_id = kwargs.pop(resource_name) - if isinstance(resource_id, dict): - resource_id = resource_id.get('Id', resource_id.get('ID')) - if not resource_id: - raise errors.NullResource( - 'Resource ID was not provided' - ) - return f(self, resource_id, *args, **kwargs) - return wrapped - return decorator - - -def minimum_version(version): - def decorator(f): - @functools.wraps(f) - def wrapper(self, *args, **kwargs): - if utils.version_lt(self._version, version): - raise errors.InvalidVersion( - '{} is not available for version < {}'.format( - f.__name__, version - ) - ) - return f(self, *args, **kwargs) - return wrapper - return decorator - - -def update_headers(f): - def inner(self, *args, **kwargs): - if 'HttpHeaders' in self._general_configs: - if not kwargs.get('headers'): - kwargs['headers'] = self._general_configs['HttpHeaders'] - else: - kwargs['headers'].update(self._general_configs['HttpHeaders']) - return f(self, *args, **kwargs) - return inner diff --git a/server/venv/Lib/site-packages/docker/utils/fnmatch.py b/server/venv/Lib/site-packages/docker/utils/fnmatch.py deleted file mode 100644 index 90e9f60..0000000 --- a/server/venv/Lib/site-packages/docker/utils/fnmatch.py +++ /dev/null @@ -1,115 +0,0 @@ -"""Filename matching with shell patterns. - -fnmatch(FILENAME, PATTERN) matches according to the local convention. -fnmatchcase(FILENAME, PATTERN) always takes case in account. - -The functions operate by translating the pattern into a regular -expression. They cache the compiled regular expressions for speed. - -The function translate(PATTERN) returns a regular expression -corresponding to PATTERN. (It does not compile it.) -""" - -import re - -__all__ = ["fnmatch", "fnmatchcase", "translate"] - -_cache = {} -_MAXCACHE = 100 - - -def _purge(): - """Clear the pattern cache""" - _cache.clear() - - -def fnmatch(name, pat): - """Test whether FILENAME matches PATTERN. - - Patterns are Unix shell style: - - * matches everything - ? matches any single character - [seq] matches any character in seq - [!seq] matches any char not in seq - - An initial period in FILENAME is not special. - Both FILENAME and PATTERN are first case-normalized - if the operating system requires it. - If you don't want this, use fnmatchcase(FILENAME, PATTERN). - """ - - name = name.lower() - pat = pat.lower() - return fnmatchcase(name, pat) - - -def fnmatchcase(name, pat): - """Test whether FILENAME matches PATTERN, including case. - This is a version of fnmatch() which doesn't case-normalize - its arguments. - """ - - try: - re_pat = _cache[pat] - except KeyError: - res = translate(pat) - if len(_cache) >= _MAXCACHE: - _cache.clear() - _cache[pat] = re_pat = re.compile(res) - return re_pat.match(name) is not None - - -def translate(pat): - """Translate a shell PATTERN to a regular expression. - - There is no way to quote meta-characters. - """ - i, n = 0, len(pat) - res = '^' - while i < n: - c = pat[i] - i = i + 1 - if c == '*': - if i < n and pat[i] == '*': - # is some flavor of "**" - i = i + 1 - # Treat **/ as ** so eat the "/" - if i < n and pat[i] == '/': - i = i + 1 - if i >= n: - # is "**EOF" - to align with .gitignore just accept all - res = res + '.*' - else: - # is "**" - # Note that this allows for any # of /'s (even 0) because - # the .* will eat everything, even /'s - res = res + '(.*/)?' - else: - # is "*" so map it to anything but "/" - res = res + '[^/]*' - elif c == '?': - # "?" is any char except "/" - res = res + '[^/]' - elif c == '[': - j = i - if j < n and pat[j] == '!': - j = j + 1 - if j < n and pat[j] == ']': - j = j + 1 - while j < n and pat[j] != ']': - j = j + 1 - if j >= n: - res = res + '\\[' - else: - stuff = pat[i:j].replace('\\', '\\\\') - i = j + 1 - if stuff[0] == '!': - stuff = '^' + stuff[1:] - elif stuff[0] == '^': - stuff = '\\' + stuff - res = f'{res}[{stuff}]' - else: - res = res + re.escape(c) - - return res + '$' diff --git a/server/venv/Lib/site-packages/docker/utils/json_stream.py b/server/venv/Lib/site-packages/docker/utils/json_stream.py deleted file mode 100644 index f384175..0000000 --- a/server/venv/Lib/site-packages/docker/utils/json_stream.py +++ /dev/null @@ -1,75 +0,0 @@ -import json -import json.decoder - -from ..errors import StreamParseError - - -json_decoder = json.JSONDecoder() - - -def stream_as_text(stream): - """ - Given a stream of bytes or text, if any of the items in the stream - are bytes convert them to text. - This function can be removed once we return text streams - instead of byte streams. - """ - for data in stream: - if not isinstance(data, str): - data = data.decode('utf-8', 'replace') - yield data - - -def json_splitter(buffer): - """Attempt to parse a json object from a buffer. If there is at least one - object, return it and the rest of the buffer, otherwise return None. - """ - buffer = buffer.strip() - try: - obj, index = json_decoder.raw_decode(buffer) - rest = buffer[json.decoder.WHITESPACE.match(buffer, index).end():] - return obj, rest - except ValueError: - return None - - -def json_stream(stream): - """Given a stream of text, return a stream of json objects. - This handles streams which are inconsistently buffered (some entries may - be newline delimited, and others are not). - """ - return split_buffer(stream, json_splitter, json_decoder.decode) - - -def line_splitter(buffer, separator='\n'): - index = buffer.find(str(separator)) - if index == -1: - return None - return buffer[:index + 1], buffer[index + 1:] - - -def split_buffer(stream, splitter=None, decoder=lambda a: a): - """Given a generator which yields strings and a splitter function, - joins all input, splits on the separator and yields each chunk. - Unlike string.split(), each chunk includes the trailing - separator, except for the last one if none was found on the end - of the input. - """ - splitter = splitter or line_splitter - buffered = '' - - for data in stream_as_text(stream): - buffered += data - while True: - buffer_split = splitter(buffered) - if buffer_split is None: - break - - item, buffered = buffer_split - yield item - - if buffered: - try: - yield decoder(buffered) - except Exception as e: - raise StreamParseError(e) diff --git a/server/venv/Lib/site-packages/docker/utils/ports.py b/server/venv/Lib/site-packages/docker/utils/ports.py deleted file mode 100644 index e813936..0000000 --- a/server/venv/Lib/site-packages/docker/utils/ports.py +++ /dev/null @@ -1,83 +0,0 @@ -import re - -PORT_SPEC = re.compile( - "^" # Match full string - "(" # External part - r"(\[?(?P[a-fA-F\d.:]+)\]?:)?" # Address - r"(?P[\d]*)(-(?P[\d]+))?:" # External range - ")?" - r"(?P[\d]+)(-(?P[\d]+))?" # Internal range - "(?P/(udp|tcp|sctp))?" # Protocol - "$" # Match full string -) - - -def add_port_mapping(port_bindings, internal_port, external): - if internal_port in port_bindings: - port_bindings[internal_port].append(external) - else: - port_bindings[internal_port] = [external] - - -def add_port(port_bindings, internal_port_range, external_range): - if external_range is None: - for internal_port in internal_port_range: - add_port_mapping(port_bindings, internal_port, None) - else: - ports = zip(internal_port_range, external_range) - for internal_port, external_port in ports: - add_port_mapping(port_bindings, internal_port, external_port) - - -def build_port_bindings(ports): - port_bindings = {} - for port in ports: - internal_port_range, external_range = split_port(port) - add_port(port_bindings, internal_port_range, external_range) - return port_bindings - - -def _raise_invalid_port(port): - raise ValueError('Invalid port "%s", should be ' - '[[remote_ip:]remote_port[-remote_port]:]' - 'port[/protocol]' % port) - - -def port_range(start, end, proto, randomly_available_port=False): - if not start: - return start - if not end: - return [start + proto] - if randomly_available_port: - return [f'{start}-{end}' + proto] - return [str(port) + proto for port in range(int(start), int(end) + 1)] - - -def split_port(port): - if hasattr(port, 'legacy_repr'): - # This is the worst hack, but it prevents a bug in Compose 1.14.0 - # https://github.com/docker/docker-py/issues/1668 - # TODO: remove once fixed in Compose stable - port = port.legacy_repr() - port = str(port) - match = PORT_SPEC.match(port) - if match is None: - _raise_invalid_port(port) - parts = match.groupdict() - - host = parts['host'] - proto = parts['proto'] or '' - internal = port_range(parts['int'], parts['int_end'], proto) - external = port_range( - parts['ext'], parts['ext_end'], '', len(internal) == 1) - - if host is None: - if external is not None and len(internal) != len(external): - raise ValueError('Port ranges don\'t match in length') - return internal, external - else: - if not external: - external = [None] * len(internal) - elif len(internal) != len(external): - raise ValueError('Port ranges don\'t match in length') - return internal, [(host, ext_port) for ext_port in external] diff --git a/server/venv/Lib/site-packages/docker/utils/proxy.py b/server/venv/Lib/site-packages/docker/utils/proxy.py deleted file mode 100644 index 49e98ed..0000000 --- a/server/venv/Lib/site-packages/docker/utils/proxy.py +++ /dev/null @@ -1,73 +0,0 @@ -from .utils import format_environment - - -class ProxyConfig(dict): - ''' - Hold the client's proxy configuration - ''' - @property - def http(self): - return self.get('http') - - @property - def https(self): - return self.get('https') - - @property - def ftp(self): - return self.get('ftp') - - @property - def no_proxy(self): - return self.get('no_proxy') - - @staticmethod - def from_dict(config): - ''' - Instantiate a new ProxyConfig from a dictionary that represents a - client configuration, as described in `the documentation`_. - - .. _the documentation: - https://docs.docker.com/network/proxy/#configure-the-docker-client - ''' - return ProxyConfig( - http=config.get('httpProxy'), - https=config.get('httpsProxy'), - ftp=config.get('ftpProxy'), - no_proxy=config.get('noProxy'), - ) - - def get_environment(self): - ''' - Return a dictionary representing the environment variables used to - set the proxy settings. - ''' - env = {} - if self.http: - env['http_proxy'] = env['HTTP_PROXY'] = self.http - if self.https: - env['https_proxy'] = env['HTTPS_PROXY'] = self.https - if self.ftp: - env['ftp_proxy'] = env['FTP_PROXY'] = self.ftp - if self.no_proxy: - env['no_proxy'] = env['NO_PROXY'] = self.no_proxy - return env - - def inject_proxy_environment(self, environment): - ''' - Given a list of strings representing environment variables, prepend the - environment variables corresponding to the proxy settings. - ''' - if not self: - return environment - - proxy_env = format_environment(self.get_environment()) - if not environment: - return proxy_env - # It is important to prepend our variables, because we want the - # variables defined in "environment" to take precedence. - return proxy_env + environment - - def __str__(self): - return 'ProxyConfig(http={}, https={}, ftp={}, no_proxy={})'.format( - self.http, self.https, self.ftp, self.no_proxy) diff --git a/server/venv/Lib/site-packages/docker/utils/socket.py b/server/venv/Lib/site-packages/docker/utils/socket.py deleted file mode 100644 index cdc485e..0000000 --- a/server/venv/Lib/site-packages/docker/utils/socket.py +++ /dev/null @@ -1,187 +0,0 @@ -import errno -import os -import select -import socket as pysocket -import struct - -try: - from ..transport import NpipeSocket -except ImportError: - NpipeSocket = type(None) - - -STDOUT = 1 -STDERR = 2 - - -class SocketError(Exception): - pass - - -# NpipeSockets have their own error types -# pywintypes.error: (109, 'ReadFile', 'The pipe has been ended.') -NPIPE_ENDED = 109 - - -def read(socket, n=4096): - """ - Reads at most n bytes from socket - """ - - recoverable_errors = (errno.EINTR, errno.EDEADLK, errno.EWOULDBLOCK) - - if not isinstance(socket, NpipeSocket): - if not hasattr(select, "poll"): - # Limited to 1024 - select.select([socket], [], []) - else: - poll = select.poll() - poll.register(socket, select.POLLIN | select.POLLPRI) - poll.poll() - - try: - if hasattr(socket, 'recv'): - return socket.recv(n) - if isinstance(socket, getattr(pysocket, 'SocketIO')): - return socket.read(n) - return os.read(socket.fileno(), n) - except OSError as e: - if e.errno not in recoverable_errors: - raise - except Exception as e: - is_pipe_ended = (isinstance(socket, NpipeSocket) and - len(e.args) > 0 and - e.args[0] == NPIPE_ENDED) - if is_pipe_ended: - # npipes don't support duplex sockets, so we interpret - # a PIPE_ENDED error as a close operation (0-length read). - return '' - raise - - -def read_exactly(socket, n): - """ - Reads exactly n bytes from socket - Raises SocketError if there isn't enough data - """ - data = bytes() - while len(data) < n: - next_data = read(socket, n - len(data)) - if not next_data: - raise SocketError("Unexpected EOF") - data += next_data - return data - - -def next_frame_header(socket): - """ - Returns the stream and size of the next frame of data waiting to be read - from socket, according to the protocol defined here: - - https://docs.docker.com/engine/api/v1.24/#attach-to-a-container - """ - try: - data = read_exactly(socket, 8) - except SocketError: - return (-1, -1) - - stream, actual = struct.unpack('>BxxxL', data) - return (stream, actual) - - -def frames_iter(socket, tty): - """ - Return a generator of frames read from socket. A frame is a tuple where - the first item is the stream number and the second item is a chunk of data. - - If the tty setting is enabled, the streams are multiplexed into the stdout - stream. - """ - if tty: - return ((STDOUT, frame) for frame in frames_iter_tty(socket)) - else: - return frames_iter_no_tty(socket) - - -def frames_iter_no_tty(socket): - """ - Returns a generator of data read from the socket when the tty setting is - not enabled. - """ - while True: - (stream, n) = next_frame_header(socket) - if n < 0: - break - while n > 0: - result = read(socket, n) - if result is None: - continue - data_length = len(result) - if data_length == 0: - # We have reached EOF - return - n -= data_length - yield (stream, result) - - -def frames_iter_tty(socket): - """ - Return a generator of data read from the socket when the tty setting is - enabled. - """ - while True: - result = read(socket) - if len(result) == 0: - # We have reached EOF - return - yield result - - -def consume_socket_output(frames, demux=False): - """ - Iterate through frames read from the socket and return the result. - - Args: - - demux (bool): - If False, stdout and stderr are multiplexed, and the result is the - concatenation of all the frames. If True, the streams are - demultiplexed, and the result is a 2-tuple where each item is the - concatenation of frames belonging to the same stream. - """ - if demux is False: - # If the streams are multiplexed, the generator returns strings, that - # we just need to concatenate. - return bytes().join(frames) - - # If the streams are demultiplexed, the generator yields tuples - # (stdout, stderr) - out = [None, None] - for frame in frames: - # It is guaranteed that for each frame, one and only one stream - # is not None. - assert frame != (None, None) - if frame[0] is not None: - if out[0] is None: - out[0] = frame[0] - else: - out[0] += frame[0] - else: - if out[1] is None: - out[1] = frame[1] - else: - out[1] += frame[1] - return tuple(out) - - -def demux_adaptor(stream_id, data): - """ - Utility to demultiplex stdout and stderr when reading frames from the - socket. - """ - if stream_id == STDOUT: - return (data, None) - elif stream_id == STDERR: - return (None, data) - else: - raise ValueError(f'{stream_id} is not a valid stream') diff --git a/server/venv/Lib/site-packages/docker/utils/utils.py b/server/venv/Lib/site-packages/docker/utils/utils.py deleted file mode 100644 index 7b2bbf4..0000000 --- a/server/venv/Lib/site-packages/docker/utils/utils.py +++ /dev/null @@ -1,505 +0,0 @@ -import base64 -import collections -import json -import os -import os.path -import shlex -import string -from datetime import datetime -from packaging.version import Version - -from .. import errors -from ..constants import DEFAULT_HTTP_HOST -from ..constants import DEFAULT_UNIX_SOCKET -from ..constants import DEFAULT_NPIPE -from ..constants import BYTE_UNITS -from ..tls import TLSConfig - -from urllib.parse import urlparse, urlunparse - - -URLComponents = collections.namedtuple( - 'URLComponents', - 'scheme netloc url params query fragment', -) - - -def create_ipam_pool(*args, **kwargs): - raise errors.DeprecatedMethod( - 'utils.create_ipam_pool has been removed. Please use a ' - 'docker.types.IPAMPool object instead.' - ) - - -def create_ipam_config(*args, **kwargs): - raise errors.DeprecatedMethod( - 'utils.create_ipam_config has been removed. Please use a ' - 'docker.types.IPAMConfig object instead.' - ) - - -def decode_json_header(header): - data = base64.b64decode(header) - data = data.decode('utf-8') - return json.loads(data) - - -def compare_version(v1, v2): - """Compare docker versions - - >>> v1 = '1.9' - >>> v2 = '1.10' - >>> compare_version(v1, v2) - 1 - >>> compare_version(v2, v1) - -1 - >>> compare_version(v2, v2) - 0 - """ - s1 = Version(v1) - s2 = Version(v2) - if s1 == s2: - return 0 - elif s1 > s2: - return -1 - else: - return 1 - - -def version_lt(v1, v2): - return compare_version(v1, v2) > 0 - - -def version_gte(v1, v2): - return not version_lt(v1, v2) - - -def _convert_port_binding(binding): - result = {'HostIp': '', 'HostPort': ''} - if isinstance(binding, tuple): - if len(binding) == 2: - result['HostPort'] = binding[1] - result['HostIp'] = binding[0] - elif isinstance(binding[0], str): - result['HostIp'] = binding[0] - else: - result['HostPort'] = binding[0] - elif isinstance(binding, dict): - if 'HostPort' in binding: - result['HostPort'] = binding['HostPort'] - if 'HostIp' in binding: - result['HostIp'] = binding['HostIp'] - else: - raise ValueError(binding) - else: - result['HostPort'] = binding - - if result['HostPort'] is None: - result['HostPort'] = '' - else: - result['HostPort'] = str(result['HostPort']) - - return result - - -def convert_port_bindings(port_bindings): - result = {} - for k, v in iter(port_bindings.items()): - key = str(k) - if '/' not in key: - key += '/tcp' - if isinstance(v, list): - result[key] = [_convert_port_binding(binding) for binding in v] - else: - result[key] = [_convert_port_binding(v)] - return result - - -def convert_volume_binds(binds): - if isinstance(binds, list): - return binds - - result = [] - for k, v in binds.items(): - if isinstance(k, bytes): - k = k.decode('utf-8') - - if isinstance(v, dict): - if 'ro' in v and 'mode' in v: - raise ValueError( - 'Binding cannot contain both "ro" and "mode": {}' - .format(repr(v)) - ) - - bind = v['bind'] - if isinstance(bind, bytes): - bind = bind.decode('utf-8') - - if 'ro' in v: - mode = 'ro' if v['ro'] else 'rw' - elif 'mode' in v: - mode = v['mode'] - else: - mode = 'rw' - - result.append( - f'{k}:{bind}:{mode}' - ) - else: - if isinstance(v, bytes): - v = v.decode('utf-8') - result.append( - f'{k}:{v}:rw' - ) - return result - - -def convert_tmpfs_mounts(tmpfs): - if isinstance(tmpfs, dict): - return tmpfs - - if not isinstance(tmpfs, list): - raise ValueError( - 'Expected tmpfs value to be either a list or a dict, found: {}' - .format(type(tmpfs).__name__) - ) - - result = {} - for mount in tmpfs: - if isinstance(mount, str): - if ":" in mount: - name, options = mount.split(":", 1) - else: - name = mount - options = "" - - else: - raise ValueError( - "Expected item in tmpfs list to be a string, found: {}" - .format(type(mount).__name__) - ) - - result[name] = options - return result - - -def convert_service_networks(networks): - if not networks: - return networks - if not isinstance(networks, list): - raise TypeError('networks parameter must be a list.') - - result = [] - for n in networks: - if isinstance(n, str): - n = {'Target': n} - result.append(n) - return result - - -def parse_repository_tag(repo_name): - parts = repo_name.rsplit('@', 1) - if len(parts) == 2: - return tuple(parts) - parts = repo_name.rsplit(':', 1) - if len(parts) == 2 and '/' not in parts[1]: - return tuple(parts) - return repo_name, None - - -def parse_host(addr, is_win32=False, tls=False): - # Sensible defaults - if not addr and is_win32: - return DEFAULT_NPIPE - if not addr or addr.strip() == 'unix://': - return DEFAULT_UNIX_SOCKET - - addr = addr.strip() - - parsed_url = urlparse(addr) - proto = parsed_url.scheme - if not proto or any([x not in string.ascii_letters + '+' for x in proto]): - # https://bugs.python.org/issue754016 - parsed_url = urlparse('//' + addr, 'tcp') - proto = 'tcp' - - if proto == 'fd': - raise errors.DockerException('fd protocol is not implemented') - - # These protos are valid aliases for our library but not for the - # official spec - if proto == 'http' or proto == 'https': - tls = proto == 'https' - proto = 'tcp' - elif proto == 'http+unix': - proto = 'unix' - - if proto not in ('tcp', 'unix', 'npipe', 'ssh'): - raise errors.DockerException( - f"Invalid bind address protocol: {addr}" - ) - - if proto == 'tcp' and not parsed_url.netloc: - # "tcp://" is exceptionally disallowed by convention; - # omitting a hostname for other protocols is fine - raise errors.DockerException( - f'Invalid bind address format: {addr}' - ) - - if any([ - parsed_url.params, parsed_url.query, parsed_url.fragment, - parsed_url.password - ]): - raise errors.DockerException( - f'Invalid bind address format: {addr}' - ) - - if parsed_url.path and proto == 'ssh': - raise errors.DockerException( - 'Invalid bind address format: no path allowed for this protocol:' - ' {}'.format(addr) - ) - else: - path = parsed_url.path - if proto == 'unix' and parsed_url.hostname is not None: - # For legacy reasons, we consider unix://path - # to be valid and equivalent to unix:///path - path = '/'.join((parsed_url.hostname, path)) - - netloc = parsed_url.netloc - if proto in ('tcp', 'ssh'): - port = parsed_url.port or 0 - if port <= 0: - if proto != 'ssh': - raise errors.DockerException( - 'Invalid bind address format: port is required:' - ' {}'.format(addr) - ) - port = 22 - netloc = f'{parsed_url.netloc}:{port}' - - if not parsed_url.hostname: - netloc = f'{DEFAULT_HTTP_HOST}:{port}' - - # Rewrite schemes to fit library internals (requests adapters) - if proto == 'tcp': - proto = 'http{}'.format('s' if tls else '') - elif proto == 'unix': - proto = 'http+unix' - - if proto in ('http+unix', 'npipe'): - return f"{proto}://{path}".rstrip('/') - - return urlunparse(URLComponents( - scheme=proto, - netloc=netloc, - url=path, - params='', - query='', - fragment='', - )).rstrip('/') - - -def parse_devices(devices): - device_list = [] - for device in devices: - if isinstance(device, dict): - device_list.append(device) - continue - if not isinstance(device, str): - raise errors.DockerException( - f'Invalid device type {type(device)}' - ) - device_mapping = device.split(':') - if device_mapping: - path_on_host = device_mapping[0] - if len(device_mapping) > 1: - path_in_container = device_mapping[1] - else: - path_in_container = path_on_host - if len(device_mapping) > 2: - permissions = device_mapping[2] - else: - permissions = 'rwm' - device_list.append({ - 'PathOnHost': path_on_host, - 'PathInContainer': path_in_container, - 'CgroupPermissions': permissions - }) - return device_list - - -def kwargs_from_env(ssl_version=None, assert_hostname=None, environment=None): - if not environment: - environment = os.environ - host = environment.get('DOCKER_HOST') - - # empty string for cert path is the same as unset. - cert_path = environment.get('DOCKER_CERT_PATH') or None - - # empty string for tls verify counts as "false". - # Any value or 'unset' counts as true. - tls_verify = environment.get('DOCKER_TLS_VERIFY') - if tls_verify == '': - tls_verify = False - else: - tls_verify = tls_verify is not None - enable_tls = cert_path or tls_verify - - params = {} - - if host: - params['base_url'] = host - - if not enable_tls: - return params - - if not cert_path: - cert_path = os.path.join(os.path.expanduser('~'), '.docker') - - if not tls_verify and assert_hostname is None: - # assert_hostname is a subset of TLS verification, - # so if it's not set already then set it to false. - assert_hostname = False - - params['tls'] = TLSConfig( - client_cert=(os.path.join(cert_path, 'cert.pem'), - os.path.join(cert_path, 'key.pem')), - ca_cert=os.path.join(cert_path, 'ca.pem'), - verify=tls_verify, - ssl_version=ssl_version, - assert_hostname=assert_hostname, - ) - - return params - - -def convert_filters(filters): - result = {} - for k, v in iter(filters.items()): - if isinstance(v, bool): - v = 'true' if v else 'false' - if not isinstance(v, list): - v = [v, ] - result[k] = [ - str(item) if not isinstance(item, str) else item - for item in v - ] - return json.dumps(result) - - -def datetime_to_timestamp(dt): - """Convert a UTC datetime to a Unix timestamp""" - delta = dt - datetime.utcfromtimestamp(0) - return delta.seconds + delta.days * 24 * 3600 - - -def parse_bytes(s): - if isinstance(s, (int, float,)): - return s - if len(s) == 0: - return 0 - - if s[-2:-1].isalpha() and s[-1].isalpha(): - if s[-1] == "b" or s[-1] == "B": - s = s[:-1] - units = BYTE_UNITS - suffix = s[-1].lower() - - # Check if the variable is a string representation of an int - # without a units part. Assuming that the units are bytes. - if suffix.isdigit(): - digits_part = s - suffix = 'b' - else: - digits_part = s[:-1] - - if suffix in units.keys() or suffix.isdigit(): - try: - digits = float(digits_part) - except ValueError: - raise errors.DockerException( - 'Failed converting the string value for memory ({}) to' - ' an integer.'.format(digits_part) - ) - - # Reconvert to long for the final result - s = int(digits * units[suffix]) - else: - raise errors.DockerException( - 'The specified value for memory ({}) should specify the' - ' units. The postfix should be one of the `b` `k` `m` `g`' - ' characters'.format(s) - ) - - return s - - -def normalize_links(links): - if isinstance(links, dict): - links = iter(links.items()) - - return [f'{k}:{v}' if v else k for k, v in sorted(links)] - - -def parse_env_file(env_file): - """ - Reads a line-separated environment file. - The format of each line should be "key=value". - """ - environment = {} - - with open(env_file) as f: - for line in f: - - if line[0] == '#': - continue - - line = line.strip() - if not line: - continue - - parse_line = line.split('=', 1) - if len(parse_line) == 2: - k, v = parse_line - environment[k] = v - else: - raise errors.DockerException( - 'Invalid line in environment file {}:\n{}'.format( - env_file, line)) - - return environment - - -def split_command(command): - return shlex.split(command) - - -def format_environment(environment): - def format_env(key, value): - if value is None: - return key - if isinstance(value, bytes): - value = value.decode('utf-8') - - return f'{key}={value}' - return [format_env(*var) for var in iter(environment.items())] - - -def format_extra_hosts(extra_hosts, task=False): - # Use format dictated by Swarm API if container is part of a task - if task: - return [ - f'{v} {k}' for k, v in sorted(iter(extra_hosts.items())) - ] - - return [ - f'{k}:{v}' for k, v in sorted(iter(extra_hosts.items())) - ] - - -def create_host_config(self, *args, **kwargs): - raise errors.DeprecatedMethod( - 'utils.create_host_config has been removed. Please use a ' - 'docker.types.HostConfig object instead.' - ) diff --git a/server/venv/Lib/site-packages/docker/version.py b/server/venv/Lib/site-packages/docker/version.py deleted file mode 100644 index 44eac8c..0000000 --- a/server/venv/Lib/site-packages/docker/version.py +++ /dev/null @@ -1,14 +0,0 @@ -try: - from ._version import __version__ -except ImportError: - try: - # importlib.metadata available in Python 3.8+, the fallback (0.0.0) - # is fine because release builds use _version (above) rather than - # this code path, so it only impacts developing w/ 3.7 - from importlib.metadata import version, PackageNotFoundError - try: - __version__ = version('docker') - except PackageNotFoundError: - __version__ = '0.0.0' - except ImportError: - __version__ = '0.0.0' diff --git a/server/venv/Lib/site-packages/dotenv/__init__.py b/server/venv/Lib/site-packages/dotenv/__init__.py deleted file mode 100644 index 7f4c631..0000000 --- a/server/venv/Lib/site-packages/dotenv/__init__.py +++ /dev/null @@ -1,49 +0,0 @@ -from typing import Any, Optional - -from .main import (dotenv_values, find_dotenv, get_key, load_dotenv, set_key, - unset_key) - - -def load_ipython_extension(ipython: Any) -> None: - from .ipython import load_ipython_extension - load_ipython_extension(ipython) - - -def get_cli_string( - path: Optional[str] = None, - action: Optional[str] = None, - key: Optional[str] = None, - value: Optional[str] = None, - quote: Optional[str] = None, -): - """Returns a string suitable for running as a shell script. - - Useful for converting a arguments passed to a fabric task - to be passed to a `local` or `run` command. - """ - command = ['dotenv'] - if quote: - command.append(f'-q {quote}') - if path: - command.append(f'-f {path}') - if action: - command.append(action) - if key: - command.append(key) - if value: - if ' ' in value: - command.append(f'"{value}"') - else: - command.append(value) - - return ' '.join(command).strip() - - -__all__ = ['get_cli_string', - 'load_dotenv', - 'dotenv_values', - 'get_key', - 'set_key', - 'unset_key', - 'find_dotenv', - 'load_ipython_extension'] diff --git a/server/venv/Lib/site-packages/dotenv/__main__.py b/server/venv/Lib/site-packages/dotenv/__main__.py deleted file mode 100644 index 3977f55..0000000 --- a/server/venv/Lib/site-packages/dotenv/__main__.py +++ /dev/null @@ -1,6 +0,0 @@ -"""Entry point for cli, enables execution with `python -m dotenv`""" - -from .cli import cli - -if __name__ == "__main__": - cli() diff --git a/server/venv/Lib/site-packages/dotenv/cli.py b/server/venv/Lib/site-packages/dotenv/cli.py deleted file mode 100644 index 65ead46..0000000 --- a/server/venv/Lib/site-packages/dotenv/cli.py +++ /dev/null @@ -1,199 +0,0 @@ -import json -import os -import shlex -import sys -from contextlib import contextmanager -from subprocess import Popen -from typing import Any, Dict, IO, Iterator, List - -try: - import click -except ImportError: - sys.stderr.write('It seems python-dotenv is not installed with cli option. \n' - 'Run pip install "python-dotenv[cli]" to fix this.') - sys.exit(1) - -from .main import dotenv_values, set_key, unset_key -from .version import __version__ - - -def enumerate_env(): - """ - Return a path for the ${pwd}/.env file. - - If pwd does not exist, return None. - """ - try: - cwd = os.getcwd() - except FileNotFoundError: - return None - path = os.path.join(cwd, '.env') - return path - - -@click.group() -@click.option('-f', '--file', default=enumerate_env(), - type=click.Path(file_okay=True), - help="Location of the .env file, defaults to .env file in current working directory.") -@click.option('-q', '--quote', default='always', - type=click.Choice(['always', 'never', 'auto']), - help="Whether to quote or not the variable values. Default mode is always. This does not affect parsing.") -@click.option('-e', '--export', default=False, - type=click.BOOL, - help="Whether to write the dot file as an executable bash script.") -@click.version_option(version=__version__) -@click.pass_context -def cli(ctx: click.Context, file: Any, quote: Any, export: Any) -> None: - """This script is used to set, get or unset values from a .env file.""" - ctx.obj = {'QUOTE': quote, 'EXPORT': export, 'FILE': file} - - -@contextmanager -def stream_file(path: os.PathLike) -> Iterator[IO[str]]: - """ - Open a file and yield the corresponding (decoded) stream. - - Exits with error code 2 if the file cannot be opened. - """ - - try: - with open(path) as stream: - yield stream - except OSError as exc: - print(f"Error opening env file: {exc}", file=sys.stderr) - exit(2) - - -@cli.command() -@click.pass_context -@click.option('--format', default='simple', - type=click.Choice(['simple', 'json', 'shell', 'export']), - help="The format in which to display the list. Default format is simple, " - "which displays name=value without quotes.") -def list(ctx: click.Context, format: bool) -> None: - """Display all the stored key/value.""" - file = ctx.obj['FILE'] - - with stream_file(file) as stream: - values = dotenv_values(stream=stream) - - if format == 'json': - click.echo(json.dumps(values, indent=2, sort_keys=True)) - else: - prefix = 'export ' if format == 'export' else '' - for k in sorted(values): - v = values[k] - if v is not None: - if format in ('export', 'shell'): - v = shlex.quote(v) - click.echo(f'{prefix}{k}={v}') - - -@cli.command() -@click.pass_context -@click.argument('key', required=True) -@click.argument('value', required=True) -def set(ctx: click.Context, key: Any, value: Any) -> None: - """Store the given key/value.""" - file = ctx.obj['FILE'] - quote = ctx.obj['QUOTE'] - export = ctx.obj['EXPORT'] - success, key, value = set_key(file, key, value, quote, export) - if success: - click.echo(f'{key}={value}') - else: - exit(1) - - -@cli.command() -@click.pass_context -@click.argument('key', required=True) -def get(ctx: click.Context, key: Any) -> None: - """Retrieve the value for the given key.""" - file = ctx.obj['FILE'] - - with stream_file(file) as stream: - values = dotenv_values(stream=stream) - - stored_value = values.get(key) - if stored_value: - click.echo(stored_value) - else: - exit(1) - - -@cli.command() -@click.pass_context -@click.argument('key', required=True) -def unset(ctx: click.Context, key: Any) -> None: - """Removes the given key.""" - file = ctx.obj['FILE'] - quote = ctx.obj['QUOTE'] - success, key = unset_key(file, key, quote) - if success: - click.echo(f"Successfully removed {key}") - else: - exit(1) - - -@cli.command(context_settings={'ignore_unknown_options': True}) -@click.pass_context -@click.option( - "--override/--no-override", - default=True, - help="Override variables from the environment file with those from the .env file.", -) -@click.argument('commandline', nargs=-1, type=click.UNPROCESSED) -def run(ctx: click.Context, override: bool, commandline: List[str]) -> None: - """Run command with environment variables present.""" - file = ctx.obj['FILE'] - if not os.path.isfile(file): - raise click.BadParameter( - f'Invalid value for \'-f\' "{file}" does not exist.', - ctx=ctx - ) - dotenv_as_dict = { - k: v - for (k, v) in dotenv_values(file).items() - if v is not None and (override or k not in os.environ) - } - - if not commandline: - click.echo('No command given.') - exit(1) - ret = run_command(commandline, dotenv_as_dict) - exit(ret) - - -def run_command(command: List[str], env: Dict[str, str]) -> int: - """Run command in sub process. - - Runs the command in a sub process with the variables from `env` - added in the current environment variables. - - Parameters - ---------- - command: List[str] - The command and it's parameters - env: Dict - The additional environment variables - - Returns - ------- - int - The return code of the command - - """ - # copy the current environment variables and add the vales from - # `env` - cmd_env = os.environ.copy() - cmd_env.update(env) - - p = Popen(command, - universal_newlines=True, - bufsize=0, - shell=False, - env=cmd_env) - _, _ = p.communicate() - - return p.returncode diff --git a/server/venv/Lib/site-packages/dotenv/ipython.py b/server/venv/Lib/site-packages/dotenv/ipython.py deleted file mode 100644 index 7df727c..0000000 --- a/server/venv/Lib/site-packages/dotenv/ipython.py +++ /dev/null @@ -1,39 +0,0 @@ -from IPython.core.magic import Magics, line_magic, magics_class # type: ignore -from IPython.core.magic_arguments import (argument, magic_arguments, # type: ignore - parse_argstring) # type: ignore - -from .main import find_dotenv, load_dotenv - - -@magics_class -class IPythonDotEnv(Magics): - - @magic_arguments() - @argument( - '-o', '--override', action='store_true', - help="Indicate to override existing variables" - ) - @argument( - '-v', '--verbose', action='store_true', - help="Indicate function calls to be verbose" - ) - @argument('dotenv_path', nargs='?', type=str, default='.env', - help='Search in increasingly higher folders for the `dotenv_path`') - @line_magic - def dotenv(self, line): - args = parse_argstring(self.dotenv, line) - # Locate the .env file - dotenv_path = args.dotenv_path - try: - dotenv_path = find_dotenv(dotenv_path, True, True) - except IOError: - print("cannot find .env file") - return - - # Load the .env file - load_dotenv(dotenv_path, verbose=args.verbose, override=args.override) - - -def load_ipython_extension(ipython): - """Register the %dotenv magic.""" - ipython.register_magics(IPythonDotEnv) diff --git a/server/venv/Lib/site-packages/dotenv/main.py b/server/venv/Lib/site-packages/dotenv/main.py deleted file mode 100644 index f40c20e..0000000 --- a/server/venv/Lib/site-packages/dotenv/main.py +++ /dev/null @@ -1,382 +0,0 @@ -import io -import logging -import os -import shutil -import sys -import tempfile -from collections import OrderedDict -from contextlib import contextmanager -from typing import (IO, Dict, Iterable, Iterator, Mapping, Optional, Tuple, - Union) - -from .parser import Binding, parse_stream -from .variables import parse_variables - -# A type alias for a string path to be used for the paths in this file. -# These paths may flow to `open()` and `shutil.move()`; `shutil.move()` -# only accepts string paths, not byte paths or file descriptors. See -# https://github.com/python/typeshed/pull/6832. -StrPath = Union[str, 'os.PathLike[str]'] - -logger = logging.getLogger(__name__) - - -def with_warn_for_invalid_lines(mappings: Iterator[Binding]) -> Iterator[Binding]: - for mapping in mappings: - if mapping.error: - logger.warning( - "Python-dotenv could not parse statement starting at line %s", - mapping.original.line, - ) - yield mapping - - -class DotEnv: - def __init__( - self, - dotenv_path: Optional[StrPath], - stream: Optional[IO[str]] = None, - verbose: bool = False, - encoding: Optional[str] = None, - interpolate: bool = True, - override: bool = True, - ) -> None: - self.dotenv_path: Optional[StrPath] = dotenv_path - self.stream: Optional[IO[str]] = stream - self._dict: Optional[Dict[str, Optional[str]]] = None - self.verbose: bool = verbose - self.encoding: Optional[str] = encoding - self.interpolate: bool = interpolate - self.override: bool = override - - @contextmanager - def _get_stream(self) -> Iterator[IO[str]]: - if self.dotenv_path and os.path.isfile(self.dotenv_path): - with open(self.dotenv_path, encoding=self.encoding) as stream: - yield stream - elif self.stream is not None: - yield self.stream - else: - if self.verbose: - logger.info( - "Python-dotenv could not find configuration file %s.", - self.dotenv_path or '.env', - ) - yield io.StringIO('') - - def dict(self) -> Dict[str, Optional[str]]: - """Return dotenv as dict""" - if self._dict: - return self._dict - - raw_values = self.parse() - - if self.interpolate: - self._dict = OrderedDict(resolve_variables(raw_values, override=self.override)) - else: - self._dict = OrderedDict(raw_values) - - return self._dict - - def parse(self) -> Iterator[Tuple[str, Optional[str]]]: - with self._get_stream() as stream: - for mapping in with_warn_for_invalid_lines(parse_stream(stream)): - if mapping.key is not None: - yield mapping.key, mapping.value - - def set_as_environment_variables(self) -> bool: - """ - Load the current dotenv as system environment variable. - """ - if not self.dict(): - return False - - for k, v in self.dict().items(): - if k in os.environ and not self.override: - continue - if v is not None: - os.environ[k] = v - - return True - - def get(self, key: str) -> Optional[str]: - """ - """ - data = self.dict() - - if key in data: - return data[key] - - if self.verbose: - logger.warning("Key %s not found in %s.", key, self.dotenv_path) - - return None - - -def get_key( - dotenv_path: StrPath, - key_to_get: str, - encoding: Optional[str] = "utf-8", -) -> Optional[str]: - """ - Get the value of a given key from the given .env. - - Returns `None` if the key isn't found or doesn't have a value. - """ - return DotEnv(dotenv_path, verbose=True, encoding=encoding).get(key_to_get) - - -@contextmanager -def rewrite( - path: StrPath, - encoding: Optional[str], -) -> Iterator[Tuple[IO[str], IO[str]]]: - if not os.path.isfile(path): - with open(path, mode="w", encoding=encoding) as source: - source.write("") - with tempfile.NamedTemporaryFile(mode="w", encoding=encoding, delete=False) as dest: - try: - with open(path, encoding=encoding) as source: - yield (source, dest) - except BaseException: - os.unlink(dest.name) - raise - shutil.move(dest.name, path) - - -def set_key( - dotenv_path: StrPath, - key_to_set: str, - value_to_set: str, - quote_mode: str = "always", - export: bool = False, - encoding: Optional[str] = "utf-8", -) -> Tuple[Optional[bool], str, str]: - """ - Adds or Updates a key/value to the given .env - - If the .env path given doesn't exist, fails instead of risking creating - an orphan .env somewhere in the filesystem - """ - if quote_mode not in ("always", "auto", "never"): - raise ValueError(f"Unknown quote_mode: {quote_mode}") - - quote = ( - quote_mode == "always" - or (quote_mode == "auto" and not value_to_set.isalnum()) - ) - - if quote: - value_out = "'{}'".format(value_to_set.replace("'", "\\'")) - else: - value_out = value_to_set - if export: - line_out = f'export {key_to_set}={value_out}\n' - else: - line_out = f"{key_to_set}={value_out}\n" - - with rewrite(dotenv_path, encoding=encoding) as (source, dest): - replaced = False - missing_newline = False - for mapping in with_warn_for_invalid_lines(parse_stream(source)): - if mapping.key == key_to_set: - dest.write(line_out) - replaced = True - else: - dest.write(mapping.original.string) - missing_newline = not mapping.original.string.endswith("\n") - if not replaced: - if missing_newline: - dest.write("\n") - dest.write(line_out) - - return True, key_to_set, value_to_set - - -def unset_key( - dotenv_path: StrPath, - key_to_unset: str, - quote_mode: str = "always", - encoding: Optional[str] = "utf-8", -) -> Tuple[Optional[bool], str]: - """ - Removes a given key from the given `.env` file. - - If the .env path given doesn't exist, fails. - If the given key doesn't exist in the .env, fails. - """ - if not os.path.exists(dotenv_path): - logger.warning("Can't delete from %s - it doesn't exist.", dotenv_path) - return None, key_to_unset - - removed = False - with rewrite(dotenv_path, encoding=encoding) as (source, dest): - for mapping in with_warn_for_invalid_lines(parse_stream(source)): - if mapping.key == key_to_unset: - removed = True - else: - dest.write(mapping.original.string) - - if not removed: - logger.warning("Key %s not removed from %s - key doesn't exist.", key_to_unset, dotenv_path) - return None, key_to_unset - - return removed, key_to_unset - - -def resolve_variables( - values: Iterable[Tuple[str, Optional[str]]], - override: bool, -) -> Mapping[str, Optional[str]]: - new_values: Dict[str, Optional[str]] = {} - - for (name, value) in values: - if value is None: - result = None - else: - atoms = parse_variables(value) - env: Dict[str, Optional[str]] = {} - if override: - env.update(os.environ) # type: ignore - env.update(new_values) - else: - env.update(new_values) - env.update(os.environ) # type: ignore - result = "".join(atom.resolve(env) for atom in atoms) - - new_values[name] = result - - return new_values - - -def _walk_to_root(path: str) -> Iterator[str]: - """ - Yield directories starting from the given directory up to the root - """ - if not os.path.exists(path): - raise IOError('Starting path not found') - - if os.path.isfile(path): - path = os.path.dirname(path) - - last_dir = None - current_dir = os.path.abspath(path) - while last_dir != current_dir: - yield current_dir - parent_dir = os.path.abspath(os.path.join(current_dir, os.path.pardir)) - last_dir, current_dir = current_dir, parent_dir - - -def find_dotenv( - filename: str = '.env', - raise_error_if_not_found: bool = False, - usecwd: bool = False, -) -> str: - """ - Search in increasingly higher folders for the given file - - Returns path to the file if found, or an empty string otherwise - """ - - def _is_interactive(): - """ Decide whether this is running in a REPL or IPython notebook """ - main = __import__('__main__', None, None, fromlist=['__file__']) - return not hasattr(main, '__file__') - - if usecwd or _is_interactive() or getattr(sys, 'frozen', False): - # Should work without __file__, e.g. in REPL or IPython notebook. - path = os.getcwd() - else: - # will work for .py files - frame = sys._getframe() - current_file = __file__ - - while frame.f_code.co_filename == current_file: - assert frame.f_back is not None - frame = frame.f_back - frame_filename = frame.f_code.co_filename - path = os.path.dirname(os.path.abspath(frame_filename)) - - for dirname in _walk_to_root(path): - check_path = os.path.join(dirname, filename) - if os.path.isfile(check_path): - return check_path - - if raise_error_if_not_found: - raise IOError('File not found') - - return '' - - -def load_dotenv( - dotenv_path: Optional[StrPath] = None, - stream: Optional[IO[str]] = None, - verbose: bool = False, - override: bool = False, - interpolate: bool = True, - encoding: Optional[str] = "utf-8", -) -> bool: - """Parse a .env file and then load all the variables found as environment variables. - - Parameters: - dotenv_path: Absolute or relative path to .env file. - stream: Text stream (such as `io.StringIO`) with .env content, used if - `dotenv_path` is `None`. - verbose: Whether to output a warning the .env file is missing. - override: Whether to override the system environment variables with the variables - from the `.env` file. - encoding: Encoding to be used to read the file. - Returns: - Bool: True if at least one environment variable is set else False - - If both `dotenv_path` and `stream` are `None`, `find_dotenv()` is used to find the - .env file. - """ - if dotenv_path is None and stream is None: - dotenv_path = find_dotenv() - - dotenv = DotEnv( - dotenv_path=dotenv_path, - stream=stream, - verbose=verbose, - interpolate=interpolate, - override=override, - encoding=encoding, - ) - return dotenv.set_as_environment_variables() - - -def dotenv_values( - dotenv_path: Optional[StrPath] = None, - stream: Optional[IO[str]] = None, - verbose: bool = False, - interpolate: bool = True, - encoding: Optional[str] = "utf-8", -) -> Dict[str, Optional[str]]: - """ - Parse a .env file and return its content as a dict. - - The returned dict will have `None` values for keys without values in the .env file. - For example, `foo=bar` results in `{"foo": "bar"}` whereas `foo` alone results in - `{"foo": None}` - - Parameters: - dotenv_path: Absolute or relative path to the .env file. - stream: `StringIO` object with .env content, used if `dotenv_path` is `None`. - verbose: Whether to output a warning if the .env file is missing. - encoding: Encoding to be used to read the file. - - If both `dotenv_path` and `stream` are `None`, `find_dotenv()` is used to find the - .env file. - """ - if dotenv_path is None and stream is None: - dotenv_path = find_dotenv() - - return DotEnv( - dotenv_path=dotenv_path, - stream=stream, - verbose=verbose, - interpolate=interpolate, - override=True, - encoding=encoding, - ).dict() diff --git a/server/venv/Lib/site-packages/dotenv/parser.py b/server/venv/Lib/site-packages/dotenv/parser.py deleted file mode 100644 index 735f14a..0000000 --- a/server/venv/Lib/site-packages/dotenv/parser.py +++ /dev/null @@ -1,175 +0,0 @@ -import codecs -import re -from typing import (IO, Iterator, Match, NamedTuple, Optional, # noqa:F401 - Pattern, Sequence, Tuple) - - -def make_regex(string: str, extra_flags: int = 0) -> Pattern[str]: - return re.compile(string, re.UNICODE | extra_flags) - - -_newline = make_regex(r"(\r\n|\n|\r)") -_multiline_whitespace = make_regex(r"\s*", extra_flags=re.MULTILINE) -_whitespace = make_regex(r"[^\S\r\n]*") -_export = make_regex(r"(?:export[^\S\r\n]+)?") -_single_quoted_key = make_regex(r"'([^']+)'") -_unquoted_key = make_regex(r"([^=\#\s]+)") -_equal_sign = make_regex(r"(=[^\S\r\n]*)") -_single_quoted_value = make_regex(r"'((?:\\'|[^'])*)'") -_double_quoted_value = make_regex(r'"((?:\\"|[^"])*)"') -_unquoted_value = make_regex(r"([^\r\n]*)") -_comment = make_regex(r"(?:[^\S\r\n]*#[^\r\n]*)?") -_end_of_line = make_regex(r"[^\S\r\n]*(?:\r\n|\n|\r|$)") -_rest_of_line = make_regex(r"[^\r\n]*(?:\r|\n|\r\n)?") -_double_quote_escapes = make_regex(r"\\[\\'\"abfnrtv]") -_single_quote_escapes = make_regex(r"\\[\\']") - - -class Original(NamedTuple): - string: str - line: int - - -class Binding(NamedTuple): - key: Optional[str] - value: Optional[str] - original: Original - error: bool - - -class Position: - def __init__(self, chars: int, line: int) -> None: - self.chars = chars - self.line = line - - @classmethod - def start(cls) -> "Position": - return cls(chars=0, line=1) - - def set(self, other: "Position") -> None: - self.chars = other.chars - self.line = other.line - - def advance(self, string: str) -> None: - self.chars += len(string) - self.line += len(re.findall(_newline, string)) - - -class Error(Exception): - pass - - -class Reader: - def __init__(self, stream: IO[str]) -> None: - self.string = stream.read() - self.position = Position.start() - self.mark = Position.start() - - def has_next(self) -> bool: - return self.position.chars < len(self.string) - - def set_mark(self) -> None: - self.mark.set(self.position) - - def get_marked(self) -> Original: - return Original( - string=self.string[self.mark.chars:self.position.chars], - line=self.mark.line, - ) - - def peek(self, count: int) -> str: - return self.string[self.position.chars:self.position.chars + count] - - def read(self, count: int) -> str: - result = self.string[self.position.chars:self.position.chars + count] - if len(result) < count: - raise Error("read: End of string") - self.position.advance(result) - return result - - def read_regex(self, regex: Pattern[str]) -> Sequence[str]: - match = regex.match(self.string, self.position.chars) - if match is None: - raise Error("read_regex: Pattern not found") - self.position.advance(self.string[match.start():match.end()]) - return match.groups() - - -def decode_escapes(regex: Pattern[str], string: str) -> str: - def decode_match(match: Match[str]) -> str: - return codecs.decode(match.group(0), 'unicode-escape') # type: ignore - - return regex.sub(decode_match, string) - - -def parse_key(reader: Reader) -> Optional[str]: - char = reader.peek(1) - if char == "#": - return None - elif char == "'": - (key,) = reader.read_regex(_single_quoted_key) - else: - (key,) = reader.read_regex(_unquoted_key) - return key - - -def parse_unquoted_value(reader: Reader) -> str: - (part,) = reader.read_regex(_unquoted_value) - return re.sub(r"\s+#.*", "", part).rstrip() - - -def parse_value(reader: Reader) -> str: - char = reader.peek(1) - if char == u"'": - (value,) = reader.read_regex(_single_quoted_value) - return decode_escapes(_single_quote_escapes, value) - elif char == u'"': - (value,) = reader.read_regex(_double_quoted_value) - return decode_escapes(_double_quote_escapes, value) - elif char in (u"", u"\n", u"\r"): - return u"" - else: - return parse_unquoted_value(reader) - - -def parse_binding(reader: Reader) -> Binding: - reader.set_mark() - try: - reader.read_regex(_multiline_whitespace) - if not reader.has_next(): - return Binding( - key=None, - value=None, - original=reader.get_marked(), - error=False, - ) - reader.read_regex(_export) - key = parse_key(reader) - reader.read_regex(_whitespace) - if reader.peek(1) == "=": - reader.read_regex(_equal_sign) - value: Optional[str] = parse_value(reader) - else: - value = None - reader.read_regex(_comment) - reader.read_regex(_end_of_line) - return Binding( - key=key, - value=value, - original=reader.get_marked(), - error=False, - ) - except Error: - reader.read_regex(_rest_of_line) - return Binding( - key=None, - value=None, - original=reader.get_marked(), - error=True, - ) - - -def parse_stream(stream: IO[str]) -> Iterator[Binding]: - reader = Reader(stream) - while reader.has_next(): - yield parse_binding(reader) diff --git a/server/venv/Lib/site-packages/dotenv/py.typed b/server/venv/Lib/site-packages/dotenv/py.typed deleted file mode 100644 index 7632ecf..0000000 --- a/server/venv/Lib/site-packages/dotenv/py.typed +++ /dev/null @@ -1 +0,0 @@ -# Marker file for PEP 561 diff --git a/server/venv/Lib/site-packages/dotenv/variables.py b/server/venv/Lib/site-packages/dotenv/variables.py deleted file mode 100644 index 667f2f2..0000000 --- a/server/venv/Lib/site-packages/dotenv/variables.py +++ /dev/null @@ -1,86 +0,0 @@ -import re -from abc import ABCMeta, abstractmethod -from typing import Iterator, Mapping, Optional, Pattern - -_posix_variable: Pattern[str] = re.compile( - r""" - \$\{ - (?P[^\}:]*) - (?::- - (?P[^\}]*) - )? - \} - """, - re.VERBOSE, -) - - -class Atom(metaclass=ABCMeta): - def __ne__(self, other: object) -> bool: - result = self.__eq__(other) - if result is NotImplemented: - return NotImplemented - return not result - - @abstractmethod - def resolve(self, env: Mapping[str, Optional[str]]) -> str: ... - - -class Literal(Atom): - def __init__(self, value: str) -> None: - self.value = value - - def __repr__(self) -> str: - return f"Literal(value={self.value})" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, self.__class__): - return NotImplemented - return self.value == other.value - - def __hash__(self) -> int: - return hash((self.__class__, self.value)) - - def resolve(self, env: Mapping[str, Optional[str]]) -> str: - return self.value - - -class Variable(Atom): - def __init__(self, name: str, default: Optional[str]) -> None: - self.name = name - self.default = default - - def __repr__(self) -> str: - return f"Variable(name={self.name}, default={self.default})" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, self.__class__): - return NotImplemented - return (self.name, self.default) == (other.name, other.default) - - def __hash__(self) -> int: - return hash((self.__class__, self.name, self.default)) - - def resolve(self, env: Mapping[str, Optional[str]]) -> str: - default = self.default if self.default is not None else "" - result = env.get(self.name, default) - return result if result is not None else "" - - -def parse_variables(value: str) -> Iterator[Atom]: - cursor = 0 - - for match in _posix_variable.finditer(value): - (start, end) = match.span() - name = match["name"] - default = match["default"] - - if start > cursor: - yield Literal(value=value[cursor:start]) - - yield Variable(name=name, default=default) - cursor = end - - length = len(value) - if cursor < length: - yield Literal(value=value[cursor:length]) diff --git a/server/venv/Lib/site-packages/dotenv/version.py b/server/venv/Lib/site-packages/dotenv/version.py deleted file mode 100644 index 5becc17..0000000 --- a/server/venv/Lib/site-packages/dotenv/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "1.0.0" diff --git a/server/venv/Lib/site-packages/email_validator-2.1.0.post1.dist-info/INSTALLER b/server/venv/Lib/site-packages/email_validator-2.1.0.post1.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/server/venv/Lib/site-packages/email_validator-2.1.0.post1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/server/venv/Lib/site-packages/email_validator-2.1.0.post1.dist-info/LICENSE b/server/venv/Lib/site-packages/email_validator-2.1.0.post1.dist-info/LICENSE deleted file mode 100644 index 0e259d4..0000000 --- a/server/venv/Lib/site-packages/email_validator-2.1.0.post1.dist-info/LICENSE +++ /dev/null @@ -1,121 +0,0 @@ -Creative Commons Legal Code - -CC0 1.0 Universal - - CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE - LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN - ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS - INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES - REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS - PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM - THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED - HEREUNDER. - -Statement of Purpose - -The laws of most jurisdictions throughout the world automatically confer -exclusive Copyright and Related Rights (defined below) upon the creator -and subsequent owner(s) (each and all, an "owner") of an original work of -authorship and/or a database (each, a "Work"). - -Certain owners wish to permanently relinquish those rights to a Work for -the purpose of contributing to a commons of creative, cultural and -scientific works ("Commons") that the public can reliably and without fear -of later claims of infringement build upon, modify, incorporate in other -works, reuse and redistribute as freely as possible in any form whatsoever -and for any purposes, including without limitation commercial purposes. -These owners may contribute to the Commons to promote the ideal of a free -culture and the further production of creative, cultural and scientific -works, or to gain reputation or greater distribution for their Work in -part through the use and efforts of others. - -For these and/or other purposes and motivations, and without any -expectation of additional consideration or compensation, the person -associating CC0 with a Work (the "Affirmer"), to the extent that he or she -is an owner of Copyright and Related Rights in the Work, voluntarily -elects to apply CC0 to the Work and publicly distribute the Work under its -terms, with knowledge of his or her Copyright and Related Rights in the -Work and the meaning and intended legal effect of CC0 on those rights. - -1. Copyright and Related Rights. A Work made available under CC0 may be -protected by copyright and related or neighboring rights ("Copyright and -Related Rights"). Copyright and Related Rights include, but are not -limited to, the following: - - i. the right to reproduce, adapt, distribute, perform, display, - communicate, and translate a Work; - ii. moral rights retained by the original author(s) and/or performer(s); -iii. publicity and privacy rights pertaining to a person's image or - likeness depicted in a Work; - iv. rights protecting against unfair competition in regards to a Work, - subject to the limitations in paragraph 4(a), below; - v. rights protecting the extraction, dissemination, use and reuse of data - in a Work; - vi. database rights (such as those arising under Directive 96/9/EC of the - European Parliament and of the Council of 11 March 1996 on the legal - protection of databases, and under any national implementation - thereof, including any amended or successor version of such - directive); and -vii. other similar, equivalent or corresponding rights throughout the - world based on applicable law or treaty, and any national - implementations thereof. - -2. Waiver. To the greatest extent permitted by, but not in contravention -of, applicable law, Affirmer hereby overtly, fully, permanently, -irrevocably and unconditionally waives, abandons, and surrenders all of -Affirmer's Copyright and Related Rights and associated claims and causes -of action, whether now known or unknown (including existing as well as -future claims and causes of action), in the Work (i) in all territories -worldwide, (ii) for the maximum duration provided by applicable law or -treaty (including future time extensions), (iii) in any current or future -medium and for any number of copies, and (iv) for any purpose whatsoever, -including without limitation commercial, advertising or promotional -purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each -member of the public at large and to the detriment of Affirmer's heirs and -successors, fully intending that such Waiver shall not be subject to -revocation, rescission, cancellation, termination, or any other legal or -equitable action to disrupt the quiet enjoyment of the Work by the public -as contemplated by Affirmer's express Statement of Purpose. - -3. Public License Fallback. Should any part of the Waiver for any reason -be judged legally invalid or ineffective under applicable law, then the -Waiver shall be preserved to the maximum extent permitted taking into -account Affirmer's express Statement of Purpose. In addition, to the -extent the Waiver is so judged Affirmer hereby grants to each affected -person a royalty-free, non transferable, non sublicensable, non exclusive, -irrevocable and unconditional license to exercise Affirmer's Copyright and -Related Rights in the Work (i) in all territories worldwide, (ii) for the -maximum duration provided by applicable law or treaty (including future -time extensions), (iii) in any current or future medium and for any number -of copies, and (iv) for any purpose whatsoever, including without -limitation commercial, advertising or promotional purposes (the -"License"). The License shall be deemed effective as of the date CC0 was -applied by Affirmer to the Work. Should any part of the License for any -reason be judged legally invalid or ineffective under applicable law, such -partial invalidity or ineffectiveness shall not invalidate the remainder -of the License, and in such case Affirmer hereby affirms that he or she -will not (i) exercise any of his or her remaining Copyright and Related -Rights in the Work or (ii) assert any associated claims and causes of -action with respect to the Work, in either case contrary to Affirmer's -express Statement of Purpose. - -4. Limitations and Disclaimers. - - a. No trademark or patent rights held by Affirmer are waived, abandoned, - surrendered, licensed or otherwise affected by this document. - b. Affirmer offers the Work as-is and makes no representations or - warranties of any kind concerning the Work, express, implied, - statutory or otherwise, including without limitation warranties of - title, merchantability, fitness for a particular purpose, non - infringement, or the absence of latent or other defects, accuracy, or - the present or absence of errors, whether or not discoverable, all to - the greatest extent permissible under applicable law. - c. Affirmer disclaims responsibility for clearing rights of other persons - that may apply to the Work or any use thereof, including without - limitation any person's Copyright and Related Rights in the Work. - Further, Affirmer disclaims responsibility for obtaining any necessary - consents, permissions or other rights required for any use of the - Work. - d. Affirmer understands and acknowledges that Creative Commons is not a - party to this document and has no duty or obligation with respect to - this CC0 or use of the Work. diff --git a/server/venv/Lib/site-packages/email_validator-2.1.0.post1.dist-info/METADATA b/server/venv/Lib/site-packages/email_validator-2.1.0.post1.dist-info/METADATA deleted file mode 100644 index 2b6c0f9..0000000 --- a/server/venv/Lib/site-packages/email_validator-2.1.0.post1.dist-info/METADATA +++ /dev/null @@ -1,482 +0,0 @@ -Metadata-Version: 2.1 -Name: email-validator -Version: 2.1.0.post1 -Summary: A robust email address syntax and deliverability validation library. -Home-page: https://github.com/JoshData/python-email-validator -Author: Joshua Tauberer -Author-email: jt@occams.info -License: CC0 (copyright waived) -Keywords: email address validator -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Requires-Python: >=3.8 -Description-Content-Type: text/markdown -License-File: LICENSE -Requires-Dist: dnspython >=2.0.0 -Requires-Dist: idna >=2.0.0 - -email-validator: Validate Email Addresses -========================================= - -A robust email address syntax and deliverability validation library for -Python 3.8+ by [Joshua Tauberer](https://joshdata.me). - -This library validates that a string is of the form `name@example.com` -and optionally checks that the domain name is set up to receive email. -This is the sort of validation you would want when you are identifying -users by their email address like on a registration/login form (but not -necessarily for composing an email message, see below). - -Key features: - -* Checks that an email address has the correct syntax --- good for - registration/login forms or other uses related to identifying users. -* Gives friendly English error messages when validation fails that you - can display to end-users. -* Checks deliverability (optional): Does the domain name resolve? - (You can override the default DNS resolver to add query caching.) -* Supports internationalized domain names and internationalized local parts. -* Rejects addresses with unsafe Unicode characters, obsolete email address - syntax that you'd find unexpected, special use domain names like - `@localhost`, and domains without a dot by default. This is an - opinionated library! -* Normalizes email addresses (important for internationalized - and quoted-string addresses! see below). -* Python type annotations are used. - -This is an opinionated library. You should definitely also consider using -the less-opinionated [pyIsEmail](https://github.com/michaelherold/pyIsEmail) and -[flanker](https://github.com/mailgun/flanker) if they are better for your -use case. - -[![Build Status](https://github.com/JoshData/python-email-validator/actions/workflows/test_and_build.yaml/badge.svg)](https://github.com/JoshData/python-email-validator/actions/workflows/test_and_build.yaml) - -View the [CHANGELOG / Release Notes](CHANGELOG.md) for the version history of changes in the library. Occasionally this README is ahead of the latest published package --- see the CHANGELOG for details. - ---- - -Installation ------------- - -This package [is on PyPI](https://pypi.org/project/email-validator/), so: - -```sh -pip install email-validator -``` - -(You might need to use `pip3` depending on your local environment.) - -Quick Start ------------ - -If you're validating a user's email address before creating a user -account in your application, you might do this: - -```python -from email_validator import validate_email, EmailNotValidError - -email = "my+address@example.org" - -try: - - # Check that the email address is valid. Turn on check_deliverability - # for first-time validations like on account creation pages (but not - # login pages). - emailinfo = validate_email(email, check_deliverability=False) - - # After this point, use only the normalized form of the email address, - # especially before going to a database query. - email = emailinfo.normalized - -except EmailNotValidError as e: - - # The exception message is human-readable explanation of why it's - # not a valid (or deliverable) email address. - print(str(e)) -``` - -This validates the address and gives you its normalized form. You should -**put the normalized form in your database** and always normalize before -checking if an address is in your database. When using this in a login form, -set `check_deliverability` to `False` to avoid unnecessary DNS queries. - -Usage ------ - -### Overview - -The module provides a function `validate_email(email_address)` which -takes an email address and: - -- Raises a `EmailNotValidError` with a helpful, human-readable error - message explaining why the email address is not valid, or -- Returns an object with a normalized form of the email address (which - you should use!) and other information about it. - -When an email address is not valid, `validate_email` raises either an -`EmailSyntaxError` if the form of the address is invalid or an -`EmailUndeliverableError` if the domain name fails DNS checks. Both -exception classes are subclasses of `EmailNotValidError`, which in turn -is a subclass of `ValueError`. - -But when an email address is valid, an object is returned containing -a normalized form of the email address (which you should use!) and -other information. - -The validator doesn't, by default, permit obsoleted forms of email addresses -that no one uses anymore even though they are still valid and deliverable, since -they will probably give you grief if you're using email for login. (See -later in the document about how to allow some obsolete forms.) - -The validator optionally checks that the domain name in the email address has -a DNS MX record indicating that it can receive email. (Except a Null MX record. -If there is no MX record, a fallback A/AAAA-record is permitted, unless -a reject-all SPF record is present.) DNS is slow and sometimes unavailable or -unreliable, so consider whether these checks are useful for your use case and -turn them off if they aren't. -There is nothing to be gained by trying to actually contact an SMTP server, so -that's not done here. For privacy, security, and practicality reasons, servers -are good at not giving away whether an address is -deliverable or not: email addresses that appear to accept mail at first -can bounce mail after a delay, and bounced mail may indicate a temporary -failure of a good email address (sometimes an intentional failure, like -greylisting). - -### Options - -The `validate_email` function also accepts the following keyword arguments -(defaults are as shown below): - -`check_deliverability=True`: If true, DNS queries are made to check that the domain name in the email address (the part after the @-sign) can receive mail, as described above. Set to `False` to skip this DNS-based check. It is recommended to pass `False` when performing validation for login pages (but not account creation pages) since re-validation of a previously validated domain in your database by querying DNS at every login is probably undesirable. You can also set `email_validator.CHECK_DELIVERABILITY` to `False` to turn this off for all calls by default. - -`dns_resolver=None`: Pass an instance of [dns.resolver.Resolver](https://dnspython.readthedocs.io/en/latest/resolver-class.html) to control the DNS resolver including setting a timeout and [a cache](https://dnspython.readthedocs.io/en/latest/resolver-caching.html). The `caching_resolver` function shown below is a helper function to construct a dns.resolver.Resolver with a [LRUCache](https://dnspython.readthedocs.io/en/latest/resolver-caching.html#dns.resolver.LRUCache). Reuse the same resolver instance across calls to `validate_email` to make use of the cache. - -`test_environment=False`: If `True`, DNS-based deliverability checks are disabled and `test` and `**.test` domain names are permitted (see below). You can also set `email_validator.TEST_ENVIRONMENT` to `True` to turn it on for all calls by default. - -`allow_smtputf8=True`: Set to `False` to prohibit internationalized addresses that would - require the - [SMTPUTF8](https://tools.ietf.org/html/rfc6531) extension. You can also set `email_validator.ALLOW_SMTPUTF8` to `False` to turn it off for all calls by default. - -`allow_quoted_local=False`: Set to `True` to allow obscure and potentially problematic email addresses in which the part of the address before the @-sign contains spaces, @-signs, or other surprising characters when the local part is surrounded in quotes (so-called quoted-string local parts). In the object returned by `validate_email`, the normalized local part removes any unnecessary backslash-escaping and even removes the surrounding quotes if the address would be valid without them. You can also set `email_validator.ALLOW_QUOTED_LOCAL` to `True` to turn this on for all calls by default. - -`allow_domain_literal=False`: Set to `True` to allow bracketed IPv4 and "IPv6:"-prefixd IPv6 addresses in the domain part of the email address. No deliverability checks are performed for these addresses. In the object returned by `validate_email`, the normalized domain will use the condensed IPv6 format, if applicable. The object's `domain_address` attribute will hold the parsed `ipaddress.IPv4Address` or `ipaddress.IPv6Address` object if applicable. You can also set `email_validator.ALLOW_DOMAIN_LITERAL` to `True` to turn this on for all calls by default. - -`allow_empty_local=False`: Set to `True` to allow an empty local part (i.e. - `@example.com`), e.g. for validating Postfix aliases. - - -### DNS timeout and cache - -When validating many email addresses or to control the timeout (the default is 15 seconds), create a caching [dns.resolver.Resolver](https://dnspython.readthedocs.io/en/latest/resolver-class.html) to reuse in each call. The `caching_resolver` function returns one easily for you: - -```python -from email_validator import validate_email, caching_resolver - -resolver = caching_resolver(timeout=10) - -while True: - validate_email(email, dns_resolver=resolver) -``` - -### Test addresses - -This library rejects email addresses that use the [Special Use Domain Names](https://www.iana.org/assignments/special-use-domain-names/special-use-domain-names.xhtml) `invalid`, `localhost`, `test`, and some others by raising `EmailSyntaxError`. This is to protect your system from abuse: You probably don't want a user to be able to cause an email to be sent to `localhost` (although they might be able to still do so via a malicious MX record). However, in your non-production test environments you may want to use `@test` or `@myname.test` email addresses. There are three ways you can allow this: - -1. Add `test_environment=True` to the call to `validate_email` (see above). -2. Set `email_validator.TEST_ENVIRONMENT` to `True` globally. -3. Remove the special-use domain name that you want to use from `email_validator.SPECIAL_USE_DOMAIN_NAMES`, e.g.: - -```python -import email_validator -email_validator.SPECIAL_USE_DOMAIN_NAMES.remove("test") -``` - -It is tempting to use `@example.com/net/org` in tests. They are *not* in this library's `SPECIAL_USE_DOMAIN_NAMES` list so you can, but shouldn't, use them. These domains are reserved to IANA for use in documentation so there is no risk of accidentally emailing someone at those domains. But beware that this library will nevertheless reject these domain names if DNS-based deliverability checks are not disabled because these domains do not resolve to domains that accept email. In tests, consider using your own domain name or `@test` or `@myname.test` instead. - -Internationalized email addresses ---------------------------------- - -The email protocol SMTP and the domain name system DNS have historically -only allowed English (ASCII) characters in email addresses and domain names, -respectively. Each has adapted to internationalization in a separate -way, creating two separate aspects to email address -internationalization. - -### Internationalized domain names (IDN) - -The first is [internationalized domain names (RFC -5891)](https://tools.ietf.org/html/rfc5891), a.k.a IDNA 2008. The DNS -system has not been updated with Unicode support. Instead, internationalized -domain names are converted into a special IDNA ASCII "[Punycode](https://www.rfc-editor.org/rfc/rfc3492.txt)" -form starting with `xn--`. When an email address has non-ASCII -characters in its domain part, the domain part is replaced with its IDNA -ASCII equivalent form in the process of mail transmission. Your mail -submission library probably does this for you transparently. ([Compliance -around the web is not very good though](http://archives.miloush.net/michkap/archive/2012/02/27/10273315.html).) This library conforms to IDNA 2008 -using the [idna](https://github.com/kjd/idna) module by Kim Davies. - -### Internationalized local parts - -The second sort of internationalization is internationalization in the -*local* part of the address (before the @-sign). In non-internationalized -email addresses, only English letters, numbers, and some punctuation -(`._!#$%&'^``*+-=~/?{|}`) are allowed. In internationalized email address -local parts, a wider range of Unicode characters are allowed. - -A surprisingly large number of Unicode characters are not safe to display, -especially when the email address is concatenated with other text, so this -library tries to protect you by not permitting resvered, non-, private use, -formatting (which can be used to alter the display order of characters), -whitespace, and control characters, and combining characters -as the first character of the local part and the domain name (so that they -cannot combine with something outside of the email address string or with -the @-sign). See https://qntm.org/safe and https://trojansource.codes/ -for relevant prior work. (Other than whitespace, these are checks that -you should be applying to nearly all user inputs in a security-sensitive -context.) - -These character checks are performed after Unicode normalization (see below), -so you are only fully protected if you replace all user-provided email addresses -with the normalized email address string returned by this library. This does not -guard against the well known problem that many Unicode characters look alike -(or are identical), which can be used to fool humans reading displayed text. - -Email addresses with these non-ASCII characters require that your mail -submission library and the mail servers along the route to the destination, -including your own outbound mail server, all support the -[SMTPUTF8 (RFC 6531)](https://tools.ietf.org/html/rfc6531) extension. -Support for SMTPUTF8 varies. See the `allow_smtputf8` parameter. - -### If you know ahead of time that SMTPUTF8 is not supported by your mail submission stack - -By default all internationalized forms are accepted by the validator. -But if you know ahead of time that SMTPUTF8 is not supported by your -mail submission stack, then you must filter out addresses that require -SMTPUTF8 using the `allow_smtputf8=False` keyword argument (see above). -This will cause the validation function to raise a `EmailSyntaxError` if -delivery would require SMTPUTF8. That's just in those cases where -non-ASCII characters appear before the @-sign. If you do not set -`allow_smtputf8=False`, you can also check the value of the `smtputf8` -field in the returned object. - -If your mail submission library doesn't support Unicode at all --- even -in the domain part of the address --- then immediately prior to mail -submission you must replace the email address with its ASCII-ized form. -This library gives you back the ASCII-ized form in the `ascii_email` -field in the returned object, which you can get like this: - -```python -emailinfo = validate_email(email, allow_smtputf8=False) -email = emailinfo.ascii_email -``` - -The local part is left alone (if it has internationalized characters -`allow_smtputf8=False` will force validation to fail) and the domain -part is converted to [IDNA ASCII](https://tools.ietf.org/html/rfc5891). -(You probably should not do this at account creation time so you don't -change the user's login information without telling them.) - -Normalization -------------- - -### Unicode Normalization - -The use of Unicode in email addresses introduced a normalization -problem. Different Unicode strings can look identical and have the same -semantic meaning to the user. The `normalized` field returned on successful -validation provides the correctly normalized form of the given email -address. - -For example, the CJK fullwidth Latin letters are considered semantically -equivalent in domain names to their ASCII counterparts. This library -normalizes them to their ASCII counterparts: - -```python -emailinfo = validate_email("me@Domain.com") -print(emailinfo.normalized) -print(emailinfo.ascii_email) -# prints "me@domain.com" twice -``` - -Because an end-user might type their email address in different (but -equivalent) un-normalized forms at different times, you ought to -replace what they enter with the normalized form immediately prior to -going into your database (during account creation), querying your database -(during login), or sending outbound mail. Normalization may also change -the length of an email address, and this may affect whether it is valid -and acceptable by your SMTP provider. - -The normalizations include lowercasing the domain part of the email -address (domain names are case-insensitive), [Unicode "NFC" -normalization](https://en.wikipedia.org/wiki/Unicode_equivalence) of the -whole address (which turns characters plus [combining -characters](https://en.wikipedia.org/wiki/Combining_character) into -precomposed characters where possible, replacement of [fullwidth and -halfwidth -characters](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms) -in the domain part, possibly other -[UTS46](http://unicode.org/reports/tr46) mappings on the domain part, -and conversion from Punycode to Unicode characters. - -(See [RFC 6532 (internationalized email) section -3.1](https://tools.ietf.org/html/rfc6532#section-3.1) and [RFC 5895 -(IDNA 2008) section 2](http://www.ietf.org/rfc/rfc5895.txt).) - -### Other Normalization - -Normalization is also applied to quoted-string local parts and domain -literal IPv6 addresses if you have allowed them by the `allow_quoted_local` -and `allow_domain_literal` options. In quoted-string local parts, unnecessary -backslash escaping is removed and even the surrounding quotes are removed if -they are unnecessary. For IPv6 domain literals, the IPv6 address is -normalized to condensed form. [RFC 2142](https://datatracker.ietf.org/doc/html/rfc2142) -also requires lowercase normalization for some specific mailbox names like `postmaster@`. - -### Length checks - -This library checks that the length of the email address is not longer than -the maximum length. The check is performed on the normalized form of the -address, which might be different from a string provided by a user. If you -send email to the original string and not the normalized address, the email -might be rejected because the original address could be too long. - -Examples --------- - -For the email address `test@joshdata.me`, the returned object is: - -```python -ValidatedEmail( - normalized='test@joshdata.me', - local_part='test', - domain='joshdata.me', - ascii_email='test@joshdata.me', - ascii_local_part='test', - ascii_domain='joshdata.me', - smtputf8=False) -``` - -For the fictitious but valid address `example@ツ.ⓁⒾⒻⒺ`, which has an -internationalized domain but ASCII local part, the returned object is: - -```python -ValidatedEmail( - normalized='example@ツ.life', - local_part='example', - domain='ツ.life', - ascii_email='example@xn--bdk.life', - ascii_local_part='example', - ascii_domain='xn--bdk.life', - smtputf8=False) - -``` - -Note that `normalized` and other fields provide a normalized form of the -email address, domain name, and (in other cases) local part (see earlier -discussion of normalization), which you should use in your database. - -Calling `validate_email` with the ASCII form of the above email address, -`example@xn--bdk.life`, returns the exact same information (i.e., the -`normalized` field always will contain Unicode characters, not Punycode). - -For the fictitious address `ツ-test@joshdata.me`, which has an -internationalized local part, the returned object is: - -```python -ValidatedEmail( - normalized='ツ-test@joshdata.me', - local_part='ツ-test', - domain='joshdata.me', - ascii_email=None, - ascii_local_part=None, - ascii_domain='joshdata.me', - smtputf8=True) -``` - -Now `smtputf8` is `True` and `ascii_email` is `None` because the local -part of the address is internationalized. The `local_part` and `normalized` fields -return the normalized form of the address. - -Return value ------------- - -When an email address passes validation, the fields in the returned object -are: - -| Field | Value | -| -----:|-------| -| `normalized` | The normalized form of the email address that you should put in your database. This combines the `local_part` and `domain` fields (see below). | -| `ascii_email` | If set, an ASCII-only form of the normalized email address by replacing the domain part with [IDNA](https://tools.ietf.org/html/rfc5891) [Punycode](https://www.rfc-editor.org/rfc/rfc3492.txt). This field will be present when an ASCII-only form of the email address exists (including if the email address is already ASCII). If the local part of the email address contains internationalized characters, `ascii_email` will be `None`. If set, it merely combines `ascii_local_part` and `ascii_domain`. | -| `local_part` | The normalized local part of the given email address (before the @-sign). Normalization includes Unicode NFC normalization and removing unnecessary quoted-string quotes and backslashes. If `allow_quoted_local` is True and the surrounding quotes are necessary, the quotes _will_ be present in this field. | -| `ascii_local_part` | If set, the local part, which is composed of ASCII characters only. | -| `domain` | The canonical internationalized Unicode form of the domain part of the email address. If the returned string contains non-ASCII characters, either the [SMTPUTF8](https://tools.ietf.org/html/rfc6531) feature of your mail relay will be required to transmit the message or else the email address's domain part must be converted to IDNA ASCII first: Use `ascii_domain` field instead. | -| `ascii_domain` | The [IDNA](https://tools.ietf.org/html/rfc5891) [Punycode](https://www.rfc-editor.org/rfc/rfc3492.txt)-encoded form of the domain part of the given email address, as it would be transmitted on the wire. | -| `domain_address` | If domain literals are allowed and if the email address contains one, an `ipaddress.IPv4Address` or `ipaddress.IPv6Address` object. | -| `smtputf8` | A boolean indicating that the [SMTPUTF8](https://tools.ietf.org/html/rfc6531) feature of your mail relay will be required to transmit messages to this address because the local part of the address has non-ASCII characters (the local part cannot be IDNA-encoded). If `allow_smtputf8=False` is passed as an argument, this flag will always be false because an exception is raised if it would have been true. | -| `mx` | A list of (priority, domain) tuples of MX records specified in the DNS for the domain (see [RFC 5321 section 5](https://tools.ietf.org/html/rfc5321#section-5)). May be `None` if the deliverability check could not be completed because of a temporary issue like a timeout. | -| `mx_fallback_type` | `None` if an `MX` record is found. If no MX records are actually specified in DNS and instead are inferred, through an obsolete mechanism, from A or AAAA records, the value is the type of DNS record used instead (`A` or `AAAA`). May be `None` if the deliverability check could not be completed because of a temporary issue like a timeout. | -| `spf` | Any SPF record found while checking deliverability. Only set if the SPF record is queried. | - -Assumptions ------------ - -By design, this validator does not pass all email addresses that -strictly conform to the standards. Many email address forms are obsolete -or likely to cause trouble: - -* The validator assumes the email address is intended to be - usable on the public Internet. The domain part - of the email address must be a resolvable domain name - (see the deliverability checks described above). - Most [Special Use Domain Names](https://www.iana.org/assignments/special-use-domain-names/special-use-domain-names.xhtml) - and their subdomains, as well as - domain names without a `.`, are rejected as a syntax error - (except see the `test_environment` parameter above). -* Obsolete email syntaxes are rejected: - The unusual ["(comment)" syntax](https://github.com/JoshData/python-email-validator/issues/77) - is rejected. Extremely old obsolete syntaxes are - rejected. Quoted-string local parts and domain-literal addresses - are rejected by default, but there are options to allow them (see above). - No one uses these forms anymore, and I can't think of any reason why anyone - using this library would need to accept them. - - -Testing -------- - -Tests can be run using - -```sh -pip install -r test_requirements.txt -make test -``` - -Tests run with mocked DNS responses. When adding or changing tests, temporarily turn on the `BUILD_MOCKED_DNS_RESPONSE_DATA` flag in `tests/mocked_dns_responses.py` to re-build the database of mocked responses from live queries. - -For Project Maintainers ------------------------ - -The package is distributed as a universal wheel and as a source package. - -To release: - -* Update CHANGELOG.md. -* Update the version number in `email_validator/version.py`. -* Make & push a commit with the new version number and make sure tests pass. -* Make & push a tag (see command below). -* Make a release at https://github.com/JoshData/python-email-validator/releases/new. -* Publish a source and wheel distribution to pypi (see command below). - -```sh -git tag v$(cat email_validator/version.py | sed "s/.* = //" | sed 's/"//g') -git push --tags -./release_to_pypi.sh -``` diff --git a/server/venv/Lib/site-packages/email_validator-2.1.0.post1.dist-info/RECORD b/server/venv/Lib/site-packages/email_validator-2.1.0.post1.dist-info/RECORD deleted file mode 100644 index 3c93ef2..0000000 --- a/server/venv/Lib/site-packages/email_validator-2.1.0.post1.dist-info/RECORD +++ /dev/null @@ -1,26 +0,0 @@ -../../Scripts/email_validator.exe,sha256=kHouteFnjEwWQ2XQh9a4Y-UvJ2GLtryK1oqfhOX6E6s,108464 -email_validator-2.1.0.post1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -email_validator-2.1.0.post1.dist-info/LICENSE,sha256=ogEPNDSH0_dhiv_lT3ifVIdgIzHAqNA_SemnxUfPBJk,7048 -email_validator-2.1.0.post1.dist-info/METADATA,sha256=fP4R-XKjWy52NPZsIr5ADKBX-juH2S3cHTkCLhSBc_k,25840 -email_validator-2.1.0.post1.dist-info/RECORD,, -email_validator-2.1.0.post1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -email_validator-2.1.0.post1.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92 -email_validator-2.1.0.post1.dist-info/entry_points.txt,sha256=zRM_6bNIUSHTbNx5u6M3nK1MAguvryrc9hICC6HyrBg,66 -email_validator-2.1.0.post1.dist-info/top_level.txt,sha256=fYDOSWFZke46ut7WqdOAJjjhlpPYAaOwOwIsh3s8oWI,16 -email_validator/__init__.py,sha256=_GA_uk_JuZcy6dPSVpvyyQuEPZt7iD-sXDbOd4f9sIQ,4236 -email_validator/__main__.py,sha256=SgarDcfH3W5KlcuUi6aaiQPqMdL3C-mOZVnTS6WesS4,2146 -email_validator/__pycache__/__init__.cpython-311.pyc,, -email_validator/__pycache__/__main__.cpython-311.pyc,, -email_validator/__pycache__/deliverability.cpython-311.pyc,, -email_validator/__pycache__/exceptions_types.cpython-311.pyc,, -email_validator/__pycache__/rfc_constants.cpython-311.pyc,, -email_validator/__pycache__/syntax.cpython-311.pyc,, -email_validator/__pycache__/validate_email.cpython-311.pyc,, -email_validator/__pycache__/version.cpython-311.pyc,, -email_validator/deliverability.py,sha256=it2lcdg-uML_-AVXOWQ6FbUW0bJjtViVEh-w2Im1E3g,5861 -email_validator/exceptions_types.py,sha256=yX1-0rSQ-f7HEIy8br_dEhW_Ctw9PCtgCceUSRFEdPo,5807 -email_validator/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -email_validator/rfc_constants.py,sha256=2yJVQgKFaVcG2CV-XT4PWevBUjAPVHRycgzOpBaqRZE,2720 -email_validator/syntax.py,sha256=6YF_2M5oSEKgtaX6E5wFnZHCU_9sGrO-We4UReqXGME,27307 -email_validator/validate_email.py,sha256=DDdTj7Xwqhq0pCCMw-fRy213SuM0kmGzu-b2I1yTRNU,6585 -email_validator/version.py,sha256=-WsUdwvZYItaNZsUXpRhmhfg_uLnDu7pXpEtlYuwd7M,28 diff --git a/server/venv/Lib/site-packages/email_validator-2.1.0.post1.dist-info/REQUESTED b/server/venv/Lib/site-packages/email_validator-2.1.0.post1.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/email_validator-2.1.0.post1.dist-info/WHEEL b/server/venv/Lib/site-packages/email_validator-2.1.0.post1.dist-info/WHEEL deleted file mode 100644 index 7e68873..0000000 --- a/server/venv/Lib/site-packages/email_validator-2.1.0.post1.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.41.2) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/server/venv/Lib/site-packages/email_validator-2.1.0.post1.dist-info/entry_points.txt b/server/venv/Lib/site-packages/email_validator-2.1.0.post1.dist-info/entry_points.txt deleted file mode 100644 index 03c6e23..0000000 --- a/server/venv/Lib/site-packages/email_validator-2.1.0.post1.dist-info/entry_points.txt +++ /dev/null @@ -1,2 +0,0 @@ -[console_scripts] -email_validator = email_validator.__main__:main diff --git a/server/venv/Lib/site-packages/email_validator-2.1.0.post1.dist-info/top_level.txt b/server/venv/Lib/site-packages/email_validator-2.1.0.post1.dist-info/top_level.txt deleted file mode 100644 index 798fd5e..0000000 --- a/server/venv/Lib/site-packages/email_validator-2.1.0.post1.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -email_validator diff --git a/server/venv/Lib/site-packages/email_validator/__init__.py b/server/venv/Lib/site-packages/email_validator/__init__.py deleted file mode 100644 index c3b5929..0000000 --- a/server/venv/Lib/site-packages/email_validator/__init__.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- - -# Export the main method, helper methods, and the public data types. -from .exceptions_types import ValidatedEmail, EmailNotValidError, \ - EmailSyntaxError, EmailUndeliverableError -from .validate_email import validate_email -from .version import __version__ - -__all__ = ["validate_email", - "ValidatedEmail", "EmailNotValidError", - "EmailSyntaxError", "EmailUndeliverableError", - "caching_resolver", "__version__"] - - -def caching_resolver(*args, **kwargs): - # Lazy load `deliverability` as it is slow to import (due to dns.resolver) - from .deliverability import caching_resolver - - return caching_resolver(*args, **kwargs) - - -# These global attributes are a part of the library's API and can be -# changed by library users. - -# Default values for keyword arguments. - -ALLOW_SMTPUTF8 = True -ALLOW_QUOTED_LOCAL = False -ALLOW_DOMAIN_LITERAL = False -GLOBALLY_DELIVERABLE = True -CHECK_DELIVERABILITY = True -TEST_ENVIRONMENT = False -DEFAULT_TIMEOUT = 15 # secs - -# IANA Special Use Domain Names -# Last Updated 2021-09-21 -# https://www.iana.org/assignments/special-use-domain-names/special-use-domain-names.txt -# -# The domain names without dots would be caught by the check that the domain -# name in an email address must have a period, but this list will also catch -# subdomains of these domains, which are also reserved. -SPECIAL_USE_DOMAIN_NAMES = [ - # The "arpa" entry here is consolidated from a lot of arpa subdomains - # for private address (i.e. non-routable IP addresses like 172.16.x.x) - # reverse mapping, plus some other subdomains. Although RFC 6761 says - # that application software should not treat these domains as special, - # they are private-use domains and so cannot have globally deliverable - # email addresses, which is an assumption of this library, and probably - # all of arpa is similarly special-use, so we reject it all. - "arpa", - - # RFC 6761 says applications "SHOULD NOT" treat the "example" domains - # as special, i.e. applications should accept these domains. - # - # The domain "example" alone fails our syntax validation because it - # lacks a dot (we assume no one has an email address on a TLD directly). - # "@example.com/net/org" will currently fail DNS-based deliverability - # checks because IANA publishes a NULL MX for these domains, and - # "@mail.example[.com/net/org]" and other subdomains will fail DNS- - # based deliverability checks because IANA does not publish MX or A - # DNS records for these subdomains. - # "example", # i.e. "wwww.example" - # "example.com", - # "example.net", - # "example.org", - - # RFC 6761 says that applications are permitted to treat this domain - # as special and that DNS should return an immediate negative response, - # so we also immediately reject this domain, which also follows the - # purpose of the domain. - "invalid", - - # RFC 6762 says that applications "may" treat ".local" as special and - # that "name resolution APIs and libraries SHOULD recognize these names - # as special," and since ".local" has no global definition, we reject - # it, as we expect email addresses to be gloally routable. - "local", - - # RFC 6761 says that applications (like this library) are permitted - # to treat "localhost" as special, and since it cannot have a globally - # deliverable email address, we reject it. - "localhost", - - # RFC 7686 says "applications that do not implement the Tor protocol - # SHOULD generate an error upon the use of .onion and SHOULD NOT - # perform a DNS lookup. - "onion", - - # Although RFC 6761 says that application software should not treat - # these domains as special, it also warns users that the address may - # resolve differently in different systems, and therefore it cannot - # have a globally routable email address, which is an assumption of - # this library, so we reject "@test" and "@*.test" addresses, unless - # the test_environment keyword argument is given, to allow their use - # in application-level test environments. These domains will generally - # fail deliverability checks because "test" is not an actual TLD. - "test", -] diff --git a/server/venv/Lib/site-packages/email_validator/__main__.py b/server/venv/Lib/site-packages/email_validator/__main__.py deleted file mode 100644 index a414ff6..0000000 --- a/server/venv/Lib/site-packages/email_validator/__main__.py +++ /dev/null @@ -1,59 +0,0 @@ -# A command-line tool for testing. -# -# Usage: -# -# python -m email_validator test@example.org -# python -m email_validator < LIST_OF_ADDRESSES.TXT -# -# Provide email addresses to validate either as a command-line argument -# or in STDIN separated by newlines. Validation errors will be printed for -# invalid email addresses. When passing an email address on the command -# line, if the email address is valid, information about it will be printed. -# When using STDIN, no output will be given for valid email addresses. -# -# Keyword arguments to validate_email can be set in environment variables -# of the same name but upprcase (see below). - -import json -import os -import sys - -from .validate_email import validate_email -from .deliverability import caching_resolver -from .exceptions_types import EmailNotValidError - - -def main(dns_resolver=None): - # The dns_resolver argument is for tests. - - # Set options from environment variables. - options = {} - for varname in ('ALLOW_SMTPUTF8', 'ALLOW_QUOTED_LOCAL', 'ALLOW_DOMAIN_LITERAL', - 'GLOBALLY_DELIVERABLE', 'CHECK_DELIVERABILITY', 'TEST_ENVIRONMENT'): - if varname in os.environ: - options[varname.lower()] = bool(os.environ[varname]) - for varname in ('DEFAULT_TIMEOUT',): - if varname in os.environ: - options[varname.lower()] = float(os.environ[varname]) - - if len(sys.argv) == 1: - # Validate the email addresses pased line-by-line on STDIN. - dns_resolver = dns_resolver or caching_resolver() - for line in sys.stdin: - email = line.strip() - try: - validate_email(email, dns_resolver=dns_resolver, **options) - except EmailNotValidError as e: - print(f"{email} {e}") - else: - # Validate the email address passed on the command line. - email = sys.argv[1] - try: - result = validate_email(email, dns_resolver=dns_resolver, **options) - print(json.dumps(result.as_dict(), indent=2, sort_keys=True, ensure_ascii=False)) - except EmailNotValidError as e: - print(e) - - -if __name__ == "__main__": - main() diff --git a/server/venv/Lib/site-packages/email_validator/deliverability.py b/server/venv/Lib/site-packages/email_validator/deliverability.py deleted file mode 100644 index 4846091..0000000 --- a/server/venv/Lib/site-packages/email_validator/deliverability.py +++ /dev/null @@ -1,127 +0,0 @@ -from typing import Optional, Any, Dict - -from .exceptions_types import EmailUndeliverableError - -import dns.resolver -import dns.exception - - -def caching_resolver(*, timeout: Optional[int] = None, cache=None): - if timeout is None: - from . import DEFAULT_TIMEOUT - timeout = DEFAULT_TIMEOUT - resolver = dns.resolver.Resolver() - resolver.cache = cache or dns.resolver.LRUCache() # type: ignore - resolver.lifetime = timeout # type: ignore # timeout, in seconds - return resolver - - -def validate_email_deliverability(domain: str, domain_i18n: str, timeout: Optional[int] = None, dns_resolver=None): - # Check that the domain resolves to an MX record. If there is no MX record, - # try an A or AAAA record which is a deprecated fallback for deliverability. - # Raises an EmailUndeliverableError on failure. On success, returns a dict - # with deliverability information. - - # If no dns.resolver.Resolver was given, get dnspython's default resolver. - # Override the default resolver's timeout. This may affect other uses of - # dnspython in this process. - if dns_resolver is None: - from . import DEFAULT_TIMEOUT - if timeout is None: - timeout = DEFAULT_TIMEOUT - dns_resolver = dns.resolver.get_default_resolver() - dns_resolver.lifetime = timeout - elif timeout is not None: - raise ValueError("It's not valid to pass both timeout and dns_resolver.") - - deliverability_info: Dict[str, Any] = {} - - try: - try: - # Try resolving for MX records (RFC 5321 Section 5). - response = dns_resolver.resolve(domain, "MX") - - # For reporting, put them in priority order and remove the trailing dot in the qnames. - mtas = sorted([(r.preference, str(r.exchange).rstrip('.')) for r in response]) - - # RFC 7505: Null MX (0, ".") records signify the domain does not accept email. - # Remove null MX records from the mtas list (but we've stripped trailing dots, - # so the 'exchange' is just "") so we can check if there are no non-null MX - # records remaining. - mtas = [(preference, exchange) for preference, exchange in mtas - if exchange != ""] - if len(mtas) == 0: # null MX only, if there were no MX records originally a NoAnswer exception would have occurred - raise EmailUndeliverableError(f"The domain name {domain_i18n} does not accept email.") - - deliverability_info["mx"] = mtas - deliverability_info["mx_fallback_type"] = None - - except dns.resolver.NoAnswer: - # If there was no MX record, fall back to an A record. (RFC 5321 Section 5) - try: - response = dns_resolver.resolve(domain, "A") - deliverability_info["mx"] = [(0, str(r)) for r in response] - deliverability_info["mx_fallback_type"] = "A" - - except dns.resolver.NoAnswer: - - # If there was no A record, fall back to an AAAA record. - # (It's unclear if SMTP servers actually do this.) - try: - response = dns_resolver.resolve(domain, "AAAA") - deliverability_info["mx"] = [(0, str(r)) for r in response] - deliverability_info["mx_fallback_type"] = "AAAA" - - except dns.resolver.NoAnswer: - # If there was no MX, A, or AAAA record, then mail to - # this domain is not deliverable, although the domain - # name has other records (otherwise NXDOMAIN would - # have been raised). - raise EmailUndeliverableError(f"The domain name {domain_i18n} does not accept email.") - - # Check for a SPF (RFC 7208) reject-all record ("v=spf1 -all") which indicates - # no emails are sent from this domain (similar to a Null MX record - # but for sending rather than receiving). In combination with the - # absence of an MX record, this is probably a good sign that the - # domain is not used for email. - try: - response = dns_resolver.resolve(domain, "TXT") - for rec in response: - value = b"".join(rec.strings) - if value.startswith(b"v=spf1 "): - deliverability_info["spf"] = value.decode("ascii", errors='replace') - if value == b"v=spf1 -all": - raise EmailUndeliverableError(f"The domain name {domain_i18n} does not send email.") - except dns.resolver.NoAnswer: - # No TXT records means there is no SPF policy, so we cannot take any action. - pass - - except dns.resolver.NXDOMAIN: - # The domain name does not exist --- there are no records of any sort - # for the domain name. - raise EmailUndeliverableError(f"The domain name {domain_i18n} does not exist.") - - except dns.resolver.NoNameservers: - # All nameservers failed to answer the query. This might be a problem - # with local nameservers, maybe? We'll allow the domain to go through. - return { - "unknown-deliverability": "no_nameservers", - } - - except dns.exception.Timeout: - # A timeout could occur for various reasons, so don't treat it as a failure. - return { - "unknown-deliverability": "timeout", - } - - except EmailUndeliverableError: - # Don't let these get clobbered by the wider except block below. - raise - - except Exception as e: - # Unhandled conditions should not propagate. - raise EmailUndeliverableError( - "There was an error while checking if the domain name in the email address is deliverable: " + str(e) - ) - - return deliverability_info diff --git a/server/venv/Lib/site-packages/email_validator/exceptions_types.py b/server/venv/Lib/site-packages/email_validator/exceptions_types.py deleted file mode 100644 index 3f6409c..0000000 --- a/server/venv/Lib/site-packages/email_validator/exceptions_types.py +++ /dev/null @@ -1,144 +0,0 @@ -import warnings -from typing import Optional - - -class EmailNotValidError(ValueError): - """Parent class of all exceptions raised by this module.""" - pass - - -class EmailSyntaxError(EmailNotValidError): - """Exception raised when an email address fails validation because of its form.""" - pass - - -class EmailUndeliverableError(EmailNotValidError): - """Exception raised when an email address fails validation because its domain name does not appear deliverable.""" - pass - - -class ValidatedEmail(object): - """The validate_email function returns objects of this type holding the normalized form of the email address - and other information.""" - - """The email address that was passed to validate_email. (If passed as bytes, this will be a string.)""" - original: str - - """The normalized email address, which should always be used in preferance to the original address. - The normalized address converts an IDNA ASCII domain name to Unicode, if possible, and performs - Unicode normalization on the local part and on the domain (if originally Unicode). It is the - concatenation of the local_part and domain attributes, separated by an @-sign.""" - normalized: str - - """The local part of the email address after Unicode normalization.""" - local_part: str - - """The domain part of the email address after Unicode normalization or conversion to - Unicode from IDNA ascii.""" - domain: str - - """If the domain part is a domain literal, the IPv4Address or IPv6Address object.""" - domain_address: object - - """If not None, a form of the email address that uses 7-bit ASCII characters only.""" - ascii_email: Optional[str] - - """If not None, the local part of the email address using 7-bit ASCII characters only.""" - ascii_local_part: Optional[str] - - """A form of the domain name that uses 7-bit ASCII characters only.""" - ascii_domain: str - - """If True, the SMTPUTF8 feature of your mail relay will be required to transmit messages - to this address. This flag is True just when ascii_local_part is missing. Otherwise it - is False.""" - smtputf8: bool - - """If a deliverability check is performed and if it succeeds, a list of (priority, domain) - tuples of MX records specified in the DNS for the domain.""" - mx: list - - """If no MX records are actually specified in DNS and instead are inferred, through an obsolete - mechanism, from A or AAAA records, the value is the type of DNS record used instead (`A` or `AAAA`).""" - mx_fallback_type: str - - """Tests use this constructor.""" - def __init__(self, **kwargs): - for k, v in kwargs.items(): - setattr(self, k, v) - - def __repr__(self): - return f"" - - """For backwards compatibility, support old field names.""" - def __getattr__(self, key): - if key == "original_email": - return self.original - if key == "email": - return self.normalized - raise AttributeError(key) - - @property - def email(self): - import warnings - warnings.warn("ValidatedEmail.email is deprecated and will be removed, use ValidatedEmail.normalized instead", DeprecationWarning) - return self.normalized - - """For backwards compatibility, some fields are also exposed through a dict-like interface. Note - that some of the names changed when they became attributes.""" - def __getitem__(self, key): - warnings.warn("dict-like access to the return value of validate_email is deprecated and may not be supported in the future.", DeprecationWarning, stacklevel=2) - if key == "email": - return self.normalized - if key == "email_ascii": - return self.ascii_email - if key == "local": - return self.local_part - if key == "domain": - return self.ascii_domain - if key == "domain_i18n": - return self.domain - if key == "smtputf8": - return self.smtputf8 - if key == "mx": - return self.mx - if key == "mx-fallback": - return self.mx_fallback_type - raise KeyError() - - """Tests use this.""" - def __eq__(self, other): - if not isinstance(other, ValidatedEmail): - return False - return ( - self.normalized == other.normalized - and self.local_part == other.local_part - and self.domain == other.domain - and getattr(self, 'ascii_email', None) == getattr(other, 'ascii_email', None) - and getattr(self, 'ascii_local_part', None) == getattr(other, 'ascii_local_part', None) - and getattr(self, 'ascii_domain', None) == getattr(other, 'ascii_domain', None) - and self.smtputf8 == other.smtputf8 - and repr(sorted(self.mx) if getattr(self, 'mx', None) else None) - == repr(sorted(other.mx) if getattr(other, 'mx', None) else None) - and getattr(self, 'mx_fallback_type', None) == getattr(other, 'mx_fallback_type', None) - ) - - """This helps producing the README.""" - def as_constructor(self): - return "ValidatedEmail(" \ - + ",".join("\n {}={}".format( - key, - repr(getattr(self, key))) - for key in ('normalized', 'local_part', 'domain', - 'ascii_email', 'ascii_local_part', 'ascii_domain', - 'smtputf8', 'mx', 'mx_fallback_type') - if hasattr(self, key) - ) \ - + ")" - - """Convenience method for accessing ValidatedEmail as a dict""" - def as_dict(self): - d = self.__dict__ - if d.get('domain_address'): - d['domain_address'] = repr(d['domain_address']) - return d diff --git a/server/venv/Lib/site-packages/email_validator/py.typed b/server/venv/Lib/site-packages/email_validator/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/email_validator/rfc_constants.py b/server/venv/Lib/site-packages/email_validator/rfc_constants.py deleted file mode 100644 index d5961d6..0000000 --- a/server/venv/Lib/site-packages/email_validator/rfc_constants.py +++ /dev/null @@ -1,52 +0,0 @@ -# These constants are defined by the email specifications. - -import re - -# Based on RFC 5322 3.2.3, these characters are permitted in email -# addresses (not taking into account internationalization) separated by dots: -ATEXT = r'a-zA-Z0-9_!#\$%&\'\*\+\-/=\?\^`\{\|\}~' -ATEXT_RE = re.compile('[.' + ATEXT + ']') # ATEXT plus dots -DOT_ATOM_TEXT = re.compile('[' + ATEXT + ']+(?:\\.[' + ATEXT + r']+)*\Z') - -# RFC 6531 3.3 extends the allowed characters in internationalized -# addresses to also include three specific ranges of UTF8 defined in -# RFC 3629 section 4, which appear to be the Unicode code points from -# U+0080 to U+10FFFF. -ATEXT_INTL = ATEXT + u"\u0080-\U0010FFFF" -ATEXT_INTL_RE = re.compile('[.' + ATEXT_INTL + ']') # ATEXT_INTL plus dots -DOT_ATOM_TEXT_INTL = re.compile('[' + ATEXT_INTL + ']+(?:\\.[' + ATEXT_INTL + r']+)*\Z') - -# The domain part of the email address, after IDNA (ASCII) encoding, -# must also satisfy the requirements of RFC 952/RFC 1123 2.1 which -# restrict the allowed characters of hostnames further. -ATEXT_HOSTNAME_INTL = re.compile(r"[a-zA-Z0-9\-\." + "\u0080-\U0010FFFF" + "]") -HOSTNAME_LABEL = r'(?:(?:[a-zA-Z0-9][a-zA-Z0-9\-]*)?[a-zA-Z0-9])' -DOT_ATOM_TEXT_HOSTNAME = re.compile(HOSTNAME_LABEL + r'(?:\.' + HOSTNAME_LABEL + r')*\Z') -DOMAIN_NAME_REGEX = re.compile(r"[A-Za-z]\Z") # all TLDs currently end with a letter - -# Domain literal (RFC 5322 3.4.1) -DOMAIN_LITERAL_CHARS = re.compile(r"[\u0021-\u00FA\u005E-\u007E]") - -# Quoted-string local part (RFC 5321 4.1.2, internationalized by RFC 6531 3.3) -# The permitted characters in a quoted string are the characters in the range -# 32-126, except that quotes and (literal) backslashes can only appear when escaped -# by a backslash. When internationalized, UTF8 strings are also permitted except -# the ASCII characters that are not previously permitted (see above). -# QUOTED_LOCAL_PART_ADDR = re.compile(r"^\"((?:[\u0020-\u0021\u0023-\u005B\u005D-\u007E]|\\[\u0020-\u007E])*)\"@(.*)") -QUOTED_LOCAL_PART_ADDR = re.compile(r"^\"((?:[^\"\\]|\\.)*)\"@(.*)") -QTEXT_INTL = re.compile(r"[\u0020-\u007E\u0080-\U0010FFFF]") - -# Length constants -# RFC 3696 + errata 1003 + errata 1690 (https://www.rfc-editor.org/errata_search.php?rfc=3696&eid=1690) -# explains the maximum length of an email address is 254 octets. -EMAIL_MAX_LENGTH = 254 -LOCAL_PART_MAX_LENGTH = 64 -DNS_LABEL_LENGTH_LIMIT = 63 # in "octets", RFC 1035 2.3.1 -DOMAIN_MAX_LENGTH = 255 # in "octets", RFC 1035 2.3.4 and RFC 5321 4.5.3.1.2 - -# RFC 2142 -CASE_INSENSITIVE_MAILBOX_NAMES = [ - 'info', 'marking', 'sales', 'support', # section 3 - 'abuse', 'noc', 'security', # section 4 - 'postmaster', 'hostmaster', 'usenet', 'news', 'webmaster', 'www', 'uucp', 'ftp', # section 5 -] diff --git a/server/venv/Lib/site-packages/email_validator/syntax.py b/server/venv/Lib/site-packages/email_validator/syntax.py deleted file mode 100644 index fef785b..0000000 --- a/server/venv/Lib/site-packages/email_validator/syntax.py +++ /dev/null @@ -1,557 +0,0 @@ -from .exceptions_types import EmailSyntaxError -from .rfc_constants import EMAIL_MAX_LENGTH, LOCAL_PART_MAX_LENGTH, DOMAIN_MAX_LENGTH, \ - DOT_ATOM_TEXT, DOT_ATOM_TEXT_INTL, ATEXT_RE, ATEXT_INTL_RE, ATEXT_HOSTNAME_INTL, QTEXT_INTL, \ - DNS_LABEL_LENGTH_LIMIT, DOT_ATOM_TEXT_HOSTNAME, DOMAIN_NAME_REGEX, DOMAIN_LITERAL_CHARS, \ - QUOTED_LOCAL_PART_ADDR - -import re -import unicodedata -import idna # implements IDNA 2008; Python's codec is only IDNA 2003 -import ipaddress -from typing import Optional - - -def split_email(email): - # Return the local part and domain part of the address and - # whether the local part was quoted as a three-tuple. - - # Typical email addresses have a single @-sign, but the - # awkward "quoted string" local part form (RFC 5321 4.1.2) - # allows @-signs (and escaped quotes) to appear in the local - # part if the local part is quoted. If the address is quoted, - # split it at a non-escaped @-sign and unescape the escaping. - if m := QUOTED_LOCAL_PART_ADDR.match(email): - local_part, domain_part = m.groups() - - # Since backslash-escaping is no longer needed because - # the quotes are removed, remove backslash-escaping - # to return in the normalized form. - import re - local_part = re.sub(r"\\(.)", "\\1", local_part) - - return local_part, domain_part, True - - else: - # Split at the one and only at-sign. - parts = email.split('@') - if len(parts) != 2: - raise EmailSyntaxError("The email address is not valid. It must have exactly one @-sign.") - local_part, domain_part = parts - return local_part, domain_part, False - - -def get_length_reason(addr, utf8=False, limit=EMAIL_MAX_LENGTH): - """Helper function to return an error message related to invalid length.""" - diff = len(addr) - limit - prefix = "at least " if utf8 else "" - suffix = "s" if diff > 1 else "" - return f"({prefix}{diff} character{suffix} too many)" - - -def safe_character_display(c): - # Return safely displayable characters in quotes. - if c == '\\': - return f"\"{c}\"" # can't use repr because it escapes it - if unicodedata.category(c)[0] in ("L", "N", "P", "S"): - return repr(c) - - # Construct a hex string in case the unicode name doesn't exist. - if ord(c) < 0xFFFF: - h = f"U+{ord(c):04x}".upper() - else: - h = f"U+{ord(c):08x}".upper() - - # Return the character name or, if it has no name, the hex string. - return unicodedata.name(c, h) - - -def validate_email_local_part(local: str, allow_smtputf8: bool = True, allow_empty_local: bool = False, - quoted_local_part: bool = False): - """Validates the syntax of the local part of an email address.""" - - if len(local) == 0: - if not allow_empty_local: - raise EmailSyntaxError("There must be something before the @-sign.") - else: - # The caller allows an empty local part. Useful for validating certain - # Postfix aliases. - return { - "local_part": local, - "ascii_local_part": local, - "smtputf8": False, - } - - # Check the length of the local part by counting characters. - # (RFC 5321 4.5.3.1.1) - # We're checking the number of characters here. If the local part - # is ASCII-only, then that's the same as bytes (octets). If it's - # internationalized, then the UTF-8 encoding may be longer, but - # that may not be relevant. We will check the total address length - # instead. - if len(local) > LOCAL_PART_MAX_LENGTH: - reason = get_length_reason(local, limit=LOCAL_PART_MAX_LENGTH) - raise EmailSyntaxError(f"The email address is too long before the @-sign {reason}.") - - # Check the local part against the non-internationalized regular expression. - # Most email addresses match this regex so it's probably fastest to check this first. - # (RFC 5322 3.2.3) - # All local parts matching the dot-atom rule are also valid as a quoted string - # so if it was originally quoted (quoted_local_part is True) and this regex matches, - # it's ok. - # (RFC 5321 4.1.2 / RFC 5322 3.2.4). - if DOT_ATOM_TEXT.match(local): - # It's valid. And since it's just the permitted ASCII characters, - # it's normalized and safe. If the local part was originally quoted, - # the quoting was unnecessary and it'll be returned as normalized to - # non-quoted form. - - # Return the local part and flag that SMTPUTF8 is not needed. - return { - "local_part": local, - "ascii_local_part": local, - "smtputf8": False, - } - - # The local part failed the basic dot-atom check. Try the extended character set - # for internationalized addresses. It's the same pattern but with additional - # characters permitted. - # RFC 6531 section 3.3. - valid: Optional[str] = None - requires_smtputf8 = False - if DOT_ATOM_TEXT_INTL.match(local): - # But international characters in the local part may not be permitted. - if not allow_smtputf8: - # Check for invalid characters against the non-internationalized - # permitted character set. - # (RFC 5322 3.2.3) - bad_chars = set( - safe_character_display(c) - for c in local - if not ATEXT_RE.match(c) - ) - if bad_chars: - raise EmailSyntaxError("Internationalized characters before the @-sign are not supported: " + ", ".join(sorted(bad_chars)) + ".") - - # Although the check above should always find something, fall back to this just in case. - raise EmailSyntaxError("Internationalized characters before the @-sign are not supported.") - - # It's valid. - valid = "dot-atom" - requires_smtputf8 = True - - # There are no syntactic restrictions on quoted local parts, so if - # it was originally quoted, it is probably valid. More characters - # are allowed, like @-signs, spaces, and quotes, and there are no - # restrictions on the placement of dots, as in dot-atom local parts. - elif quoted_local_part: - # Check for invalid characters in a quoted string local part. - # (RFC 5321 4.1.2. RFC 5322 lists additional permitted *obsolete* - # characters which are *not* allowed here. RFC 6531 section 3.3 - # extends the range to UTF8 strings.) - bad_chars = set( - safe_character_display(c) - for c in local - if not QTEXT_INTL.match(c) - ) - if bad_chars: - raise EmailSyntaxError("The email address contains invalid characters in quotes before the @-sign: " + ", ".join(sorted(bad_chars)) + ".") - - # See if any characters are outside of the ASCII range. - bad_chars = set( - safe_character_display(c) - for c in local - if not (32 <= ord(c) <= 126) - ) - if bad_chars: - requires_smtputf8 = True - - # International characters in the local part may not be permitted. - if not allow_smtputf8: - raise EmailSyntaxError("Internationalized characters before the @-sign are not supported: " + ", ".join(sorted(bad_chars)) + ".") - - # It's valid. - valid = "quoted" - - # If the local part matches the internationalized dot-atom form or was quoted, - # perform normalization and additional checks for Unicode strings. - if valid: - # RFC 6532 section 3.1 says that Unicode NFC normalization should be applied, - # so we'll return the normalized local part in the return value. - local = unicodedata.normalize("NFC", local) - - # Check that the local part is a valid, safe, and sensible Unicode string. - # Some of this may be redundant with the range U+0080 to U+10FFFF that is checked - # by DOT_ATOM_TEXT_INTL and QTEXT_INTL. Other characters may be permitted by the - # email specs, but they may not be valid, safe, or sensible Unicode strings. - # See the function for rationale. - check_unsafe_chars(local, allow_space=(valid == "quoted")) - - # Try encoding to UTF-8. Failure is possible with some characters like - # surrogate code points, but those are checked above. Still, we don't - # want to have an unhandled exception later. - try: - local.encode("utf8") - except ValueError: - raise EmailSyntaxError("The email address contains an invalid character.") - - # If this address passes only by the quoted string form, re-quote it - # and backslash-escape quotes and backslashes (removing any unnecessary - # escapes). Per RFC 5321 4.1.2, "all quoted forms MUST be treated as equivalent, - # and the sending system SHOULD transmit the form that uses the minimum quoting possible." - if valid == "quoted": - local = '"' + re.sub(r'(["\\])', r'\\\1', local) + '"' - - return { - "local_part": local, - "ascii_local_part": local if not requires_smtputf8 else None, - "smtputf8": requires_smtputf8, - } - - # It's not a valid local part. Let's find out why. - # (Since quoted local parts are all valid or handled above, these checks - # don't apply in those cases.) - - # Check for invalid characters. - # (RFC 5322 3.2.3, plus RFC 6531 3.3) - bad_chars = set( - safe_character_display(c) - for c in local - if not ATEXT_INTL_RE.match(c) - ) - if bad_chars: - raise EmailSyntaxError("The email address contains invalid characters before the @-sign: " + ", ".join(sorted(bad_chars)) + ".") - - # Check for dot errors imposted by the dot-atom rule. - # (RFC 5322 3.2.3) - check_dot_atom(local, 'An email address cannot start with a {}.', 'An email address cannot have a {} immediately before the @-sign.', is_hostname=False) - - # All of the reasons should already have been checked, but just in case - # we have a fallback message. - raise EmailSyntaxError("The email address contains invalid characters before the @-sign.") - - -def check_unsafe_chars(s, allow_space=False): - # Check for unsafe characters or characters that would make the string - # invalid or non-sensible Unicode. - bad_chars = set() - for i, c in enumerate(s): - category = unicodedata.category(c) - if category[0] in ("L", "N", "P", "S"): - # Letters, numbers, punctuation, and symbols are permitted. - pass - elif category[0] == "M": - # Combining character in first position would combine with something - # outside of the email address if concatenated, so they are not safe. - # We also check if this occurs after the @-sign, which would not be - # sensible. - if i == 0: - bad_chars.add(c) - elif category == "Zs": - # Spaces outside of the ASCII range are not specifically disallowed in - # internationalized addresses as far as I can tell, but they violate - # the spirit of the non-internationalized specification that email - # addresses do not contain ASCII spaces when not quoted. Excluding - # ASCII spaces when not quoted is handled directly by the atom regex. - # - # In quoted-string local parts, spaces are explicitly permitted, and - # the ASCII space has category Zs, so we must allow it here, and we'll - # allow all Unicode spaces to be consistent. - if not allow_space: - bad_chars.add(c) - elif category[0] == "Z": - # The two line and paragraph separator characters (in categories Zl and Zp) - # are not specifically disallowed in internationalized addresses - # as far as I can tell, but they violate the spirit of the non-internationalized - # specification that email addresses do not contain line breaks when not quoted. - bad_chars.add(c) - elif category[0] in ("C", "Z"): - # Control, format, surrogate, private use, and unassigned code points (C) - # are all unsafe in various ways. Control and format characters can affect - # text rendering if the email address is concatenated with other text. - # Bidirectional format characters are unsafe, even if used properly, because - # they cause an email address to render as a different email address. - # Private use characters do not make sense for publicly deliverable - # email addresses. - bad_chars.add(c) - else: - # All categories should be handled above, but in case there is something new - # to the Unicode specification in the future, reject all other categories. - bad_chars.add(c) - if bad_chars: - raise EmailSyntaxError("The email address contains unsafe characters: " - + ", ".join(safe_character_display(c) for c in sorted(bad_chars)) + ".") - - -def check_dot_atom(label, start_descr, end_descr, is_hostname): - # RFC 5322 3.2.3 - if label.endswith("."): - raise EmailSyntaxError(end_descr.format("period")) - if label.startswith("."): - raise EmailSyntaxError(start_descr.format("period")) - if ".." in label: - raise EmailSyntaxError("An email address cannot have two periods in a row.") - - if is_hostname: - # RFC 952 - if label.endswith("-"): - raise EmailSyntaxError(end_descr.format("hyphen")) - if label.startswith("-"): - raise EmailSyntaxError(start_descr.format("hyphen")) - if ".-" in label or "-." in label: - raise EmailSyntaxError("An email address cannot have a period and a hyphen next to each other.") - - -def validate_email_domain_name(domain, test_environment=False, globally_deliverable=True): - """Validates the syntax of the domain part of an email address.""" - - # Check for invalid characters before normalization. - # (RFC 952 plus RFC 6531 section 3.3 for internationalized addresses) - bad_chars = set( - safe_character_display(c) - for c in domain - if not ATEXT_HOSTNAME_INTL.match(c) - ) - if bad_chars: - raise EmailSyntaxError("The part after the @-sign contains invalid characters: " + ", ".join(sorted(bad_chars)) + ".") - - # Check for unsafe characters. - # Some of this may be redundant with the range U+0080 to U+10FFFF that is checked - # by DOT_ATOM_TEXT_INTL. Other characters may be permitted by the email specs, but - # they may not be valid, safe, or sensible Unicode strings. - check_unsafe_chars(domain) - - # Perform UTS-46 normalization, which includes casefolding, NFC normalization, - # and converting all label separators (the period/full stop, fullwidth full stop, - # ideographic full stop, and halfwidth ideographic full stop) to basic periods. - # It will also raise an exception if there is an invalid character in the input, - # such as "⒈" which is invalid because it would expand to include a period. - try: - domain = idna.uts46_remap(domain, std3_rules=False, transitional=False) - except idna.IDNAError as e: - raise EmailSyntaxError(f"The part after the @-sign contains invalid characters ({e}).") - - # The domain part is made up period-separated "labels." Each label must - # have at least one character and cannot start or end with dashes, which - # means there are some surprising restrictions on periods and dashes. - # Check that before we do IDNA encoding because the IDNA library gives - # unfriendly errors for these cases, but after UTS-46 normalization because - # it can insert periods and hyphens (from fullwidth characters). - # (RFC 952, RFC 1123 2.1, RFC 5322 3.2.3) - check_dot_atom(domain, 'An email address cannot have a {} immediately after the @-sign.', 'An email address cannot end with a {}.', is_hostname=True) - - # Check for RFC 5890's invalid R-LDH labels, which are labels that start - # with two characters other than "xn" and two dashes. - for label in domain.split("."): - if re.match(r"(?!xn)..--", label, re.I): - raise EmailSyntaxError("An email address cannot have two letters followed by two dashes immediately after the @-sign or after a period, except Punycode.") - - if DOT_ATOM_TEXT_HOSTNAME.match(domain): - # This is a valid non-internationalized domain. - ascii_domain = domain - else: - # If international characters are present in the domain name, convert - # the domain to IDNA ASCII. If internationalized characters are present, - # the MTA must either support SMTPUTF8 or the mail client must convert the - # domain name to IDNA before submission. - # - # Unfortunately this step incorrectly 'fixes' domain names with leading - # periods by removing them, so we have to check for this above. It also gives - # a funky error message ("No input") when there are two periods in a - # row, also checked separately above. - # - # For ASCII-only domains, the transformation does nothing and is safe to - # apply. However, to ensure we don't rely on the idna library for basic - # syntax checks, we don't use it if it's not needed. - try: - ascii_domain = idna.encode(domain, uts46=False).decode("ascii") - except idna.IDNAError as e: - if "Domain too long" in str(e): - # We can't really be more specific because UTS-46 normalization means - # the length check is applied to a string that is different from the - # one the user supplied. Also I'm not sure if the length check applies - # to the internationalized form, the IDNA ASCII form, or even both! - raise EmailSyntaxError("The email address is too long after the @-sign.") - raise EmailSyntaxError(f"The part after the @-sign contains invalid characters ({e}).") - - # Check the syntax of the string returned by idna.encode. - # It should never fail. - if not DOT_ATOM_TEXT_HOSTNAME.match(ascii_domain): - raise EmailSyntaxError("The email address contains invalid characters after the @-sign after IDNA encoding.") - - # Check the length of the domain name in bytes. - # (RFC 1035 2.3.4 and RFC 5321 4.5.3.1.2) - # We're checking the number of bytes ("octets") here, which can be much - # higher than the number of characters in internationalized domains, - # on the assumption that the domain may be transmitted without SMTPUTF8 - # as IDNA ASCII. (This is also checked by idna.encode, so this exception - # is never reached for internationalized domains.) - if len(ascii_domain) > DOMAIN_MAX_LENGTH: - reason = get_length_reason(ascii_domain, limit=DOMAIN_MAX_LENGTH) - raise EmailSyntaxError(f"The email address is too long after the @-sign {reason}.") - - # Also check the label length limit. - # (RFC 1035 2.3.1) - for label in ascii_domain.split("."): - if len(label) > DNS_LABEL_LENGTH_LIMIT: - reason = get_length_reason(label, limit=DNS_LABEL_LENGTH_LIMIT) - raise EmailSyntaxError(f"After the @-sign, periods cannot be separated by so many characters {reason}.") - - if globally_deliverable: - # All publicly deliverable addresses have domain names with at least - # one period, at least for gTLDs created since 2013 (per the ICANN Board - # New gTLD Program Committee, https://www.icann.org/en/announcements/details/new-gtld-dotless-domain-names-prohibited-30-8-2013-en). - # We'll consider the lack of a period a syntax error - # since that will match people's sense of what an email address looks - # like. We'll skip this in test environments to allow '@test' email - # addresses. - if "." not in ascii_domain and not (ascii_domain == "test" and test_environment): - raise EmailSyntaxError("The part after the @-sign is not valid. It should have a period.") - - # We also know that all TLDs currently end with a letter. - if not DOMAIN_NAME_REGEX.search(ascii_domain): - raise EmailSyntaxError("The part after the @-sign is not valid. It is not within a valid top-level domain.") - - # Check special-use and reserved domain names. - # Some might fail DNS-based deliverability checks, but that - # can be turned off, so we should fail them all sooner. - # See the references in __init__.py. - from . import SPECIAL_USE_DOMAIN_NAMES - for d in SPECIAL_USE_DOMAIN_NAMES: - # See the note near the definition of SPECIAL_USE_DOMAIN_NAMES. - if d == "test" and test_environment: - continue - - if ascii_domain == d or ascii_domain.endswith("." + d): - raise EmailSyntaxError("The part after the @-sign is a special-use or reserved name that cannot be used with email.") - - # We may have been given an IDNA ASCII domain to begin with. Check - # that the domain actually conforms to IDNA. It could look like IDNA - # but not be actual IDNA. For ASCII-only domains, the conversion out - # of IDNA just gives the same thing back. - # - # This gives us the canonical internationalized form of the domain. - try: - domain_i18n = idna.decode(ascii_domain.encode('ascii')) - except idna.IDNAError as e: - raise EmailSyntaxError(f"The part after the @-sign is not valid IDNA ({e}).") - - # Check for invalid characters after normalization. These - # should never arise. See the similar checks above. - bad_chars = set( - safe_character_display(c) - for c in domain - if not ATEXT_HOSTNAME_INTL.match(c) - ) - if bad_chars: - raise EmailSyntaxError("The part after the @-sign contains invalid characters: " + ", ".join(sorted(bad_chars)) + ".") - check_unsafe_chars(domain) - - # Return the IDNA ASCII-encoded form of the domain, which is how it - # would be transmitted on the wire (except when used with SMTPUTF8 - # possibly), as well as the canonical Unicode form of the domain, - # which is better for display purposes. This should also take care - # of RFC 6532 section 3.1's suggestion to apply Unicode NFC - # normalization to addresses. - return { - "ascii_domain": ascii_domain, - "domain": domain_i18n, - } - - -def validate_email_length(addrinfo): - # If the email address has an ASCII representation, then we assume it may be - # transmitted in ASCII (we can't assume SMTPUTF8 will be used on all hops to - # the destination) and the length limit applies to ASCII characters (which is - # the same as octets). The number of characters in the internationalized form - # may be many fewer (because IDNA ASCII is verbose) and could be less than 254 - # Unicode characters, and of course the number of octets over the limit may - # not be the number of characters over the limit, so if the email address is - # internationalized, we can't give any simple information about why the address - # is too long. - if addrinfo.ascii_email and len(addrinfo.ascii_email) > EMAIL_MAX_LENGTH: - if addrinfo.ascii_email == addrinfo.normalized: - reason = get_length_reason(addrinfo.ascii_email) - elif len(addrinfo.normalized) > EMAIL_MAX_LENGTH: - # If there are more than 254 characters, then the ASCII - # form is definitely going to be too long. - reason = get_length_reason(addrinfo.normalized, utf8=True) - else: - reason = "(when converted to IDNA ASCII)" - raise EmailSyntaxError(f"The email address is too long {reason}.") - - # In addition, check that the UTF-8 encoding (i.e. not IDNA ASCII and not - # Unicode characters) is at most 254 octets. If the addres is transmitted using - # SMTPUTF8, then the length limit probably applies to the UTF-8 encoded octets. - # If the email address has an ASCII form that differs from its internationalized - # form, I don't think the internationalized form can be longer, and so the ASCII - # form length check would be sufficient. If there is no ASCII form, then we have - # to check the UTF-8 encoding. The UTF-8 encoding could be up to about four times - # longer than the number of characters. - # - # See the length checks on the local part and the domain. - if len(addrinfo.normalized.encode("utf8")) > EMAIL_MAX_LENGTH: - if len(addrinfo.normalized) > EMAIL_MAX_LENGTH: - # If there are more than 254 characters, then the UTF-8 - # encoding is definitely going to be too long. - reason = get_length_reason(addrinfo.normalized, utf8=True) - else: - reason = "(when encoded in bytes)" - raise EmailSyntaxError(f"The email address is too long {reason}.") - - -def validate_email_domain_literal(domain_literal): - # This is obscure domain-literal syntax. Parse it and return - # a compressed/normalized address. - # RFC 5321 4.1.3 and RFC 5322 3.4.1. - - # Try to parse the domain literal as an IPv4 address. - # There is no tag for IPv4 addresses, so we can never - # be sure if the user intends an IPv4 address. - if re.match(r"^[0-9\.]+$", domain_literal): - try: - addr = ipaddress.IPv4Address(domain_literal) - except ValueError as e: - raise EmailSyntaxError(f"The address in brackets after the @-sign is not valid: It is not an IPv4 address ({e}) or is missing an address literal tag.") - - # Return the IPv4Address object and the domain back unchanged. - return { - "domain_address": addr, - "domain": f"[{addr}]", - } - - # If it begins with "IPv6:" it's an IPv6 address. - if domain_literal.startswith("IPv6:"): - try: - addr = ipaddress.IPv6Address(domain_literal[5:]) - except ValueError as e: - raise EmailSyntaxError(f"The IPv6 address in brackets after the @-sign is not valid ({e}).") - - # Return the IPv6Address object and construct a normalized - # domain literal. - return { - "domain_address": addr, - "domain": f"[IPv6:{addr.compressed}]", - } - - # Nothing else is valid. - - if ":" not in domain_literal: - raise EmailSyntaxError("The part after the @-sign in brackets is not an IPv4 address and has no address literal tag.") - - # The tag (the part before the colon) has character restrictions, - # but since it must come from a registry of tags (in which only "IPv6" is defined), - # there's no need to check the syntax of the tag. See RFC 5321 4.1.2. - - # Check for permitted ASCII characters. This actually doesn't matter - # since there will be an exception after anyway. - bad_chars = set( - safe_character_display(c) - for c in domain_literal - if not DOMAIN_LITERAL_CHARS.match(c) - ) - if bad_chars: - raise EmailSyntaxError("The part after the @-sign contains invalid characters in brackets: " + ", ".join(sorted(bad_chars)) + ".") - - # There are no other domain literal tags. - # https://www.iana.org/assignments/address-literal-tags/address-literal-tags.xhtml - raise EmailSyntaxError("The part after the @-sign contains an invalid address literal tag in brackets.") diff --git a/server/venv/Lib/site-packages/email_validator/validate_email.py b/server/venv/Lib/site-packages/email_validator/validate_email.py deleted file mode 100644 index d2791fe..0000000 --- a/server/venv/Lib/site-packages/email_validator/validate_email.py +++ /dev/null @@ -1,146 +0,0 @@ -from typing import Optional, Union - -from .exceptions_types import EmailSyntaxError, ValidatedEmail -from .syntax import split_email, validate_email_local_part, validate_email_domain_name, validate_email_domain_literal, validate_email_length -from .rfc_constants import CASE_INSENSITIVE_MAILBOX_NAMES - - -def validate_email( - email: Union[str, bytes], - /, # prior arguments are positional-only - *, # subsequent arguments are keyword-only - allow_smtputf8: Optional[bool] = None, - allow_empty_local: bool = False, - allow_quoted_local: Optional[bool] = None, - allow_domain_literal: Optional[bool] = None, - check_deliverability: Optional[bool] = None, - test_environment: Optional[bool] = None, - globally_deliverable: Optional[bool] = None, - timeout: Optional[int] = None, - dns_resolver: Optional[object] = None -) -> ValidatedEmail: - """ - Given an email address, and some options, returns a ValidatedEmail instance - with information about the address if it is valid or, if the address is not - valid, raises an EmailNotValidError. This is the main function of the module. - """ - - # Fill in default values of arguments. - from . import ALLOW_SMTPUTF8, ALLOW_QUOTED_LOCAL, ALLOW_DOMAIN_LITERAL, \ - GLOBALLY_DELIVERABLE, CHECK_DELIVERABILITY, TEST_ENVIRONMENT, DEFAULT_TIMEOUT - if allow_smtputf8 is None: - allow_smtputf8 = ALLOW_SMTPUTF8 - if allow_quoted_local is None: - allow_quoted_local = ALLOW_QUOTED_LOCAL - if allow_domain_literal is None: - allow_domain_literal = ALLOW_DOMAIN_LITERAL - if check_deliverability is None: - check_deliverability = CHECK_DELIVERABILITY - if test_environment is None: - test_environment = TEST_ENVIRONMENT - if globally_deliverable is None: - globally_deliverable = GLOBALLY_DELIVERABLE - if timeout is None and dns_resolver is None: - timeout = DEFAULT_TIMEOUT - - # Allow email to be a str or bytes instance. If bytes, - # it must be ASCII because that's how the bytes work - # on the wire with SMTP. - if not isinstance(email, str): - try: - email = email.decode("ascii") - except ValueError: - raise EmailSyntaxError("The email address is not valid ASCII.") - - # Split the address into the local part (before the @-sign) - # and the domain part (after the @-sign). Normally, there - # is only one @-sign. But the awkward "quoted string" local - # part form (RFC 5321 4.1.2) allows @-signs in the local - # part if the local part is quoted. - local_part, domain_part, is_quoted_local_part \ - = split_email(email) - - # Collect return values in this instance. - ret = ValidatedEmail() - ret.original = email - - # Validate the email address's local part syntax and get a normalized form. - # If the original address was quoted and the decoded local part is a valid - # unquoted local part, then we'll get back a normalized (unescaped) local - # part. - local_part_info = validate_email_local_part(local_part, - allow_smtputf8=allow_smtputf8, - allow_empty_local=allow_empty_local, - quoted_local_part=is_quoted_local_part) - ret.local_part = local_part_info["local_part"] - ret.ascii_local_part = local_part_info["ascii_local_part"] - ret.smtputf8 = local_part_info["smtputf8"] - - # If a quoted local part isn't allowed but is present, now raise an exception. - # This is done after any exceptions raised by validate_email_local_part so - # that mandatory checks have highest precedence. - if is_quoted_local_part and not allow_quoted_local: - raise EmailSyntaxError("Quoting the part before the @-sign is not allowed here.") - - # Some local parts are required to be case-insensitive, so we should normalize - # to lowercase. - # RFC 2142 - if ret.ascii_local_part is not None \ - and ret.ascii_local_part.lower() in CASE_INSENSITIVE_MAILBOX_NAMES \ - and ret.local_part is not None: - ret.ascii_local_part = ret.ascii_local_part.lower() - ret.local_part = ret.local_part.lower() - - # Validate the email address's domain part syntax and get a normalized form. - is_domain_literal = False - if len(domain_part) == 0: - raise EmailSyntaxError("There must be something after the @-sign.") - - elif domain_part.startswith("[") and domain_part.endswith("]"): - # Parse the address in the domain literal and get back a normalized domain. - domain_part_info = validate_email_domain_literal(domain_part[1:-1]) - if not allow_domain_literal: - raise EmailSyntaxError("A bracketed IP address after the @-sign is not allowed here.") - ret.domain = domain_part_info["domain"] - ret.ascii_domain = domain_part_info["domain"] # Domain literals are always ASCII. - ret.domain_address = domain_part_info["domain_address"] - is_domain_literal = True # Prevent deliverability checks. - - else: - # Check the syntax of the domain and get back a normalized - # internationalized and ASCII form. - domain_part_info = validate_email_domain_name(domain_part, test_environment=test_environment, globally_deliverable=globally_deliverable) - ret.domain = domain_part_info["domain"] - ret.ascii_domain = domain_part_info["ascii_domain"] - - # Construct the complete normalized form. - ret.normalized = ret.local_part + "@" + ret.domain - - # If the email address has an ASCII form, add it. - if not ret.smtputf8: - if not ret.ascii_domain: - raise Exception("Missing ASCII domain.") - ret.ascii_email = (ret.ascii_local_part or "") + "@" + ret.ascii_domain - else: - ret.ascii_email = None - - # Check the length of the address. - validate_email_length(ret) - - if check_deliverability and not test_environment: - # Validate the email address's deliverability using DNS - # and update the returned ValidatedEmail object with metadata. - - if is_domain_literal: - # There is nothing to check --- skip deliverability checks. - return ret - - # Lazy load `deliverability` as it is slow to import (due to dns.resolver) - from .deliverability import validate_email_deliverability - deliverability_info = validate_email_deliverability( - ret.ascii_domain, ret.domain, timeout, dns_resolver - ) - for key, value in deliverability_info.items(): - setattr(ret, key, value) - - return ret diff --git a/server/venv/Lib/site-packages/email_validator/version.py b/server/venv/Lib/site-packages/email_validator/version.py deleted file mode 100644 index acc96f7..0000000 --- a/server/venv/Lib/site-packages/email_validator/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "2.1.0.post1" diff --git a/server/venv/Lib/site-packages/exceptiongroup-1.1.3.dist-info/INSTALLER b/server/venv/Lib/site-packages/exceptiongroup-1.1.3.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/server/venv/Lib/site-packages/exceptiongroup-1.1.3.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/server/venv/Lib/site-packages/exceptiongroup-1.1.3.dist-info/LICENSE b/server/venv/Lib/site-packages/exceptiongroup-1.1.3.dist-info/LICENSE deleted file mode 100644 index 50d4fa5..0000000 --- a/server/venv/Lib/site-packages/exceptiongroup-1.1.3.dist-info/LICENSE +++ /dev/null @@ -1,73 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2022 Alex Grönholm - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - -This project contains code copied from the Python standard library. -The following is the required license notice for those parts. - -PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 --------------------------------------------- - -1. This LICENSE AGREEMENT is between the Python Software Foundation -("PSF"), and the Individual or Organization ("Licensee") accessing and -otherwise using this software ("Python") in source or binary form and -its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, PSF hereby -grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, -analyze, test, perform and/or display publicly, prepare derivative works, -distribute, and otherwise use Python alone or in any derivative version, -provided, however, that PSF's License Agreement and PSF's notice of copyright, -i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022 Python Software Foundation; -All Rights Reserved" are retained in Python alone or in any derivative version -prepared by Licensee. - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python. - -4. PSF is making Python available to Licensee on an "AS IS" -basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. Nothing in this License Agreement shall be deemed to create any -relationship of agency, partnership, or joint venture between PSF and -Licensee. This License Agreement does not grant permission to use PSF -trademarks or trade name in a trademark sense to endorse or promote -products or services of Licensee, or any third party. - -8. By copying, installing or otherwise using Python, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. diff --git a/server/venv/Lib/site-packages/exceptiongroup-1.1.3.dist-info/METADATA b/server/venv/Lib/site-packages/exceptiongroup-1.1.3.dist-info/METADATA deleted file mode 100644 index f2dd55b..0000000 --- a/server/venv/Lib/site-packages/exceptiongroup-1.1.3.dist-info/METADATA +++ /dev/null @@ -1,141 +0,0 @@ -Metadata-Version: 2.1 -Name: exceptiongroup -Version: 1.1.3 -Summary: Backport of PEP 654 (exception groups) -Author-email: Alex Grönholm -Requires-Python: >=3.7 -Description-Content-Type: text/x-rst -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Typing :: Typed -Requires-Dist: pytest >= 6 ; extra == "test" -Project-URL: Changelog, https://github.com/agronholm/exceptiongroup/blob/main/CHANGES.rst -Project-URL: Issue Tracker, https://github.com/agronholm/exceptiongroup/issues -Project-URL: Source code, https://github.com/agronholm/exceptiongroup -Provides-Extra: test - -.. image:: https://github.com/agronholm/exceptiongroup/actions/workflows/test.yml/badge.svg - :target: https://github.com/agronholm/exceptiongroup/actions/workflows/test.yml - :alt: Build Status -.. image:: https://coveralls.io/repos/github/agronholm/exceptiongroup/badge.svg?branch=main - :target: https://coveralls.io/github/agronholm/exceptiongroup?branch=main - :alt: Code Coverage - -This is a backport of the ``BaseExceptionGroup`` and ``ExceptionGroup`` classes from -Python 3.11. - -It contains the following: - -* The ``exceptiongroup.BaseExceptionGroup`` and ``exceptiongroup.ExceptionGroup`` - classes -* A utility function (``exceptiongroup.catch()``) for catching exceptions possibly - nested in an exception group -* Patches to the ``TracebackException`` class that properly formats exception groups - (installed on import) -* An exception hook that handles formatting of exception groups through - ``TracebackException`` (installed on import) -* Special versions of some of the functions from the ``traceback`` module, modified to - correctly handle exception groups even when monkey patching is disabled, or blocked by - another custom exception hook: - - * ``traceback.format_exception()`` - * ``traceback.format_exception_only()`` - * ``traceback.print_exception()`` - * ``traceback.print_exc()`` - -If this package is imported on Python 3.11 or later, the built-in implementations of the -exception group classes are used instead, ``TracebackException`` is not monkey patched -and the exception hook won't be installed. - -See the `standard library documentation`_ for more information on exception groups. - -.. _standard library documentation: https://docs.python.org/3/library/exceptions.html - -Catching exceptions -=================== - -Due to the lack of the ``except*`` syntax introduced by `PEP 654`_ in earlier Python -versions, you need to use ``exceptiongroup.catch()`` to catch exceptions that are -potentially nested inside an exception group. This function returns a context manager -that calls the given handler for any exceptions matching the sole argument. - -The argument to ``catch()`` must be a dict (or any ``Mapping``) where each key is either -an exception class or an iterable of exception classes. Each value must be a callable -that takes a single positional argument. The handler will be called at most once, with -an exception group as an argument which will contain all the exceptions that are any -of the given types, or their subclasses. The exception group may contain nested groups -containing more matching exceptions. - -Thus, the following Python 3.11+ code: - -.. code-block:: python3 - - try: - ... - except* (ValueError, KeyError) as excgroup: - for exc in excgroup.exceptions: - print('Caught exception:', type(exc)) - except* RuntimeError: - print('Caught runtime error') - -would be written with this backport like this: - -.. code-block:: python3 - - from exceptiongroup import ExceptionGroup, catch - - def value_key_err_handler(excgroup: ExceptionGroup) -> None: - for exc in excgroup.exceptions: - print('Caught exception:', type(exc)) - - def runtime_err_handler(exc: ExceptionGroup) -> None: - print('Caught runtime error') - - with catch({ - (ValueError, KeyError): value_key_err_handler, - RuntimeError: runtime_err_handler - }): - ... - -**NOTE**: Just like with ``except*``, you cannot handle ``BaseExceptionGroup`` or -``ExceptionGroup`` with ``catch()``. - -Notes on monkey patching -======================== - -To make exception groups render properly when an unhandled exception group is being -printed out, this package does two things when it is imported on any Python version -earlier than 3.11: - -#. The ``traceback.TracebackException`` class is monkey patched to store extra - information about exception groups (in ``__init__()``) and properly format them (in - ``format()``) -#. An exception hook is installed at ``sys.excepthook``, provided that no other hook is - already present. This hook causes the exception to be formatted using - ``traceback.TracebackException`` rather than the built-in rendered. - -If ``sys.exceptionhook`` is found to be set to something else than the default when -``exceptiongroup`` is imported, no monkeypatching is done at all. - -To prevent the exception hook and patches from being installed, set the environment -variable ``EXCEPTIONGROUP_NO_PATCH`` to ``1``. - -Formatting exception groups ---------------------------- - -Normally, the monkey patching applied by this library on import will cause exception -groups to be printed properly in tracebacks. But in cases when the monkey patching is -blocked by a third party exception hook, or monkey patching is explicitly disabled, -you can still manually format exceptions using the special versions of the ``traceback`` -functions, like ``format_exception()``, listed at the top of this page. They work just -like their counterparts in the ``traceback`` module, except that they use a separately -patched subclass of ``TracebackException`` to perform the rendering. - -Particularly in cases where a library installs its own exception hook, it is recommended -to use these special versions to do the actual formatting of exceptions/tracebacks. - -.. _PEP 654: https://www.python.org/dev/peps/pep-0654/ - diff --git a/server/venv/Lib/site-packages/exceptiongroup-1.1.3.dist-info/RECORD b/server/venv/Lib/site-packages/exceptiongroup-1.1.3.dist-info/RECORD deleted file mode 100644 index 13de349..0000000 --- a/server/venv/Lib/site-packages/exceptiongroup-1.1.3.dist-info/RECORD +++ /dev/null @@ -1,17 +0,0 @@ -exceptiongroup-1.1.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -exceptiongroup-1.1.3.dist-info/LICENSE,sha256=blBw12UDHgrUA6HL-Qrm0ZoCKPgC4yC3rP9GCqcu1Hw,3704 -exceptiongroup-1.1.3.dist-info/METADATA,sha256=4hAoEFfdm7skPuGC6sZ8Vp_OyL2sOFf6-lCoFlAWkb8,6083 -exceptiongroup-1.1.3.dist-info/RECORD,, -exceptiongroup-1.1.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -exceptiongroup-1.1.3.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 -exceptiongroup/__init__.py,sha256=Zr-MMWDXFMdrRBC8nS8UFqATRQL0Z-Lb-SANqs-uu0I,920 -exceptiongroup/__pycache__/__init__.cpython-311.pyc,, -exceptiongroup/__pycache__/_catch.cpython-311.pyc,, -exceptiongroup/__pycache__/_exceptions.cpython-311.pyc,, -exceptiongroup/__pycache__/_formatting.cpython-311.pyc,, -exceptiongroup/__pycache__/_version.cpython-311.pyc,, -exceptiongroup/_catch.py,sha256=g8808SCitG7uoMR5y01fU6-HTGMsxG-mRoAuuTmWLQw,4541 -exceptiongroup/_exceptions.py,sha256=x8oC2XOKyAYTWl0kjt_Jqx90J1G-PENK3nycdIgO4aM,11009 -exceptiongroup/_formatting.py,sha256=HvArXuIyt0I8Wh4S3akamBSekPslCdCFqTnR6byEPVQ,19475 -exceptiongroup/_version.py,sha256=vwU9nkVx7zfW2_Q0-iIUmzZsr1Ozo7oPKy0nUfjkQy4,160 -exceptiongroup/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/server/venv/Lib/site-packages/exceptiongroup-1.1.3.dist-info/REQUESTED b/server/venv/Lib/site-packages/exceptiongroup-1.1.3.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/exceptiongroup-1.1.3.dist-info/WHEEL b/server/venv/Lib/site-packages/exceptiongroup-1.1.3.dist-info/WHEEL deleted file mode 100644 index 3b5e64b..0000000 --- a/server/venv/Lib/site-packages/exceptiongroup-1.1.3.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.9.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/server/venv/Lib/site-packages/exceptiongroup/__init__.py b/server/venv/Lib/site-packages/exceptiongroup/__init__.py deleted file mode 100644 index 0e7e02b..0000000 --- a/server/venv/Lib/site-packages/exceptiongroup/__init__.py +++ /dev/null @@ -1,40 +0,0 @@ -__all__ = [ - "BaseExceptionGroup", - "ExceptionGroup", - "catch", - "format_exception", - "format_exception_only", - "print_exception", - "print_exc", -] - -import os -import sys - -from ._catch import catch -from ._version import version as __version__ # noqa: F401 - -if sys.version_info < (3, 11): - from ._exceptions import BaseExceptionGroup, ExceptionGroup - from ._formatting import ( - format_exception, - format_exception_only, - print_exc, - print_exception, - ) - - if os.getenv("EXCEPTIONGROUP_NO_PATCH") != "1": - from . import _formatting # noqa: F401 - - BaseExceptionGroup.__module__ = __name__ - ExceptionGroup.__module__ = __name__ -else: - from traceback import ( - format_exception, - format_exception_only, - print_exc, - print_exception, - ) - - BaseExceptionGroup = BaseExceptionGroup - ExceptionGroup = ExceptionGroup diff --git a/server/venv/Lib/site-packages/exceptiongroup/_catch.py b/server/venv/Lib/site-packages/exceptiongroup/_catch.py deleted file mode 100644 index b76f51f..0000000 --- a/server/venv/Lib/site-packages/exceptiongroup/_catch.py +++ /dev/null @@ -1,135 +0,0 @@ -from __future__ import annotations - -import inspect -import sys -from collections.abc import Callable, Iterable, Mapping -from contextlib import AbstractContextManager -from types import TracebackType -from typing import TYPE_CHECKING, Any - -if sys.version_info < (3, 11): - from ._exceptions import BaseExceptionGroup - -if TYPE_CHECKING: - _Handler = Callable[[BaseException], Any] - - -class _Catcher: - def __init__(self, handler_map: Mapping[tuple[type[BaseException], ...], _Handler]): - self._handler_map = handler_map - - def __enter__(self) -> None: - pass - - def __exit__( - self, - etype: type[BaseException] | None, - exc: BaseException | None, - tb: TracebackType | None, - ) -> bool: - if exc is not None: - unhandled = self.handle_exception(exc) - if unhandled is exc: - return False - elif unhandled is None: - return True - else: - if isinstance(exc, BaseExceptionGroup): - try: - raise unhandled from exc.__cause__ - except BaseExceptionGroup: - # Change __context__ to __cause__ because Python 3.11 does this - # too - unhandled.__context__ = exc.__cause__ - raise - - raise unhandled from exc - - return False - - def handle_exception(self, exc: BaseException) -> BaseException | None: - excgroup: BaseExceptionGroup | None - if isinstance(exc, BaseExceptionGroup): - excgroup = exc - else: - excgroup = BaseExceptionGroup("", [exc]) - - new_exceptions: list[BaseException] = [] - for exc_types, handler in self._handler_map.items(): - matched, excgroup = excgroup.split(exc_types) - if matched: - try: - try: - raise matched - except BaseExceptionGroup: - result = handler(matched) - except BaseExceptionGroup as new_exc: - new_exceptions.extend(new_exc.exceptions) - except BaseException as new_exc: - new_exceptions.append(new_exc) - else: - if inspect.iscoroutine(result): - raise TypeError( - f"Error trying to handle {matched!r} with {handler!r}. " - "Exception handler must be a sync function." - ) from exc - - if not excgroup: - break - - if new_exceptions: - if len(new_exceptions) == 1: - return new_exceptions[0] - - return BaseExceptionGroup("", new_exceptions) - elif ( - excgroup and len(excgroup.exceptions) == 1 and excgroup.exceptions[0] is exc - ): - return exc - else: - return excgroup - - -def catch( - __handlers: Mapping[type[BaseException] | Iterable[type[BaseException]], _Handler] -) -> AbstractContextManager[None]: - if not isinstance(__handlers, Mapping): - raise TypeError("the argument must be a mapping") - - handler_map: dict[ - tuple[type[BaseException], ...], Callable[[BaseExceptionGroup]] - ] = {} - for type_or_iterable, handler in __handlers.items(): - iterable: tuple[type[BaseException]] - if isinstance(type_or_iterable, type) and issubclass( - type_or_iterable, BaseException - ): - iterable = (type_or_iterable,) - elif isinstance(type_or_iterable, Iterable): - iterable = tuple(type_or_iterable) - else: - raise TypeError( - "each key must be either an exception classes or an iterable thereof" - ) - - if not callable(handler): - raise TypeError("handlers must be callable") - - for exc_type in iterable: - if not isinstance(exc_type, type) or not issubclass( - exc_type, BaseException - ): - raise TypeError( - "each key must be either an exception classes or an iterable " - "thereof" - ) - - if issubclass(exc_type, BaseExceptionGroup): - raise TypeError( - "catching ExceptionGroup with catch() is not allowed. " - "Use except instead." - ) - - handler_map[iterable] = handler - - return _Catcher(handler_map) diff --git a/server/venv/Lib/site-packages/exceptiongroup/_exceptions.py b/server/venv/Lib/site-packages/exceptiongroup/_exceptions.py deleted file mode 100644 index 339735e..0000000 --- a/server/venv/Lib/site-packages/exceptiongroup/_exceptions.py +++ /dev/null @@ -1,327 +0,0 @@ -from __future__ import annotations - -from collections.abc import Callable, Sequence -from functools import partial -from inspect import getmro, isclass -from typing import TYPE_CHECKING, Generic, Type, TypeVar, cast, overload - -if TYPE_CHECKING: - from typing import Self - -_BaseExceptionT_co = TypeVar("_BaseExceptionT_co", bound=BaseException, covariant=True) -_BaseExceptionT = TypeVar("_BaseExceptionT", bound=BaseException) -_ExceptionT_co = TypeVar("_ExceptionT_co", bound=Exception, covariant=True) -_ExceptionT = TypeVar("_ExceptionT", bound=Exception) - - -def check_direct_subclass( - exc: BaseException, parents: tuple[type[BaseException]] -) -> bool: - for cls in getmro(exc.__class__)[:-1]: - if cls in parents: - return True - - return False - - -def get_condition_filter( - condition: type[_BaseExceptionT] - | tuple[type[_BaseExceptionT], ...] - | Callable[[_BaseExceptionT_co], bool] -) -> Callable[[_BaseExceptionT_co], bool]: - if isclass(condition) and issubclass( - cast(Type[BaseException], condition), BaseException - ): - return partial(check_direct_subclass, parents=(condition,)) - elif isinstance(condition, tuple): - if all(isclass(x) and issubclass(x, BaseException) for x in condition): - return partial(check_direct_subclass, parents=condition) - elif callable(condition): - return cast("Callable[[BaseException], bool]", condition) - - raise TypeError("expected a function, exception type or tuple of exception types") - - -class BaseExceptionGroup(BaseException, Generic[_BaseExceptionT_co]): - """A combination of multiple unrelated exceptions.""" - - def __new__( - cls, __message: str, __exceptions: Sequence[_BaseExceptionT_co] - ) -> Self: - if not isinstance(__message, str): - raise TypeError(f"argument 1 must be str, not {type(__message)}") - if not isinstance(__exceptions, Sequence): - raise TypeError("second argument (exceptions) must be a sequence") - if not __exceptions: - raise ValueError( - "second argument (exceptions) must be a non-empty sequence" - ) - - for i, exc in enumerate(__exceptions): - if not isinstance(exc, BaseException): - raise ValueError( - f"Item {i} of second argument (exceptions) is not an exception" - ) - - if cls is BaseExceptionGroup: - if all(isinstance(exc, Exception) for exc in __exceptions): - cls = ExceptionGroup - - if issubclass(cls, Exception): - for exc in __exceptions: - if not isinstance(exc, Exception): - if cls is ExceptionGroup: - raise TypeError( - "Cannot nest BaseExceptions in an ExceptionGroup" - ) - else: - raise TypeError( - f"Cannot nest BaseExceptions in {cls.__name__!r}" - ) - - instance = super().__new__(cls, __message, __exceptions) - instance._message = __message - instance._exceptions = __exceptions - return instance - - def add_note(self, note: str) -> None: - if not isinstance(note, str): - raise TypeError( - f"Expected a string, got note={note!r} (type {type(note).__name__})" - ) - - if not hasattr(self, "__notes__"): - self.__notes__: list[str] = [] - - self.__notes__.append(note) - - @property - def message(self) -> str: - return self._message - - @property - def exceptions( - self, - ) -> tuple[_BaseExceptionT_co | BaseExceptionGroup[_BaseExceptionT_co], ...]: - return tuple(self._exceptions) - - @overload - def subgroup( - self, __condition: type[_ExceptionT] | tuple[type[_ExceptionT], ...] - ) -> ExceptionGroup[_ExceptionT] | None: - ... - - @overload - def subgroup( - self, __condition: type[_BaseExceptionT] | tuple[type[_BaseExceptionT], ...] - ) -> BaseExceptionGroup[_BaseExceptionT] | None: - ... - - @overload - def subgroup( - self, __condition: Callable[[_BaseExceptionT_co | Self], bool] - ) -> BaseExceptionGroup[_BaseExceptionT_co] | None: - ... - - def subgroup( - self, - __condition: type[_BaseExceptionT] - | tuple[type[_BaseExceptionT], ...] - | Callable[[_BaseExceptionT_co | Self], bool], - ) -> BaseExceptionGroup[_BaseExceptionT] | None: - condition = get_condition_filter(__condition) - modified = False - if condition(self): - return self - - exceptions: list[BaseException] = [] - for exc in self.exceptions: - if isinstance(exc, BaseExceptionGroup): - subgroup = exc.subgroup(__condition) - if subgroup is not None: - exceptions.append(subgroup) - - if subgroup is not exc: - modified = True - elif condition(exc): - exceptions.append(exc) - else: - modified = True - - if not modified: - return self - elif exceptions: - group = self.derive(exceptions) - group.__cause__ = self.__cause__ - group.__context__ = self.__context__ - group.__traceback__ = self.__traceback__ - return group - else: - return None - - @overload - def split( - self, __condition: type[_ExceptionT] | tuple[type[_ExceptionT], ...] - ) -> tuple[ - ExceptionGroup[_ExceptionT] | None, - BaseExceptionGroup[_BaseExceptionT_co] | None, - ]: - ... - - @overload - def split( - self, __condition: type[_BaseExceptionT] | tuple[type[_BaseExceptionT], ...] - ) -> tuple[ - BaseExceptionGroup[_BaseExceptionT] | None, - BaseExceptionGroup[_BaseExceptionT_co] | None, - ]: - ... - - @overload - def split( - self, __condition: Callable[[_BaseExceptionT_co | Self], bool] - ) -> tuple[ - BaseExceptionGroup[_BaseExceptionT_co] | None, - BaseExceptionGroup[_BaseExceptionT_co] | None, - ]: - ... - - def split( - self, - __condition: type[_BaseExceptionT] - | tuple[type[_BaseExceptionT], ...] - | Callable[[_BaseExceptionT_co], bool], - ) -> ( - tuple[ - ExceptionGroup[_ExceptionT] | None, - BaseExceptionGroup[_BaseExceptionT_co] | None, - ] - | tuple[ - BaseExceptionGroup[_BaseExceptionT] | None, - BaseExceptionGroup[_BaseExceptionT_co] | None, - ] - | tuple[ - BaseExceptionGroup[_BaseExceptionT_co] | None, - BaseExceptionGroup[_BaseExceptionT_co] | None, - ] - ): - condition = get_condition_filter(__condition) - if condition(self): - return self, None - - matching_exceptions: list[BaseException] = [] - nonmatching_exceptions: list[BaseException] = [] - for exc in self.exceptions: - if isinstance(exc, BaseExceptionGroup): - matching, nonmatching = exc.split(condition) - if matching is not None: - matching_exceptions.append(matching) - - if nonmatching is not None: - nonmatching_exceptions.append(nonmatching) - elif condition(exc): - matching_exceptions.append(exc) - else: - nonmatching_exceptions.append(exc) - - matching_group: Self | None = None - if matching_exceptions: - matching_group = self.derive(matching_exceptions) - matching_group.__cause__ = self.__cause__ - matching_group.__context__ = self.__context__ - matching_group.__traceback__ = self.__traceback__ - - nonmatching_group: Self | None = None - if nonmatching_exceptions: - nonmatching_group = self.derive(nonmatching_exceptions) - nonmatching_group.__cause__ = self.__cause__ - nonmatching_group.__context__ = self.__context__ - nonmatching_group.__traceback__ = self.__traceback__ - - return matching_group, nonmatching_group - - @overload - def derive(self, __excs: Sequence[_ExceptionT]) -> ExceptionGroup[_ExceptionT]: - ... - - @overload - def derive( - self, __excs: Sequence[_BaseExceptionT] - ) -> BaseExceptionGroup[_BaseExceptionT]: - ... - - def derive( - self, __excs: Sequence[_BaseExceptionT] - ) -> BaseExceptionGroup[_BaseExceptionT]: - eg = BaseExceptionGroup(self.message, __excs) - if hasattr(self, "__notes__"): - # Create a new list so that add_note() only affects one exceptiongroup - eg.__notes__ = list(self.__notes__) - - return eg - - def __str__(self) -> str: - suffix = "" if len(self._exceptions) == 1 else "s" - return f"{self.message} ({len(self._exceptions)} sub-exception{suffix})" - - def __repr__(self) -> str: - return f"{self.__class__.__name__}({self.message!r}, {self._exceptions!r})" - - -class ExceptionGroup(BaseExceptionGroup[_ExceptionT_co], Exception): - def __new__(cls, __message: str, __exceptions: Sequence[_ExceptionT_co]) -> Self: - return super().__new__(cls, __message, __exceptions) - - if TYPE_CHECKING: - - @property - def exceptions( - self, - ) -> tuple[_ExceptionT_co | ExceptionGroup[_ExceptionT_co], ...]: - ... - - @overload # type: ignore[override] - def subgroup( - self, __condition: type[_ExceptionT] | tuple[type[_ExceptionT], ...] - ) -> ExceptionGroup[_ExceptionT] | None: - ... - - @overload - def subgroup( - self, __condition: Callable[[_ExceptionT_co | Self], bool] - ) -> ExceptionGroup[_ExceptionT_co] | None: - ... - - def subgroup( - self, - __condition: type[_ExceptionT] - | tuple[type[_ExceptionT], ...] - | Callable[[_ExceptionT_co], bool], - ) -> ExceptionGroup[_ExceptionT] | None: - return super().subgroup(__condition) - - @overload - def split( - self, __condition: type[_ExceptionT] | tuple[type[_ExceptionT], ...] - ) -> tuple[ - ExceptionGroup[_ExceptionT] | None, ExceptionGroup[_ExceptionT_co] | None - ]: - ... - - @overload - def split( - self, __condition: Callable[[_ExceptionT_co | Self], bool] - ) -> tuple[ - ExceptionGroup[_ExceptionT_co] | None, ExceptionGroup[_ExceptionT_co] | None - ]: - ... - - def split( - self: Self, - __condition: type[_ExceptionT] - | tuple[type[_ExceptionT], ...] - | Callable[[_ExceptionT_co], bool], - ) -> tuple[ - ExceptionGroup[_ExceptionT_co] | None, ExceptionGroup[_ExceptionT_co] | None - ]: - return super().split(__condition) diff --git a/server/venv/Lib/site-packages/exceptiongroup/_formatting.py b/server/venv/Lib/site-packages/exceptiongroup/_formatting.py deleted file mode 100644 index c0402ba..0000000 --- a/server/venv/Lib/site-packages/exceptiongroup/_formatting.py +++ /dev/null @@ -1,563 +0,0 @@ -# traceback_exception_init() adapted from trio -# -# _ExceptionPrintContext and traceback_exception_format() copied from the standard -# library -from __future__ import annotations - -import collections.abc -import sys -import textwrap -import traceback -from functools import singledispatch -from types import TracebackType -from typing import Any, List, Optional - -from ._exceptions import BaseExceptionGroup - -max_group_width = 15 -max_group_depth = 10 -_cause_message = ( - "\nThe above exception was the direct cause of the following exception:\n\n" -) - -_context_message = ( - "\nDuring handling of the above exception, another exception occurred:\n\n" -) - - -def _format_final_exc_line(etype, value): - valuestr = _safe_string(value, "exception") - if value is None or not valuestr: - line = f"{etype}\n" - else: - line = f"{etype}: {valuestr}\n" - - return line - - -def _safe_string(value, what, func=str): - try: - return func(value) - except BaseException: - return f"<{what} {func.__name__}() failed>" - - -class _ExceptionPrintContext: - def __init__(self): - self.seen = set() - self.exception_group_depth = 0 - self.need_close = False - - def indent(self): - return " " * (2 * self.exception_group_depth) - - def emit(self, text_gen, margin_char=None): - if margin_char is None: - margin_char = "|" - indent_str = self.indent() - if self.exception_group_depth: - indent_str += margin_char + " " - - if isinstance(text_gen, str): - yield textwrap.indent(text_gen, indent_str, lambda line: True) - else: - for text in text_gen: - yield textwrap.indent(text, indent_str, lambda line: True) - - -def exceptiongroup_excepthook( - etype: type[BaseException], value: BaseException, tb: TracebackType | None -) -> None: - sys.stderr.write("".join(traceback.format_exception(etype, value, tb))) - - -class PatchedTracebackException(traceback.TracebackException): - def __init__( - self, - exc_type: type[BaseException], - exc_value: BaseException, - exc_traceback: TracebackType | None, - *, - limit: int | None = None, - lookup_lines: bool = True, - capture_locals: bool = False, - compact: bool = False, - _seen: set[int] | None = None, - ) -> None: - kwargs: dict[str, Any] = {} - if sys.version_info >= (3, 10): - kwargs["compact"] = compact - - is_recursive_call = _seen is not None - if _seen is None: - _seen = set() - _seen.add(id(exc_value)) - - self.stack = traceback.StackSummary.extract( - traceback.walk_tb(exc_traceback), - limit=limit, - lookup_lines=lookup_lines, - capture_locals=capture_locals, - ) - self.exc_type = exc_type - # Capture now to permit freeing resources: only complication is in the - # unofficial API _format_final_exc_line - self._str = _safe_string(exc_value, "exception") - try: - self.__notes__ = getattr(exc_value, "__notes__", None) - except KeyError: - # Workaround for https://github.com/python/cpython/issues/98778 on Python - # <= 3.9, and some 3.10 and 3.11 patch versions. - HTTPError = getattr(sys.modules.get("urllib.error", None), "HTTPError", ()) - if sys.version_info[:2] <= (3, 11) and isinstance(exc_value, HTTPError): - self.__notes__ = None - else: - raise - - if exc_type and issubclass(exc_type, SyntaxError): - # Handle SyntaxError's specially - self.filename = exc_value.filename - lno = exc_value.lineno - self.lineno = str(lno) if lno is not None else None - self.text = exc_value.text - self.offset = exc_value.offset - self.msg = exc_value.msg - if sys.version_info >= (3, 10): - end_lno = exc_value.end_lineno - self.end_lineno = str(end_lno) if end_lno is not None else None - self.end_offset = exc_value.end_offset - elif ( - exc_type - and issubclass(exc_type, (NameError, AttributeError)) - and getattr(exc_value, "name", None) is not None - ): - suggestion = _compute_suggestion_error(exc_value, exc_traceback) - if suggestion: - self._str += f". Did you mean: '{suggestion}'?" - - if lookup_lines: - # Force all lines in the stack to be loaded - for frame in self.stack: - frame.line - - self.__suppress_context__ = ( - exc_value.__suppress_context__ if exc_value is not None else False - ) - - # Convert __cause__ and __context__ to `TracebackExceptions`s, use a - # queue to avoid recursion (only the top-level call gets _seen == None) - if not is_recursive_call: - queue = [(self, exc_value)] - while queue: - te, e = queue.pop() - - if e and e.__cause__ is not None and id(e.__cause__) not in _seen: - cause = PatchedTracebackException( - type(e.__cause__), - e.__cause__, - e.__cause__.__traceback__, - limit=limit, - lookup_lines=lookup_lines, - capture_locals=capture_locals, - _seen=_seen, - ) - else: - cause = None - - if compact: - need_context = ( - cause is None and e is not None and not e.__suppress_context__ - ) - else: - need_context = True - if ( - e - and e.__context__ is not None - and need_context - and id(e.__context__) not in _seen - ): - context = PatchedTracebackException( - type(e.__context__), - e.__context__, - e.__context__.__traceback__, - limit=limit, - lookup_lines=lookup_lines, - capture_locals=capture_locals, - _seen=_seen, - ) - else: - context = None - - # Capture each of the exceptions in the ExceptionGroup along with each - # of their causes and contexts - if e and isinstance(e, BaseExceptionGroup): - exceptions = [] - for exc in e.exceptions: - texc = PatchedTracebackException( - type(exc), - exc, - exc.__traceback__, - lookup_lines=lookup_lines, - capture_locals=capture_locals, - _seen=_seen, - ) - exceptions.append(texc) - else: - exceptions = None - - te.__cause__ = cause - te.__context__ = context - te.exceptions = exceptions - if cause: - queue.append((te.__cause__, e.__cause__)) - if context: - queue.append((te.__context__, e.__context__)) - if exceptions: - queue.extend(zip(te.exceptions, e.exceptions)) - - def format(self, *, chain=True, _ctx=None): - if _ctx is None: - _ctx = _ExceptionPrintContext() - - output = [] - exc = self - if chain: - while exc: - if exc.__cause__ is not None: - chained_msg = _cause_message - chained_exc = exc.__cause__ - elif exc.__context__ is not None and not exc.__suppress_context__: - chained_msg = _context_message - chained_exc = exc.__context__ - else: - chained_msg = None - chained_exc = None - - output.append((chained_msg, exc)) - exc = chained_exc - else: - output.append((None, exc)) - - for msg, exc in reversed(output): - if msg is not None: - yield from _ctx.emit(msg) - if exc.exceptions is None: - if exc.stack: - yield from _ctx.emit("Traceback (most recent call last):\n") - yield from _ctx.emit(exc.stack.format()) - yield from _ctx.emit(exc.format_exception_only()) - elif _ctx.exception_group_depth > max_group_depth: - # exception group, but depth exceeds limit - yield from _ctx.emit(f"... (max_group_depth is {max_group_depth})\n") - else: - # format exception group - is_toplevel = _ctx.exception_group_depth == 0 - if is_toplevel: - _ctx.exception_group_depth += 1 - - if exc.stack: - yield from _ctx.emit( - "Exception Group Traceback (most recent call last):\n", - margin_char="+" if is_toplevel else None, - ) - yield from _ctx.emit(exc.stack.format()) - - yield from _ctx.emit(exc.format_exception_only()) - num_excs = len(exc.exceptions) - if num_excs <= max_group_width: - n = num_excs - else: - n = max_group_width + 1 - _ctx.need_close = False - for i in range(n): - last_exc = i == n - 1 - if last_exc: - # The closing frame may be added by a recursive call - _ctx.need_close = True - - if max_group_width is not None: - truncated = i >= max_group_width - else: - truncated = False - title = f"{i + 1}" if not truncated else "..." - yield ( - _ctx.indent() - + ("+-" if i == 0 else " ") - + f"+---------------- {title} ----------------\n" - ) - _ctx.exception_group_depth += 1 - if not truncated: - yield from exc.exceptions[i].format(chain=chain, _ctx=_ctx) - else: - remaining = num_excs - max_group_width - plural = "s" if remaining > 1 else "" - yield from _ctx.emit( - f"and {remaining} more exception{plural}\n" - ) - - if last_exc and _ctx.need_close: - yield _ctx.indent() + "+------------------------------------\n" - _ctx.need_close = False - _ctx.exception_group_depth -= 1 - - if is_toplevel: - assert _ctx.exception_group_depth == 1 - _ctx.exception_group_depth = 0 - - def format_exception_only(self): - """Format the exception part of the traceback. - The return value is a generator of strings, each ending in a newline. - Normally, the generator emits a single string; however, for - SyntaxError exceptions, it emits several lines that (when - printed) display detailed information about where the syntax - error occurred. - The message indicating which exception occurred is always the last - string in the output. - """ - if self.exc_type is None: - yield traceback._format_final_exc_line(None, self._str) - return - - stype = self.exc_type.__qualname__ - smod = self.exc_type.__module__ - if smod not in ("__main__", "builtins"): - if not isinstance(smod, str): - smod = "" - stype = smod + "." + stype - - if not issubclass(self.exc_type, SyntaxError): - yield _format_final_exc_line(stype, self._str) - elif traceback_exception_format_syntax_error is not None: - yield from traceback_exception_format_syntax_error(self, stype) - else: - yield from traceback_exception_original_format_exception_only(self) - - if isinstance(self.__notes__, collections.abc.Sequence): - for note in self.__notes__: - note = _safe_string(note, "note") - yield from [line + "\n" for line in note.split("\n")] - elif self.__notes__ is not None: - yield _safe_string(self.__notes__, "__notes__", func=repr) - - -traceback_exception_original_format = traceback.TracebackException.format -traceback_exception_original_format_exception_only = ( - traceback.TracebackException.format_exception_only -) -traceback_exception_format_syntax_error = getattr( - traceback.TracebackException, "_format_syntax_error", None -) -if sys.excepthook is sys.__excepthook__: - traceback.TracebackException.__init__ = ( # type: ignore[assignment] - PatchedTracebackException.__init__ - ) - traceback.TracebackException.format = ( # type: ignore[assignment] - PatchedTracebackException.format - ) - traceback.TracebackException.format_exception_only = ( # type: ignore[assignment] - PatchedTracebackException.format_exception_only - ) - sys.excepthook = exceptiongroup_excepthook - - -@singledispatch -def format_exception_only(__exc: BaseException) -> List[str]: - return list( - PatchedTracebackException( - type(__exc), __exc, None, compact=True - ).format_exception_only() - ) - - -@format_exception_only.register -def _(__exc: type, value: BaseException) -> List[str]: - return format_exception_only(value) - - -@singledispatch -def format_exception( - __exc: BaseException, - limit: Optional[int] = None, - chain: bool = True, -) -> List[str]: - return list( - PatchedTracebackException( - type(__exc), __exc, __exc.__traceback__, limit=limit, compact=True - ).format(chain=chain) - ) - - -@format_exception.register -def _( - __exc: type, - value: BaseException, - tb: TracebackType, - limit: Optional[int] = None, - chain: bool = True, -) -> List[str]: - return format_exception(value, limit, chain) - - -@singledispatch -def print_exception( - __exc: BaseException, - limit: Optional[int] = None, - file: Any = None, - chain: bool = True, -) -> None: - if file is None: - file = sys.stderr - - for line in PatchedTracebackException( - type(__exc), __exc, __exc.__traceback__, limit=limit - ).format(chain=chain): - print(line, file=file, end="") - - -@print_exception.register -def _( - __exc: type, - value: BaseException, - tb: TracebackType, - limit: Optional[int] = None, - file: Any = None, - chain: bool = True, -) -> None: - print_exception(value, limit, file, chain) - - -def print_exc( - limit: Optional[int] = None, - file: Any | None = None, - chain: bool = True, -) -> None: - value = sys.exc_info()[1] - print_exception(value, limit, file, chain) - - -# Python levenshtein edit distance code for NameError/AttributeError -# suggestions, backported from 3.12 - -_MAX_CANDIDATE_ITEMS = 750 -_MAX_STRING_SIZE = 40 -_MOVE_COST = 2 -_CASE_COST = 1 -_SENTINEL = object() - - -def _substitution_cost(ch_a, ch_b): - if ch_a == ch_b: - return 0 - if ch_a.lower() == ch_b.lower(): - return _CASE_COST - return _MOVE_COST - - -def _compute_suggestion_error(exc_value, tb): - wrong_name = getattr(exc_value, "name", None) - if wrong_name is None or not isinstance(wrong_name, str): - return None - if isinstance(exc_value, AttributeError): - obj = getattr(exc_value, "obj", _SENTINEL) - if obj is _SENTINEL: - return None - obj = exc_value.obj - try: - d = dir(obj) - except Exception: - return None - else: - assert isinstance(exc_value, NameError) - # find most recent frame - if tb is None: - return None - while tb.tb_next is not None: - tb = tb.tb_next - frame = tb.tb_frame - - d = list(frame.f_locals) + list(frame.f_globals) + list(frame.f_builtins) - if len(d) > _MAX_CANDIDATE_ITEMS: - return None - wrong_name_len = len(wrong_name) - if wrong_name_len > _MAX_STRING_SIZE: - return None - best_distance = wrong_name_len - suggestion = None - for possible_name in d: - if possible_name == wrong_name: - # A missing attribute is "found". Don't suggest it (see GH-88821). - continue - # No more than 1/3 of the involved characters should need changed. - max_distance = (len(possible_name) + wrong_name_len + 3) * _MOVE_COST // 6 - # Don't take matches we've already beaten. - max_distance = min(max_distance, best_distance - 1) - current_distance = _levenshtein_distance( - wrong_name, possible_name, max_distance - ) - if current_distance > max_distance: - continue - if not suggestion or current_distance < best_distance: - suggestion = possible_name - best_distance = current_distance - return suggestion - - -def _levenshtein_distance(a, b, max_cost): - # A Python implementation of Python/suggestions.c:levenshtein_distance. - - # Both strings are the same - if a == b: - return 0 - - # Trim away common affixes - pre = 0 - while a[pre:] and b[pre:] and a[pre] == b[pre]: - pre += 1 - a = a[pre:] - b = b[pre:] - post = 0 - while a[: post or None] and b[: post or None] and a[post - 1] == b[post - 1]: - post -= 1 - a = a[: post or None] - b = b[: post or None] - if not a or not b: - return _MOVE_COST * (len(a) + len(b)) - if len(a) > _MAX_STRING_SIZE or len(b) > _MAX_STRING_SIZE: - return max_cost + 1 - - # Prefer shorter buffer - if len(b) < len(a): - a, b = b, a - - # Quick fail when a match is impossible - if (len(b) - len(a)) * _MOVE_COST > max_cost: - return max_cost + 1 - - # Instead of producing the whole traditional len(a)-by-len(b) - # matrix, we can update just one row in place. - # Initialize the buffer row - row = list(range(_MOVE_COST, _MOVE_COST * (len(a) + 1), _MOVE_COST)) - - result = 0 - for bindex in range(len(b)): - bchar = b[bindex] - distance = result = bindex * _MOVE_COST - minimum = sys.maxsize - for index in range(len(a)): - # 1) Previous distance in this row is cost(b[:b_index], a[:index]) - substitute = distance + _substitution_cost(bchar, a[index]) - # 2) cost(b[:b_index], a[:index+1]) from previous row - distance = row[index] - # 3) existing result is cost(b[:b_index+1], a[index]) - - insert_delete = min(result, distance) + _MOVE_COST - result = min(insert_delete, substitute) - - # cost(b[:b_index+1], a[:index+1]) - row[index] = result - if result < minimum: - minimum = result - if minimum > max_cost: - # Everything in this row is too big, so bail early. - return max_cost + 1 - return result diff --git a/server/venv/Lib/site-packages/exceptiongroup/_version.py b/server/venv/Lib/site-packages/exceptiongroup/_version.py deleted file mode 100644 index 760ce26..0000000 --- a/server/venv/Lib/site-packages/exceptiongroup/_version.py +++ /dev/null @@ -1,4 +0,0 @@ -# file generated by setuptools_scm -# don't change, don't track in version control -__version__ = version = '1.1.3' -__version_tuple__ = version_tuple = (1, 1, 3) diff --git a/server/venv/Lib/site-packages/exceptiongroup/py.typed b/server/venv/Lib/site-packages/exceptiongroup/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/fastapi-0.104.0.dist-info/INSTALLER b/server/venv/Lib/site-packages/fastapi-0.104.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/server/venv/Lib/site-packages/fastapi-0.104.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/server/venv/Lib/site-packages/fastapi-0.104.0.dist-info/METADATA b/server/venv/Lib/site-packages/fastapi-0.104.0.dist-info/METADATA deleted file mode 100644 index a010938..0000000 --- a/server/venv/Lib/site-packages/fastapi-0.104.0.dist-info/METADATA +++ /dev/null @@ -1,530 +0,0 @@ -Metadata-Version: 2.1 -Name: fastapi -Version: 0.104.0 -Summary: FastAPI framework, high performance, easy to learn, fast to code, ready for production -Project-URL: Homepage, https://github.com/tiangolo/fastapi -Project-URL: Documentation, https://fastapi.tiangolo.com/ -Project-URL: Repository, https://github.com/tiangolo/fastapi -Author-email: Sebastián Ramírez -License-Expression: MIT -License-File: LICENSE -Classifier: Development Status :: 4 - Beta -Classifier: Environment :: Web Environment -Classifier: Framework :: AsyncIO -Classifier: Framework :: FastAPI -Classifier: Framework :: Pydantic -Classifier: Framework :: Pydantic :: 1 -Classifier: Intended Audience :: Developers -Classifier: Intended Audience :: Information Technology -Classifier: Intended Audience :: System Administrators -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Topic :: Internet -Classifier: Topic :: Internet :: WWW/HTTP -Classifier: Topic :: Internet :: WWW/HTTP :: HTTP Servers -Classifier: Topic :: Software Development -Classifier: Topic :: Software Development :: Libraries -Classifier: Topic :: Software Development :: Libraries :: Application Frameworks -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Typing :: Typed -Requires-Python: >=3.8 -Requires-Dist: anyio<4.0.0,>=3.7.1 -Requires-Dist: pydantic!=1.8,!=1.8.1,!=2.0.0,!=2.0.1,!=2.1.0,<3.0.0,>=1.7.4 -Requires-Dist: starlette<0.28.0,>=0.27.0 -Requires-Dist: typing-extensions>=4.8.0 -Provides-Extra: all -Requires-Dist: email-validator>=2.0.0; extra == 'all' -Requires-Dist: httpx>=0.23.0; extra == 'all' -Requires-Dist: itsdangerous>=1.1.0; extra == 'all' -Requires-Dist: jinja2>=2.11.2; extra == 'all' -Requires-Dist: orjson>=3.2.1; extra == 'all' -Requires-Dist: pydantic-extra-types>=2.0.0; extra == 'all' -Requires-Dist: pydantic-settings>=2.0.0; extra == 'all' -Requires-Dist: python-multipart>=0.0.5; extra == 'all' -Requires-Dist: pyyaml>=5.3.1; extra == 'all' -Requires-Dist: ujson!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,>=4.0.1; extra == 'all' -Requires-Dist: uvicorn[standard]>=0.12.0; extra == 'all' -Description-Content-Type: text/markdown - -

- FastAPI -

-

- FastAPI framework, high performance, easy to learn, fast to code, ready for production -

-

- - Test - - - Coverage - - - Package version - - - Supported Python versions - -

- ---- - -**Documentation**: https://fastapi.tiangolo.com - -**Source Code**: https://github.com/tiangolo/fastapi - ---- - -FastAPI is a modern, fast (high-performance), web framework for building APIs with Python 3.8+ based on standard Python type hints. - -The key features are: - -* **Fast**: Very high performance, on par with **NodeJS** and **Go** (thanks to Starlette and Pydantic). [One of the fastest Python frameworks available](#performance). -* **Fast to code**: Increase the speed to develop features by about 200% to 300%. * -* **Fewer bugs**: Reduce about 40% of human (developer) induced errors. * -* **Intuitive**: Great editor support. Completion everywhere. Less time debugging. -* **Easy**: Designed to be easy to use and learn. Less time reading docs. -* **Short**: Minimize code duplication. Multiple features from each parameter declaration. Fewer bugs. -* **Robust**: Get production-ready code. With automatic interactive documentation. -* **Standards-based**: Based on (and fully compatible with) the open standards for APIs: OpenAPI (previously known as Swagger) and JSON Schema. - -* estimation based on tests on an internal development team, building production applications. - -## Sponsors - - - - - - - - - - - - - - - - - - - -Other sponsors - -## Opinions - -"_[...] I'm using **FastAPI** a ton these days. [...] I'm actually planning to use it for all of my team's **ML services at Microsoft**. Some of them are getting integrated into the core **Windows** product and some **Office** products._" - -
Kabir Khan - Microsoft (ref)
- ---- - -"_We adopted the **FastAPI** library to spawn a **REST** server that can be queried to obtain **predictions**. [for Ludwig]_" - -
Piero Molino, Yaroslav Dudin, and Sai Sumanth Miryala - Uber (ref)
- ---- - -"_**Netflix** is pleased to announce the open-source release of our **crisis management** orchestration framework: **Dispatch**! [built with **FastAPI**]_" - -
Kevin Glisson, Marc Vilanova, Forest Monsen - Netflix (ref)
- ---- - -"_I’m over the moon excited about **FastAPI**. It’s so fun!_" - -
Brian Okken - Python Bytes podcast host (ref)
- ---- - -"_Honestly, what you've built looks super solid and polished. In many ways, it's what I wanted **Hug** to be - it's really inspiring to see someone build that._" - -
Timothy Crosley - Hug creator (ref)
- ---- - -"_If you're looking to learn one **modern framework** for building REST APIs, check out **FastAPI** [...] It's fast, easy to use and easy to learn [...]_" - -"_We've switched over to **FastAPI** for our **APIs** [...] I think you'll like it [...]_" - -
Ines Montani - Matthew Honnibal - Explosion AI founders - spaCy creators (ref) - (ref)
- ---- - -"_If anyone is looking to build a production Python API, I would highly recommend **FastAPI**. It is **beautifully designed**, **simple to use** and **highly scalable**, it has become a **key component** in our API first development strategy and is driving many automations and services such as our Virtual TAC Engineer._" - -
Deon Pillsbury - Cisco (ref)
- ---- - -## **Typer**, the FastAPI of CLIs - - - -If you are building a CLI app to be used in the terminal instead of a web API, check out **Typer**. - -**Typer** is FastAPI's little sibling. And it's intended to be the **FastAPI of CLIs**. ⌨️ 🚀 - -## Requirements - -Python 3.8+ - -FastAPI stands on the shoulders of giants: - -* Starlette for the web parts. -* Pydantic for the data parts. - -## Installation - -
- -```console -$ pip install fastapi - ----> 100% -``` - -
- -You will also need an ASGI server, for production such as Uvicorn or Hypercorn. - -
- -```console -$ pip install "uvicorn[standard]" - ----> 100% -``` - -
- -## Example - -### Create it - -* Create a file `main.py` with: - -```Python -from typing import Union - -from fastapi import FastAPI - -app = FastAPI() - - -@app.get("/") -def read_root(): - return {"Hello": "World"} - - -@app.get("/items/{item_id}") -def read_item(item_id: int, q: Union[str, None] = None): - return {"item_id": item_id, "q": q} -``` - -
-Or use async def... - -If your code uses `async` / `await`, use `async def`: - -```Python hl_lines="9 14" -from typing import Union - -from fastapi import FastAPI - -app = FastAPI() - - -@app.get("/") -async def read_root(): - return {"Hello": "World"} - - -@app.get("/items/{item_id}") -async def read_item(item_id: int, q: Union[str, None] = None): - return {"item_id": item_id, "q": q} -``` - -**Note**: - -If you don't know, check the _"In a hurry?"_ section about `async` and `await` in the docs. - -
- -### Run it - -Run the server with: - -
- -```console -$ uvicorn main:app --reload - -INFO: Uvicorn running on http://127.0.0.1:8000 (Press CTRL+C to quit) -INFO: Started reloader process [28720] -INFO: Started server process [28722] -INFO: Waiting for application startup. -INFO: Application startup complete. -``` - -
- -
-About the command uvicorn main:app --reload... - -The command `uvicorn main:app` refers to: - -* `main`: the file `main.py` (the Python "module"). -* `app`: the object created inside of `main.py` with the line `app = FastAPI()`. -* `--reload`: make the server restart after code changes. Only do this for development. - -
- -### Check it - -Open your browser at http://127.0.0.1:8000/items/5?q=somequery. - -You will see the JSON response as: - -```JSON -{"item_id": 5, "q": "somequery"} -``` - -You already created an API that: - -* Receives HTTP requests in the _paths_ `/` and `/items/{item_id}`. -* Both _paths_ take `GET` operations (also known as HTTP _methods_). -* The _path_ `/items/{item_id}` has a _path parameter_ `item_id` that should be an `int`. -* The _path_ `/items/{item_id}` has an optional `str` _query parameter_ `q`. - -### Interactive API docs - -Now go to http://127.0.0.1:8000/docs. - -You will see the automatic interactive API documentation (provided by Swagger UI): - -![Swagger UI](https://fastapi.tiangolo.com/img/index/index-01-swagger-ui-simple.png) - -### Alternative API docs - -And now, go to http://127.0.0.1:8000/redoc. - -You will see the alternative automatic documentation (provided by ReDoc): - -![ReDoc](https://fastapi.tiangolo.com/img/index/index-02-redoc-simple.png) - -## Example upgrade - -Now modify the file `main.py` to receive a body from a `PUT` request. - -Declare the body using standard Python types, thanks to Pydantic. - -```Python hl_lines="4 9-12 25-27" -from typing import Union - -from fastapi import FastAPI -from pydantic import BaseModel - -app = FastAPI() - - -class Item(BaseModel): - name: str - price: float - is_offer: Union[bool, None] = None - - -@app.get("/") -def read_root(): - return {"Hello": "World"} - - -@app.get("/items/{item_id}") -def read_item(item_id: int, q: Union[str, None] = None): - return {"item_id": item_id, "q": q} - - -@app.put("/items/{item_id}") -def update_item(item_id: int, item: Item): - return {"item_name": item.name, "item_id": item_id} -``` - -The server should reload automatically (because you added `--reload` to the `uvicorn` command above). - -### Interactive API docs upgrade - -Now go to http://127.0.0.1:8000/docs. - -* The interactive API documentation will be automatically updated, including the new body: - -![Swagger UI](https://fastapi.tiangolo.com/img/index/index-03-swagger-02.png) - -* Click on the button "Try it out", it allows you to fill the parameters and directly interact with the API: - -![Swagger UI interaction](https://fastapi.tiangolo.com/img/index/index-04-swagger-03.png) - -* Then click on the "Execute" button, the user interface will communicate with your API, send the parameters, get the results and show them on the screen: - -![Swagger UI interaction](https://fastapi.tiangolo.com/img/index/index-05-swagger-04.png) - -### Alternative API docs upgrade - -And now, go to http://127.0.0.1:8000/redoc. - -* The alternative documentation will also reflect the new query parameter and body: - -![ReDoc](https://fastapi.tiangolo.com/img/index/index-06-redoc-02.png) - -### Recap - -In summary, you declare **once** the types of parameters, body, etc. as function parameters. - -You do that with standard modern Python types. - -You don't have to learn a new syntax, the methods or classes of a specific library, etc. - -Just standard **Python 3.8+**. - -For example, for an `int`: - -```Python -item_id: int -``` - -or for a more complex `Item` model: - -```Python -item: Item -``` - -...and with that single declaration you get: - -* Editor support, including: - * Completion. - * Type checks. -* Validation of data: - * Automatic and clear errors when the data is invalid. - * Validation even for deeply nested JSON objects. -* Conversion of input data: coming from the network to Python data and types. Reading from: - * JSON. - * Path parameters. - * Query parameters. - * Cookies. - * Headers. - * Forms. - * Files. -* Conversion of output data: converting from Python data and types to network data (as JSON): - * Convert Python types (`str`, `int`, `float`, `bool`, `list`, etc). - * `datetime` objects. - * `UUID` objects. - * Database models. - * ...and many more. -* Automatic interactive API documentation, including 2 alternative user interfaces: - * Swagger UI. - * ReDoc. - ---- - -Coming back to the previous code example, **FastAPI** will: - -* Validate that there is an `item_id` in the path for `GET` and `PUT` requests. -* Validate that the `item_id` is of type `int` for `GET` and `PUT` requests. - * If it is not, the client will see a useful, clear error. -* Check if there is an optional query parameter named `q` (as in `http://127.0.0.1:8000/items/foo?q=somequery`) for `GET` requests. - * As the `q` parameter is declared with `= None`, it is optional. - * Without the `None` it would be required (as is the body in the case with `PUT`). -* For `PUT` requests to `/items/{item_id}`, Read the body as JSON: - * Check that it has a required attribute `name` that should be a `str`. - * Check that it has a required attribute `price` that has to be a `float`. - * Check that it has an optional attribute `is_offer`, that should be a `bool`, if present. - * All this would also work for deeply nested JSON objects. -* Convert from and to JSON automatically. -* Document everything with OpenAPI, that can be used by: - * Interactive documentation systems. - * Automatic client code generation systems, for many languages. -* Provide 2 interactive documentation web interfaces directly. - ---- - -We just scratched the surface, but you already get the idea of how it all works. - -Try changing the line with: - -```Python - return {"item_name": item.name, "item_id": item_id} -``` - -...from: - -```Python - ... "item_name": item.name ... -``` - -...to: - -```Python - ... "item_price": item.price ... -``` - -...and see how your editor will auto-complete the attributes and know their types: - -![editor support](https://fastapi.tiangolo.com/img/vscode-completion.png) - -For a more complete example including more features, see the Tutorial - User Guide. - -**Spoiler alert**: the tutorial - user guide includes: - -* Declaration of **parameters** from other different places as: **headers**, **cookies**, **form fields** and **files**. -* How to set **validation constraints** as `maximum_length` or `regex`. -* A very powerful and easy to use **Dependency Injection** system. -* Security and authentication, including support for **OAuth2** with **JWT tokens** and **HTTP Basic** auth. -* More advanced (but equally easy) techniques for declaring **deeply nested JSON models** (thanks to Pydantic). -* **GraphQL** integration with Strawberry and other libraries. -* Many extra features (thanks to Starlette) as: - * **WebSockets** - * extremely easy tests based on HTTPX and `pytest` - * **CORS** - * **Cookie Sessions** - * ...and more. - -## Performance - -Independent TechEmpower benchmarks show **FastAPI** applications running under Uvicorn as one of the fastest Python frameworks available, only below Starlette and Uvicorn themselves (used internally by FastAPI). (*) - -To understand more about it, see the section Benchmarks. - -## Optional Dependencies - -Used by Pydantic: - -* email_validator - for email validation. -* pydantic-settings - for settings management. -* pydantic-extra-types - for extra types to be used with Pydantic. - -Used by Starlette: - -* httpx - Required if you want to use the `TestClient`. -* jinja2 - Required if you want to use the default template configuration. -* python-multipart - Required if you want to support form "parsing", with `request.form()`. -* itsdangerous - Required for `SessionMiddleware` support. -* pyyaml - Required for Starlette's `SchemaGenerator` support (you probably don't need it with FastAPI). -* ujson - Required if you want to use `UJSONResponse`. - -Used by FastAPI / Starlette: - -* uvicorn - for the server that loads and serves your application. -* orjson - Required if you want to use `ORJSONResponse`. - -You can install all of these with `pip install "fastapi[all]"`. - -## License - -This project is licensed under the terms of the MIT license. diff --git a/server/venv/Lib/site-packages/fastapi-0.104.0.dist-info/RECORD b/server/venv/Lib/site-packages/fastapi-0.104.0.dist-info/RECORD deleted file mode 100644 index 7aa86a0..0000000 --- a/server/venv/Lib/site-packages/fastapi-0.104.0.dist-info/RECORD +++ /dev/null @@ -1,93 +0,0 @@ -fastapi-0.104.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -fastapi-0.104.0.dist-info/METADATA,sha256=CkzeBpixDOSeV0EHeSYTRlEY06F6nJwCcgTBbBTqcyY,24298 -fastapi-0.104.0.dist-info/RECORD,, -fastapi-0.104.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -fastapi-0.104.0.dist-info/WHEEL,sha256=9QBuHhg6FNW7lppboF2vKVbCGTVzsFykgRQjjlajrhA,87 -fastapi-0.104.0.dist-info/licenses/LICENSE,sha256=Tsif_IFIW5f-xYSy1KlhAy7v_oNEU4lP2cEnSQbMdE4,1086 -fastapi/__init__.py,sha256=gn98glQhIF340JPgqEZxbeyUUHwWya-Wdin8FALRQc0,1081 -fastapi/__pycache__/__init__.cpython-311.pyc,, -fastapi/__pycache__/_compat.cpython-311.pyc,, -fastapi/__pycache__/applications.cpython-311.pyc,, -fastapi/__pycache__/background.cpython-311.pyc,, -fastapi/__pycache__/concurrency.cpython-311.pyc,, -fastapi/__pycache__/datastructures.cpython-311.pyc,, -fastapi/__pycache__/encoders.cpython-311.pyc,, -fastapi/__pycache__/exception_handlers.cpython-311.pyc,, -fastapi/__pycache__/exceptions.cpython-311.pyc,, -fastapi/__pycache__/logger.cpython-311.pyc,, -fastapi/__pycache__/param_functions.cpython-311.pyc,, -fastapi/__pycache__/params.cpython-311.pyc,, -fastapi/__pycache__/requests.cpython-311.pyc,, -fastapi/__pycache__/responses.cpython-311.pyc,, -fastapi/__pycache__/routing.cpython-311.pyc,, -fastapi/__pycache__/staticfiles.cpython-311.pyc,, -fastapi/__pycache__/templating.cpython-311.pyc,, -fastapi/__pycache__/testclient.cpython-311.pyc,, -fastapi/__pycache__/types.cpython-311.pyc,, -fastapi/__pycache__/utils.cpython-311.pyc,, -fastapi/__pycache__/websockets.cpython-311.pyc,, -fastapi/_compat.py,sha256=xAFxZVkeuyOGlAHgCU_tmCtSLZjGkXvUElaeyoKoVrk,22796 -fastapi/applications.py,sha256=0x_D9p1AwI8EoBWgL_AsG0Qw8bp6TSZiDKqoAXR5W-w,179291 -fastapi/background.py,sha256=F1tsrJKfDZaRchNgF9ykB2PcRaPBJTbL4htN45TJAIc,1799 -fastapi/concurrency.py,sha256=NAK9SMlTCOALLjTAR6KzWUDEkVj7_EyNRz0-lDVW_W8,1467 -fastapi/datastructures.py,sha256=FF1s2g6cAQ5XxlNToB3scgV94Zf3DjdzcaI7ToaTrmg,5797 -fastapi/dependencies/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -fastapi/dependencies/__pycache__/__init__.cpython-311.pyc,, -fastapi/dependencies/__pycache__/models.cpython-311.pyc,, -fastapi/dependencies/__pycache__/utils.cpython-311.pyc,, -fastapi/dependencies/models.py,sha256=-n-YCxzgVBkurQi49qOTooT71v_oeAhHJ-qQFonxh5o,2494 -fastapi/dependencies/utils.py,sha256=DjRdd_NVdXh_jDYKTRjUIXkwkLD0WE4oFXQC4peMr2c,29915 -fastapi/encoders.py,sha256=90lbmIW8NZjpPVzbgKhpY49B7TFqa7hrdQDQa70SM9U,11024 -fastapi/exception_handlers.py,sha256=MBrIOA-ugjJDivIi4rSsUJBdTsjuzN76q4yh0q1COKw,1332 -fastapi/exceptions.py,sha256=SQsPxq-QYBZUhq6L4K3B3W7gaSD3Gub2f17erStRagY,5000 -fastapi/logger.py,sha256=I9NNi3ov8AcqbsbC9wl1X-hdItKgYt2XTrx1f99Zpl4,54 -fastapi/middleware/__init__.py,sha256=oQDxiFVcc1fYJUOIFvphnK7pTT5kktmfL32QXpBFvvo,58 -fastapi/middleware/__pycache__/__init__.cpython-311.pyc,, -fastapi/middleware/__pycache__/asyncexitstack.cpython-311.pyc,, -fastapi/middleware/__pycache__/cors.cpython-311.pyc,, -fastapi/middleware/__pycache__/gzip.cpython-311.pyc,, -fastapi/middleware/__pycache__/httpsredirect.cpython-311.pyc,, -fastapi/middleware/__pycache__/trustedhost.cpython-311.pyc,, -fastapi/middleware/__pycache__/wsgi.cpython-311.pyc,, -fastapi/middleware/asyncexitstack.py,sha256=LvMyVI1QdmWNWYPZqx295VFavssUfVpUsonPOsMWz1E,1035 -fastapi/middleware/cors.py,sha256=ynwjWQZoc_vbhzZ3_ZXceoaSrslHFHPdoM52rXr0WUU,79 -fastapi/middleware/gzip.py,sha256=xM5PcsH8QlAimZw4VDvcmTnqQamslThsfe3CVN2voa0,79 -fastapi/middleware/httpsredirect.py,sha256=rL8eXMnmLijwVkH7_400zHri1AekfeBd6D6qs8ix950,115 -fastapi/middleware/trustedhost.py,sha256=eE5XGRxGa7c5zPnMJDGp3BxaL25k5iVQlhnv-Pk0Pss,109 -fastapi/middleware/wsgi.py,sha256=Z3Ue-7wni4lUZMvH3G9ek__acgYdJstbnpZX_HQAboY,79 -fastapi/openapi/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -fastapi/openapi/__pycache__/__init__.cpython-311.pyc,, -fastapi/openapi/__pycache__/constants.cpython-311.pyc,, -fastapi/openapi/__pycache__/docs.cpython-311.pyc,, -fastapi/openapi/__pycache__/models.cpython-311.pyc,, -fastapi/openapi/__pycache__/utils.cpython-311.pyc,, -fastapi/openapi/constants.py,sha256=adGzmis1L1HJRTE3kJ5fmHS_Noq6tIY6pWv_SFzoFDU,153 -fastapi/openapi/docs.py,sha256=gnnivCh7N0AOTVLpGrDOlo4P4h1CrbU6vcqeNhvZGkI,10379 -fastapi/openapi/models.py,sha256=DEmsWA-9sNqv2H4YneZUW86r1nMwD920EiTvan5kndI,17763 -fastapi/openapi/utils.py,sha256=PUuz_ISarHVPBRyIgfyHz8uwH0eEsDY3rJUfW__I9GI,22303 -fastapi/param_functions.py,sha256=VWEsJbkH8lJZgcJ6fI6uzquui1kgHrDv1i_wXM7cW3M,63896 -fastapi/params.py,sha256=LzjihAvODd3w7-GddraUyVtH1xfwR9smIoQn-Z_g4mg,27807 -fastapi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -fastapi/requests.py,sha256=zayepKFcienBllv3snmWI20Gk0oHNVLU4DDhqXBb4LU,142 -fastapi/responses.py,sha256=QNQQlwpKhQoIPZTTWkpc9d_QGeGZ_aVQPaDV3nQ8m7c,1761 -fastapi/routing.py,sha256=wtZXvIphGDMUBkBkZHMWgiiiQZX28HXNvPxxT0HWwXA,172396 -fastapi/security/__init__.py,sha256=bO8pNmxqVRXUjfl2mOKiVZLn0FpBQ61VUYVjmppnbJw,881 -fastapi/security/__pycache__/__init__.cpython-311.pyc,, -fastapi/security/__pycache__/api_key.cpython-311.pyc,, -fastapi/security/__pycache__/base.cpython-311.pyc,, -fastapi/security/__pycache__/http.cpython-311.pyc,, -fastapi/security/__pycache__/oauth2.cpython-311.pyc,, -fastapi/security/__pycache__/open_id_connect_url.cpython-311.pyc,, -fastapi/security/__pycache__/utils.cpython-311.pyc,, -fastapi/security/api_key.py,sha256=bcZbUzTqeR_CI_LXuJdDq1qL322kmhgy5ApOCqgGDi4,9399 -fastapi/security/base.py,sha256=dl4pvbC-RxjfbWgPtCWd8MVU-7CB2SZ22rJDXVCXO6c,141 -fastapi/security/http.py,sha256=RVnZNavpUNHr6wguijiM3uNKt-b4i_bXgdT_JInuVZY,13523 -fastapi/security/oauth2.py,sha256=sjJgO5WL7j0cxnigzfF1zIIR1hgK_YSvh05B8KtxE94,21636 -fastapi/security/open_id_connect_url.py,sha256=Mb8wFxrRh4CrsFW0RcjBEQLASPHGDtZRP6c2dCrspAg,2753 -fastapi/security/utils.py,sha256=bd8T0YM7UQD5ATKucr1bNtAvz_Y3__dVNAv5UebiPvc,293 -fastapi/staticfiles.py,sha256=iirGIt3sdY2QZXd36ijs3Cj-T0FuGFda3cd90kM9Ikw,69 -fastapi/templating.py,sha256=4zsuTWgcjcEainMJFAlW6-gnslm6AgOS1SiiDWfmQxk,76 -fastapi/testclient.py,sha256=nBvaAmX66YldReJNZXPOk1sfuo2Q6hs8bOvIaCep6LQ,66 -fastapi/types.py,sha256=WZJ1jvm1MCwIrxxRYxKwtXS9HqcGk0RnCbLzrMZh-lI,428 -fastapi/utils.py,sha256=_vUwlqa4dq8M0Rl3Pro051teIccx36Z4hgecGH8F_oA,8179 -fastapi/websockets.py,sha256=419uncYObEKZG0YcrXscfQQYLSWoE10jqxVMetGdR98,222 diff --git a/server/venv/Lib/site-packages/fastapi-0.104.0.dist-info/REQUESTED b/server/venv/Lib/site-packages/fastapi-0.104.0.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/fastapi-0.104.0.dist-info/WHEEL b/server/venv/Lib/site-packages/fastapi-0.104.0.dist-info/WHEEL deleted file mode 100644 index ba1a8af..0000000 --- a/server/venv/Lib/site-packages/fastapi-0.104.0.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: hatchling 1.18.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/server/venv/Lib/site-packages/fastapi-0.104.0.dist-info/licenses/LICENSE b/server/venv/Lib/site-packages/fastapi-0.104.0.dist-info/licenses/LICENSE deleted file mode 100644 index 3e92463..0000000 --- a/server/venv/Lib/site-packages/fastapi-0.104.0.dist-info/licenses/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2018 Sebastián Ramírez - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/server/venv/Lib/site-packages/fastapi/__init__.py b/server/venv/Lib/site-packages/fastapi/__init__.py deleted file mode 100644 index 4fdb155..0000000 --- a/server/venv/Lib/site-packages/fastapi/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -"""FastAPI framework, high performance, easy to learn, fast to code, ready for production""" - -__version__ = "0.104.0" - -from starlette import status as status - -from .applications import FastAPI as FastAPI -from .background import BackgroundTasks as BackgroundTasks -from .datastructures import UploadFile as UploadFile -from .exceptions import HTTPException as HTTPException -from .exceptions import WebSocketException as WebSocketException -from .param_functions import Body as Body -from .param_functions import Cookie as Cookie -from .param_functions import Depends as Depends -from .param_functions import File as File -from .param_functions import Form as Form -from .param_functions import Header as Header -from .param_functions import Path as Path -from .param_functions import Query as Query -from .param_functions import Security as Security -from .requests import Request as Request -from .responses import Response as Response -from .routing import APIRouter as APIRouter -from .websockets import WebSocket as WebSocket -from .websockets import WebSocketDisconnect as WebSocketDisconnect diff --git a/server/venv/Lib/site-packages/fastapi/_compat.py b/server/venv/Lib/site-packages/fastapi/_compat.py deleted file mode 100644 index a4b305d..0000000 --- a/server/venv/Lib/site-packages/fastapi/_compat.py +++ /dev/null @@ -1,629 +0,0 @@ -from collections import deque -from copy import copy -from dataclasses import dataclass, is_dataclass -from enum import Enum -from typing import ( - Any, - Callable, - Deque, - Dict, - FrozenSet, - List, - Mapping, - Sequence, - Set, - Tuple, - Type, - Union, -) - -from fastapi.exceptions import RequestErrorModel -from fastapi.types import IncEx, ModelNameMap, UnionType -from pydantic import BaseModel, create_model -from pydantic.version import VERSION as PYDANTIC_VERSION -from starlette.datastructures import UploadFile -from typing_extensions import Annotated, Literal, get_args, get_origin - -PYDANTIC_V2 = PYDANTIC_VERSION.startswith("2.") - - -sequence_annotation_to_type = { - Sequence: list, - List: list, - list: list, - Tuple: tuple, - tuple: tuple, - Set: set, - set: set, - FrozenSet: frozenset, - frozenset: frozenset, - Deque: deque, - deque: deque, -} - -sequence_types = tuple(sequence_annotation_to_type.keys()) - -if PYDANTIC_V2: - from pydantic import PydanticSchemaGenerationError as PydanticSchemaGenerationError - from pydantic import TypeAdapter - from pydantic import ValidationError as ValidationError - from pydantic._internal._schema_generation_shared import ( # type: ignore[attr-defined] - GetJsonSchemaHandler as GetJsonSchemaHandler, - ) - from pydantic._internal._typing_extra import eval_type_lenient - from pydantic._internal._utils import lenient_issubclass as lenient_issubclass - from pydantic.fields import FieldInfo - from pydantic.json_schema import GenerateJsonSchema as GenerateJsonSchema - from pydantic.json_schema import JsonSchemaValue as JsonSchemaValue - from pydantic_core import CoreSchema as CoreSchema - from pydantic_core import PydanticUndefined, PydanticUndefinedType - from pydantic_core import Url as Url - - try: - from pydantic_core.core_schema import ( - with_info_plain_validator_function as with_info_plain_validator_function, - ) - except ImportError: # pragma: no cover - from pydantic_core.core_schema import ( - general_plain_validator_function as with_info_plain_validator_function, # noqa: F401 - ) - - Required = PydanticUndefined - Undefined = PydanticUndefined - UndefinedType = PydanticUndefinedType - evaluate_forwardref = eval_type_lenient - Validator = Any - - class BaseConfig: - pass - - class ErrorWrapper(Exception): - pass - - @dataclass - class ModelField: - field_info: FieldInfo - name: str - mode: Literal["validation", "serialization"] = "validation" - - @property - def alias(self) -> str: - a = self.field_info.alias - return a if a is not None else self.name - - @property - def required(self) -> bool: - return self.field_info.is_required() - - @property - def default(self) -> Any: - return self.get_default() - - @property - def type_(self) -> Any: - return self.field_info.annotation - - def __post_init__(self) -> None: - self._type_adapter: TypeAdapter[Any] = TypeAdapter( - Annotated[self.field_info.annotation, self.field_info] - ) - - def get_default(self) -> Any: - if self.field_info.is_required(): - return Undefined - return self.field_info.get_default(call_default_factory=True) - - def validate( - self, - value: Any, - values: Dict[str, Any] = {}, # noqa: B006 - *, - loc: Tuple[Union[int, str], ...] = (), - ) -> Tuple[Any, Union[List[Dict[str, Any]], None]]: - try: - return ( - self._type_adapter.validate_python(value, from_attributes=True), - None, - ) - except ValidationError as exc: - return None, _regenerate_error_with_loc( - errors=exc.errors(), loc_prefix=loc - ) - - def serialize( - self, - value: Any, - *, - mode: Literal["json", "python"] = "json", - include: Union[IncEx, None] = None, - exclude: Union[IncEx, None] = None, - by_alias: bool = True, - exclude_unset: bool = False, - exclude_defaults: bool = False, - exclude_none: bool = False, - ) -> Any: - # What calls this code passes a value that already called - # self._type_adapter.validate_python(value) - return self._type_adapter.dump_python( - value, - mode=mode, - include=include, - exclude=exclude, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - ) - - def __hash__(self) -> int: - # Each ModelField is unique for our purposes, to allow making a dict from - # ModelField to its JSON Schema. - return id(self) - - def get_annotation_from_field_info( - annotation: Any, field_info: FieldInfo, field_name: str - ) -> Any: - return annotation - - def _normalize_errors(errors: Sequence[Any]) -> List[Dict[str, Any]]: - return errors # type: ignore[return-value] - - def _model_rebuild(model: Type[BaseModel]) -> None: - model.model_rebuild() - - def _model_dump( - model: BaseModel, mode: Literal["json", "python"] = "json", **kwargs: Any - ) -> Any: - return model.model_dump(mode=mode, **kwargs) - - def _get_model_config(model: BaseModel) -> Any: - return model.model_config - - def get_schema_from_model_field( - *, - field: ModelField, - schema_generator: GenerateJsonSchema, - model_name_map: ModelNameMap, - field_mapping: Dict[ - Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue - ], - separate_input_output_schemas: bool = True, - ) -> Dict[str, Any]: - override_mode: Union[Literal["validation"], None] = ( - None if separate_input_output_schemas else "validation" - ) - # This expects that GenerateJsonSchema was already used to generate the definitions - json_schema = field_mapping[(field, override_mode or field.mode)] - if "$ref" not in json_schema: - # TODO remove when deprecating Pydantic v1 - # Ref: https://github.com/pydantic/pydantic/blob/d61792cc42c80b13b23e3ffa74bc37ec7c77f7d1/pydantic/schema.py#L207 - json_schema[ - "title" - ] = field.field_info.title or field.alias.title().replace("_", " ") - return json_schema - - def get_compat_model_name_map(fields: List[ModelField]) -> ModelNameMap: - return {} - - def get_definitions( - *, - fields: List[ModelField], - schema_generator: GenerateJsonSchema, - model_name_map: ModelNameMap, - separate_input_output_schemas: bool = True, - ) -> Tuple[ - Dict[ - Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue - ], - Dict[str, Dict[str, Any]], - ]: - override_mode: Union[Literal["validation"], None] = ( - None if separate_input_output_schemas else "validation" - ) - inputs = [ - (field, override_mode or field.mode, field._type_adapter.core_schema) - for field in fields - ] - field_mapping, definitions = schema_generator.generate_definitions( - inputs=inputs - ) - return field_mapping, definitions # type: ignore[return-value] - - def is_scalar_field(field: ModelField) -> bool: - from fastapi import params - - return field_annotation_is_scalar( - field.field_info.annotation - ) and not isinstance(field.field_info, params.Body) - - def is_sequence_field(field: ModelField) -> bool: - return field_annotation_is_sequence(field.field_info.annotation) - - def is_scalar_sequence_field(field: ModelField) -> bool: - return field_annotation_is_scalar_sequence(field.field_info.annotation) - - def is_bytes_field(field: ModelField) -> bool: - return is_bytes_or_nonable_bytes_annotation(field.type_) - - def is_bytes_sequence_field(field: ModelField) -> bool: - return is_bytes_sequence_annotation(field.type_) - - def copy_field_info(*, field_info: FieldInfo, annotation: Any) -> FieldInfo: - return type(field_info).from_annotation(annotation) - - def serialize_sequence_value(*, field: ModelField, value: Any) -> Sequence[Any]: - origin_type = ( - get_origin(field.field_info.annotation) or field.field_info.annotation - ) - assert issubclass(origin_type, sequence_types) # type: ignore[arg-type] - return sequence_annotation_to_type[origin_type](value) # type: ignore[no-any-return] - - def get_missing_field_error(loc: Tuple[str, ...]) -> Dict[str, Any]: - error = ValidationError.from_exception_data( - "Field required", [{"type": "missing", "loc": loc, "input": {}}] - ).errors()[0] - error["input"] = None - return error # type: ignore[return-value] - - def create_body_model( - *, fields: Sequence[ModelField], model_name: str - ) -> Type[BaseModel]: - field_params = {f.name: (f.field_info.annotation, f.field_info) for f in fields} - BodyModel: Type[BaseModel] = create_model(model_name, **field_params) # type: ignore[call-overload] - return BodyModel - -else: - from fastapi.openapi.constants import REF_PREFIX as REF_PREFIX - from pydantic import AnyUrl as Url # noqa: F401 - from pydantic import ( # type: ignore[assignment] - BaseConfig as BaseConfig, # noqa: F401 - ) - from pydantic import ValidationError as ValidationError # noqa: F401 - from pydantic.class_validators import ( # type: ignore[no-redef] - Validator as Validator, # noqa: F401 - ) - from pydantic.error_wrappers import ( # type: ignore[no-redef] - ErrorWrapper as ErrorWrapper, # noqa: F401 - ) - from pydantic.errors import MissingError - from pydantic.fields import ( # type: ignore[attr-defined] - SHAPE_FROZENSET, - SHAPE_LIST, - SHAPE_SEQUENCE, - SHAPE_SET, - SHAPE_SINGLETON, - SHAPE_TUPLE, - SHAPE_TUPLE_ELLIPSIS, - ) - from pydantic.fields import FieldInfo as FieldInfo - from pydantic.fields import ( # type: ignore[no-redef,attr-defined] - ModelField as ModelField, # noqa: F401 - ) - from pydantic.fields import ( # type: ignore[no-redef,attr-defined] - Required as Required, # noqa: F401 - ) - from pydantic.fields import ( # type: ignore[no-redef,attr-defined] - Undefined as Undefined, - ) - from pydantic.fields import ( # type: ignore[no-redef, attr-defined] - UndefinedType as UndefinedType, # noqa: F401 - ) - from pydantic.schema import ( - field_schema, - get_flat_models_from_fields, - get_model_name_map, - model_process_schema, - ) - from pydantic.schema import ( # type: ignore[no-redef] # noqa: F401 - get_annotation_from_field_info as get_annotation_from_field_info, - ) - from pydantic.typing import ( # type: ignore[no-redef] - evaluate_forwardref as evaluate_forwardref, # noqa: F401 - ) - from pydantic.utils import ( # type: ignore[no-redef] - lenient_issubclass as lenient_issubclass, # noqa: F401 - ) - - GetJsonSchemaHandler = Any # type: ignore[assignment,misc] - JsonSchemaValue = Dict[str, Any] # type: ignore[misc] - CoreSchema = Any # type: ignore[assignment,misc] - - sequence_shapes = { - SHAPE_LIST, - SHAPE_SET, - SHAPE_FROZENSET, - SHAPE_TUPLE, - SHAPE_SEQUENCE, - SHAPE_TUPLE_ELLIPSIS, - } - sequence_shape_to_type = { - SHAPE_LIST: list, - SHAPE_SET: set, - SHAPE_TUPLE: tuple, - SHAPE_SEQUENCE: list, - SHAPE_TUPLE_ELLIPSIS: list, - } - - @dataclass - class GenerateJsonSchema: # type: ignore[no-redef] - ref_template: str - - class PydanticSchemaGenerationError(Exception): # type: ignore[no-redef] - pass - - def with_info_plain_validator_function( # type: ignore[misc] - function: Callable[..., Any], - *, - ref: Union[str, None] = None, - metadata: Any = None, - serialization: Any = None, - ) -> Any: - return {} - - def get_model_definitions( - *, - flat_models: Set[Union[Type[BaseModel], Type[Enum]]], - model_name_map: Dict[Union[Type[BaseModel], Type[Enum]], str], - ) -> Dict[str, Any]: - definitions: Dict[str, Dict[str, Any]] = {} - for model in flat_models: - m_schema, m_definitions, m_nested_models = model_process_schema( - model, model_name_map=model_name_map, ref_prefix=REF_PREFIX - ) - definitions.update(m_definitions) - model_name = model_name_map[model] - if "description" in m_schema: - m_schema["description"] = m_schema["description"].split("\f")[0] - definitions[model_name] = m_schema - return definitions - - def is_pv1_scalar_field(field: ModelField) -> bool: - from fastapi import params - - field_info = field.field_info - if not ( - field.shape == SHAPE_SINGLETON # type: ignore[attr-defined] - and not lenient_issubclass(field.type_, BaseModel) - and not lenient_issubclass(field.type_, dict) - and not field_annotation_is_sequence(field.type_) - and not is_dataclass(field.type_) - and not isinstance(field_info, params.Body) - ): - return False - if field.sub_fields: # type: ignore[attr-defined] - if not all( - is_pv1_scalar_field(f) - for f in field.sub_fields # type: ignore[attr-defined] - ): - return False - return True - - def is_pv1_scalar_sequence_field(field: ModelField) -> bool: - if (field.shape in sequence_shapes) and not lenient_issubclass( # type: ignore[attr-defined] - field.type_, BaseModel - ): - if field.sub_fields is not None: # type: ignore[attr-defined] - for sub_field in field.sub_fields: # type: ignore[attr-defined] - if not is_pv1_scalar_field(sub_field): - return False - return True - if _annotation_is_sequence(field.type_): - return True - return False - - def _normalize_errors(errors: Sequence[Any]) -> List[Dict[str, Any]]: - use_errors: List[Any] = [] - for error in errors: - if isinstance(error, ErrorWrapper): - new_errors = ValidationError( # type: ignore[call-arg] - errors=[error], model=RequestErrorModel - ).errors() - use_errors.extend(new_errors) - elif isinstance(error, list): - use_errors.extend(_normalize_errors(error)) - else: - use_errors.append(error) - return use_errors - - def _model_rebuild(model: Type[BaseModel]) -> None: - model.update_forward_refs() - - def _model_dump( - model: BaseModel, mode: Literal["json", "python"] = "json", **kwargs: Any - ) -> Any: - return model.dict(**kwargs) - - def _get_model_config(model: BaseModel) -> Any: - return model.__config__ # type: ignore[attr-defined] - - def get_schema_from_model_field( - *, - field: ModelField, - schema_generator: GenerateJsonSchema, - model_name_map: ModelNameMap, - field_mapping: Dict[ - Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue - ], - separate_input_output_schemas: bool = True, - ) -> Dict[str, Any]: - # This expects that GenerateJsonSchema was already used to generate the definitions - return field_schema( # type: ignore[no-any-return] - field, model_name_map=model_name_map, ref_prefix=REF_PREFIX - )[0] - - def get_compat_model_name_map(fields: List[ModelField]) -> ModelNameMap: - models = get_flat_models_from_fields(fields, known_models=set()) - return get_model_name_map(models) # type: ignore[no-any-return] - - def get_definitions( - *, - fields: List[ModelField], - schema_generator: GenerateJsonSchema, - model_name_map: ModelNameMap, - separate_input_output_schemas: bool = True, - ) -> Tuple[ - Dict[ - Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue - ], - Dict[str, Dict[str, Any]], - ]: - models = get_flat_models_from_fields(fields, known_models=set()) - return {}, get_model_definitions( - flat_models=models, model_name_map=model_name_map - ) - - def is_scalar_field(field: ModelField) -> bool: - return is_pv1_scalar_field(field) - - def is_sequence_field(field: ModelField) -> bool: - return field.shape in sequence_shapes or _annotation_is_sequence(field.type_) # type: ignore[attr-defined] - - def is_scalar_sequence_field(field: ModelField) -> bool: - return is_pv1_scalar_sequence_field(field) - - def is_bytes_field(field: ModelField) -> bool: - return lenient_issubclass(field.type_, bytes) - - def is_bytes_sequence_field(field: ModelField) -> bool: - return field.shape in sequence_shapes and lenient_issubclass(field.type_, bytes) # type: ignore[attr-defined] - - def copy_field_info(*, field_info: FieldInfo, annotation: Any) -> FieldInfo: - return copy(field_info) - - def serialize_sequence_value(*, field: ModelField, value: Any) -> Sequence[Any]: - return sequence_shape_to_type[field.shape](value) # type: ignore[no-any-return,attr-defined] - - def get_missing_field_error(loc: Tuple[str, ...]) -> Dict[str, Any]: - missing_field_error = ErrorWrapper(MissingError(), loc=loc) # type: ignore[call-arg] - new_error = ValidationError([missing_field_error], RequestErrorModel) - return new_error.errors()[0] # type: ignore[return-value] - - def create_body_model( - *, fields: Sequence[ModelField], model_name: str - ) -> Type[BaseModel]: - BodyModel = create_model(model_name) - for f in fields: - BodyModel.__fields__[f.name] = f # type: ignore[index] - return BodyModel - - -def _regenerate_error_with_loc( - *, errors: Sequence[Any], loc_prefix: Tuple[Union[str, int], ...] -) -> List[Dict[str, Any]]: - updated_loc_errors: List[Any] = [ - {**err, "loc": loc_prefix + err.get("loc", ())} - for err in _normalize_errors(errors) - ] - - return updated_loc_errors - - -def _annotation_is_sequence(annotation: Union[Type[Any], None]) -> bool: - if lenient_issubclass(annotation, (str, bytes)): - return False - return lenient_issubclass(annotation, sequence_types) - - -def field_annotation_is_sequence(annotation: Union[Type[Any], None]) -> bool: - return _annotation_is_sequence(annotation) or _annotation_is_sequence( - get_origin(annotation) - ) - - -def value_is_sequence(value: Any) -> bool: - return isinstance(value, sequence_types) and not isinstance(value, (str, bytes)) # type: ignore[arg-type] - - -def _annotation_is_complex(annotation: Union[Type[Any], None]) -> bool: - return ( - lenient_issubclass(annotation, (BaseModel, Mapping, UploadFile)) - or _annotation_is_sequence(annotation) - or is_dataclass(annotation) - ) - - -def field_annotation_is_complex(annotation: Union[Type[Any], None]) -> bool: - origin = get_origin(annotation) - if origin is Union or origin is UnionType: - return any(field_annotation_is_complex(arg) for arg in get_args(annotation)) - - return ( - _annotation_is_complex(annotation) - or _annotation_is_complex(origin) - or hasattr(origin, "__pydantic_core_schema__") - or hasattr(origin, "__get_pydantic_core_schema__") - ) - - -def field_annotation_is_scalar(annotation: Any) -> bool: - # handle Ellipsis here to make tuple[int, ...] work nicely - return annotation is Ellipsis or not field_annotation_is_complex(annotation) - - -def field_annotation_is_scalar_sequence(annotation: Union[Type[Any], None]) -> bool: - origin = get_origin(annotation) - if origin is Union or origin is UnionType: - at_least_one_scalar_sequence = False - for arg in get_args(annotation): - if field_annotation_is_scalar_sequence(arg): - at_least_one_scalar_sequence = True - continue - elif not field_annotation_is_scalar(arg): - return False - return at_least_one_scalar_sequence - return field_annotation_is_sequence(annotation) and all( - field_annotation_is_scalar(sub_annotation) - for sub_annotation in get_args(annotation) - ) - - -def is_bytes_or_nonable_bytes_annotation(annotation: Any) -> bool: - if lenient_issubclass(annotation, bytes): - return True - origin = get_origin(annotation) - if origin is Union or origin is UnionType: - for arg in get_args(annotation): - if lenient_issubclass(arg, bytes): - return True - return False - - -def is_uploadfile_or_nonable_uploadfile_annotation(annotation: Any) -> bool: - if lenient_issubclass(annotation, UploadFile): - return True - origin = get_origin(annotation) - if origin is Union or origin is UnionType: - for arg in get_args(annotation): - if lenient_issubclass(arg, UploadFile): - return True - return False - - -def is_bytes_sequence_annotation(annotation: Any) -> bool: - origin = get_origin(annotation) - if origin is Union or origin is UnionType: - at_least_one = False - for arg in get_args(annotation): - if is_bytes_sequence_annotation(arg): - at_least_one = True - continue - return at_least_one - return field_annotation_is_sequence(annotation) and all( - is_bytes_or_nonable_bytes_annotation(sub_annotation) - for sub_annotation in get_args(annotation) - ) - - -def is_uploadfile_sequence_annotation(annotation: Any) -> bool: - origin = get_origin(annotation) - if origin is Union or origin is UnionType: - at_least_one = False - for arg in get_args(annotation): - if is_uploadfile_sequence_annotation(arg): - at_least_one = True - continue - return at_least_one - return field_annotation_is_sequence(annotation) and all( - is_uploadfile_or_nonable_uploadfile_annotation(sub_annotation) - for sub_annotation in get_args(annotation) - ) diff --git a/server/venv/Lib/site-packages/fastapi/applications.py b/server/venv/Lib/site-packages/fastapi/applications.py deleted file mode 100644 index 46b7ae8..0000000 --- a/server/venv/Lib/site-packages/fastapi/applications.py +++ /dev/null @@ -1,4646 +0,0 @@ -from enum import Enum -from typing import ( - Any, - Awaitable, - Callable, - Coroutine, - Dict, - List, - Optional, - Sequence, - Type, - TypeVar, - Union, -) - -from fastapi import routing -from fastapi.datastructures import Default, DefaultPlaceholder -from fastapi.exception_handlers import ( - http_exception_handler, - request_validation_exception_handler, - websocket_request_validation_exception_handler, -) -from fastapi.exceptions import RequestValidationError, WebSocketRequestValidationError -from fastapi.logger import logger -from fastapi.middleware.asyncexitstack import AsyncExitStackMiddleware -from fastapi.openapi.docs import ( - get_redoc_html, - get_swagger_ui_html, - get_swagger_ui_oauth2_redirect_html, -) -from fastapi.openapi.utils import get_openapi -from fastapi.params import Depends -from fastapi.types import DecoratedCallable, IncEx -from fastapi.utils import generate_unique_id -from starlette.applications import Starlette -from starlette.datastructures import State -from starlette.exceptions import HTTPException -from starlette.middleware import Middleware -from starlette.middleware.base import BaseHTTPMiddleware -from starlette.middleware.errors import ServerErrorMiddleware -from starlette.middleware.exceptions import ExceptionMiddleware -from starlette.requests import Request -from starlette.responses import HTMLResponse, JSONResponse, Response -from starlette.routing import BaseRoute -from starlette.types import ASGIApp, Lifespan, Receive, Scope, Send -from typing_extensions import Annotated, Doc, deprecated # type: ignore [attr-defined] - -AppType = TypeVar("AppType", bound="FastAPI") - - -class FastAPI(Starlette): - """ - `FastAPI` app class, the main entrypoint to use FastAPI. - - Read more in the - [FastAPI docs for First Steps](https://fastapi.tiangolo.com/tutorial/first-steps/). - - ## Example - - ```python - from fastapi import FastAPI - - app = FastAPI() - ``` - """ - - def __init__( - self: AppType, - *, - debug: Annotated[ - bool, - Doc( - """ - Boolean indicating if debug tracebacks should be returned on server - errors. - - Read more in the - [Starlette docs for Applications](https://www.starlette.io/applications/#instantiating-the-application). - """ - ), - ] = False, - routes: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - **Note**: you probably shouldn't use this parameter, it is inherited - from Starlette and supported for compatibility. - - In FastAPI, you normally would use the *path operation* decorators, - like: - - * `app.get()` - * `app.post()` - * etc. - - --- - - A list of routes to serve incoming HTTP and WebSocket requests. - """ - ), - deprecated( - """ - You normally wouldn't use this parameter with FastAPI, it is inherited - from Starlette and supported for compatibility. - - In FastAPI, you normally would use the *path operation methods*, - like `app.get()`, `app.post()`, etc. - """ - ), - ] = None, - title: Annotated[ - str, - Doc( - """ - The title of the API. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more in the - [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-api). - - **Example** - - ```python - from fastapi import FastAPI - - app = FastAPI(title="ChimichangApp") - ``` - """ - ), - ] = "FastAPI", - summary: Annotated[ - Optional[str], - Doc( - """ - A short summary of the API. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more in the - [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-api). - - **Example** - - ```python - from fastapi import FastAPI - - app = FastAPI(summary="Deadpond's favorite app. Nuff said.") - ``` - """ - ), - ] = None, - description: Annotated[ - str, - Doc( - ''' - A description of the API. Supports Markdown (using - [CommonMark syntax](https://commonmark.org/)). - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more in the - [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-api). - - **Example** - - ```python - from fastapi import FastAPI - - app = FastAPI( - description=""" - ChimichangApp API helps you do awesome stuff. 🚀 - - ## Items - - You can **read items**. - - ## Users - - You will be able to: - - * **Create users** (_not implemented_). - * **Read users** (_not implemented_). - - """ - ) - ``` - ''' - ), - ] = "", - version: Annotated[ - str, - Doc( - """ - The version of the API. - - **Note** This is the version of your application, not the version of - the OpenAPI specification nor the version of FastAPI being used. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more in the - [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-api). - - **Example** - - ```python - from fastapi import FastAPI - - app = FastAPI(version="0.0.1") - ``` - """ - ), - ] = "0.1.0", - openapi_url: Annotated[ - Optional[str], - Doc( - """ - The URL where the OpenAPI schema will be served from. - - If you set it to `None`, no OpenAPI schema will be served publicly, and - the default automatic endpoints `/docs` and `/redoc` will also be - disabled. - - Read more in the - [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#openapi-url). - - **Example** - - ```python - from fastapi import FastAPI - - app = FastAPI(openapi_url="/api/v1/openapi.json") - ``` - """ - ), - ] = "/openapi.json", - openapi_tags: Annotated[ - Optional[List[Dict[str, Any]]], - Doc( - """ - A list of tags used by OpenAPI, these are the same `tags` you can set - in the *path operations*, like: - - * `@app.get("/users/", tags=["users"])` - * `@app.get("/items/", tags=["items"])` - - The order of the tags can be used to specify the order shown in - tools like Swagger UI, used in the automatic path `/docs`. - - It's not required to specify all the tags used. - - The tags that are not declared MAY be organized randomly or based - on the tools' logic. Each tag name in the list MUST be unique. - - The value of each item is a `dict` containing: - - * `name`: The name of the tag. - * `description`: A short description of the tag. - [CommonMark syntax](https://commonmark.org/) MAY be used for rich - text representation. - * `externalDocs`: Additional external documentation for this tag. If - provided, it would contain a `dict` with: - * `description`: A short description of the target documentation. - [CommonMark syntax](https://commonmark.org/) MAY be used for - rich text representation. - * `url`: The URL for the target documentation. Value MUST be in - the form of a URL. - - Read more in the - [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-tags). - - **Example** - - ```python - from fastapi import FastAPI - - tags_metadata = [ - { - "name": "users", - "description": "Operations with users. The **login** logic is also here.", - }, - { - "name": "items", - "description": "Manage items. So _fancy_ they have their own docs.", - "externalDocs": { - "description": "Items external docs", - "url": "https://fastapi.tiangolo.com/", - }, - }, - ] - - app = FastAPI(openapi_tags=tags_metadata) - ``` - """ - ), - ] = None, - servers: Annotated[ - Optional[List[Dict[str, Union[str, Any]]]], - Doc( - """ - A `list` of `dict`s with connectivity information to a target server. - - You would use it, for example, if your application is served from - different domains and you want to use the same Swagger UI in the - browser to interact with each of them (instead of having multiple - browser tabs open). Or if you want to leave fixed the possible URLs. - - If the servers `list` is not provided, or is an empty `list`, the - default value would be a a `dict` with a `url` value of `/`. - - Each item in the `list` is a `dict` containing: - - * `url`: A URL to the target host. This URL supports Server Variables - and MAY be relative, to indicate that the host location is relative - to the location where the OpenAPI document is being served. Variable - substitutions will be made when a variable is named in `{`brackets`}`. - * `description`: An optional string describing the host designated by - the URL. [CommonMark syntax](https://commonmark.org/) MAY be used for - rich text representation. - * `variables`: A `dict` between a variable name and its value. The value - is used for substitution in the server's URL template. - - Read more in the - [FastAPI docs for Behind a Proxy](https://fastapi.tiangolo.com/advanced/behind-a-proxy/#additional-servers). - - **Example** - - ```python - from fastapi import FastAPI - - app = FastAPI( - servers=[ - {"url": "https://stag.example.com", "description": "Staging environment"}, - {"url": "https://prod.example.com", "description": "Production environment"}, - ] - ) - ``` - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[Depends]], - Doc( - """ - A list of global dependencies, they will be applied to each - *path operation*, including in sub-routers. - - Read more about it in the - [FastAPI docs for Global Dependencies](https://fastapi.tiangolo.com/tutorial/dependencies/global-dependencies/). - - **Example** - - ```python - from fastapi import Depends, FastAPI - - from .dependencies import func_dep_1, func_dep_2 - - app = FastAPI(dependencies=[Depends(func_dep_1), Depends(func_dep_2)]) - ``` - """ - ), - ] = None, - default_response_class: Annotated[ - Type[Response], - Doc( - """ - The default response class to be used. - - Read more in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#default-response-class). - - **Example** - - ```python - from fastapi import FastAPI - from fastapi.responses import ORJSONResponse - - app = FastAPI(default_response_class=ORJSONResponse) - ``` - """ - ), - ] = Default(JSONResponse), - redirect_slashes: Annotated[ - bool, - Doc( - """ - Whether to detect and redirect slashes in URLs when the client doesn't - use the same format. - - **Example** - - ```python - from fastapi import FastAPI - - app = FastAPI(redirect_slashes=True) # the default - - @app.get("/items/") - async def read_items(): - return [{"item_id": "Foo"}] - ``` - - With this app, if a client goes to `/items` (without a trailing slash), - they will be automatically redirected with an HTTP status code of 307 - to `/items/`. - """ - ), - ] = True, - docs_url: Annotated[ - Optional[str], - Doc( - """ - The path to the automatic interactive API documentation. - It is handled in the browser by Swagger UI. - - The default URL is `/docs`. You can disable it by setting it to `None`. - - If `openapi_url` is set to `None`, this will be automatically disabled. - - Read more in the - [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#docs-urls). - - **Example** - - ```python - from fastapi import FastAPI - - app = FastAPI(docs_url="/documentation", redoc_url=None) - ``` - """ - ), - ] = "/docs", - redoc_url: Annotated[ - Optional[str], - Doc( - """ - The path to the alternative automatic interactive API documentation - provided by ReDoc. - - The default URL is `/redoc`. You can disable it by setting it to `None`. - - If `openapi_url` is set to `None`, this will be automatically disabled. - - Read more in the - [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#docs-urls). - - **Example** - - ```python - from fastapi import FastAPI - - app = FastAPI(docs_url="/documentation", redoc_url="redocumentation") - ``` - """ - ), - ] = "/redoc", - swagger_ui_oauth2_redirect_url: Annotated[ - Optional[str], - Doc( - """ - The OAuth2 redirect endpoint for the Swagger UI. - - By default it is `/docs/oauth2-redirect`. - - This is only used if you use OAuth2 (with the "Authorize" button) - with Swagger UI. - """ - ), - ] = "/docs/oauth2-redirect", - swagger_ui_init_oauth: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - OAuth2 configuration for the Swagger UI, by default shown at `/docs`. - - Read more about the available configuration options in the - [Swagger UI docs](https://swagger.io/docs/open-source-tools/swagger-ui/usage/oauth2/). - """ - ), - ] = None, - middleware: Annotated[ - Optional[Sequence[Middleware]], - Doc( - """ - List of middleware to be added when creating the application. - - In FastAPI you would normally do this with `app.add_middleware()` - instead. - - Read more in the - [FastAPI docs for Middleware](https://fastapi.tiangolo.com/tutorial/middleware/). - """ - ), - ] = None, - exception_handlers: Annotated[ - Optional[ - Dict[ - Union[int, Type[Exception]], - Callable[[Request, Any], Coroutine[Any, Any, Response]], - ] - ], - Doc( - """ - A dictionary with handlers for exceptions. - - In FastAPI, you would normally use the decorator - `@app.exception_handler()`. - - Read more in the - [FastAPI docs for Handling Errors](https://fastapi.tiangolo.com/tutorial/handling-errors/). - """ - ), - ] = None, - on_startup: Annotated[ - Optional[Sequence[Callable[[], Any]]], - Doc( - """ - A list of startup event handler functions. - - You should instead use the `lifespan` handlers. - - Read more in the [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/). - """ - ), - ] = None, - on_shutdown: Annotated[ - Optional[Sequence[Callable[[], Any]]], - Doc( - """ - A list of shutdown event handler functions. - - You should instead use the `lifespan` handlers. - - Read more in the - [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/). - """ - ), - ] = None, - lifespan: Annotated[ - Optional[Lifespan[AppType]], - Doc( - """ - A `Lifespan` context manager handler. This replaces `startup` and - `shutdown` functions with a single context manager. - - Read more in the - [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/). - """ - ), - ] = None, - terms_of_service: Annotated[ - Optional[str], - Doc( - """ - A URL to the Terms of Service for your API. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more at the - [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-api). - - **Example** - - ```python - app = FastAPI(terms_of_service="http://example.com/terms/") - ``` - """ - ), - ] = None, - contact: Annotated[ - Optional[Dict[str, Union[str, Any]]], - Doc( - """ - A dictionary with the contact information for the exposed API. - - It can contain several fields. - - * `name`: (`str`) The name of the contact person/organization. - * `url`: (`str`) A URL pointing to the contact information. MUST be in - the format of a URL. - * `email`: (`str`) The email address of the contact person/organization. - MUST be in the format of an email address. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more at the - [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-api). - - **Example** - - ```python - app = FastAPI( - contact={ - "name": "Deadpoolio the Amazing", - "url": "http://x-force.example.com/contact/", - "email": "dp@x-force.example.com", - } - ) - ``` - """ - ), - ] = None, - license_info: Annotated[ - Optional[Dict[str, Union[str, Any]]], - Doc( - """ - A dictionary with the license information for the exposed API. - - It can contain several fields. - - * `name`: (`str`) **REQUIRED** (if a `license_info` is set). The - license name used for the API. - * `identifier`: (`str`) An [SPDX](https://spdx.dev/) license expression - for the API. The `identifier` field is mutually exclusive of the `url` - field. Available since OpenAPI 3.1.0, FastAPI 0.99.0. - * `url`: (`str`) A URL to the license used for the API. This MUST be - the format of a URL. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more at the - [FastAPI docs for Metadata and Docs URLs](https://fastapi.tiangolo.com/tutorial/metadata/#metadata-for-api). - - **Example** - - ```python - app = FastAPI( - license_info={ - "name": "Apache 2.0", - "url": "https://www.apache.org/licenses/LICENSE-2.0.html", - } - ) - ``` - """ - ), - ] = None, - openapi_prefix: Annotated[ - str, - Doc( - """ - A URL prefix for the OpenAPI URL. - """ - ), - deprecated( - """ - "openapi_prefix" has been deprecated in favor of "root_path", which - follows more closely the ASGI standard, is simpler, and more - automatic. - """ - ), - ] = "", - root_path: Annotated[ - str, - Doc( - """ - A path prefix handled by a proxy that is not seen by the application - but is seen by external clients, which affects things like Swagger UI. - - Read more about it at the - [FastAPI docs for Behind a Proxy](https://fastapi.tiangolo.com/advanced/behind-a-proxy/). - - **Example** - - ```python - from fastapi import FastAPI - - app = FastAPI(root_path="/api/v1") - ``` - """ - ), - ] = "", - root_path_in_servers: Annotated[ - bool, - Doc( - """ - To disable automatically generating the URLs in the `servers` field - in the autogenerated OpenAPI using the `root_path`. - - Read more about it in the - [FastAPI docs for Behind a Proxy](https://fastapi.tiangolo.com/advanced/behind-a-proxy/#disable-automatic-server-from-root_path). - - **Example** - - ```python - from fastapi import FastAPI - - app = FastAPI(root_path_in_servers=False) - ``` - """ - ), - ] = True, - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses to be shown in OpenAPI. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Additional Responses in OpenAPI](https://fastapi.tiangolo.com/advanced/additional-responses/). - - And in the - [FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies). - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - OpenAPI callbacks that should apply to all *path operations*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - webhooks: Annotated[ - Optional[routing.APIRouter], - Doc( - """ - Add OpenAPI webhooks. This is similar to `callbacks` but it doesn't - depend on specific *path operations*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - **Note**: This is available since OpenAPI 3.1.0, FastAPI 0.99.0. - - Read more about it in the - [FastAPI docs for OpenAPI Webhooks](https://fastapi.tiangolo.com/advanced/openapi-webhooks/). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark all *path operations* as deprecated. You probably don't need it, - but it's available. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - include_in_schema: Annotated[ - bool, - Doc( - """ - To include (or not) all the *path operations* in the generated OpenAPI. - You probably don't need it, but it's available. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). - """ - ), - ] = True, - swagger_ui_parameters: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Parameters to configure Swagger UI, the autogenerated interactive API - documentation (by default at `/docs`). - - Read more about it in the - [FastAPI docs about how to Configure Swagger UI](https://fastapi.tiangolo.com/how-to/configure-swagger-ui/). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[routing.APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - separate_input_output_schemas: Annotated[ - bool, - Doc( - """ - Whether to generate separate OpenAPI schemas for request body and - response body when the results would be more precise. - - This is particularly useful when automatically generating clients. - - For example, if you have a model like: - - ```python - from pydantic import BaseModel - - class Item(BaseModel): - name: str - tags: list[str] = [] - ``` - - When `Item` is used for input, a request body, `tags` is not required, - the client doesn't have to provide it. - - But when using `Item` for output, for a response body, `tags` is always - available because it has a default value, even if it's just an empty - list. So, the client should be able to always expect it. - - In this case, there would be two different schemas, one for input and - another one for output. - """ - ), - ] = True, - **extra: Annotated[ - Any, - Doc( - """ - Extra keyword arguments to be stored in the app, not used by FastAPI - anywhere. - """ - ), - ], - ) -> None: - self.debug = debug - self.title = title - self.summary = summary - self.description = description - self.version = version - self.terms_of_service = terms_of_service - self.contact = contact - self.license_info = license_info - self.openapi_url = openapi_url - self.openapi_tags = openapi_tags - self.root_path_in_servers = root_path_in_servers - self.docs_url = docs_url - self.redoc_url = redoc_url - self.swagger_ui_oauth2_redirect_url = swagger_ui_oauth2_redirect_url - self.swagger_ui_init_oauth = swagger_ui_init_oauth - self.swagger_ui_parameters = swagger_ui_parameters - self.servers = servers or [] - self.separate_input_output_schemas = separate_input_output_schemas - self.extra = extra - self.openapi_version: Annotated[ - str, - Doc( - """ - The version string of OpenAPI. - - FastAPI will generate OpenAPI version 3.1.0, and will output that as - the OpenAPI version. But some tools, even though they might be - compatible with OpenAPI 3.1.0, might not recognize it as a valid. - - So you could override this value to trick those tools into using - the generated OpenAPI. Have in mind that this is a hack. But if you - avoid using features added in OpenAPI 3.1.0, it might work for your - use case. - - This is not passed as a parameter to the `FastAPI` class to avoid - giving the false idea that FastAPI would generate a different OpenAPI - schema. It is only available as an attribute. - - **Example** - - ```python - from fastapi import FastAPI - - app = FastAPI() - - app.openapi_version = "3.0.2" - ``` - """ - ), - ] = "3.1.0" - self.openapi_schema: Optional[Dict[str, Any]] = None - if self.openapi_url: - assert self.title, "A title must be provided for OpenAPI, e.g.: 'My API'" - assert self.version, "A version must be provided for OpenAPI, e.g.: '2.1.0'" - # TODO: remove when discarding the openapi_prefix parameter - if openapi_prefix: - logger.warning( - '"openapi_prefix" has been deprecated in favor of "root_path", which ' - "follows more closely the ASGI standard, is simpler, and more " - "automatic. Check the docs at " - "https://fastapi.tiangolo.com/advanced/sub-applications/" - ) - self.webhooks: Annotated[ - routing.APIRouter, - Doc( - """ - The `app.webhooks` attribute is an `APIRouter` with the *path - operations* that will be used just for documentation of webhooks. - - Read more about it in the - [FastAPI docs for OpenAPI Webhooks](https://fastapi.tiangolo.com/advanced/openapi-webhooks/). - """ - ), - ] = ( - webhooks or routing.APIRouter() - ) - self.root_path = root_path or openapi_prefix - self.state: Annotated[ - State, - Doc( - """ - A state object for the application. This is the same object for the - entire application, it doesn't change from request to request. - - You normally woudln't use this in FastAPI, for most of the cases you - would instead use FastAPI dependencies. - - This is simply inherited from Starlette. - - Read more about it in the - [Starlette docs for Applications](https://www.starlette.io/applications/#storing-state-on-the-app-instance). - """ - ), - ] = State() - self.dependency_overrides: Annotated[ - Dict[Callable[..., Any], Callable[..., Any]], - Doc( - """ - A dictionary with overrides for the dependencies. - - Each key is the original dependency callable, and the value is the - actual dependency that should be called. - - This is for testing, to replace expensive dependencies with testing - versions. - - Read more about it in the - [FastAPI docs for Testing Dependencies with Overrides](https://fastapi.tiangolo.com/advanced/testing-dependencies/). - """ - ), - ] = {} - self.router: routing.APIRouter = routing.APIRouter( - routes=routes, - redirect_slashes=redirect_slashes, - dependency_overrides_provider=self, - on_startup=on_startup, - on_shutdown=on_shutdown, - lifespan=lifespan, - default_response_class=default_response_class, - dependencies=dependencies, - callbacks=callbacks, - deprecated=deprecated, - include_in_schema=include_in_schema, - responses=responses, - generate_unique_id_function=generate_unique_id_function, - ) - self.exception_handlers: Dict[ - Any, Callable[[Request, Any], Union[Response, Awaitable[Response]]] - ] = ({} if exception_handlers is None else dict(exception_handlers)) - self.exception_handlers.setdefault(HTTPException, http_exception_handler) - self.exception_handlers.setdefault( - RequestValidationError, request_validation_exception_handler - ) - self.exception_handlers.setdefault( - WebSocketRequestValidationError, - # Starlette still has incorrect type specification for the handlers - websocket_request_validation_exception_handler, # type: ignore - ) - - self.user_middleware: List[Middleware] = ( - [] if middleware is None else list(middleware) - ) - self.middleware_stack: Union[ASGIApp, None] = None - self.setup() - - def build_middleware_stack(self) -> ASGIApp: - # Duplicate/override from Starlette to add AsyncExitStackMiddleware - # inside of ExceptionMiddleware, inside of custom user middlewares - debug = self.debug - error_handler = None - exception_handlers = {} - - for key, value in self.exception_handlers.items(): - if key in (500, Exception): - error_handler = value - else: - exception_handlers[key] = value - - middleware = ( - [Middleware(ServerErrorMiddleware, handler=error_handler, debug=debug)] - + self.user_middleware - + [ - Middleware( - ExceptionMiddleware, handlers=exception_handlers, debug=debug - ), - # Add FastAPI-specific AsyncExitStackMiddleware for dependencies with - # contextvars. - # This needs to happen after user middlewares because those create a - # new contextvars context copy by using a new AnyIO task group. - # The initial part of dependencies with 'yield' is executed in the - # FastAPI code, inside all the middlewares. However, the teardown part - # (after 'yield') is executed in the AsyncExitStack in this middleware. - # If the AsyncExitStack lived outside of the custom middlewares and - # contextvars were set in a dependency with 'yield' in that internal - # contextvars context, the values would not be available in the - # outer context of the AsyncExitStack. - # By placing the middleware and the AsyncExitStack here, inside all - # user middlewares, the code before and after 'yield' in dependencies - # with 'yield' is executed in the same contextvars context. Thus, all values - # set in contextvars before 'yield' are still available after 'yield,' as - # expected. - # Additionally, by having this AsyncExitStack here, after the - # ExceptionMiddleware, dependencies can now catch handled exceptions, - # e.g. HTTPException, to customize the teardown code (e.g. DB session - # rollback). - Middleware(AsyncExitStackMiddleware), - ] - ) - - app = self.router - for cls, options in reversed(middleware): - app = cls(app=app, **options) - return app - - def openapi(self) -> Dict[str, Any]: - """ - Generate the OpenAPI schema of the application. This is called by FastAPI - internally. - - The first time it is called it stores the result in the attribute - `app.openapi_schema`, and next times it is called, it just returns that same - result. To avoid the cost of generating the schema every time. - - If you need to modify the generated OpenAPI schema, you could modify it. - - Read more in the - [FastAPI docs for OpenAPI](https://fastapi.tiangolo.com/how-to/extending-openapi/). - """ - if not self.openapi_schema: - self.openapi_schema = get_openapi( - title=self.title, - version=self.version, - openapi_version=self.openapi_version, - summary=self.summary, - description=self.description, - terms_of_service=self.terms_of_service, - contact=self.contact, - license_info=self.license_info, - routes=self.routes, - webhooks=self.webhooks.routes, - tags=self.openapi_tags, - servers=self.servers, - separate_input_output_schemas=self.separate_input_output_schemas, - ) - return self.openapi_schema - - def setup(self) -> None: - if self.openapi_url: - urls = (server_data.get("url") for server_data in self.servers) - server_urls = {url for url in urls if url} - - async def openapi(req: Request) -> JSONResponse: - root_path = req.scope.get("root_path", "").rstrip("/") - if root_path not in server_urls: - if root_path and self.root_path_in_servers: - self.servers.insert(0, {"url": root_path}) - server_urls.add(root_path) - return JSONResponse(self.openapi()) - - self.add_route(self.openapi_url, openapi, include_in_schema=False) - if self.openapi_url and self.docs_url: - - async def swagger_ui_html(req: Request) -> HTMLResponse: - root_path = req.scope.get("root_path", "").rstrip("/") - openapi_url = root_path + self.openapi_url - oauth2_redirect_url = self.swagger_ui_oauth2_redirect_url - if oauth2_redirect_url: - oauth2_redirect_url = root_path + oauth2_redirect_url - return get_swagger_ui_html( - openapi_url=openapi_url, - title=self.title + " - Swagger UI", - oauth2_redirect_url=oauth2_redirect_url, - init_oauth=self.swagger_ui_init_oauth, - swagger_ui_parameters=self.swagger_ui_parameters, - ) - - self.add_route(self.docs_url, swagger_ui_html, include_in_schema=False) - - if self.swagger_ui_oauth2_redirect_url: - - async def swagger_ui_redirect(req: Request) -> HTMLResponse: - return get_swagger_ui_oauth2_redirect_html() - - self.add_route( - self.swagger_ui_oauth2_redirect_url, - swagger_ui_redirect, - include_in_schema=False, - ) - if self.openapi_url and self.redoc_url: - - async def redoc_html(req: Request) -> HTMLResponse: - root_path = req.scope.get("root_path", "").rstrip("/") - openapi_url = root_path + self.openapi_url - return get_redoc_html( - openapi_url=openapi_url, title=self.title + " - ReDoc" - ) - - self.add_route(self.redoc_url, redoc_html, include_in_schema=False) - - async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: - if self.root_path: - scope["root_path"] = self.root_path - await super().__call__(scope, receive, send) - - def add_api_route( - self, - path: str, - endpoint: Callable[..., Coroutine[Any, Any, Response]], - *, - response_model: Any = Default(None), - status_code: Optional[int] = None, - tags: Optional[List[Union[str, Enum]]] = None, - dependencies: Optional[Sequence[Depends]] = None, - summary: Optional[str] = None, - description: Optional[str] = None, - response_description: str = "Successful Response", - responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, - deprecated: Optional[bool] = None, - methods: Optional[List[str]] = None, - operation_id: Optional[str] = None, - response_model_include: Optional[IncEx] = None, - response_model_exclude: Optional[IncEx] = None, - response_model_by_alias: bool = True, - response_model_exclude_unset: bool = False, - response_model_exclude_defaults: bool = False, - response_model_exclude_none: bool = False, - include_in_schema: bool = True, - response_class: Union[Type[Response], DefaultPlaceholder] = Default( - JSONResponse - ), - name: Optional[str] = None, - openapi_extra: Optional[Dict[str, Any]] = None, - generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( - generate_unique_id - ), - ) -> None: - self.router.add_api_route( - path, - endpoint=endpoint, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - methods=methods, - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def api_route( - self, - path: str, - *, - response_model: Any = Default(None), - status_code: Optional[int] = None, - tags: Optional[List[Union[str, Enum]]] = None, - dependencies: Optional[Sequence[Depends]] = None, - summary: Optional[str] = None, - description: Optional[str] = None, - response_description: str = "Successful Response", - responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, - deprecated: Optional[bool] = None, - methods: Optional[List[str]] = None, - operation_id: Optional[str] = None, - response_model_include: Optional[IncEx] = None, - response_model_exclude: Optional[IncEx] = None, - response_model_by_alias: bool = True, - response_model_exclude_unset: bool = False, - response_model_exclude_defaults: bool = False, - response_model_exclude_none: bool = False, - include_in_schema: bool = True, - response_class: Type[Response] = Default(JSONResponse), - name: Optional[str] = None, - openapi_extra: Optional[Dict[str, Any]] = None, - generate_unique_id_function: Callable[[routing.APIRoute], str] = Default( - generate_unique_id - ), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - def decorator(func: DecoratedCallable) -> DecoratedCallable: - self.router.add_api_route( - path, - func, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - methods=methods, - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - return func - - return decorator - - def add_api_websocket_route( - self, - path: str, - endpoint: Callable[..., Any], - name: Optional[str] = None, - *, - dependencies: Optional[Sequence[Depends]] = None, - ) -> None: - self.router.add_api_websocket_route( - path, - endpoint, - name=name, - dependencies=dependencies, - ) - - def websocket( - self, - path: Annotated[ - str, - Doc( - """ - WebSocket path. - """ - ), - ], - name: Annotated[ - Optional[str], - Doc( - """ - A name for the WebSocket. Only used internally. - """ - ), - ] = None, - *, - dependencies: Annotated[ - Optional[Sequence[Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be used for this - WebSocket. - - Read more about it in the - [FastAPI docs for WebSockets](https://fastapi.tiangolo.com/advanced/websockets/). - """ - ), - ] = None, - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Decorate a WebSocket function. - - Read more about it in the - [FastAPI docs for WebSockets](https://fastapi.tiangolo.com/advanced/websockets/). - - **Example** - - ```python - from fastapi import FastAPI, WebSocket - - app = FastAPI() - - @app.websocket("/ws") - async def websocket_endpoint(websocket: WebSocket): - await websocket.accept() - while True: - data = await websocket.receive_text() - await websocket.send_text(f"Message text was: {data}") - ``` - """ - - def decorator(func: DecoratedCallable) -> DecoratedCallable: - self.add_api_websocket_route( - path, - func, - name=name, - dependencies=dependencies, - ) - return func - - return decorator - - def include_router( - self, - router: Annotated[routing.APIRouter, Doc("The `APIRouter` to include.")], - *, - prefix: Annotated[str, Doc("An optional path prefix for the router.")] = "", - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to all the *path operations* in this - router. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to all the - *path operations* in this router. - - Read more about it in the - [FastAPI docs for Bigger Applications - Multiple Files](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies). - - **Example** - - ```python - from fastapi import Depends, FastAPI - - from .dependencies import get_token_header - from .internal import admin - - app = FastAPI() - - app.include_router( - admin.router, - dependencies=[Depends(get_token_header)], - ) - ``` - """ - ), - ] = None, - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses to be shown in OpenAPI. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Additional Responses in OpenAPI](https://fastapi.tiangolo.com/advanced/additional-responses/). - - And in the - [FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark all the *path operations* in this router as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - **Example** - - ```python - from fastapi import FastAPI - - from .internal import old_api - - app = FastAPI() - - app.include_router( - old_api.router, - deprecated=True, - ) - ``` - """ - ), - ] = None, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include (or not) all the *path operations* in this router in the - generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - **Example** - - ```python - from fastapi import FastAPI - - from .internal import old_api - - app = FastAPI() - - app.include_router( - old_api.router, - include_in_schema=False, - ) - ``` - """ - ), - ] = True, - default_response_class: Annotated[ - Type[Response], - Doc( - """ - Default response class to be used for the *path operations* in this - router. - - Read more in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#default-response-class). - - **Example** - - ```python - from fastapi import FastAPI - from fastapi.responses import ORJSONResponse - - from .internal import old_api - - app = FastAPI() - - app.include_router( - old_api.router, - default_response_class=ORJSONResponse, - ) - ``` - """ - ), - ] = Default(JSONResponse), - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[routing.APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> None: - """ - Include an `APIRouter` in the same app. - - Read more about it in the - [FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/). - - ## Example - - ```python - from fastapi import FastAPI - - from .users import users_router - - app = FastAPI() - - app.include_router(users_router) - ``` - """ - self.router.include_router( - router, - prefix=prefix, - tags=tags, - dependencies=dependencies, - responses=responses, - deprecated=deprecated, - include_in_schema=include_in_schema, - default_response_class=default_response_class, - callbacks=callbacks, - generate_unique_id_function=generate_unique_id_function, - ) - - def get( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[routing.APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP GET operation. - - ## Example - - ```python - from fastapi import FastAPI - - app = FastAPI() - - @app.get("/items/") - def read_items(): - return [{"name": "Empanada"}, {"name": "Arepa"}] - ``` - """ - return self.router.get( - path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def put( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[routing.APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP PUT operation. - - ## Example - - ```python - from fastapi import FastAPI - from pydantic import BaseModel - - class Item(BaseModel): - name: str - description: str | None = None - - app = FastAPI() - - @app.put("/items/{item_id}") - def replace_item(item_id: str, item: Item): - return {"message": "Item replaced", "id": item_id} - ``` - """ - return self.router.put( - path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def post( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[routing.APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP POST operation. - - ## Example - - ```python - from fastapi import FastAPI - from pydantic import BaseModel - - class Item(BaseModel): - name: str - description: str | None = None - - app = FastAPI() - - @app.post("/items/") - def create_item(item: Item): - return {"message": "Item created"} - ``` - """ - return self.router.post( - path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def delete( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[routing.APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP DELETE operation. - - ## Example - - ```python - from fastapi import FastAPI - - app = FastAPI() - - @app.delete("/items/{item_id}") - def delete_item(item_id: str): - return {"message": "Item deleted"} - ``` - """ - return self.router.delete( - path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def options( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[routing.APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP OPTIONS operation. - - ## Example - - ```python - from fastapi import FastAPI - - app = FastAPI() - - @app.options("/items/") - def get_item_options(): - return {"additions": ["Aji", "Guacamole"]} - ``` - """ - return self.router.options( - path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def head( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[routing.APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP HEAD operation. - - ## Example - - ```python - from fastapi import FastAPI, Response - - app = FastAPI() - - @app.head("/items/", status_code=204) - def get_items_headers(response: Response): - response.headers["X-Cat-Dog"] = "Alone in the world" - ``` - """ - return self.router.head( - path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def patch( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[routing.APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP PATCH operation. - - ## Example - - ```python - from fastapi import FastAPI - from pydantic import BaseModel - - class Item(BaseModel): - name: str - description: str | None = None - - app = FastAPI() - - @app.patch("/items/") - def update_item(item: Item): - return {"message": "Item updated in place"} - ``` - """ - return self.router.patch( - path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def trace( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[routing.APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP TRACE operation. - - ## Example - - ```python - from fastapi import FastAPI - - app = FastAPI() - - @app.put("/items/{item_id}") - def trace_item(item_id: str): - return None - ``` - """ - return self.router.trace( - path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def websocket_route( - self, path: str, name: Union[str, None] = None - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - def decorator(func: DecoratedCallable) -> DecoratedCallable: - self.router.add_websocket_route(path, func, name=name) - return func - - return decorator - - @deprecated( - """ - on_event is deprecated, use lifespan event handlers instead. - - Read more about it in the - [FastAPI docs for Lifespan Events](https://fastapi.tiangolo.com/advanced/events/). - """ - ) - def on_event( - self, - event_type: Annotated[ - str, - Doc( - """ - The type of event. `startup` or `shutdown`. - """ - ), - ], - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add an event handler for the application. - - `on_event` is deprecated, use `lifespan` event handlers instead. - - Read more about it in the - [FastAPI docs for Lifespan Events](https://fastapi.tiangolo.com/advanced/events/#alternative-events-deprecated). - """ - return self.router.on_event(event_type) - - def middleware( - self, - middleware_type: Annotated[ - str, - Doc( - """ - The type of middleware. Currently only supports `http`. - """ - ), - ], - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a middleware to the application. - - Read more about it in the - [FastAPI docs for Middleware](https://fastapi.tiangolo.com/tutorial/middleware/). - - ## Example - - ```python - import time - - from fastapi import FastAPI, Request - - app = FastAPI() - - - @app.middleware("http") - async def add_process_time_header(request: Request, call_next): - start_time = time.time() - response = await call_next(request) - process_time = time.time() - start_time - response.headers["X-Process-Time"] = str(process_time) - return response - ``` - """ - - def decorator(func: DecoratedCallable) -> DecoratedCallable: - self.add_middleware(BaseHTTPMiddleware, dispatch=func) - return func - - return decorator - - def exception_handler( - self, - exc_class_or_status_code: Annotated[ - Union[int, Type[Exception]], - Doc( - """ - The Exception class this would handle, or a status code. - """ - ), - ], - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add an exception handler to the app. - - Read more about it in the - [FastAPI docs for Handling Errors](https://fastapi.tiangolo.com/tutorial/handling-errors/). - - ## Example - - ```python - from fastapi import FastAPI, Request - from fastapi.responses import JSONResponse - - - class UnicornException(Exception): - def __init__(self, name: str): - self.name = name - - - app = FastAPI() - - - @app.exception_handler(UnicornException) - async def unicorn_exception_handler(request: Request, exc: UnicornException): - return JSONResponse( - status_code=418, - content={"message": f"Oops! {exc.name} did something. There goes a rainbow..."}, - ) - ``` - """ - - def decorator(func: DecoratedCallable) -> DecoratedCallable: - self.add_exception_handler(exc_class_or_status_code, func) - return func - - return decorator diff --git a/server/venv/Lib/site-packages/fastapi/background.py b/server/venv/Lib/site-packages/fastapi/background.py deleted file mode 100644 index 35ab1b2..0000000 --- a/server/venv/Lib/site-packages/fastapi/background.py +++ /dev/null @@ -1,59 +0,0 @@ -from typing import Any, Callable - -from starlette.background import BackgroundTasks as StarletteBackgroundTasks -from typing_extensions import Annotated, Doc, ParamSpec # type: ignore [attr-defined] - -P = ParamSpec("P") - - -class BackgroundTasks(StarletteBackgroundTasks): - """ - A collection of background tasks that will be called after a response has been - sent to the client. - - Read more about it in the - [FastAPI docs for Background Tasks](https://fastapi.tiangolo.com/tutorial/background-tasks/). - - ## Example - - ```python - from fastapi import BackgroundTasks, FastAPI - - app = FastAPI() - - - def write_notification(email: str, message=""): - with open("log.txt", mode="w") as email_file: - content = f"notification for {email}: {message}" - email_file.write(content) - - - @app.post("/send-notification/{email}") - async def send_notification(email: str, background_tasks: BackgroundTasks): - background_tasks.add_task(write_notification, email, message="some notification") - return {"message": "Notification sent in the background"} - ``` - """ - - def add_task( - self, - func: Annotated[ - Callable[P, Any], - Doc( - """ - The function to call after the response is sent. - - It can be a regular `def` function or an `async def` function. - """ - ), - ], - *args: P.args, - **kwargs: P.kwargs, - ) -> None: - """ - Add a function to be called in the background after the response is sent. - - Read more about it in the - [FastAPI docs for Background Tasks](https://fastapi.tiangolo.com/tutorial/background-tasks/). - """ - return super().add_task(func, *args, **kwargs) diff --git a/server/venv/Lib/site-packages/fastapi/concurrency.py b/server/venv/Lib/site-packages/fastapi/concurrency.py deleted file mode 100644 index 754061c..0000000 --- a/server/venv/Lib/site-packages/fastapi/concurrency.py +++ /dev/null @@ -1,40 +0,0 @@ -from contextlib import AsyncExitStack as AsyncExitStack # noqa -from contextlib import asynccontextmanager as asynccontextmanager -from typing import AsyncGenerator, ContextManager, TypeVar - -import anyio -from anyio import CapacityLimiter -from starlette.concurrency import iterate_in_threadpool as iterate_in_threadpool # noqa -from starlette.concurrency import run_in_threadpool as run_in_threadpool # noqa -from starlette.concurrency import ( # noqa - run_until_first_complete as run_until_first_complete, -) - -_T = TypeVar("_T") - - -@asynccontextmanager -async def contextmanager_in_threadpool( - cm: ContextManager[_T], -) -> AsyncGenerator[_T, None]: - # blocking __exit__ from running waiting on a free thread - # can create race conditions/deadlocks if the context manager itself - # has its own internal pool (e.g. a database connection pool) - # to avoid this we let __exit__ run without a capacity limit - # since we're creating a new limiter for each call, any non-zero limit - # works (1 is arbitrary) - exit_limiter = CapacityLimiter(1) - try: - yield await run_in_threadpool(cm.__enter__) - except Exception as e: - ok = bool( - await anyio.to_thread.run_sync( - cm.__exit__, type(e), e, None, limiter=exit_limiter - ) - ) - if not ok: - raise e - else: - await anyio.to_thread.run_sync( - cm.__exit__, None, None, None, limiter=exit_limiter - ) diff --git a/server/venv/Lib/site-packages/fastapi/datastructures.py b/server/venv/Lib/site-packages/fastapi/datastructures.py deleted file mode 100644 index ce03e3c..0000000 --- a/server/venv/Lib/site-packages/fastapi/datastructures.py +++ /dev/null @@ -1,204 +0,0 @@ -from typing import ( - Any, - BinaryIO, - Callable, - Dict, - Iterable, - Optional, - Type, - TypeVar, - cast, -) - -from fastapi._compat import ( - PYDANTIC_V2, - CoreSchema, - GetJsonSchemaHandler, - JsonSchemaValue, - with_info_plain_validator_function, -) -from starlette.datastructures import URL as URL # noqa: F401 -from starlette.datastructures import Address as Address # noqa: F401 -from starlette.datastructures import FormData as FormData # noqa: F401 -from starlette.datastructures import Headers as Headers # noqa: F401 -from starlette.datastructures import QueryParams as QueryParams # noqa: F401 -from starlette.datastructures import State as State # noqa: F401 -from starlette.datastructures import UploadFile as StarletteUploadFile -from typing_extensions import Annotated, Doc # type: ignore [attr-defined] - - -class UploadFile(StarletteUploadFile): - """ - A file uploaded in a request. - - Define it as a *path operation function* (or dependency) parameter. - - If you are using a regular `def` function, you can use the `upload_file.file` - attribute to access the raw standard Python file (blocking, not async), useful and - needed for non-async code. - - Read more about it in the - [FastAPI docs for Request Files](https://fastapi.tiangolo.com/tutorial/request-files/). - - ## Example - - ```python - from typing import Annotated - - from fastapi import FastAPI, File, UploadFile - - app = FastAPI() - - - @app.post("/files/") - async def create_file(file: Annotated[bytes, File()]): - return {"file_size": len(file)} - - - @app.post("/uploadfile/") - async def create_upload_file(file: UploadFile): - return {"filename": file.filename} - ``` - """ - - file: Annotated[ - BinaryIO, - Doc("The standard Python file object (non-async)."), - ] - filename: Annotated[Optional[str], Doc("The original file name.")] - size: Annotated[Optional[int], Doc("The size of the file in bytes.")] - headers: Annotated[Headers, Doc("The headers of the request.")] - content_type: Annotated[ - Optional[str], Doc("The content type of the request, from the headers.") - ] - - async def write( - self, - data: Annotated[ - bytes, - Doc( - """ - The bytes to write to the file. - """ - ), - ], - ) -> None: - """ - Write some bytes to the file. - - You normally wouldn't use this from a file you read in a request. - - To be awaitable, compatible with async, this is run in threadpool. - """ - return await super().write(data) - - async def read( - self, - size: Annotated[ - int, - Doc( - """ - The number of bytes to read from the file. - """ - ), - ] = -1, - ) -> bytes: - """ - Read some bytes from the file. - - To be awaitable, compatible with async, this is run in threadpool. - """ - return await super().read(size) - - async def seek( - self, - offset: Annotated[ - int, - Doc( - """ - The position in bytes to seek to in the file. - """ - ), - ], - ) -> None: - """ - Move to a position in the file. - - Any next read or write will be done from that position. - - To be awaitable, compatible with async, this is run in threadpool. - """ - return await super().seek(offset) - - async def close(self) -> None: - """ - Close the file. - - To be awaitable, compatible with async, this is run in threadpool. - """ - return await super().close() - - @classmethod - def __get_validators__(cls: Type["UploadFile"]) -> Iterable[Callable[..., Any]]: - yield cls.validate - - @classmethod - def validate(cls: Type["UploadFile"], v: Any) -> Any: - if not isinstance(v, StarletteUploadFile): - raise ValueError(f"Expected UploadFile, received: {type(v)}") - return v - - @classmethod - def _validate(cls, __input_value: Any, _: Any) -> "UploadFile": - if not isinstance(__input_value, StarletteUploadFile): - raise ValueError(f"Expected UploadFile, received: {type(__input_value)}") - return cast(UploadFile, __input_value) - - if not PYDANTIC_V2: - - @classmethod - def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: - field_schema.update({"type": "string", "format": "binary"}) - - @classmethod - def __get_pydantic_json_schema__( - cls, core_schema: CoreSchema, handler: GetJsonSchemaHandler - ) -> JsonSchemaValue: - return {"type": "string", "format": "binary"} - - @classmethod - def __get_pydantic_core_schema__( - cls, source: Type[Any], handler: Callable[[Any], CoreSchema] - ) -> CoreSchema: - return with_info_plain_validator_function(cls._validate) - - -class DefaultPlaceholder: - """ - You shouldn't use this class directly. - - It's used internally to recognize when a default value has been overwritten, even - if the overridden default value was truthy. - """ - - def __init__(self, value: Any): - self.value = value - - def __bool__(self) -> bool: - return bool(self.value) - - def __eq__(self, o: object) -> bool: - return isinstance(o, DefaultPlaceholder) and o.value == self.value - - -DefaultType = TypeVar("DefaultType") - - -def Default(value: DefaultType) -> DefaultType: - """ - You shouldn't use this function directly. - - It's used internally to recognize when a default value has been overwritten, even - if the overridden default value was truthy. - """ - return DefaultPlaceholder(value) # type: ignore diff --git a/server/venv/Lib/site-packages/fastapi/dependencies/__init__.py b/server/venv/Lib/site-packages/fastapi/dependencies/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/fastapi/dependencies/models.py b/server/venv/Lib/site-packages/fastapi/dependencies/models.py deleted file mode 100644 index 61ef006..0000000 --- a/server/venv/Lib/site-packages/fastapi/dependencies/models.py +++ /dev/null @@ -1,58 +0,0 @@ -from typing import Any, Callable, List, Optional, Sequence - -from fastapi._compat import ModelField -from fastapi.security.base import SecurityBase - - -class SecurityRequirement: - def __init__( - self, security_scheme: SecurityBase, scopes: Optional[Sequence[str]] = None - ): - self.security_scheme = security_scheme - self.scopes = scopes - - -class Dependant: - def __init__( - self, - *, - path_params: Optional[List[ModelField]] = None, - query_params: Optional[List[ModelField]] = None, - header_params: Optional[List[ModelField]] = None, - cookie_params: Optional[List[ModelField]] = None, - body_params: Optional[List[ModelField]] = None, - dependencies: Optional[List["Dependant"]] = None, - security_schemes: Optional[List[SecurityRequirement]] = None, - name: Optional[str] = None, - call: Optional[Callable[..., Any]] = None, - request_param_name: Optional[str] = None, - websocket_param_name: Optional[str] = None, - http_connection_param_name: Optional[str] = None, - response_param_name: Optional[str] = None, - background_tasks_param_name: Optional[str] = None, - security_scopes_param_name: Optional[str] = None, - security_scopes: Optional[List[str]] = None, - use_cache: bool = True, - path: Optional[str] = None, - ) -> None: - self.path_params = path_params or [] - self.query_params = query_params or [] - self.header_params = header_params or [] - self.cookie_params = cookie_params or [] - self.body_params = body_params or [] - self.dependencies = dependencies or [] - self.security_requirements = security_schemes or [] - self.request_param_name = request_param_name - self.websocket_param_name = websocket_param_name - self.http_connection_param_name = http_connection_param_name - self.response_param_name = response_param_name - self.background_tasks_param_name = background_tasks_param_name - self.security_scopes = security_scopes - self.security_scopes_param_name = security_scopes_param_name - self.name = name - self.call = call - self.use_cache = use_cache - # Store the path to be able to re-generate a dependable from it in overrides - self.path = path - # Save the cache key at creation to optimize performance - self.cache_key = (self.call, tuple(sorted(set(self.security_scopes or [])))) diff --git a/server/venv/Lib/site-packages/fastapi/dependencies/utils.py b/server/venv/Lib/site-packages/fastapi/dependencies/utils.py deleted file mode 100644 index 96e07a4..0000000 --- a/server/venv/Lib/site-packages/fastapi/dependencies/utils.py +++ /dev/null @@ -1,810 +0,0 @@ -import inspect -from contextlib import contextmanager -from copy import deepcopy -from typing import ( - Any, - Callable, - Coroutine, - Dict, - ForwardRef, - List, - Mapping, - Optional, - Sequence, - Tuple, - Type, - Union, - cast, -) - -import anyio -from fastapi import params -from fastapi._compat import ( - PYDANTIC_V2, - ErrorWrapper, - ModelField, - Required, - Undefined, - _regenerate_error_with_loc, - copy_field_info, - create_body_model, - evaluate_forwardref, - field_annotation_is_scalar, - get_annotation_from_field_info, - get_missing_field_error, - is_bytes_field, - is_bytes_sequence_field, - is_scalar_field, - is_scalar_sequence_field, - is_sequence_field, - is_uploadfile_or_nonable_uploadfile_annotation, - is_uploadfile_sequence_annotation, - lenient_issubclass, - sequence_types, - serialize_sequence_value, - value_is_sequence, -) -from fastapi.background import BackgroundTasks -from fastapi.concurrency import ( - AsyncExitStack, - asynccontextmanager, - contextmanager_in_threadpool, -) -from fastapi.dependencies.models import Dependant, SecurityRequirement -from fastapi.logger import logger -from fastapi.security.base import SecurityBase -from fastapi.security.oauth2 import OAuth2, SecurityScopes -from fastapi.security.open_id_connect_url import OpenIdConnect -from fastapi.utils import create_response_field, get_path_param_names -from pydantic.fields import FieldInfo -from starlette.background import BackgroundTasks as StarletteBackgroundTasks -from starlette.concurrency import run_in_threadpool -from starlette.datastructures import FormData, Headers, QueryParams, UploadFile -from starlette.requests import HTTPConnection, Request -from starlette.responses import Response -from starlette.websockets import WebSocket -from typing_extensions import Annotated, get_args, get_origin - -multipart_not_installed_error = ( - 'Form data requires "python-multipart" to be installed. \n' - 'You can install "python-multipart" with: \n\n' - "pip install python-multipart\n" -) -multipart_incorrect_install_error = ( - 'Form data requires "python-multipart" to be installed. ' - 'It seems you installed "multipart" instead. \n' - 'You can remove "multipart" with: \n\n' - "pip uninstall multipart\n\n" - 'And then install "python-multipart" with: \n\n' - "pip install python-multipart\n" -) - - -def check_file_field(field: ModelField) -> None: - field_info = field.field_info - if isinstance(field_info, params.Form): - try: - # __version__ is available in both multiparts, and can be mocked - from multipart import __version__ # type: ignore - - assert __version__ - try: - # parse_options_header is only available in the right multipart - from multipart.multipart import parse_options_header # type: ignore - - assert parse_options_header - except ImportError: - logger.error(multipart_incorrect_install_error) - raise RuntimeError(multipart_incorrect_install_error) from None - except ImportError: - logger.error(multipart_not_installed_error) - raise RuntimeError(multipart_not_installed_error) from None - - -def get_param_sub_dependant( - *, - param_name: str, - depends: params.Depends, - path: str, - security_scopes: Optional[List[str]] = None, -) -> Dependant: - assert depends.dependency - return get_sub_dependant( - depends=depends, - dependency=depends.dependency, - path=path, - name=param_name, - security_scopes=security_scopes, - ) - - -def get_parameterless_sub_dependant(*, depends: params.Depends, path: str) -> Dependant: - assert callable( - depends.dependency - ), "A parameter-less dependency must have a callable dependency" - return get_sub_dependant(depends=depends, dependency=depends.dependency, path=path) - - -def get_sub_dependant( - *, - depends: params.Depends, - dependency: Callable[..., Any], - path: str, - name: Optional[str] = None, - security_scopes: Optional[List[str]] = None, -) -> Dependant: - security_requirement = None - security_scopes = security_scopes or [] - if isinstance(depends, params.Security): - dependency_scopes = depends.scopes - security_scopes.extend(dependency_scopes) - if isinstance(dependency, SecurityBase): - use_scopes: List[str] = [] - if isinstance(dependency, (OAuth2, OpenIdConnect)): - use_scopes = security_scopes - security_requirement = SecurityRequirement( - security_scheme=dependency, scopes=use_scopes - ) - sub_dependant = get_dependant( - path=path, - call=dependency, - name=name, - security_scopes=security_scopes, - use_cache=depends.use_cache, - ) - if security_requirement: - sub_dependant.security_requirements.append(security_requirement) - return sub_dependant - - -CacheKey = Tuple[Optional[Callable[..., Any]], Tuple[str, ...]] - - -def get_flat_dependant( - dependant: Dependant, - *, - skip_repeats: bool = False, - visited: Optional[List[CacheKey]] = None, -) -> Dependant: - if visited is None: - visited = [] - visited.append(dependant.cache_key) - - flat_dependant = Dependant( - path_params=dependant.path_params.copy(), - query_params=dependant.query_params.copy(), - header_params=dependant.header_params.copy(), - cookie_params=dependant.cookie_params.copy(), - body_params=dependant.body_params.copy(), - security_schemes=dependant.security_requirements.copy(), - use_cache=dependant.use_cache, - path=dependant.path, - ) - for sub_dependant in dependant.dependencies: - if skip_repeats and sub_dependant.cache_key in visited: - continue - flat_sub = get_flat_dependant( - sub_dependant, skip_repeats=skip_repeats, visited=visited - ) - flat_dependant.path_params.extend(flat_sub.path_params) - flat_dependant.query_params.extend(flat_sub.query_params) - flat_dependant.header_params.extend(flat_sub.header_params) - flat_dependant.cookie_params.extend(flat_sub.cookie_params) - flat_dependant.body_params.extend(flat_sub.body_params) - flat_dependant.security_requirements.extend(flat_sub.security_requirements) - return flat_dependant - - -def get_flat_params(dependant: Dependant) -> List[ModelField]: - flat_dependant = get_flat_dependant(dependant, skip_repeats=True) - return ( - flat_dependant.path_params - + flat_dependant.query_params - + flat_dependant.header_params - + flat_dependant.cookie_params - ) - - -def get_typed_signature(call: Callable[..., Any]) -> inspect.Signature: - signature = inspect.signature(call) - globalns = getattr(call, "__globals__", {}) - typed_params = [ - inspect.Parameter( - name=param.name, - kind=param.kind, - default=param.default, - annotation=get_typed_annotation(param.annotation, globalns), - ) - for param in signature.parameters.values() - ] - typed_signature = inspect.Signature(typed_params) - return typed_signature - - -def get_typed_annotation(annotation: Any, globalns: Dict[str, Any]) -> Any: - if isinstance(annotation, str): - annotation = ForwardRef(annotation) - annotation = evaluate_forwardref(annotation, globalns, globalns) - return annotation - - -def get_typed_return_annotation(call: Callable[..., Any]) -> Any: - signature = inspect.signature(call) - annotation = signature.return_annotation - - if annotation is inspect.Signature.empty: - return None - - globalns = getattr(call, "__globals__", {}) - return get_typed_annotation(annotation, globalns) - - -def get_dependant( - *, - path: str, - call: Callable[..., Any], - name: Optional[str] = None, - security_scopes: Optional[List[str]] = None, - use_cache: bool = True, -) -> Dependant: - path_param_names = get_path_param_names(path) - endpoint_signature = get_typed_signature(call) - signature_params = endpoint_signature.parameters - dependant = Dependant( - call=call, - name=name, - path=path, - security_scopes=security_scopes, - use_cache=use_cache, - ) - for param_name, param in signature_params.items(): - is_path_param = param_name in path_param_names - type_annotation, depends, param_field = analyze_param( - param_name=param_name, - annotation=param.annotation, - value=param.default, - is_path_param=is_path_param, - ) - if depends is not None: - sub_dependant = get_param_sub_dependant( - param_name=param_name, - depends=depends, - path=path, - security_scopes=security_scopes, - ) - dependant.dependencies.append(sub_dependant) - continue - if add_non_field_param_to_dependency( - param_name=param_name, - type_annotation=type_annotation, - dependant=dependant, - ): - assert ( - param_field is None - ), f"Cannot specify multiple FastAPI annotations for {param_name!r}" - continue - assert param_field is not None - if is_body_param(param_field=param_field, is_path_param=is_path_param): - dependant.body_params.append(param_field) - else: - add_param_to_fields(field=param_field, dependant=dependant) - return dependant - - -def add_non_field_param_to_dependency( - *, param_name: str, type_annotation: Any, dependant: Dependant -) -> Optional[bool]: - if lenient_issubclass(type_annotation, Request): - dependant.request_param_name = param_name - return True - elif lenient_issubclass(type_annotation, WebSocket): - dependant.websocket_param_name = param_name - return True - elif lenient_issubclass(type_annotation, HTTPConnection): - dependant.http_connection_param_name = param_name - return True - elif lenient_issubclass(type_annotation, Response): - dependant.response_param_name = param_name - return True - elif lenient_issubclass(type_annotation, StarletteBackgroundTasks): - dependant.background_tasks_param_name = param_name - return True - elif lenient_issubclass(type_annotation, SecurityScopes): - dependant.security_scopes_param_name = param_name - return True - return None - - -def analyze_param( - *, - param_name: str, - annotation: Any, - value: Any, - is_path_param: bool, -) -> Tuple[Any, Optional[params.Depends], Optional[ModelField]]: - field_info = None - depends = None - type_annotation: Any = Any - if ( - annotation is not inspect.Signature.empty - and get_origin(annotation) is Annotated - ): - annotated_args = get_args(annotation) - type_annotation = annotated_args[0] - fastapi_annotations = [ - arg - for arg in annotated_args[1:] - if isinstance(arg, (FieldInfo, params.Depends)) - ] - assert ( - len(fastapi_annotations) <= 1 - ), f"Cannot specify multiple `Annotated` FastAPI arguments for {param_name!r}" - fastapi_annotation = next(iter(fastapi_annotations), None) - if isinstance(fastapi_annotation, FieldInfo): - # Copy `field_info` because we mutate `field_info.default` below. - field_info = copy_field_info( - field_info=fastapi_annotation, annotation=annotation - ) - assert field_info.default is Undefined or field_info.default is Required, ( - f"`{field_info.__class__.__name__}` default value cannot be set in" - f" `Annotated` for {param_name!r}. Set the default value with `=` instead." - ) - if value is not inspect.Signature.empty: - assert not is_path_param, "Path parameters cannot have default values" - field_info.default = value - else: - field_info.default = Required - elif isinstance(fastapi_annotation, params.Depends): - depends = fastapi_annotation - elif annotation is not inspect.Signature.empty: - type_annotation = annotation - - if isinstance(value, params.Depends): - assert depends is None, ( - "Cannot specify `Depends` in `Annotated` and default value" - f" together for {param_name!r}" - ) - assert field_info is None, ( - "Cannot specify a FastAPI annotation in `Annotated` and `Depends` as a" - f" default value together for {param_name!r}" - ) - depends = value - elif isinstance(value, FieldInfo): - assert field_info is None, ( - "Cannot specify FastAPI annotations in `Annotated` and default value" - f" together for {param_name!r}" - ) - field_info = value - if PYDANTIC_V2: - field_info.annotation = type_annotation - - if depends is not None and depends.dependency is None: - depends.dependency = type_annotation - - if lenient_issubclass( - type_annotation, - ( - Request, - WebSocket, - HTTPConnection, - Response, - StarletteBackgroundTasks, - SecurityScopes, - ), - ): - assert depends is None, f"Cannot specify `Depends` for type {type_annotation!r}" - assert ( - field_info is None - ), f"Cannot specify FastAPI annotation for type {type_annotation!r}" - elif field_info is None and depends is None: - default_value = value if value is not inspect.Signature.empty else Required - if is_path_param: - # We might check here that `default_value is Required`, but the fact is that the same - # parameter might sometimes be a path parameter and sometimes not. See - # `tests/test_infer_param_optionality.py` for an example. - field_info = params.Path(annotation=type_annotation) - elif is_uploadfile_or_nonable_uploadfile_annotation( - type_annotation - ) or is_uploadfile_sequence_annotation(type_annotation): - field_info = params.File(annotation=type_annotation, default=default_value) - elif not field_annotation_is_scalar(annotation=type_annotation): - field_info = params.Body(annotation=type_annotation, default=default_value) - else: - field_info = params.Query(annotation=type_annotation, default=default_value) - - field = None - if field_info is not None: - if is_path_param: - assert isinstance(field_info, params.Path), ( - f"Cannot use `{field_info.__class__.__name__}` for path param" - f" {param_name!r}" - ) - elif ( - isinstance(field_info, params.Param) - and getattr(field_info, "in_", None) is None - ): - field_info.in_ = params.ParamTypes.query - use_annotation = get_annotation_from_field_info( - type_annotation, - field_info, - param_name, - ) - if not field_info.alias and getattr(field_info, "convert_underscores", None): - alias = param_name.replace("_", "-") - else: - alias = field_info.alias or param_name - field_info.alias = alias - field = create_response_field( - name=param_name, - type_=use_annotation, - default=field_info.default, - alias=alias, - required=field_info.default in (Required, Undefined), - field_info=field_info, - ) - - return type_annotation, depends, field - - -def is_body_param(*, param_field: ModelField, is_path_param: bool) -> bool: - if is_path_param: - assert is_scalar_field( - field=param_field - ), "Path params must be of one of the supported types" - return False - elif is_scalar_field(field=param_field): - return False - elif isinstance( - param_field.field_info, (params.Query, params.Header) - ) and is_scalar_sequence_field(param_field): - return False - else: - assert isinstance( - param_field.field_info, params.Body - ), f"Param: {param_field.name} can only be a request body, using Body()" - return True - - -def add_param_to_fields(*, field: ModelField, dependant: Dependant) -> None: - field_info = cast(params.Param, field.field_info) - if field_info.in_ == params.ParamTypes.path: - dependant.path_params.append(field) - elif field_info.in_ == params.ParamTypes.query: - dependant.query_params.append(field) - elif field_info.in_ == params.ParamTypes.header: - dependant.header_params.append(field) - else: - assert ( - field_info.in_ == params.ParamTypes.cookie - ), f"non-body parameters must be in path, query, header or cookie: {field.name}" - dependant.cookie_params.append(field) - - -def is_coroutine_callable(call: Callable[..., Any]) -> bool: - if inspect.isroutine(call): - return inspect.iscoroutinefunction(call) - if inspect.isclass(call): - return False - dunder_call = getattr(call, "__call__", None) # noqa: B004 - return inspect.iscoroutinefunction(dunder_call) - - -def is_async_gen_callable(call: Callable[..., Any]) -> bool: - if inspect.isasyncgenfunction(call): - return True - dunder_call = getattr(call, "__call__", None) # noqa: B004 - return inspect.isasyncgenfunction(dunder_call) - - -def is_gen_callable(call: Callable[..., Any]) -> bool: - if inspect.isgeneratorfunction(call): - return True - dunder_call = getattr(call, "__call__", None) # noqa: B004 - return inspect.isgeneratorfunction(dunder_call) - - -async def solve_generator( - *, call: Callable[..., Any], stack: AsyncExitStack, sub_values: Dict[str, Any] -) -> Any: - if is_gen_callable(call): - cm = contextmanager_in_threadpool(contextmanager(call)(**sub_values)) - elif is_async_gen_callable(call): - cm = asynccontextmanager(call)(**sub_values) - return await stack.enter_async_context(cm) - - -async def solve_dependencies( - *, - request: Union[Request, WebSocket], - dependant: Dependant, - body: Optional[Union[Dict[str, Any], FormData]] = None, - background_tasks: Optional[StarletteBackgroundTasks] = None, - response: Optional[Response] = None, - dependency_overrides_provider: Optional[Any] = None, - dependency_cache: Optional[Dict[Tuple[Callable[..., Any], Tuple[str]], Any]] = None, -) -> Tuple[ - Dict[str, Any], - List[Any], - Optional[StarletteBackgroundTasks], - Response, - Dict[Tuple[Callable[..., Any], Tuple[str]], Any], -]: - values: Dict[str, Any] = {} - errors: List[Any] = [] - if response is None: - response = Response() - del response.headers["content-length"] - response.status_code = None # type: ignore - dependency_cache = dependency_cache or {} - sub_dependant: Dependant - for sub_dependant in dependant.dependencies: - sub_dependant.call = cast(Callable[..., Any], sub_dependant.call) - sub_dependant.cache_key = cast( - Tuple[Callable[..., Any], Tuple[str]], sub_dependant.cache_key - ) - call = sub_dependant.call - use_sub_dependant = sub_dependant - if ( - dependency_overrides_provider - and dependency_overrides_provider.dependency_overrides - ): - original_call = sub_dependant.call - call = getattr( - dependency_overrides_provider, "dependency_overrides", {} - ).get(original_call, original_call) - use_path: str = sub_dependant.path # type: ignore - use_sub_dependant = get_dependant( - path=use_path, - call=call, - name=sub_dependant.name, - security_scopes=sub_dependant.security_scopes, - ) - - solved_result = await solve_dependencies( - request=request, - dependant=use_sub_dependant, - body=body, - background_tasks=background_tasks, - response=response, - dependency_overrides_provider=dependency_overrides_provider, - dependency_cache=dependency_cache, - ) - ( - sub_values, - sub_errors, - background_tasks, - _, # the subdependency returns the same response we have - sub_dependency_cache, - ) = solved_result - dependency_cache.update(sub_dependency_cache) - if sub_errors: - errors.extend(sub_errors) - continue - if sub_dependant.use_cache and sub_dependant.cache_key in dependency_cache: - solved = dependency_cache[sub_dependant.cache_key] - elif is_gen_callable(call) or is_async_gen_callable(call): - stack = request.scope.get("fastapi_astack") - assert isinstance(stack, AsyncExitStack) - solved = await solve_generator( - call=call, stack=stack, sub_values=sub_values - ) - elif is_coroutine_callable(call): - solved = await call(**sub_values) - else: - solved = await run_in_threadpool(call, **sub_values) - if sub_dependant.name is not None: - values[sub_dependant.name] = solved - if sub_dependant.cache_key not in dependency_cache: - dependency_cache[sub_dependant.cache_key] = solved - path_values, path_errors = request_params_to_args( - dependant.path_params, request.path_params - ) - query_values, query_errors = request_params_to_args( - dependant.query_params, request.query_params - ) - header_values, header_errors = request_params_to_args( - dependant.header_params, request.headers - ) - cookie_values, cookie_errors = request_params_to_args( - dependant.cookie_params, request.cookies - ) - values.update(path_values) - values.update(query_values) - values.update(header_values) - values.update(cookie_values) - errors += path_errors + query_errors + header_errors + cookie_errors - if dependant.body_params: - ( - body_values, - body_errors, - ) = await request_body_to_args( # body_params checked above - required_params=dependant.body_params, received_body=body - ) - values.update(body_values) - errors.extend(body_errors) - if dependant.http_connection_param_name: - values[dependant.http_connection_param_name] = request - if dependant.request_param_name and isinstance(request, Request): - values[dependant.request_param_name] = request - elif dependant.websocket_param_name and isinstance(request, WebSocket): - values[dependant.websocket_param_name] = request - if dependant.background_tasks_param_name: - if background_tasks is None: - background_tasks = BackgroundTasks() - values[dependant.background_tasks_param_name] = background_tasks - if dependant.response_param_name: - values[dependant.response_param_name] = response - if dependant.security_scopes_param_name: - values[dependant.security_scopes_param_name] = SecurityScopes( - scopes=dependant.security_scopes - ) - return values, errors, background_tasks, response, dependency_cache - - -def request_params_to_args( - required_params: Sequence[ModelField], - received_params: Union[Mapping[str, Any], QueryParams, Headers], -) -> Tuple[Dict[str, Any], List[Any]]: - values = {} - errors = [] - for field in required_params: - if is_scalar_sequence_field(field) and isinstance( - received_params, (QueryParams, Headers) - ): - value = received_params.getlist(field.alias) or field.default - else: - value = received_params.get(field.alias) - field_info = field.field_info - assert isinstance( - field_info, params.Param - ), "Params must be subclasses of Param" - loc = (field_info.in_.value, field.alias) - if value is None: - if field.required: - errors.append(get_missing_field_error(loc=loc)) - else: - values[field.name] = deepcopy(field.default) - continue - v_, errors_ = field.validate(value, values, loc=loc) - if isinstance(errors_, ErrorWrapper): - errors.append(errors_) - elif isinstance(errors_, list): - new_errors = _regenerate_error_with_loc(errors=errors_, loc_prefix=()) - errors.extend(new_errors) - else: - values[field.name] = v_ - return values, errors - - -async def request_body_to_args( - required_params: List[ModelField], - received_body: Optional[Union[Dict[str, Any], FormData]], -) -> Tuple[Dict[str, Any], List[Dict[str, Any]]]: - values = {} - errors: List[Dict[str, Any]] = [] - if required_params: - field = required_params[0] - field_info = field.field_info - embed = getattr(field_info, "embed", None) - field_alias_omitted = len(required_params) == 1 and not embed - if field_alias_omitted: - received_body = {field.alias: received_body} - - for field in required_params: - loc: Tuple[str, ...] - if field_alias_omitted: - loc = ("body",) - else: - loc = ("body", field.alias) - - value: Optional[Any] = None - if received_body is not None: - if (is_sequence_field(field)) and isinstance(received_body, FormData): - value = received_body.getlist(field.alias) - else: - try: - value = received_body.get(field.alias) - except AttributeError: - errors.append(get_missing_field_error(loc)) - continue - if ( - value is None - or (isinstance(field_info, params.Form) and value == "") - or ( - isinstance(field_info, params.Form) - and is_sequence_field(field) - and len(value) == 0 - ) - ): - if field.required: - errors.append(get_missing_field_error(loc)) - else: - values[field.name] = deepcopy(field.default) - continue - if ( - isinstance(field_info, params.File) - and is_bytes_field(field) - and isinstance(value, UploadFile) - ): - value = await value.read() - elif ( - is_bytes_sequence_field(field) - and isinstance(field_info, params.File) - and value_is_sequence(value) - ): - # For types - assert isinstance(value, sequence_types) # type: ignore[arg-type] - results: List[Union[bytes, str]] = [] - - async def process_fn( - fn: Callable[[], Coroutine[Any, Any, Any]] - ) -> None: - result = await fn() - results.append(result) # noqa: B023 - - async with anyio.create_task_group() as tg: - for sub_value in value: - tg.start_soon(process_fn, sub_value.read) - value = serialize_sequence_value(field=field, value=results) - - v_, errors_ = field.validate(value, values, loc=loc) - - if isinstance(errors_, list): - errors.extend(errors_) - elif errors_: - errors.append(errors_) - else: - values[field.name] = v_ - return values, errors - - -def get_body_field(*, dependant: Dependant, name: str) -> Optional[ModelField]: - flat_dependant = get_flat_dependant(dependant) - if not flat_dependant.body_params: - return None - first_param = flat_dependant.body_params[0] - field_info = first_param.field_info - embed = getattr(field_info, "embed", None) - body_param_names_set = {param.name for param in flat_dependant.body_params} - if len(body_param_names_set) == 1 and not embed: - check_file_field(first_param) - return first_param - # If one field requires to embed, all have to be embedded - # in case a sub-dependency is evaluated with a single unique body field - # That is combined (embedded) with other body fields - for param in flat_dependant.body_params: - setattr(param.field_info, "embed", True) # noqa: B010 - model_name = "Body_" + name - BodyModel = create_body_model( - fields=flat_dependant.body_params, model_name=model_name - ) - required = any(True for f in flat_dependant.body_params if f.required) - BodyFieldInfo_kwargs: Dict[str, Any] = { - "annotation": BodyModel, - "alias": "body", - } - if not required: - BodyFieldInfo_kwargs["default"] = None - if any(isinstance(f.field_info, params.File) for f in flat_dependant.body_params): - BodyFieldInfo: Type[params.Body] = params.File - elif any(isinstance(f.field_info, params.Form) for f in flat_dependant.body_params): - BodyFieldInfo = params.Form - else: - BodyFieldInfo = params.Body - - body_param_media_types = [ - f.field_info.media_type - for f in flat_dependant.body_params - if isinstance(f.field_info, params.Body) - ] - if len(set(body_param_media_types)) == 1: - BodyFieldInfo_kwargs["media_type"] = body_param_media_types[0] - final_field = create_response_field( - name="body", - type_=BodyModel, - required=required, - alias="body", - field_info=BodyFieldInfo(**BodyFieldInfo_kwargs), - ) - check_file_field(final_field) - return final_field diff --git a/server/venv/Lib/site-packages/fastapi/encoders.py b/server/venv/Lib/site-packages/fastapi/encoders.py deleted file mode 100644 index e501713..0000000 --- a/server/venv/Lib/site-packages/fastapi/encoders.py +++ /dev/null @@ -1,341 +0,0 @@ -import dataclasses -import datetime -from collections import defaultdict, deque -from decimal import Decimal -from enum import Enum -from ipaddress import ( - IPv4Address, - IPv4Interface, - IPv4Network, - IPv6Address, - IPv6Interface, - IPv6Network, -) -from pathlib import Path, PurePath -from re import Pattern -from types import GeneratorType -from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union -from uuid import UUID - -from fastapi.types import IncEx -from pydantic import BaseModel -from pydantic.color import Color -from pydantic.networks import AnyUrl, NameEmail -from pydantic.types import SecretBytes, SecretStr -from typing_extensions import Annotated, Doc # type: ignore [attr-defined] - -from ._compat import PYDANTIC_V2, Url, _model_dump - - -# Taken from Pydantic v1 as is -def isoformat(o: Union[datetime.date, datetime.time]) -> str: - return o.isoformat() - - -# Taken from Pydantic v1 as is -# TODO: pv2 should this return strings instead? -def decimal_encoder(dec_value: Decimal) -> Union[int, float]: - """ - Encodes a Decimal as int of there's no exponent, otherwise float - - This is useful when we use ConstrainedDecimal to represent Numeric(x,0) - where a integer (but not int typed) is used. Encoding this as a float - results in failed round-tripping between encode and parse. - Our Id type is a prime example of this. - - >>> decimal_encoder(Decimal("1.0")) - 1.0 - - >>> decimal_encoder(Decimal("1")) - 1 - """ - if dec_value.as_tuple().exponent >= 0: # type: ignore[operator] - return int(dec_value) - else: - return float(dec_value) - - -ENCODERS_BY_TYPE: Dict[Type[Any], Callable[[Any], Any]] = { - bytes: lambda o: o.decode(), - Color: str, - datetime.date: isoformat, - datetime.datetime: isoformat, - datetime.time: isoformat, - datetime.timedelta: lambda td: td.total_seconds(), - Decimal: decimal_encoder, - Enum: lambda o: o.value, - frozenset: list, - deque: list, - GeneratorType: list, - IPv4Address: str, - IPv4Interface: str, - IPv4Network: str, - IPv6Address: str, - IPv6Interface: str, - IPv6Network: str, - NameEmail: str, - Path: str, - Pattern: lambda o: o.pattern, - SecretBytes: str, - SecretStr: str, - set: list, - UUID: str, - Url: str, - AnyUrl: str, -} - - -def generate_encoders_by_class_tuples( - type_encoder_map: Dict[Any, Callable[[Any], Any]] -) -> Dict[Callable[[Any], Any], Tuple[Any, ...]]: - encoders_by_class_tuples: Dict[Callable[[Any], Any], Tuple[Any, ...]] = defaultdict( - tuple - ) - for type_, encoder in type_encoder_map.items(): - encoders_by_class_tuples[encoder] += (type_,) - return encoders_by_class_tuples - - -encoders_by_class_tuples = generate_encoders_by_class_tuples(ENCODERS_BY_TYPE) - - -def jsonable_encoder( - obj: Annotated[ - Any, - Doc( - """ - The input object to convert to JSON. - """ - ), - ], - include: Annotated[ - Optional[IncEx], - Doc( - """ - Pydantic's `include` parameter, passed to Pydantic models to set the - fields to include. - """ - ), - ] = None, - exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Pydantic's `exclude` parameter, passed to Pydantic models to set the - fields to exclude. - """ - ), - ] = None, - by_alias: Annotated[ - bool, - Doc( - """ - Pydantic's `by_alias` parameter, passed to Pydantic models to define if - the output should use the alias names (when provided) or the Python - attribute names. In an API, if you set an alias, it's probably because you - want to use it in the result, so you probably want to leave this set to - `True`. - """ - ), - ] = True, - exclude_unset: Annotated[ - bool, - Doc( - """ - Pydantic's `exclude_unset` parameter, passed to Pydantic models to define - if it should exclude from the output the fields that were not explicitly - set (and that only had their default values). - """ - ), - ] = False, - exclude_defaults: Annotated[ - bool, - Doc( - """ - Pydantic's `exclude_defaults` parameter, passed to Pydantic models to define - if it should exclude from the output the fields that had the same default - value, even when they were explicitly set. - """ - ), - ] = False, - exclude_none: Annotated[ - bool, - Doc( - """ - Pydantic's `exclude_none` parameter, passed to Pydantic models to define - if it should exclude from the output any fields that have a `None` value. - """ - ), - ] = False, - custom_encoder: Annotated[ - Optional[Dict[Any, Callable[[Any], Any]]], - Doc( - """ - Pydantic's `custom_encoder` parameter, passed to Pydantic models to define - a custom encoder. - """ - ), - ] = None, - sqlalchemy_safe: Annotated[ - bool, - Doc( - """ - Exclude from the output any fields that start with the name `_sa`. - - This is mainly a hack for compatibility with SQLAlchemy objects, they - store internal SQLAlchemy-specific state in attributes named with `_sa`, - and those objects can't (and shouldn't be) serialized to JSON. - """ - ), - ] = True, -) -> Any: - """ - Convert any object to something that can be encoded in JSON. - - This is used internally by FastAPI to make sure anything you return can be - encoded as JSON before it is sent to the client. - - You can also use it yourself, for example to convert objects before saving them - in a database that supports only JSON. - - Read more about it in the - [FastAPI docs for JSON Compatible Encoder](https://fastapi.tiangolo.com/tutorial/encoder/). - """ - custom_encoder = custom_encoder or {} - if custom_encoder: - if type(obj) in custom_encoder: - return custom_encoder[type(obj)](obj) - else: - for encoder_type, encoder_instance in custom_encoder.items(): - if isinstance(obj, encoder_type): - return encoder_instance(obj) - if include is not None and not isinstance(include, (set, dict)): - include = set(include) - if exclude is not None and not isinstance(exclude, (set, dict)): - exclude = set(exclude) - if isinstance(obj, BaseModel): - # TODO: remove when deprecating Pydantic v1 - encoders: Dict[Any, Any] = {} - if not PYDANTIC_V2: - encoders = getattr(obj.__config__, "json_encoders", {}) # type: ignore[attr-defined] - if custom_encoder: - encoders.update(custom_encoder) - obj_dict = _model_dump( - obj, - mode="json", - include=include, - exclude=exclude, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_none=exclude_none, - exclude_defaults=exclude_defaults, - ) - if "__root__" in obj_dict: - obj_dict = obj_dict["__root__"] - return jsonable_encoder( - obj_dict, - exclude_none=exclude_none, - exclude_defaults=exclude_defaults, - # TODO: remove when deprecating Pydantic v1 - custom_encoder=encoders, - sqlalchemy_safe=sqlalchemy_safe, - ) - if dataclasses.is_dataclass(obj): - obj_dict = dataclasses.asdict(obj) - return jsonable_encoder( - obj_dict, - include=include, - exclude=exclude, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - custom_encoder=custom_encoder, - sqlalchemy_safe=sqlalchemy_safe, - ) - if isinstance(obj, Enum): - return obj.value - if isinstance(obj, PurePath): - return str(obj) - if isinstance(obj, (str, int, float, type(None))): - return obj - if isinstance(obj, dict): - encoded_dict = {} - allowed_keys = set(obj.keys()) - if include is not None: - allowed_keys &= set(include) - if exclude is not None: - allowed_keys -= set(exclude) - for key, value in obj.items(): - if ( - ( - not sqlalchemy_safe - or (not isinstance(key, str)) - or (not key.startswith("_sa")) - ) - and (value is not None or not exclude_none) - and key in allowed_keys - ): - encoded_key = jsonable_encoder( - key, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_none=exclude_none, - custom_encoder=custom_encoder, - sqlalchemy_safe=sqlalchemy_safe, - ) - encoded_value = jsonable_encoder( - value, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_none=exclude_none, - custom_encoder=custom_encoder, - sqlalchemy_safe=sqlalchemy_safe, - ) - encoded_dict[encoded_key] = encoded_value - return encoded_dict - if isinstance(obj, (list, set, frozenset, GeneratorType, tuple, deque)): - encoded_list = [] - for item in obj: - encoded_list.append( - jsonable_encoder( - item, - include=include, - exclude=exclude, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - custom_encoder=custom_encoder, - sqlalchemy_safe=sqlalchemy_safe, - ) - ) - return encoded_list - - if type(obj) in ENCODERS_BY_TYPE: - return ENCODERS_BY_TYPE[type(obj)](obj) - for encoder, classes_tuple in encoders_by_class_tuples.items(): - if isinstance(obj, classes_tuple): - return encoder(obj) - - try: - data = dict(obj) - except Exception as e: - errors: List[Exception] = [] - errors.append(e) - try: - data = vars(obj) - except Exception as e: - errors.append(e) - raise ValueError(errors) from e - return jsonable_encoder( - data, - include=include, - exclude=exclude, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - custom_encoder=custom_encoder, - sqlalchemy_safe=sqlalchemy_safe, - ) diff --git a/server/venv/Lib/site-packages/fastapi/exception_handlers.py b/server/venv/Lib/site-packages/fastapi/exception_handlers.py deleted file mode 100644 index 6c2ba7f..0000000 --- a/server/venv/Lib/site-packages/fastapi/exception_handlers.py +++ /dev/null @@ -1,34 +0,0 @@ -from fastapi.encoders import jsonable_encoder -from fastapi.exceptions import RequestValidationError, WebSocketRequestValidationError -from fastapi.utils import is_body_allowed_for_status_code -from fastapi.websockets import WebSocket -from starlette.exceptions import HTTPException -from starlette.requests import Request -from starlette.responses import JSONResponse, Response -from starlette.status import HTTP_422_UNPROCESSABLE_ENTITY, WS_1008_POLICY_VIOLATION - - -async def http_exception_handler(request: Request, exc: HTTPException) -> Response: - headers = getattr(exc, "headers", None) - if not is_body_allowed_for_status_code(exc.status_code): - return Response(status_code=exc.status_code, headers=headers) - return JSONResponse( - {"detail": exc.detail}, status_code=exc.status_code, headers=headers - ) - - -async def request_validation_exception_handler( - request: Request, exc: RequestValidationError -) -> JSONResponse: - return JSONResponse( - status_code=HTTP_422_UNPROCESSABLE_ENTITY, - content={"detail": jsonable_encoder(exc.errors())}, - ) - - -async def websocket_request_validation_exception_handler( - websocket: WebSocket, exc: WebSocketRequestValidationError -) -> None: - await websocket.close( - code=WS_1008_POLICY_VIOLATION, reason=jsonable_encoder(exc.errors()) - ) diff --git a/server/venv/Lib/site-packages/fastapi/exceptions.py b/server/venv/Lib/site-packages/fastapi/exceptions.py deleted file mode 100644 index 680d288..0000000 --- a/server/venv/Lib/site-packages/fastapi/exceptions.py +++ /dev/null @@ -1,176 +0,0 @@ -from typing import Any, Dict, Optional, Sequence, Type, Union - -from pydantic import BaseModel, create_model -from starlette.exceptions import HTTPException as StarletteHTTPException -from starlette.exceptions import WebSocketException as StarletteWebSocketException -from typing_extensions import Annotated, Doc # type: ignore [attr-defined] - - -class HTTPException(StarletteHTTPException): - """ - An HTTP exception you can raise in your own code to show errors to the client. - - This is for client errors, invalid authentication, invalid data, etc. Not for server - errors in your code. - - Read more about it in the - [FastAPI docs for Handling Errors](https://fastapi.tiangolo.com/tutorial/handling-errors/). - - ## Example - - ```python - from fastapi import FastAPI, HTTPException - - app = FastAPI() - - items = {"foo": "The Foo Wrestlers"} - - - @app.get("/items/{item_id}") - async def read_item(item_id: str): - if item_id not in items: - raise HTTPException(status_code=404, detail="Item not found") - return {"item": items[item_id]} - ``` - """ - - def __init__( - self, - status_code: Annotated[ - int, - Doc( - """ - HTTP status code to send to the client. - """ - ), - ], - detail: Annotated[ - Any, - Doc( - """ - Any data to be sent to the client in the `detail` key of the JSON - response. - """ - ), - ] = None, - headers: Annotated[ - Optional[Dict[str, str]], - Doc( - """ - Any headers to send to the client in the response. - """ - ), - ] = None, - ) -> None: - super().__init__(status_code=status_code, detail=detail, headers=headers) - - -class WebSocketException(StarletteWebSocketException): - """ - A WebSocket exception you can raise in your own code to show errors to the client. - - This is for client errors, invalid authentication, invalid data, etc. Not for server - errors in your code. - - Read more about it in the - [FastAPI docs for WebSockets](https://fastapi.tiangolo.com/advanced/websockets/). - - ## Example - - ```python - from typing import Annotated - - from fastapi import ( - Cookie, - FastAPI, - WebSocket, - WebSocketException, - status, - ) - - app = FastAPI() - - @app.websocket("/items/{item_id}/ws") - async def websocket_endpoint( - *, - websocket: WebSocket, - session: Annotated[str | None, Cookie()] = None, - item_id: str, - ): - if session is None: - raise WebSocketException(code=status.WS_1008_POLICY_VIOLATION) - await websocket.accept() - while True: - data = await websocket.receive_text() - await websocket.send_text(f"Session cookie is: {session}") - await websocket.send_text(f"Message text was: {data}, for item ID: {item_id}") - ``` - """ - - def __init__( - self, - code: Annotated[ - int, - Doc( - """ - A closing code from the - [valid codes defined in the specification](https://datatracker.ietf.org/doc/html/rfc6455#section-7.4.1). - """ - ), - ], - reason: Annotated[ - Union[str, None], - Doc( - """ - The reason to close the WebSocket connection. - - It is UTF-8-encoded data. The interpretation of the reason is up to the - application, it is not specified by the WebSocket specification. - - It could contain text that could be human-readable or interpretable - by the client code, etc. - """ - ), - ] = None, - ) -> None: - super().__init__(code=code, reason=reason) - - -RequestErrorModel: Type[BaseModel] = create_model("Request") -WebSocketErrorModel: Type[BaseModel] = create_model("WebSocket") - - -class FastAPIError(RuntimeError): - """ - A generic, FastAPI-specific error. - """ - - -class ValidationException(Exception): - def __init__(self, errors: Sequence[Any]) -> None: - self._errors = errors - - def errors(self) -> Sequence[Any]: - return self._errors - - -class RequestValidationError(ValidationException): - def __init__(self, errors: Sequence[Any], *, body: Any = None) -> None: - super().__init__(errors) - self.body = body - - -class WebSocketRequestValidationError(ValidationException): - pass - - -class ResponseValidationError(ValidationException): - def __init__(self, errors: Sequence[Any], *, body: Any = None) -> None: - super().__init__(errors) - self.body = body - - def __str__(self) -> str: - message = f"{len(self._errors)} validation errors:\n" - for err in self._errors: - message += f" {err}\n" - return message diff --git a/server/venv/Lib/site-packages/fastapi/logger.py b/server/venv/Lib/site-packages/fastapi/logger.py deleted file mode 100644 index 5b2c4ad..0000000 --- a/server/venv/Lib/site-packages/fastapi/logger.py +++ /dev/null @@ -1,3 +0,0 @@ -import logging - -logger = logging.getLogger("fastapi") diff --git a/server/venv/Lib/site-packages/fastapi/middleware/__init__.py b/server/venv/Lib/site-packages/fastapi/middleware/__init__.py deleted file mode 100644 index 620296d..0000000 --- a/server/venv/Lib/site-packages/fastapi/middleware/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from starlette.middleware import Middleware as Middleware diff --git a/server/venv/Lib/site-packages/fastapi/middleware/asyncexitstack.py b/server/venv/Lib/site-packages/fastapi/middleware/asyncexitstack.py deleted file mode 100644 index 30a0ae6..0000000 --- a/server/venv/Lib/site-packages/fastapi/middleware/asyncexitstack.py +++ /dev/null @@ -1,25 +0,0 @@ -from typing import Optional - -from fastapi.concurrency import AsyncExitStack -from starlette.types import ASGIApp, Receive, Scope, Send - - -class AsyncExitStackMiddleware: - def __init__(self, app: ASGIApp, context_name: str = "fastapi_astack") -> None: - self.app = app - self.context_name = context_name - - async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: - dependency_exception: Optional[Exception] = None - async with AsyncExitStack() as stack: - scope[self.context_name] = stack - try: - await self.app(scope, receive, send) - except Exception as e: - dependency_exception = e - raise e - if dependency_exception: - # This exception was possibly handled by the dependency but it should - # still bubble up so that the ServerErrorMiddleware can return a 500 - # or the ExceptionMiddleware can catch and handle any other exceptions - raise dependency_exception diff --git a/server/venv/Lib/site-packages/fastapi/middleware/cors.py b/server/venv/Lib/site-packages/fastapi/middleware/cors.py deleted file mode 100644 index 8dfaad0..0000000 --- a/server/venv/Lib/site-packages/fastapi/middleware/cors.py +++ /dev/null @@ -1 +0,0 @@ -from starlette.middleware.cors import CORSMiddleware as CORSMiddleware # noqa diff --git a/server/venv/Lib/site-packages/fastapi/middleware/gzip.py b/server/venv/Lib/site-packages/fastapi/middleware/gzip.py deleted file mode 100644 index bbeb2cc..0000000 --- a/server/venv/Lib/site-packages/fastapi/middleware/gzip.py +++ /dev/null @@ -1 +0,0 @@ -from starlette.middleware.gzip import GZipMiddleware as GZipMiddleware # noqa diff --git a/server/venv/Lib/site-packages/fastapi/middleware/httpsredirect.py b/server/venv/Lib/site-packages/fastapi/middleware/httpsredirect.py deleted file mode 100644 index b7a3d8e..0000000 --- a/server/venv/Lib/site-packages/fastapi/middleware/httpsredirect.py +++ /dev/null @@ -1,3 +0,0 @@ -from starlette.middleware.httpsredirect import ( # noqa - HTTPSRedirectMiddleware as HTTPSRedirectMiddleware, -) diff --git a/server/venv/Lib/site-packages/fastapi/middleware/trustedhost.py b/server/venv/Lib/site-packages/fastapi/middleware/trustedhost.py deleted file mode 100644 index 08d7e03..0000000 --- a/server/venv/Lib/site-packages/fastapi/middleware/trustedhost.py +++ /dev/null @@ -1,3 +0,0 @@ -from starlette.middleware.trustedhost import ( # noqa - TrustedHostMiddleware as TrustedHostMiddleware, -) diff --git a/server/venv/Lib/site-packages/fastapi/middleware/wsgi.py b/server/venv/Lib/site-packages/fastapi/middleware/wsgi.py deleted file mode 100644 index c4c6a79..0000000 --- a/server/venv/Lib/site-packages/fastapi/middleware/wsgi.py +++ /dev/null @@ -1 +0,0 @@ -from starlette.middleware.wsgi import WSGIMiddleware as WSGIMiddleware # noqa diff --git a/server/venv/Lib/site-packages/fastapi/openapi/__init__.py b/server/venv/Lib/site-packages/fastapi/openapi/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/fastapi/openapi/constants.py b/server/venv/Lib/site-packages/fastapi/openapi/constants.py deleted file mode 100644 index d724ee3..0000000 --- a/server/venv/Lib/site-packages/fastapi/openapi/constants.py +++ /dev/null @@ -1,3 +0,0 @@ -METHODS_WITH_BODY = {"GET", "HEAD", "POST", "PUT", "DELETE", "PATCH"} -REF_PREFIX = "#/components/schemas/" -REF_TEMPLATE = "#/components/schemas/{model}" diff --git a/server/venv/Lib/site-packages/fastapi/openapi/docs.py b/server/venv/Lib/site-packages/fastapi/openapi/docs.py deleted file mode 100644 index 8cf0d17..0000000 --- a/server/venv/Lib/site-packages/fastapi/openapi/docs.py +++ /dev/null @@ -1,344 +0,0 @@ -import json -from typing import Any, Dict, Optional - -from fastapi.encoders import jsonable_encoder -from starlette.responses import HTMLResponse -from typing_extensions import Annotated, Doc # type: ignore [attr-defined] - -swagger_ui_default_parameters: Annotated[ - Dict[str, Any], - Doc( - """ - Default configurations for Swagger UI. - - You can use it as a template to add any other configurations needed. - """ - ), -] = { - "dom_id": "#swagger-ui", - "layout": "BaseLayout", - "deepLinking": True, - "showExtensions": True, - "showCommonExtensions": True, -} - - -def get_swagger_ui_html( - *, - openapi_url: Annotated[ - str, - Doc( - """ - The OpenAPI URL that Swagger UI should load and use. - - This is normally done automatically by FastAPI using the default URL - `/openapi.json`. - """ - ), - ], - title: Annotated[ - str, - Doc( - """ - The HTML `` content, normally shown in the browser tab. - """ - ), - ], - swagger_js_url: Annotated[ - str, - Doc( - """ - The URL to use to load the Swagger UI JavaScript. - - It is normally set to a CDN URL. - """ - ), - ] = "https://cdn.jsdelivr.net/npm/swagger-ui-dist@5/swagger-ui-bundle.js", - swagger_css_url: Annotated[ - str, - Doc( - """ - The URL to use to load the Swagger UI CSS. - - It is normally set to a CDN URL. - """ - ), - ] = "https://cdn.jsdelivr.net/npm/swagger-ui-dist@5/swagger-ui.css", - swagger_favicon_url: Annotated[ - str, - Doc( - """ - The URL of the favicon to use. It is normally shown in the browser tab. - """ - ), - ] = "https://fastapi.tiangolo.com/img/favicon.png", - oauth2_redirect_url: Annotated[ - Optional[str], - Doc( - """ - The OAuth2 redirect URL, it is normally automatically handled by FastAPI. - """ - ), - ] = None, - init_oauth: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - A dictionary with Swagger UI OAuth2 initialization configurations. - """ - ), - ] = None, - swagger_ui_parameters: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Configuration parameters for Swagger UI. - - It defaults to [swagger_ui_default_parameters][fastapi.openapi.docs.swagger_ui_default_parameters]. - """ - ), - ] = None, -) -> HTMLResponse: - """ - Generate and return the HTML that loads Swagger UI for the interactive - API docs (normally served at `/docs`). - - You would only call this function yourself if you needed to override some parts, - for example the URLs to use to load Swagger UI's JavaScript and CSS. - - Read more about it in the - [FastAPI docs for Configure Swagger UI](https://fastapi.tiangolo.com/how-to/configure-swagger-ui/) - and the [FastAPI docs for Custom Docs UI Static Assets (Self-Hosting)](https://fastapi.tiangolo.com/how-to/custom-docs-ui-assets/). - """ - current_swagger_ui_parameters = swagger_ui_default_parameters.copy() - if swagger_ui_parameters: - current_swagger_ui_parameters.update(swagger_ui_parameters) - - html = f""" - <!DOCTYPE html> - <html> - <head> - <link type="text/css" rel="stylesheet" href="{swagger_css_url}"> - <link rel="shortcut icon" href="{swagger_favicon_url}"> - <title>{title} - - -
-
- - - - - - """ - return HTMLResponse(html) - - -def get_redoc_html( - *, - openapi_url: Annotated[ - str, - Doc( - """ - The OpenAPI URL that ReDoc should load and use. - - This is normally done automatically by FastAPI using the default URL - `/openapi.json`. - """ - ), - ], - title: Annotated[ - str, - Doc( - """ - The HTML `` content, normally shown in the browser tab. - """ - ), - ], - redoc_js_url: Annotated[ - str, - Doc( - """ - The URL to use to load the ReDoc JavaScript. - - It is normally set to a CDN URL. - """ - ), - ] = "https://cdn.jsdelivr.net/npm/redoc@next/bundles/redoc.standalone.js", - redoc_favicon_url: Annotated[ - str, - Doc( - """ - The URL of the favicon to use. It is normally shown in the browser tab. - """ - ), - ] = "https://fastapi.tiangolo.com/img/favicon.png", - with_google_fonts: Annotated[ - bool, - Doc( - """ - Load and use Google Fonts. - """ - ), - ] = True, -) -> HTMLResponse: - """ - Generate and return the HTML response that loads ReDoc for the alternative - API docs (normally served at `/redoc`). - - You would only call this function yourself if you needed to override some parts, - for example the URLs to use to load ReDoc's JavaScript and CSS. - - Read more about it in the - [FastAPI docs for Custom Docs UI Static Assets (Self-Hosting)](https://fastapi.tiangolo.com/how-to/custom-docs-ui-assets/). - """ - html = f""" - <!DOCTYPE html> - <html> - <head> - <title>{title} - - - - """ - if with_google_fonts: - html += """ - - """ - html += f""" - - - - - - - - - - - """ - return HTMLResponse(html) - - -def get_swagger_ui_oauth2_redirect_html() -> HTMLResponse: - """ - Generate the HTML response with the OAuth2 redirection for Swagger UI. - - You normally don't need to use or change this. - """ - # copied from https://github.com/swagger-api/swagger-ui/blob/v4.14.0/dist/oauth2-redirect.html - html = """ - - - - Swagger UI: OAuth2 Redirect - - - - - - """ - return HTMLResponse(content=html) diff --git a/server/venv/Lib/site-packages/fastapi/openapi/models.py b/server/venv/Lib/site-packages/fastapi/openapi/models.py deleted file mode 100644 index 5f3bdbb..0000000 --- a/server/venv/Lib/site-packages/fastapi/openapi/models.py +++ /dev/null @@ -1,611 +0,0 @@ -from enum import Enum -from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Type, Union - -from fastapi._compat import ( - PYDANTIC_V2, - CoreSchema, - GetJsonSchemaHandler, - JsonSchemaValue, - _model_rebuild, - with_info_plain_validator_function, -) -from fastapi.logger import logger -from pydantic import AnyUrl, BaseModel, Field -from typing_extensions import Annotated, Literal, TypedDict -from typing_extensions import deprecated as typing_deprecated - -try: - import email_validator - - assert email_validator # make autoflake ignore the unused import - from pydantic import EmailStr -except ImportError: # pragma: no cover - - class EmailStr(str): # type: ignore - @classmethod - def __get_validators__(cls) -> Iterable[Callable[..., Any]]: - yield cls.validate - - @classmethod - def validate(cls, v: Any) -> str: - logger.warning( - "email-validator not installed, email fields will be treated as str.\n" - "To install, run: pip install email-validator" - ) - return str(v) - - @classmethod - def _validate(cls, __input_value: Any, _: Any) -> str: - logger.warning( - "email-validator not installed, email fields will be treated as str.\n" - "To install, run: pip install email-validator" - ) - return str(__input_value) - - @classmethod - def __get_pydantic_json_schema__( - cls, core_schema: CoreSchema, handler: GetJsonSchemaHandler - ) -> JsonSchemaValue: - return {"type": "string", "format": "email"} - - @classmethod - def __get_pydantic_core_schema__( - cls, source: Type[Any], handler: Callable[[Any], CoreSchema] - ) -> CoreSchema: - return with_info_plain_validator_function(cls._validate) - - -class Contact(BaseModel): - name: Optional[str] = None - url: Optional[AnyUrl] = None - email: Optional[EmailStr] = None - - if PYDANTIC_V2: - model_config = {"extra": "allow"} - - else: - - class Config: - extra = "allow" - - -class License(BaseModel): - name: str - identifier: Optional[str] = None - url: Optional[AnyUrl] = None - - if PYDANTIC_V2: - model_config = {"extra": "allow"} - - else: - - class Config: - extra = "allow" - - -class Info(BaseModel): - title: str - summary: Optional[str] = None - description: Optional[str] = None - termsOfService: Optional[str] = None - contact: Optional[Contact] = None - license: Optional[License] = None - version: str - - if PYDANTIC_V2: - model_config = {"extra": "allow"} - - else: - - class Config: - extra = "allow" - - -class ServerVariable(BaseModel): - enum: Annotated[Optional[List[str]], Field(min_length=1)] = None - default: str - description: Optional[str] = None - - if PYDANTIC_V2: - model_config = {"extra": "allow"} - - else: - - class Config: - extra = "allow" - - -class Server(BaseModel): - url: Union[AnyUrl, str] - description: Optional[str] = None - variables: Optional[Dict[str, ServerVariable]] = None - - if PYDANTIC_V2: - model_config = {"extra": "allow"} - - else: - - class Config: - extra = "allow" - - -class Reference(BaseModel): - ref: str = Field(alias="$ref") - - -class Discriminator(BaseModel): - propertyName: str - mapping: Optional[Dict[str, str]] = None - - -class XML(BaseModel): - name: Optional[str] = None - namespace: Optional[str] = None - prefix: Optional[str] = None - attribute: Optional[bool] = None - wrapped: Optional[bool] = None - - if PYDANTIC_V2: - model_config = {"extra": "allow"} - - else: - - class Config: - extra = "allow" - - -class ExternalDocumentation(BaseModel): - description: Optional[str] = None - url: AnyUrl - - if PYDANTIC_V2: - model_config = {"extra": "allow"} - - else: - - class Config: - extra = "allow" - - -class Schema(BaseModel): - # Ref: JSON Schema 2020-12: https://json-schema.org/draft/2020-12/json-schema-core.html#name-the-json-schema-core-vocabu - # Core Vocabulary - schema_: Optional[str] = Field(default=None, alias="$schema") - vocabulary: Optional[str] = Field(default=None, alias="$vocabulary") - id: Optional[str] = Field(default=None, alias="$id") - anchor: Optional[str] = Field(default=None, alias="$anchor") - dynamicAnchor: Optional[str] = Field(default=None, alias="$dynamicAnchor") - ref: Optional[str] = Field(default=None, alias="$ref") - dynamicRef: Optional[str] = Field(default=None, alias="$dynamicRef") - defs: Optional[Dict[str, "SchemaOrBool"]] = Field(default=None, alias="$defs") - comment: Optional[str] = Field(default=None, alias="$comment") - # Ref: JSON Schema 2020-12: https://json-schema.org/draft/2020-12/json-schema-core.html#name-a-vocabulary-for-applying-s - # A Vocabulary for Applying Subschemas - allOf: Optional[List["SchemaOrBool"]] = None - anyOf: Optional[List["SchemaOrBool"]] = None - oneOf: Optional[List["SchemaOrBool"]] = None - not_: Optional["SchemaOrBool"] = Field(default=None, alias="not") - if_: Optional["SchemaOrBool"] = Field(default=None, alias="if") - then: Optional["SchemaOrBool"] = None - else_: Optional["SchemaOrBool"] = Field(default=None, alias="else") - dependentSchemas: Optional[Dict[str, "SchemaOrBool"]] = None - prefixItems: Optional[List["SchemaOrBool"]] = None - # TODO: uncomment and remove below when deprecating Pydantic v1 - # It generales a list of schemas for tuples, before prefixItems was available - # items: Optional["SchemaOrBool"] = None - items: Optional[Union["SchemaOrBool", List["SchemaOrBool"]]] = None - contains: Optional["SchemaOrBool"] = None - properties: Optional[Dict[str, "SchemaOrBool"]] = None - patternProperties: Optional[Dict[str, "SchemaOrBool"]] = None - additionalProperties: Optional["SchemaOrBool"] = None - propertyNames: Optional["SchemaOrBool"] = None - unevaluatedItems: Optional["SchemaOrBool"] = None - unevaluatedProperties: Optional["SchemaOrBool"] = None - # Ref: JSON Schema Validation 2020-12: https://json-schema.org/draft/2020-12/json-schema-validation.html#name-a-vocabulary-for-structural - # A Vocabulary for Structural Validation - type: Optional[str] = None - enum: Optional[List[Any]] = None - const: Optional[Any] = None - multipleOf: Optional[float] = Field(default=None, gt=0) - maximum: Optional[float] = None - exclusiveMaximum: Optional[float] = None - minimum: Optional[float] = None - exclusiveMinimum: Optional[float] = None - maxLength: Optional[int] = Field(default=None, ge=0) - minLength: Optional[int] = Field(default=None, ge=0) - pattern: Optional[str] = None - maxItems: Optional[int] = Field(default=None, ge=0) - minItems: Optional[int] = Field(default=None, ge=0) - uniqueItems: Optional[bool] = None - maxContains: Optional[int] = Field(default=None, ge=0) - minContains: Optional[int] = Field(default=None, ge=0) - maxProperties: Optional[int] = Field(default=None, ge=0) - minProperties: Optional[int] = Field(default=None, ge=0) - required: Optional[List[str]] = None - dependentRequired: Optional[Dict[str, Set[str]]] = None - # Ref: JSON Schema Validation 2020-12: https://json-schema.org/draft/2020-12/json-schema-validation.html#name-vocabularies-for-semantic-c - # Vocabularies for Semantic Content With "format" - format: Optional[str] = None - # Ref: JSON Schema Validation 2020-12: https://json-schema.org/draft/2020-12/json-schema-validation.html#name-a-vocabulary-for-the-conten - # A Vocabulary for the Contents of String-Encoded Data - contentEncoding: Optional[str] = None - contentMediaType: Optional[str] = None - contentSchema: Optional["SchemaOrBool"] = None - # Ref: JSON Schema Validation 2020-12: https://json-schema.org/draft/2020-12/json-schema-validation.html#name-a-vocabulary-for-basic-meta - # A Vocabulary for Basic Meta-Data Annotations - title: Optional[str] = None - description: Optional[str] = None - default: Optional[Any] = None - deprecated: Optional[bool] = None - readOnly: Optional[bool] = None - writeOnly: Optional[bool] = None - examples: Optional[List[Any]] = None - # Ref: OpenAPI 3.1.0: https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.1.0.md#schema-object - # Schema Object - discriminator: Optional[Discriminator] = None - xml: Optional[XML] = None - externalDocs: Optional[ExternalDocumentation] = None - example: Annotated[ - Optional[Any], - typing_deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = None - - if PYDANTIC_V2: - model_config = {"extra": "allow"} - - else: - - class Config: - extra = "allow" - - -# Ref: https://json-schema.org/draft/2020-12/json-schema-core.html#name-json-schema-documents -# A JSON Schema MUST be an object or a boolean. -SchemaOrBool = Union[Schema, bool] - - -class Example(TypedDict, total=False): - summary: Optional[str] - description: Optional[str] - value: Optional[Any] - externalValue: Optional[AnyUrl] - - if PYDANTIC_V2: # type: ignore [misc] - __pydantic_config__ = {"extra": "allow"} - - else: - - class Config: - extra = "allow" - - -class ParameterInType(Enum): - query = "query" - header = "header" - path = "path" - cookie = "cookie" - - -class Encoding(BaseModel): - contentType: Optional[str] = None - headers: Optional[Dict[str, Union["Header", Reference]]] = None - style: Optional[str] = None - explode: Optional[bool] = None - allowReserved: Optional[bool] = None - - if PYDANTIC_V2: - model_config = {"extra": "allow"} - - else: - - class Config: - extra = "allow" - - -class MediaType(BaseModel): - schema_: Optional[Union[Schema, Reference]] = Field(default=None, alias="schema") - example: Optional[Any] = None - examples: Optional[Dict[str, Union[Example, Reference]]] = None - encoding: Optional[Dict[str, Encoding]] = None - - if PYDANTIC_V2: - model_config = {"extra": "allow"} - - else: - - class Config: - extra = "allow" - - -class ParameterBase(BaseModel): - description: Optional[str] = None - required: Optional[bool] = None - deprecated: Optional[bool] = None - # Serialization rules for simple scenarios - style: Optional[str] = None - explode: Optional[bool] = None - allowReserved: Optional[bool] = None - schema_: Optional[Union[Schema, Reference]] = Field(default=None, alias="schema") - example: Optional[Any] = None - examples: Optional[Dict[str, Union[Example, Reference]]] = None - # Serialization rules for more complex scenarios - content: Optional[Dict[str, MediaType]] = None - - if PYDANTIC_V2: - model_config = {"extra": "allow"} - - else: - - class Config: - extra = "allow" - - -class Parameter(ParameterBase): - name: str - in_: ParameterInType = Field(alias="in") - - -class Header(ParameterBase): - pass - - -class RequestBody(BaseModel): - description: Optional[str] = None - content: Dict[str, MediaType] - required: Optional[bool] = None - - if PYDANTIC_V2: - model_config = {"extra": "allow"} - - else: - - class Config: - extra = "allow" - - -class Link(BaseModel): - operationRef: Optional[str] = None - operationId: Optional[str] = None - parameters: Optional[Dict[str, Union[Any, str]]] = None - requestBody: Optional[Union[Any, str]] = None - description: Optional[str] = None - server: Optional[Server] = None - - if PYDANTIC_V2: - model_config = {"extra": "allow"} - - else: - - class Config: - extra = "allow" - - -class Response(BaseModel): - description: str - headers: Optional[Dict[str, Union[Header, Reference]]] = None - content: Optional[Dict[str, MediaType]] = None - links: Optional[Dict[str, Union[Link, Reference]]] = None - - if PYDANTIC_V2: - model_config = {"extra": "allow"} - - else: - - class Config: - extra = "allow" - - -class Operation(BaseModel): - tags: Optional[List[str]] = None - summary: Optional[str] = None - description: Optional[str] = None - externalDocs: Optional[ExternalDocumentation] = None - operationId: Optional[str] = None - parameters: Optional[List[Union[Parameter, Reference]]] = None - requestBody: Optional[Union[RequestBody, Reference]] = None - # Using Any for Specification Extensions - responses: Optional[Dict[str, Union[Response, Any]]] = None - callbacks: Optional[Dict[str, Union[Dict[str, "PathItem"], Reference]]] = None - deprecated: Optional[bool] = None - security: Optional[List[Dict[str, List[str]]]] = None - servers: Optional[List[Server]] = None - - if PYDANTIC_V2: - model_config = {"extra": "allow"} - - else: - - class Config: - extra = "allow" - - -class PathItem(BaseModel): - ref: Optional[str] = Field(default=None, alias="$ref") - summary: Optional[str] = None - description: Optional[str] = None - get: Optional[Operation] = None - put: Optional[Operation] = None - post: Optional[Operation] = None - delete: Optional[Operation] = None - options: Optional[Operation] = None - head: Optional[Operation] = None - patch: Optional[Operation] = None - trace: Optional[Operation] = None - servers: Optional[List[Server]] = None - parameters: Optional[List[Union[Parameter, Reference]]] = None - - if PYDANTIC_V2: - model_config = {"extra": "allow"} - - else: - - class Config: - extra = "allow" - - -class SecuritySchemeType(Enum): - apiKey = "apiKey" - http = "http" - oauth2 = "oauth2" - openIdConnect = "openIdConnect" - - -class SecurityBase(BaseModel): - type_: SecuritySchemeType = Field(alias="type") - description: Optional[str] = None - - if PYDANTIC_V2: - model_config = {"extra": "allow"} - - else: - - class Config: - extra = "allow" - - -class APIKeyIn(Enum): - query = "query" - header = "header" - cookie = "cookie" - - -class APIKey(SecurityBase): - type_: SecuritySchemeType = Field(default=SecuritySchemeType.apiKey, alias="type") - in_: APIKeyIn = Field(alias="in") - name: str - - -class HTTPBase(SecurityBase): - type_: SecuritySchemeType = Field(default=SecuritySchemeType.http, alias="type") - scheme: str - - -class HTTPBearer(HTTPBase): - scheme: Literal["bearer"] = "bearer" - bearerFormat: Optional[str] = None - - -class OAuthFlow(BaseModel): - refreshUrl: Optional[str] = None - scopes: Dict[str, str] = {} - - if PYDANTIC_V2: - model_config = {"extra": "allow"} - - else: - - class Config: - extra = "allow" - - -class OAuthFlowImplicit(OAuthFlow): - authorizationUrl: str - - -class OAuthFlowPassword(OAuthFlow): - tokenUrl: str - - -class OAuthFlowClientCredentials(OAuthFlow): - tokenUrl: str - - -class OAuthFlowAuthorizationCode(OAuthFlow): - authorizationUrl: str - tokenUrl: str - - -class OAuthFlows(BaseModel): - implicit: Optional[OAuthFlowImplicit] = None - password: Optional[OAuthFlowPassword] = None - clientCredentials: Optional[OAuthFlowClientCredentials] = None - authorizationCode: Optional[OAuthFlowAuthorizationCode] = None - - if PYDANTIC_V2: - model_config = {"extra": "allow"} - - else: - - class Config: - extra = "allow" - - -class OAuth2(SecurityBase): - type_: SecuritySchemeType = Field(default=SecuritySchemeType.oauth2, alias="type") - flows: OAuthFlows - - -class OpenIdConnect(SecurityBase): - type_: SecuritySchemeType = Field( - default=SecuritySchemeType.openIdConnect, alias="type" - ) - openIdConnectUrl: str - - -SecurityScheme = Union[APIKey, HTTPBase, OAuth2, OpenIdConnect, HTTPBearer] - - -class Components(BaseModel): - schemas: Optional[Dict[str, Union[Schema, Reference]]] = None - responses: Optional[Dict[str, Union[Response, Reference]]] = None - parameters: Optional[Dict[str, Union[Parameter, Reference]]] = None - examples: Optional[Dict[str, Union[Example, Reference]]] = None - requestBodies: Optional[Dict[str, Union[RequestBody, Reference]]] = None - headers: Optional[Dict[str, Union[Header, Reference]]] = None - securitySchemes: Optional[Dict[str, Union[SecurityScheme, Reference]]] = None - links: Optional[Dict[str, Union[Link, Reference]]] = None - # Using Any for Specification Extensions - callbacks: Optional[Dict[str, Union[Dict[str, PathItem], Reference, Any]]] = None - pathItems: Optional[Dict[str, Union[PathItem, Reference]]] = None - - if PYDANTIC_V2: - model_config = {"extra": "allow"} - - else: - - class Config: - extra = "allow" - - -class Tag(BaseModel): - name: str - description: Optional[str] = None - externalDocs: Optional[ExternalDocumentation] = None - - if PYDANTIC_V2: - model_config = {"extra": "allow"} - - else: - - class Config: - extra = "allow" - - -class OpenAPI(BaseModel): - openapi: str - info: Info - jsonSchemaDialect: Optional[str] = None - servers: Optional[List[Server]] = None - # Using Any for Specification Extensions - paths: Optional[Dict[str, Union[PathItem, Any]]] = None - webhooks: Optional[Dict[str, Union[PathItem, Reference]]] = None - components: Optional[Components] = None - security: Optional[List[Dict[str, List[str]]]] = None - tags: Optional[List[Tag]] = None - externalDocs: Optional[ExternalDocumentation] = None - - if PYDANTIC_V2: - model_config = {"extra": "allow"} - - else: - - class Config: - extra = "allow" - - -_model_rebuild(Schema) -_model_rebuild(Operation) -_model_rebuild(Encoding) diff --git a/server/venv/Lib/site-packages/fastapi/openapi/utils.py b/server/venv/Lib/site-packages/fastapi/openapi/utils.py deleted file mode 100644 index 5bfb5ac..0000000 --- a/server/venv/Lib/site-packages/fastapi/openapi/utils.py +++ /dev/null @@ -1,530 +0,0 @@ -import http.client -import inspect -import warnings -from typing import Any, Dict, List, Optional, Sequence, Set, Tuple, Type, Union, cast - -from fastapi import routing -from fastapi._compat import ( - GenerateJsonSchema, - JsonSchemaValue, - ModelField, - Undefined, - get_compat_model_name_map, - get_definitions, - get_schema_from_model_field, - lenient_issubclass, -) -from fastapi.datastructures import DefaultPlaceholder -from fastapi.dependencies.models import Dependant -from fastapi.dependencies.utils import get_flat_dependant, get_flat_params -from fastapi.encoders import jsonable_encoder -from fastapi.openapi.constants import METHODS_WITH_BODY, REF_PREFIX, REF_TEMPLATE -from fastapi.openapi.models import OpenAPI -from fastapi.params import Body, Param -from fastapi.responses import Response -from fastapi.types import ModelNameMap -from fastapi.utils import ( - deep_dict_update, - generate_operation_id_for_path, - is_body_allowed_for_status_code, -) -from starlette.responses import JSONResponse -from starlette.routing import BaseRoute -from starlette.status import HTTP_422_UNPROCESSABLE_ENTITY -from typing_extensions import Literal - -validation_error_definition = { - "title": "ValidationError", - "type": "object", - "properties": { - "loc": { - "title": "Location", - "type": "array", - "items": {"anyOf": [{"type": "string"}, {"type": "integer"}]}, - }, - "msg": {"title": "Message", "type": "string"}, - "type": {"title": "Error Type", "type": "string"}, - }, - "required": ["loc", "msg", "type"], -} - -validation_error_response_definition = { - "title": "HTTPValidationError", - "type": "object", - "properties": { - "detail": { - "title": "Detail", - "type": "array", - "items": {"$ref": REF_PREFIX + "ValidationError"}, - } - }, -} - -status_code_ranges: Dict[str, str] = { - "1XX": "Information", - "2XX": "Success", - "3XX": "Redirection", - "4XX": "Client Error", - "5XX": "Server Error", - "DEFAULT": "Default Response", -} - - -def get_openapi_security_definitions( - flat_dependant: Dependant, -) -> Tuple[Dict[str, Any], List[Dict[str, Any]]]: - security_definitions = {} - operation_security = [] - for security_requirement in flat_dependant.security_requirements: - security_definition = jsonable_encoder( - security_requirement.security_scheme.model, - by_alias=True, - exclude_none=True, - ) - security_name = security_requirement.security_scheme.scheme_name - security_definitions[security_name] = security_definition - operation_security.append({security_name: security_requirement.scopes}) - return security_definitions, operation_security - - -def get_openapi_operation_parameters( - *, - all_route_params: Sequence[ModelField], - schema_generator: GenerateJsonSchema, - model_name_map: ModelNameMap, - field_mapping: Dict[ - Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue - ], - separate_input_output_schemas: bool = True, -) -> List[Dict[str, Any]]: - parameters = [] - for param in all_route_params: - field_info = param.field_info - field_info = cast(Param, field_info) - if not field_info.include_in_schema: - continue - param_schema = get_schema_from_model_field( - field=param, - schema_generator=schema_generator, - model_name_map=model_name_map, - field_mapping=field_mapping, - separate_input_output_schemas=separate_input_output_schemas, - ) - parameter = { - "name": param.alias, - "in": field_info.in_.value, - "required": param.required, - "schema": param_schema, - } - if field_info.description: - parameter["description"] = field_info.description - if field_info.openapi_examples: - parameter["examples"] = jsonable_encoder(field_info.openapi_examples) - elif field_info.example != Undefined: - parameter["example"] = jsonable_encoder(field_info.example) - if field_info.deprecated: - parameter["deprecated"] = field_info.deprecated - parameters.append(parameter) - return parameters - - -def get_openapi_operation_request_body( - *, - body_field: Optional[ModelField], - schema_generator: GenerateJsonSchema, - model_name_map: ModelNameMap, - field_mapping: Dict[ - Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue - ], - separate_input_output_schemas: bool = True, -) -> Optional[Dict[str, Any]]: - if not body_field: - return None - assert isinstance(body_field, ModelField) - body_schema = get_schema_from_model_field( - field=body_field, - schema_generator=schema_generator, - model_name_map=model_name_map, - field_mapping=field_mapping, - separate_input_output_schemas=separate_input_output_schemas, - ) - field_info = cast(Body, body_field.field_info) - request_media_type = field_info.media_type - required = body_field.required - request_body_oai: Dict[str, Any] = {} - if required: - request_body_oai["required"] = required - request_media_content: Dict[str, Any] = {"schema": body_schema} - if field_info.openapi_examples: - request_media_content["examples"] = jsonable_encoder( - field_info.openapi_examples - ) - elif field_info.example != Undefined: - request_media_content["example"] = jsonable_encoder(field_info.example) - request_body_oai["content"] = {request_media_type: request_media_content} - return request_body_oai - - -def generate_operation_id( - *, route: routing.APIRoute, method: str -) -> str: # pragma: nocover - warnings.warn( - "fastapi.openapi.utils.generate_operation_id() was deprecated, " - "it is not used internally, and will be removed soon", - DeprecationWarning, - stacklevel=2, - ) - if route.operation_id: - return route.operation_id - path: str = route.path_format - return generate_operation_id_for_path(name=route.name, path=path, method=method) - - -def generate_operation_summary(*, route: routing.APIRoute, method: str) -> str: - if route.summary: - return route.summary - return route.name.replace("_", " ").title() - - -def get_openapi_operation_metadata( - *, route: routing.APIRoute, method: str, operation_ids: Set[str] -) -> Dict[str, Any]: - operation: Dict[str, Any] = {} - if route.tags: - operation["tags"] = route.tags - operation["summary"] = generate_operation_summary(route=route, method=method) - if route.description: - operation["description"] = route.description - operation_id = route.operation_id or route.unique_id - if operation_id in operation_ids: - message = ( - f"Duplicate Operation ID {operation_id} for function " - + f"{route.endpoint.__name__}" - ) - file_name = getattr(route.endpoint, "__globals__", {}).get("__file__") - if file_name: - message += f" at {file_name}" - warnings.warn(message, stacklevel=1) - operation_ids.add(operation_id) - operation["operationId"] = operation_id - if route.deprecated: - operation["deprecated"] = route.deprecated - return operation - - -def get_openapi_path( - *, - route: routing.APIRoute, - operation_ids: Set[str], - schema_generator: GenerateJsonSchema, - model_name_map: ModelNameMap, - field_mapping: Dict[ - Tuple[ModelField, Literal["validation", "serialization"]], JsonSchemaValue - ], - separate_input_output_schemas: bool = True, -) -> Tuple[Dict[str, Any], Dict[str, Any], Dict[str, Any]]: - path = {} - security_schemes: Dict[str, Any] = {} - definitions: Dict[str, Any] = {} - assert route.methods is not None, "Methods must be a list" - if isinstance(route.response_class, DefaultPlaceholder): - current_response_class: Type[Response] = route.response_class.value - else: - current_response_class = route.response_class - assert current_response_class, "A response class is needed to generate OpenAPI" - route_response_media_type: Optional[str] = current_response_class.media_type - if route.include_in_schema: - for method in route.methods: - operation = get_openapi_operation_metadata( - route=route, method=method, operation_ids=operation_ids - ) - parameters: List[Dict[str, Any]] = [] - flat_dependant = get_flat_dependant(route.dependant, skip_repeats=True) - security_definitions, operation_security = get_openapi_security_definitions( - flat_dependant=flat_dependant - ) - if operation_security: - operation.setdefault("security", []).extend(operation_security) - if security_definitions: - security_schemes.update(security_definitions) - all_route_params = get_flat_params(route.dependant) - operation_parameters = get_openapi_operation_parameters( - all_route_params=all_route_params, - schema_generator=schema_generator, - model_name_map=model_name_map, - field_mapping=field_mapping, - separate_input_output_schemas=separate_input_output_schemas, - ) - parameters.extend(operation_parameters) - if parameters: - all_parameters = { - (param["in"], param["name"]): param for param in parameters - } - required_parameters = { - (param["in"], param["name"]): param - for param in parameters - if param.get("required") - } - # Make sure required definitions of the same parameter take precedence - # over non-required definitions - all_parameters.update(required_parameters) - operation["parameters"] = list(all_parameters.values()) - if method in METHODS_WITH_BODY: - request_body_oai = get_openapi_operation_request_body( - body_field=route.body_field, - schema_generator=schema_generator, - model_name_map=model_name_map, - field_mapping=field_mapping, - separate_input_output_schemas=separate_input_output_schemas, - ) - if request_body_oai: - operation["requestBody"] = request_body_oai - if route.callbacks: - callbacks = {} - for callback in route.callbacks: - if isinstance(callback, routing.APIRoute): - ( - cb_path, - cb_security_schemes, - cb_definitions, - ) = get_openapi_path( - route=callback, - operation_ids=operation_ids, - schema_generator=schema_generator, - model_name_map=model_name_map, - field_mapping=field_mapping, - separate_input_output_schemas=separate_input_output_schemas, - ) - callbacks[callback.name] = {callback.path: cb_path} - operation["callbacks"] = callbacks - if route.status_code is not None: - status_code = str(route.status_code) - else: - # It would probably make more sense for all response classes to have an - # explicit default status_code, and to extract it from them, instead of - # doing this inspection tricks, that would probably be in the future - # TODO: probably make status_code a default class attribute for all - # responses in Starlette - response_signature = inspect.signature(current_response_class.__init__) - status_code_param = response_signature.parameters.get("status_code") - if status_code_param is not None: - if isinstance(status_code_param.default, int): - status_code = str(status_code_param.default) - operation.setdefault("responses", {}).setdefault(status_code, {})[ - "description" - ] = route.response_description - if route_response_media_type and is_body_allowed_for_status_code( - route.status_code - ): - response_schema = {"type": "string"} - if lenient_issubclass(current_response_class, JSONResponse): - if route.response_field: - response_schema = get_schema_from_model_field( - field=route.response_field, - schema_generator=schema_generator, - model_name_map=model_name_map, - field_mapping=field_mapping, - separate_input_output_schemas=separate_input_output_schemas, - ) - else: - response_schema = {} - operation.setdefault("responses", {}).setdefault( - status_code, {} - ).setdefault("content", {}).setdefault(route_response_media_type, {})[ - "schema" - ] = response_schema - if route.responses: - operation_responses = operation.setdefault("responses", {}) - for ( - additional_status_code, - additional_response, - ) in route.responses.items(): - process_response = additional_response.copy() - process_response.pop("model", None) - status_code_key = str(additional_status_code).upper() - if status_code_key == "DEFAULT": - status_code_key = "default" - openapi_response = operation_responses.setdefault( - status_code_key, {} - ) - assert isinstance( - process_response, dict - ), "An additional response must be a dict" - field = route.response_fields.get(additional_status_code) - additional_field_schema: Optional[Dict[str, Any]] = None - if field: - additional_field_schema = get_schema_from_model_field( - field=field, - schema_generator=schema_generator, - model_name_map=model_name_map, - field_mapping=field_mapping, - separate_input_output_schemas=separate_input_output_schemas, - ) - media_type = route_response_media_type or "application/json" - additional_schema = ( - process_response.setdefault("content", {}) - .setdefault(media_type, {}) - .setdefault("schema", {}) - ) - deep_dict_update(additional_schema, additional_field_schema) - status_text: Optional[str] = status_code_ranges.get( - str(additional_status_code).upper() - ) or http.client.responses.get(int(additional_status_code)) - description = ( - process_response.get("description") - or openapi_response.get("description") - or status_text - or "Additional Response" - ) - deep_dict_update(openapi_response, process_response) - openapi_response["description"] = description - http422 = str(HTTP_422_UNPROCESSABLE_ENTITY) - if (all_route_params or route.body_field) and not any( - status in operation["responses"] - for status in [http422, "4XX", "default"] - ): - operation["responses"][http422] = { - "description": "Validation Error", - "content": { - "application/json": { - "schema": {"$ref": REF_PREFIX + "HTTPValidationError"} - } - }, - } - if "ValidationError" not in definitions: - definitions.update( - { - "ValidationError": validation_error_definition, - "HTTPValidationError": validation_error_response_definition, - } - ) - if route.openapi_extra: - deep_dict_update(operation, route.openapi_extra) - path[method.lower()] = operation - return path, security_schemes, definitions - - -def get_fields_from_routes( - routes: Sequence[BaseRoute], -) -> List[ModelField]: - body_fields_from_routes: List[ModelField] = [] - responses_from_routes: List[ModelField] = [] - request_fields_from_routes: List[ModelField] = [] - callback_flat_models: List[ModelField] = [] - for route in routes: - if getattr(route, "include_in_schema", None) and isinstance( - route, routing.APIRoute - ): - if route.body_field: - assert isinstance( - route.body_field, ModelField - ), "A request body must be a Pydantic Field" - body_fields_from_routes.append(route.body_field) - if route.response_field: - responses_from_routes.append(route.response_field) - if route.response_fields: - responses_from_routes.extend(route.response_fields.values()) - if route.callbacks: - callback_flat_models.extend(get_fields_from_routes(route.callbacks)) - params = get_flat_params(route.dependant) - request_fields_from_routes.extend(params) - - flat_models = callback_flat_models + list( - body_fields_from_routes + responses_from_routes + request_fields_from_routes - ) - return flat_models - - -def get_openapi( - *, - title: str, - version: str, - openapi_version: str = "3.1.0", - summary: Optional[str] = None, - description: Optional[str] = None, - routes: Sequence[BaseRoute], - webhooks: Optional[Sequence[BaseRoute]] = None, - tags: Optional[List[Dict[str, Any]]] = None, - servers: Optional[List[Dict[str, Union[str, Any]]]] = None, - terms_of_service: Optional[str] = None, - contact: Optional[Dict[str, Union[str, Any]]] = None, - license_info: Optional[Dict[str, Union[str, Any]]] = None, - separate_input_output_schemas: bool = True, -) -> Dict[str, Any]: - info: Dict[str, Any] = {"title": title, "version": version} - if summary: - info["summary"] = summary - if description: - info["description"] = description - if terms_of_service: - info["termsOfService"] = terms_of_service - if contact: - info["contact"] = contact - if license_info: - info["license"] = license_info - output: Dict[str, Any] = {"openapi": openapi_version, "info": info} - if servers: - output["servers"] = servers - components: Dict[str, Dict[str, Any]] = {} - paths: Dict[str, Dict[str, Any]] = {} - webhook_paths: Dict[str, Dict[str, Any]] = {} - operation_ids: Set[str] = set() - all_fields = get_fields_from_routes(list(routes or []) + list(webhooks or [])) - model_name_map = get_compat_model_name_map(all_fields) - schema_generator = GenerateJsonSchema(ref_template=REF_TEMPLATE) - field_mapping, definitions = get_definitions( - fields=all_fields, - schema_generator=schema_generator, - model_name_map=model_name_map, - separate_input_output_schemas=separate_input_output_schemas, - ) - for route in routes or []: - if isinstance(route, routing.APIRoute): - result = get_openapi_path( - route=route, - operation_ids=operation_ids, - schema_generator=schema_generator, - model_name_map=model_name_map, - field_mapping=field_mapping, - separate_input_output_schemas=separate_input_output_schemas, - ) - if result: - path, security_schemes, path_definitions = result - if path: - paths.setdefault(route.path_format, {}).update(path) - if security_schemes: - components.setdefault("securitySchemes", {}).update( - security_schemes - ) - if path_definitions: - definitions.update(path_definitions) - for webhook in webhooks or []: - if isinstance(webhook, routing.APIRoute): - result = get_openapi_path( - route=webhook, - operation_ids=operation_ids, - schema_generator=schema_generator, - model_name_map=model_name_map, - field_mapping=field_mapping, - separate_input_output_schemas=separate_input_output_schemas, - ) - if result: - path, security_schemes, path_definitions = result - if path: - webhook_paths.setdefault(webhook.path_format, {}).update(path) - if security_schemes: - components.setdefault("securitySchemes", {}).update( - security_schemes - ) - if path_definitions: - definitions.update(path_definitions) - if definitions: - components["schemas"] = {k: definitions[k] for k in sorted(definitions)} - if components: - output["components"] = components - output["paths"] = paths - if webhook_paths: - output["webhooks"] = webhook_paths - if tags: - output["tags"] = tags - return jsonable_encoder(OpenAPI(**output), by_alias=True, exclude_none=True) # type: ignore diff --git a/server/venv/Lib/site-packages/fastapi/param_functions.py b/server/venv/Lib/site-packages/fastapi/param_functions.py deleted file mode 100644 index 3f6dbc9..0000000 --- a/server/venv/Lib/site-packages/fastapi/param_functions.py +++ /dev/null @@ -1,2360 +0,0 @@ -from typing import Any, Callable, Dict, List, Optional, Sequence, Union - -from fastapi import params -from fastapi._compat import Undefined -from fastapi.openapi.models import Example -from typing_extensions import Annotated, Doc, deprecated # type: ignore [attr-defined] - -_Unset: Any = Undefined - - -def Path( # noqa: N802 - default: Annotated[ - Any, - Doc( - """ - Default value if the parameter field is not set. - - This doesn't affect `Path` parameters as the value is always required. - The parameter is available only for compatibility. - """ - ), - ] = ..., - *, - default_factory: Annotated[ - Union[Callable[[], Any], None], - Doc( - """ - A callable to generate the default value. - - This doesn't affect `Path` parameters as the value is always required. - The parameter is available only for compatibility. - """ - ), - ] = _Unset, - alias: Annotated[ - Optional[str], - Doc( - """ - An alternative name for the parameter field. - - This will be used to extract the data and for the generated OpenAPI. - It is particularly useful when you can't use the name you want because it - is a Python reserved keyword or similar. - """ - ), - ] = None, - alias_priority: Annotated[ - Union[int, None], - Doc( - """ - Priority of the alias. This affects whether an alias generator is used. - """ - ), - ] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Annotated[ - Union[str, None], - Doc( - """ - 'Whitelist' validation step. The parameter field will be the single one - allowed by the alias or set of aliases defined. - """ - ), - ] = None, - serialization_alias: Annotated[ - Union[str, None], - Doc( - """ - 'Blacklist' validation step. The vanilla parameter field will be the - single one of the alias' or set of aliases' fields and all the other - fields will be ignored at serialization time. - """ - ), - ] = None, - title: Annotated[ - Optional[str], - Doc( - """ - Human-readable title. - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Human-readable description. - """ - ), - ] = None, - gt: Annotated[ - Optional[float], - Doc( - """ - Greater than. If set, value must be greater than this. Only applicable to - numbers. - """ - ), - ] = None, - ge: Annotated[ - Optional[float], - Doc( - """ - Greater than or equal. If set, value must be greater than or equal to - this. Only applicable to numbers. - """ - ), - ] = None, - lt: Annotated[ - Optional[float], - Doc( - """ - Less than. If set, value must be less than this. Only applicable to numbers. - """ - ), - ] = None, - le: Annotated[ - Optional[float], - Doc( - """ - Less than or equal. If set, value must be less than or equal to this. - Only applicable to numbers. - """ - ), - ] = None, - min_length: Annotated[ - Optional[int], - Doc( - """ - Minimum length for strings. - """ - ), - ] = None, - max_length: Annotated[ - Optional[int], - Doc( - """ - Maximum length for strings. - """ - ), - ] = None, - pattern: Annotated[ - Optional[str], - Doc( - """ - RegEx pattern for strings. - """ - ), - ] = None, - regex: Annotated[ - Optional[str], - Doc( - """ - RegEx pattern for strings. - """ - ), - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Annotated[ - Union[str, None], - Doc( - """ - Parameter field name for discriminating the type in a tagged union. - """ - ), - ] = None, - strict: Annotated[ - Union[bool, None], - Doc( - """ - If `True`, strict validation is applied to the field. - """ - ), - ] = _Unset, - multiple_of: Annotated[ - Union[float, None], - Doc( - """ - Value must be a multiple of this. Only applicable to numbers. - """ - ), - ] = _Unset, - allow_inf_nan: Annotated[ - Union[bool, None], - Doc( - """ - Allow `inf`, `-inf`, `nan`. Only applicable to numbers. - """ - ), - ] = _Unset, - max_digits: Annotated[ - Union[int, None], - Doc( - """ - Maximum number of allow digits for strings. - """ - ), - ] = _Unset, - decimal_places: Annotated[ - Union[int, None], - Doc( - """ - Maximum number of decimal places allowed for numbers. - """ - ), - ] = _Unset, - examples: Annotated[ - Optional[List[Any]], - Doc( - """ - Example values for this field. - """ - ), - ] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Annotated[ - Optional[Dict[str, Example]], - Doc( - """ - OpenAPI-specific examples. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Swagger UI (that provides the `/docs` interface) has better support for the - OpenAPI-specific examples than the JSON Schema `examples`, that's the main - use case for this. - - Read more about it in the - [FastAPI docs for Declare Request Example Data](https://fastapi.tiangolo.com/tutorial/schema-extra-example/#using-the-openapi_examples-parameter). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this parameter field as deprecated. - - It will affect the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - include_in_schema: Annotated[ - bool, - Doc( - """ - To include (or not) this parameter field in the generated OpenAPI. - You probably don't need it, but it's available. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = True, - json_schema_extra: Annotated[ - Union[Dict[str, Any], None], - Doc( - """ - Any additional JSON schema data. - """ - ), - ] = None, - **extra: Annotated[ - Any, - Doc( - """ - Include extra fields used by the JSON Schema. - """ - ), - deprecated( - """ - The `extra` kwargs is deprecated. Use `json_schema_extra` instead. - """ - ), - ], -) -> Any: - """ - Declare a path parameter for a *path operation*. - - Read more about it in the - [FastAPI docs for Path Parameters and Numeric Validations](https://fastapi.tiangolo.com/tutorial/path-params-numeric-validations/). - - ```python - from typing import Annotated - - from fastapi import FastAPI, Path - - app = FastAPI() - - - @app.get("/items/{item_id}") - async def read_items( - item_id: Annotated[int, Path(title="The ID of the item to get")], - ): - return {"item_id": item_id} - ``` - """ - return params.Path( - default=default, - default_factory=default_factory, - alias=alias, - alias_priority=alias_priority, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - pattern=pattern, - regex=regex, - discriminator=discriminator, - strict=strict, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - example=example, - examples=examples, - openapi_examples=openapi_examples, - deprecated=deprecated, - include_in_schema=include_in_schema, - json_schema_extra=json_schema_extra, - **extra, - ) - - -def Query( # noqa: N802 - default: Annotated[ - Any, - Doc( - """ - Default value if the parameter field is not set. - """ - ), - ] = Undefined, - *, - default_factory: Annotated[ - Union[Callable[[], Any], None], - Doc( - """ - A callable to generate the default value. - - This doesn't affect `Path` parameters as the value is always required. - The parameter is available only for compatibility. - """ - ), - ] = _Unset, - alias: Annotated[ - Optional[str], - Doc( - """ - An alternative name for the parameter field. - - This will be used to extract the data and for the generated OpenAPI. - It is particularly useful when you can't use the name you want because it - is a Python reserved keyword or similar. - """ - ), - ] = None, - alias_priority: Annotated[ - Union[int, None], - Doc( - """ - Priority of the alias. This affects whether an alias generator is used. - """ - ), - ] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Annotated[ - Union[str, None], - Doc( - """ - 'Whitelist' validation step. The parameter field will be the single one - allowed by the alias or set of aliases defined. - """ - ), - ] = None, - serialization_alias: Annotated[ - Union[str, None], - Doc( - """ - 'Blacklist' validation step. The vanilla parameter field will be the - single one of the alias' or set of aliases' fields and all the other - fields will be ignored at serialization time. - """ - ), - ] = None, - title: Annotated[ - Optional[str], - Doc( - """ - Human-readable title. - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Human-readable description. - """ - ), - ] = None, - gt: Annotated[ - Optional[float], - Doc( - """ - Greater than. If set, value must be greater than this. Only applicable to - numbers. - """ - ), - ] = None, - ge: Annotated[ - Optional[float], - Doc( - """ - Greater than or equal. If set, value must be greater than or equal to - this. Only applicable to numbers. - """ - ), - ] = None, - lt: Annotated[ - Optional[float], - Doc( - """ - Less than. If set, value must be less than this. Only applicable to numbers. - """ - ), - ] = None, - le: Annotated[ - Optional[float], - Doc( - """ - Less than or equal. If set, value must be less than or equal to this. - Only applicable to numbers. - """ - ), - ] = None, - min_length: Annotated[ - Optional[int], - Doc( - """ - Minimum length for strings. - """ - ), - ] = None, - max_length: Annotated[ - Optional[int], - Doc( - """ - Maximum length for strings. - """ - ), - ] = None, - pattern: Annotated[ - Optional[str], - Doc( - """ - RegEx pattern for strings. - """ - ), - ] = None, - regex: Annotated[ - Optional[str], - Doc( - """ - RegEx pattern for strings. - """ - ), - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Annotated[ - Union[str, None], - Doc( - """ - Parameter field name for discriminating the type in a tagged union. - """ - ), - ] = None, - strict: Annotated[ - Union[bool, None], - Doc( - """ - If `True`, strict validation is applied to the field. - """ - ), - ] = _Unset, - multiple_of: Annotated[ - Union[float, None], - Doc( - """ - Value must be a multiple of this. Only applicable to numbers. - """ - ), - ] = _Unset, - allow_inf_nan: Annotated[ - Union[bool, None], - Doc( - """ - Allow `inf`, `-inf`, `nan`. Only applicable to numbers. - """ - ), - ] = _Unset, - max_digits: Annotated[ - Union[int, None], - Doc( - """ - Maximum number of allow digits for strings. - """ - ), - ] = _Unset, - decimal_places: Annotated[ - Union[int, None], - Doc( - """ - Maximum number of decimal places allowed for numbers. - """ - ), - ] = _Unset, - examples: Annotated[ - Optional[List[Any]], - Doc( - """ - Example values for this field. - """ - ), - ] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Annotated[ - Optional[Dict[str, Example]], - Doc( - """ - OpenAPI-specific examples. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Swagger UI (that provides the `/docs` interface) has better support for the - OpenAPI-specific examples than the JSON Schema `examples`, that's the main - use case for this. - - Read more about it in the - [FastAPI docs for Declare Request Example Data](https://fastapi.tiangolo.com/tutorial/schema-extra-example/#using-the-openapi_examples-parameter). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this parameter field as deprecated. - - It will affect the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - include_in_schema: Annotated[ - bool, - Doc( - """ - To include (or not) this parameter field in the generated OpenAPI. - You probably don't need it, but it's available. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = True, - json_schema_extra: Annotated[ - Union[Dict[str, Any], None], - Doc( - """ - Any additional JSON schema data. - """ - ), - ] = None, - **extra: Annotated[ - Any, - Doc( - """ - Include extra fields used by the JSON Schema. - """ - ), - deprecated( - """ - The `extra` kwargs is deprecated. Use `json_schema_extra` instead. - """ - ), - ], -) -> Any: - return params.Query( - default=default, - default_factory=default_factory, - alias=alias, - alias_priority=alias_priority, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - pattern=pattern, - regex=regex, - discriminator=discriminator, - strict=strict, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - example=example, - examples=examples, - openapi_examples=openapi_examples, - deprecated=deprecated, - include_in_schema=include_in_schema, - json_schema_extra=json_schema_extra, - **extra, - ) - - -def Header( # noqa: N802 - default: Annotated[ - Any, - Doc( - """ - Default value if the parameter field is not set. - """ - ), - ] = Undefined, - *, - default_factory: Annotated[ - Union[Callable[[], Any], None], - Doc( - """ - A callable to generate the default value. - - This doesn't affect `Path` parameters as the value is always required. - The parameter is available only for compatibility. - """ - ), - ] = _Unset, - alias: Annotated[ - Optional[str], - Doc( - """ - An alternative name for the parameter field. - - This will be used to extract the data and for the generated OpenAPI. - It is particularly useful when you can't use the name you want because it - is a Python reserved keyword or similar. - """ - ), - ] = None, - alias_priority: Annotated[ - Union[int, None], - Doc( - """ - Priority of the alias. This affects whether an alias generator is used. - """ - ), - ] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Annotated[ - Union[str, None], - Doc( - """ - 'Whitelist' validation step. The parameter field will be the single one - allowed by the alias or set of aliases defined. - """ - ), - ] = None, - serialization_alias: Annotated[ - Union[str, None], - Doc( - """ - 'Blacklist' validation step. The vanilla parameter field will be the - single one of the alias' or set of aliases' fields and all the other - fields will be ignored at serialization time. - """ - ), - ] = None, - convert_underscores: Annotated[ - bool, - Doc( - """ - Automatically convert underscores to hyphens in the parameter field name. - - Read more about it in the - [FastAPI docs for Header Parameters](https://fastapi.tiangolo.com/tutorial/header-params/#automatic-conversion) - """ - ), - ] = True, - title: Annotated[ - Optional[str], - Doc( - """ - Human-readable title. - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Human-readable description. - """ - ), - ] = None, - gt: Annotated[ - Optional[float], - Doc( - """ - Greater than. If set, value must be greater than this. Only applicable to - numbers. - """ - ), - ] = None, - ge: Annotated[ - Optional[float], - Doc( - """ - Greater than or equal. If set, value must be greater than or equal to - this. Only applicable to numbers. - """ - ), - ] = None, - lt: Annotated[ - Optional[float], - Doc( - """ - Less than. If set, value must be less than this. Only applicable to numbers. - """ - ), - ] = None, - le: Annotated[ - Optional[float], - Doc( - """ - Less than or equal. If set, value must be less than or equal to this. - Only applicable to numbers. - """ - ), - ] = None, - min_length: Annotated[ - Optional[int], - Doc( - """ - Minimum length for strings. - """ - ), - ] = None, - max_length: Annotated[ - Optional[int], - Doc( - """ - Maximum length for strings. - """ - ), - ] = None, - pattern: Annotated[ - Optional[str], - Doc( - """ - RegEx pattern for strings. - """ - ), - ] = None, - regex: Annotated[ - Optional[str], - Doc( - """ - RegEx pattern for strings. - """ - ), - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Annotated[ - Union[str, None], - Doc( - """ - Parameter field name for discriminating the type in a tagged union. - """ - ), - ] = None, - strict: Annotated[ - Union[bool, None], - Doc( - """ - If `True`, strict validation is applied to the field. - """ - ), - ] = _Unset, - multiple_of: Annotated[ - Union[float, None], - Doc( - """ - Value must be a multiple of this. Only applicable to numbers. - """ - ), - ] = _Unset, - allow_inf_nan: Annotated[ - Union[bool, None], - Doc( - """ - Allow `inf`, `-inf`, `nan`. Only applicable to numbers. - """ - ), - ] = _Unset, - max_digits: Annotated[ - Union[int, None], - Doc( - """ - Maximum number of allow digits for strings. - """ - ), - ] = _Unset, - decimal_places: Annotated[ - Union[int, None], - Doc( - """ - Maximum number of decimal places allowed for numbers. - """ - ), - ] = _Unset, - examples: Annotated[ - Optional[List[Any]], - Doc( - """ - Example values for this field. - """ - ), - ] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Annotated[ - Optional[Dict[str, Example]], - Doc( - """ - OpenAPI-specific examples. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Swagger UI (that provides the `/docs` interface) has better support for the - OpenAPI-specific examples than the JSON Schema `examples`, that's the main - use case for this. - - Read more about it in the - [FastAPI docs for Declare Request Example Data](https://fastapi.tiangolo.com/tutorial/schema-extra-example/#using-the-openapi_examples-parameter). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this parameter field as deprecated. - - It will affect the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - include_in_schema: Annotated[ - bool, - Doc( - """ - To include (or not) this parameter field in the generated OpenAPI. - You probably don't need it, but it's available. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = True, - json_schema_extra: Annotated[ - Union[Dict[str, Any], None], - Doc( - """ - Any additional JSON schema data. - """ - ), - ] = None, - **extra: Annotated[ - Any, - Doc( - """ - Include extra fields used by the JSON Schema. - """ - ), - deprecated( - """ - The `extra` kwargs is deprecated. Use `json_schema_extra` instead. - """ - ), - ], -) -> Any: - return params.Header( - default=default, - default_factory=default_factory, - alias=alias, - alias_priority=alias_priority, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - convert_underscores=convert_underscores, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - pattern=pattern, - regex=regex, - discriminator=discriminator, - strict=strict, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - example=example, - examples=examples, - openapi_examples=openapi_examples, - deprecated=deprecated, - include_in_schema=include_in_schema, - json_schema_extra=json_schema_extra, - **extra, - ) - - -def Cookie( # noqa: N802 - default: Annotated[ - Any, - Doc( - """ - Default value if the parameter field is not set. - """ - ), - ] = Undefined, - *, - default_factory: Annotated[ - Union[Callable[[], Any], None], - Doc( - """ - A callable to generate the default value. - - This doesn't affect `Path` parameters as the value is always required. - The parameter is available only for compatibility. - """ - ), - ] = _Unset, - alias: Annotated[ - Optional[str], - Doc( - """ - An alternative name for the parameter field. - - This will be used to extract the data and for the generated OpenAPI. - It is particularly useful when you can't use the name you want because it - is a Python reserved keyword or similar. - """ - ), - ] = None, - alias_priority: Annotated[ - Union[int, None], - Doc( - """ - Priority of the alias. This affects whether an alias generator is used. - """ - ), - ] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Annotated[ - Union[str, None], - Doc( - """ - 'Whitelist' validation step. The parameter field will be the single one - allowed by the alias or set of aliases defined. - """ - ), - ] = None, - serialization_alias: Annotated[ - Union[str, None], - Doc( - """ - 'Blacklist' validation step. The vanilla parameter field will be the - single one of the alias' or set of aliases' fields and all the other - fields will be ignored at serialization time. - """ - ), - ] = None, - title: Annotated[ - Optional[str], - Doc( - """ - Human-readable title. - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Human-readable description. - """ - ), - ] = None, - gt: Annotated[ - Optional[float], - Doc( - """ - Greater than. If set, value must be greater than this. Only applicable to - numbers. - """ - ), - ] = None, - ge: Annotated[ - Optional[float], - Doc( - """ - Greater than or equal. If set, value must be greater than or equal to - this. Only applicable to numbers. - """ - ), - ] = None, - lt: Annotated[ - Optional[float], - Doc( - """ - Less than. If set, value must be less than this. Only applicable to numbers. - """ - ), - ] = None, - le: Annotated[ - Optional[float], - Doc( - """ - Less than or equal. If set, value must be less than or equal to this. - Only applicable to numbers. - """ - ), - ] = None, - min_length: Annotated[ - Optional[int], - Doc( - """ - Minimum length for strings. - """ - ), - ] = None, - max_length: Annotated[ - Optional[int], - Doc( - """ - Maximum length for strings. - """ - ), - ] = None, - pattern: Annotated[ - Optional[str], - Doc( - """ - RegEx pattern for strings. - """ - ), - ] = None, - regex: Annotated[ - Optional[str], - Doc( - """ - RegEx pattern for strings. - """ - ), - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Annotated[ - Union[str, None], - Doc( - """ - Parameter field name for discriminating the type in a tagged union. - """ - ), - ] = None, - strict: Annotated[ - Union[bool, None], - Doc( - """ - If `True`, strict validation is applied to the field. - """ - ), - ] = _Unset, - multiple_of: Annotated[ - Union[float, None], - Doc( - """ - Value must be a multiple of this. Only applicable to numbers. - """ - ), - ] = _Unset, - allow_inf_nan: Annotated[ - Union[bool, None], - Doc( - """ - Allow `inf`, `-inf`, `nan`. Only applicable to numbers. - """ - ), - ] = _Unset, - max_digits: Annotated[ - Union[int, None], - Doc( - """ - Maximum number of allow digits for strings. - """ - ), - ] = _Unset, - decimal_places: Annotated[ - Union[int, None], - Doc( - """ - Maximum number of decimal places allowed for numbers. - """ - ), - ] = _Unset, - examples: Annotated[ - Optional[List[Any]], - Doc( - """ - Example values for this field. - """ - ), - ] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Annotated[ - Optional[Dict[str, Example]], - Doc( - """ - OpenAPI-specific examples. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Swagger UI (that provides the `/docs` interface) has better support for the - OpenAPI-specific examples than the JSON Schema `examples`, that's the main - use case for this. - - Read more about it in the - [FastAPI docs for Declare Request Example Data](https://fastapi.tiangolo.com/tutorial/schema-extra-example/#using-the-openapi_examples-parameter). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this parameter field as deprecated. - - It will affect the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - include_in_schema: Annotated[ - bool, - Doc( - """ - To include (or not) this parameter field in the generated OpenAPI. - You probably don't need it, but it's available. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = True, - json_schema_extra: Annotated[ - Union[Dict[str, Any], None], - Doc( - """ - Any additional JSON schema data. - """ - ), - ] = None, - **extra: Annotated[ - Any, - Doc( - """ - Include extra fields used by the JSON Schema. - """ - ), - deprecated( - """ - The `extra` kwargs is deprecated. Use `json_schema_extra` instead. - """ - ), - ], -) -> Any: - return params.Cookie( - default=default, - default_factory=default_factory, - alias=alias, - alias_priority=alias_priority, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - pattern=pattern, - regex=regex, - discriminator=discriminator, - strict=strict, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - example=example, - examples=examples, - openapi_examples=openapi_examples, - deprecated=deprecated, - include_in_schema=include_in_schema, - json_schema_extra=json_schema_extra, - **extra, - ) - - -def Body( # noqa: N802 - default: Annotated[ - Any, - Doc( - """ - Default value if the parameter field is not set. - """ - ), - ] = Undefined, - *, - default_factory: Annotated[ - Union[Callable[[], Any], None], - Doc( - """ - A callable to generate the default value. - - This doesn't affect `Path` parameters as the value is always required. - The parameter is available only for compatibility. - """ - ), - ] = _Unset, - embed: Annotated[ - bool, - Doc( - """ - When `embed` is `True`, the parameter will be expected in a JSON body as a - key instead of being the JSON body itself. - - This happens automatically when more than one `Body` parameter is declared. - - Read more about it in the - [FastAPI docs for Body - Multiple Parameters](https://fastapi.tiangolo.com/tutorial/body-multiple-params/#embed-a-single-body-parameter). - """ - ), - ] = False, - media_type: Annotated[ - str, - Doc( - """ - The media type of this parameter field. Changing it would affect the - generated OpenAPI, but currently it doesn't affect the parsing of the data. - """ - ), - ] = "application/json", - alias: Annotated[ - Optional[str], - Doc( - """ - An alternative name for the parameter field. - - This will be used to extract the data and for the generated OpenAPI. - It is particularly useful when you can't use the name you want because it - is a Python reserved keyword or similar. - """ - ), - ] = None, - alias_priority: Annotated[ - Union[int, None], - Doc( - """ - Priority of the alias. This affects whether an alias generator is used. - """ - ), - ] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Annotated[ - Union[str, None], - Doc( - """ - 'Whitelist' validation step. The parameter field will be the single one - allowed by the alias or set of aliases defined. - """ - ), - ] = None, - serialization_alias: Annotated[ - Union[str, None], - Doc( - """ - 'Blacklist' validation step. The vanilla parameter field will be the - single one of the alias' or set of aliases' fields and all the other - fields will be ignored at serialization time. - """ - ), - ] = None, - title: Annotated[ - Optional[str], - Doc( - """ - Human-readable title. - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Human-readable description. - """ - ), - ] = None, - gt: Annotated[ - Optional[float], - Doc( - """ - Greater than. If set, value must be greater than this. Only applicable to - numbers. - """ - ), - ] = None, - ge: Annotated[ - Optional[float], - Doc( - """ - Greater than or equal. If set, value must be greater than or equal to - this. Only applicable to numbers. - """ - ), - ] = None, - lt: Annotated[ - Optional[float], - Doc( - """ - Less than. If set, value must be less than this. Only applicable to numbers. - """ - ), - ] = None, - le: Annotated[ - Optional[float], - Doc( - """ - Less than or equal. If set, value must be less than or equal to this. - Only applicable to numbers. - """ - ), - ] = None, - min_length: Annotated[ - Optional[int], - Doc( - """ - Minimum length for strings. - """ - ), - ] = None, - max_length: Annotated[ - Optional[int], - Doc( - """ - Maximum length for strings. - """ - ), - ] = None, - pattern: Annotated[ - Optional[str], - Doc( - """ - RegEx pattern for strings. - """ - ), - ] = None, - regex: Annotated[ - Optional[str], - Doc( - """ - RegEx pattern for strings. - """ - ), - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Annotated[ - Union[str, None], - Doc( - """ - Parameter field name for discriminating the type in a tagged union. - """ - ), - ] = None, - strict: Annotated[ - Union[bool, None], - Doc( - """ - If `True`, strict validation is applied to the field. - """ - ), - ] = _Unset, - multiple_of: Annotated[ - Union[float, None], - Doc( - """ - Value must be a multiple of this. Only applicable to numbers. - """ - ), - ] = _Unset, - allow_inf_nan: Annotated[ - Union[bool, None], - Doc( - """ - Allow `inf`, `-inf`, `nan`. Only applicable to numbers. - """ - ), - ] = _Unset, - max_digits: Annotated[ - Union[int, None], - Doc( - """ - Maximum number of allow digits for strings. - """ - ), - ] = _Unset, - decimal_places: Annotated[ - Union[int, None], - Doc( - """ - Maximum number of decimal places allowed for numbers. - """ - ), - ] = _Unset, - examples: Annotated[ - Optional[List[Any]], - Doc( - """ - Example values for this field. - """ - ), - ] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Annotated[ - Optional[Dict[str, Example]], - Doc( - """ - OpenAPI-specific examples. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Swagger UI (that provides the `/docs` interface) has better support for the - OpenAPI-specific examples than the JSON Schema `examples`, that's the main - use case for this. - - Read more about it in the - [FastAPI docs for Declare Request Example Data](https://fastapi.tiangolo.com/tutorial/schema-extra-example/#using-the-openapi_examples-parameter). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this parameter field as deprecated. - - It will affect the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - include_in_schema: Annotated[ - bool, - Doc( - """ - To include (or not) this parameter field in the generated OpenAPI. - You probably don't need it, but it's available. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = True, - json_schema_extra: Annotated[ - Union[Dict[str, Any], None], - Doc( - """ - Any additional JSON schema data. - """ - ), - ] = None, - **extra: Annotated[ - Any, - Doc( - """ - Include extra fields used by the JSON Schema. - """ - ), - deprecated( - """ - The `extra` kwargs is deprecated. Use `json_schema_extra` instead. - """ - ), - ], -) -> Any: - return params.Body( - default=default, - default_factory=default_factory, - embed=embed, - media_type=media_type, - alias=alias, - alias_priority=alias_priority, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - pattern=pattern, - regex=regex, - discriminator=discriminator, - strict=strict, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - example=example, - examples=examples, - openapi_examples=openapi_examples, - deprecated=deprecated, - include_in_schema=include_in_schema, - json_schema_extra=json_schema_extra, - **extra, - ) - - -def Form( # noqa: N802 - default: Annotated[ - Any, - Doc( - """ - Default value if the parameter field is not set. - """ - ), - ] = Undefined, - *, - default_factory: Annotated[ - Union[Callable[[], Any], None], - Doc( - """ - A callable to generate the default value. - - This doesn't affect `Path` parameters as the value is always required. - The parameter is available only for compatibility. - """ - ), - ] = _Unset, - media_type: Annotated[ - str, - Doc( - """ - The media type of this parameter field. Changing it would affect the - generated OpenAPI, but currently it doesn't affect the parsing of the data. - """ - ), - ] = "application/x-www-form-urlencoded", - alias: Annotated[ - Optional[str], - Doc( - """ - An alternative name for the parameter field. - - This will be used to extract the data and for the generated OpenAPI. - It is particularly useful when you can't use the name you want because it - is a Python reserved keyword or similar. - """ - ), - ] = None, - alias_priority: Annotated[ - Union[int, None], - Doc( - """ - Priority of the alias. This affects whether an alias generator is used. - """ - ), - ] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Annotated[ - Union[str, None], - Doc( - """ - 'Whitelist' validation step. The parameter field will be the single one - allowed by the alias or set of aliases defined. - """ - ), - ] = None, - serialization_alias: Annotated[ - Union[str, None], - Doc( - """ - 'Blacklist' validation step. The vanilla parameter field will be the - single one of the alias' or set of aliases' fields and all the other - fields will be ignored at serialization time. - """ - ), - ] = None, - title: Annotated[ - Optional[str], - Doc( - """ - Human-readable title. - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Human-readable description. - """ - ), - ] = None, - gt: Annotated[ - Optional[float], - Doc( - """ - Greater than. If set, value must be greater than this. Only applicable to - numbers. - """ - ), - ] = None, - ge: Annotated[ - Optional[float], - Doc( - """ - Greater than or equal. If set, value must be greater than or equal to - this. Only applicable to numbers. - """ - ), - ] = None, - lt: Annotated[ - Optional[float], - Doc( - """ - Less than. If set, value must be less than this. Only applicable to numbers. - """ - ), - ] = None, - le: Annotated[ - Optional[float], - Doc( - """ - Less than or equal. If set, value must be less than or equal to this. - Only applicable to numbers. - """ - ), - ] = None, - min_length: Annotated[ - Optional[int], - Doc( - """ - Minimum length for strings. - """ - ), - ] = None, - max_length: Annotated[ - Optional[int], - Doc( - """ - Maximum length for strings. - """ - ), - ] = None, - pattern: Annotated[ - Optional[str], - Doc( - """ - RegEx pattern for strings. - """ - ), - ] = None, - regex: Annotated[ - Optional[str], - Doc( - """ - RegEx pattern for strings. - """ - ), - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Annotated[ - Union[str, None], - Doc( - """ - Parameter field name for discriminating the type in a tagged union. - """ - ), - ] = None, - strict: Annotated[ - Union[bool, None], - Doc( - """ - If `True`, strict validation is applied to the field. - """ - ), - ] = _Unset, - multiple_of: Annotated[ - Union[float, None], - Doc( - """ - Value must be a multiple of this. Only applicable to numbers. - """ - ), - ] = _Unset, - allow_inf_nan: Annotated[ - Union[bool, None], - Doc( - """ - Allow `inf`, `-inf`, `nan`. Only applicable to numbers. - """ - ), - ] = _Unset, - max_digits: Annotated[ - Union[int, None], - Doc( - """ - Maximum number of allow digits for strings. - """ - ), - ] = _Unset, - decimal_places: Annotated[ - Union[int, None], - Doc( - """ - Maximum number of decimal places allowed for numbers. - """ - ), - ] = _Unset, - examples: Annotated[ - Optional[List[Any]], - Doc( - """ - Example values for this field. - """ - ), - ] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Annotated[ - Optional[Dict[str, Example]], - Doc( - """ - OpenAPI-specific examples. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Swagger UI (that provides the `/docs` interface) has better support for the - OpenAPI-specific examples than the JSON Schema `examples`, that's the main - use case for this. - - Read more about it in the - [FastAPI docs for Declare Request Example Data](https://fastapi.tiangolo.com/tutorial/schema-extra-example/#using-the-openapi_examples-parameter). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this parameter field as deprecated. - - It will affect the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - include_in_schema: Annotated[ - bool, - Doc( - """ - To include (or not) this parameter field in the generated OpenAPI. - You probably don't need it, but it's available. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = True, - json_schema_extra: Annotated[ - Union[Dict[str, Any], None], - Doc( - """ - Any additional JSON schema data. - """ - ), - ] = None, - **extra: Annotated[ - Any, - Doc( - """ - Include extra fields used by the JSON Schema. - """ - ), - deprecated( - """ - The `extra` kwargs is deprecated. Use `json_schema_extra` instead. - """ - ), - ], -) -> Any: - return params.Form( - default=default, - default_factory=default_factory, - media_type=media_type, - alias=alias, - alias_priority=alias_priority, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - pattern=pattern, - regex=regex, - discriminator=discriminator, - strict=strict, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - example=example, - examples=examples, - openapi_examples=openapi_examples, - deprecated=deprecated, - include_in_schema=include_in_schema, - json_schema_extra=json_schema_extra, - **extra, - ) - - -def File( # noqa: N802 - default: Annotated[ - Any, - Doc( - """ - Default value if the parameter field is not set. - """ - ), - ] = Undefined, - *, - default_factory: Annotated[ - Union[Callable[[], Any], None], - Doc( - """ - A callable to generate the default value. - - This doesn't affect `Path` parameters as the value is always required. - The parameter is available only for compatibility. - """ - ), - ] = _Unset, - media_type: Annotated[ - str, - Doc( - """ - The media type of this parameter field. Changing it would affect the - generated OpenAPI, but currently it doesn't affect the parsing of the data. - """ - ), - ] = "multipart/form-data", - alias: Annotated[ - Optional[str], - Doc( - """ - An alternative name for the parameter field. - - This will be used to extract the data and for the generated OpenAPI. - It is particularly useful when you can't use the name you want because it - is a Python reserved keyword or similar. - """ - ), - ] = None, - alias_priority: Annotated[ - Union[int, None], - Doc( - """ - Priority of the alias. This affects whether an alias generator is used. - """ - ), - ] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Annotated[ - Union[str, None], - Doc( - """ - 'Whitelist' validation step. The parameter field will be the single one - allowed by the alias or set of aliases defined. - """ - ), - ] = None, - serialization_alias: Annotated[ - Union[str, None], - Doc( - """ - 'Blacklist' validation step. The vanilla parameter field will be the - single one of the alias' or set of aliases' fields and all the other - fields will be ignored at serialization time. - """ - ), - ] = None, - title: Annotated[ - Optional[str], - Doc( - """ - Human-readable title. - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Human-readable description. - """ - ), - ] = None, - gt: Annotated[ - Optional[float], - Doc( - """ - Greater than. If set, value must be greater than this. Only applicable to - numbers. - """ - ), - ] = None, - ge: Annotated[ - Optional[float], - Doc( - """ - Greater than or equal. If set, value must be greater than or equal to - this. Only applicable to numbers. - """ - ), - ] = None, - lt: Annotated[ - Optional[float], - Doc( - """ - Less than. If set, value must be less than this. Only applicable to numbers. - """ - ), - ] = None, - le: Annotated[ - Optional[float], - Doc( - """ - Less than or equal. If set, value must be less than or equal to this. - Only applicable to numbers. - """ - ), - ] = None, - min_length: Annotated[ - Optional[int], - Doc( - """ - Minimum length for strings. - """ - ), - ] = None, - max_length: Annotated[ - Optional[int], - Doc( - """ - Maximum length for strings. - """ - ), - ] = None, - pattern: Annotated[ - Optional[str], - Doc( - """ - RegEx pattern for strings. - """ - ), - ] = None, - regex: Annotated[ - Optional[str], - Doc( - """ - RegEx pattern for strings. - """ - ), - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Annotated[ - Union[str, None], - Doc( - """ - Parameter field name for discriminating the type in a tagged union. - """ - ), - ] = None, - strict: Annotated[ - Union[bool, None], - Doc( - """ - If `True`, strict validation is applied to the field. - """ - ), - ] = _Unset, - multiple_of: Annotated[ - Union[float, None], - Doc( - """ - Value must be a multiple of this. Only applicable to numbers. - """ - ), - ] = _Unset, - allow_inf_nan: Annotated[ - Union[bool, None], - Doc( - """ - Allow `inf`, `-inf`, `nan`. Only applicable to numbers. - """ - ), - ] = _Unset, - max_digits: Annotated[ - Union[int, None], - Doc( - """ - Maximum number of allow digits for strings. - """ - ), - ] = _Unset, - decimal_places: Annotated[ - Union[int, None], - Doc( - """ - Maximum number of decimal places allowed for numbers. - """ - ), - ] = _Unset, - examples: Annotated[ - Optional[List[Any]], - Doc( - """ - Example values for this field. - """ - ), - ] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Annotated[ - Optional[Dict[str, Example]], - Doc( - """ - OpenAPI-specific examples. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Swagger UI (that provides the `/docs` interface) has better support for the - OpenAPI-specific examples than the JSON Schema `examples`, that's the main - use case for this. - - Read more about it in the - [FastAPI docs for Declare Request Example Data](https://fastapi.tiangolo.com/tutorial/schema-extra-example/#using-the-openapi_examples-parameter). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this parameter field as deprecated. - - It will affect the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - include_in_schema: Annotated[ - bool, - Doc( - """ - To include (or not) this parameter field in the generated OpenAPI. - You probably don't need it, but it's available. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = True, - json_schema_extra: Annotated[ - Union[Dict[str, Any], None], - Doc( - """ - Any additional JSON schema data. - """ - ), - ] = None, - **extra: Annotated[ - Any, - Doc( - """ - Include extra fields used by the JSON Schema. - """ - ), - deprecated( - """ - The `extra` kwargs is deprecated. Use `json_schema_extra` instead. - """ - ), - ], -) -> Any: - return params.File( - default=default, - default_factory=default_factory, - media_type=media_type, - alias=alias, - alias_priority=alias_priority, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - pattern=pattern, - regex=regex, - discriminator=discriminator, - strict=strict, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - example=example, - examples=examples, - openapi_examples=openapi_examples, - deprecated=deprecated, - include_in_schema=include_in_schema, - json_schema_extra=json_schema_extra, - **extra, - ) - - -def Depends( # noqa: N802 - dependency: Annotated[ - Optional[Callable[..., Any]], - Doc( - """ - A "dependable" callable (like a function). - - Don't call it directly, FastAPI will call it for you, just pass the object - directly. - """ - ), - ] = None, - *, - use_cache: Annotated[ - bool, - Doc( - """ - By default, after a dependency is called the first time in a request, if - the dependency is declared again for the rest of the request (for example - if the dependency is needed by several dependencies), the value will be - re-used for the rest of the request. - - Set `use_cache` to `False` to disable this behavior and ensure the - dependency is called again (if declared more than once) in the same request. - """ - ), - ] = True, -) -> Any: - """ - Declare a FastAPI dependency. - - It takes a single "dependable" callable (like a function). - - Don't call it directly, FastAPI will call it for you. - - Read more about it in the - [FastAPI docs for Dependencies](https://fastapi.tiangolo.com/tutorial/dependencies/). - - **Example** - - ```python - from typing import Annotated - - from fastapi import Depends, FastAPI - - app = FastAPI() - - - async def common_parameters(q: str | None = None, skip: int = 0, limit: int = 100): - return {"q": q, "skip": skip, "limit": limit} - - - @app.get("/items/") - async def read_items(commons: Annotated[dict, Depends(common_parameters)]): - return commons - ``` - """ - return params.Depends(dependency=dependency, use_cache=use_cache) - - -def Security( # noqa: N802 - dependency: Annotated[ - Optional[Callable[..., Any]], - Doc( - """ - A "dependable" callable (like a function). - - Don't call it directly, FastAPI will call it for you, just pass the object - directly. - """ - ), - ] = None, - *, - scopes: Annotated[ - Optional[Sequence[str]], - Doc( - """ - OAuth2 scopes required for the *path operation* that uses this Security - dependency. - - The term "scope" comes from the OAuth2 specification, it seems to be - intentionaly vague and interpretable. It normally refers to permissions, - in cases to roles. - - These scopes are integrated with OpenAPI (and the API docs at `/docs`). - So they are visible in the OpenAPI specification. - ) - """ - ), - ] = None, - use_cache: Annotated[ - bool, - Doc( - """ - By default, after a dependency is called the first time in a request, if - the dependency is declared again for the rest of the request (for example - if the dependency is needed by several dependencies), the value will be - re-used for the rest of the request. - - Set `use_cache` to `False` to disable this behavior and ensure the - dependency is called again (if declared more than once) in the same request. - """ - ), - ] = True, -) -> Any: - """ - Declare a FastAPI Security dependency. - - The only difference with a regular dependency is that it can declare OAuth2 - scopes that will be integrated with OpenAPI and the automatic UI docs (by default - at `/docs`). - - It takes a single "dependable" callable (like a function). - - Don't call it directly, FastAPI will call it for you. - - Read more about it in the - [FastAPI docs for Security](https://fastapi.tiangolo.com/tutorial/security/) and - in the - [FastAPI docs for OAuth2 scopes](https://fastapi.tiangolo.com/advanced/security/oauth2-scopes/). - - **Example** - - ```python - from typing import Annotated - - from fastapi import Depends, FastAPI - - from .db import User - from .security import get_current_active_user - - app = FastAPI() - - @app.get("/users/me/items/") - async def read_own_items( - current_user: Annotated[User, Security(get_current_active_user, scopes=["items"])] - ): - return [{"item_id": "Foo", "owner": current_user.username}] - ``` - """ - return params.Security(dependency=dependency, scopes=scopes, use_cache=use_cache) diff --git a/server/venv/Lib/site-packages/fastapi/params.py b/server/venv/Lib/site-packages/fastapi/params.py deleted file mode 100644 index b40944d..0000000 --- a/server/venv/Lib/site-packages/fastapi/params.py +++ /dev/null @@ -1,777 +0,0 @@ -import warnings -from enum import Enum -from typing import Any, Callable, Dict, List, Optional, Sequence, Union - -from fastapi.openapi.models import Example -from pydantic.fields import FieldInfo -from typing_extensions import Annotated, deprecated - -from ._compat import PYDANTIC_V2, Undefined - -_Unset: Any = Undefined - - -class ParamTypes(Enum): - query = "query" - header = "header" - path = "path" - cookie = "cookie" - - -class Param(FieldInfo): - in_: ParamTypes - - def __init__( - self, - default: Any = Undefined, - *, - default_factory: Union[Callable[[], Any], None] = _Unset, - annotation: Optional[Any] = None, - alias: Optional[str] = None, - alias_priority: Union[int, None] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Union[str, None] = None, - serialization_alias: Union[str, None] = None, - title: Optional[str] = None, - description: Optional[str] = None, - gt: Optional[float] = None, - ge: Optional[float] = None, - lt: Optional[float] = None, - le: Optional[float] = None, - min_length: Optional[int] = None, - max_length: Optional[int] = None, - pattern: Optional[str] = None, - regex: Annotated[ - Optional[str], - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Union[str, None] = None, - strict: Union[bool, None] = _Unset, - multiple_of: Union[float, None] = _Unset, - allow_inf_nan: Union[bool, None] = _Unset, - max_digits: Union[int, None] = _Unset, - decimal_places: Union[int, None] = _Unset, - examples: Optional[List[Any]] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Optional[Dict[str, Example]] = None, - deprecated: Optional[bool] = None, - include_in_schema: bool = True, - json_schema_extra: Union[Dict[str, Any], None] = None, - **extra: Any, - ): - self.deprecated = deprecated - if example is not _Unset: - warnings.warn( - "`example` has been deprecated, please use `examples` instead", - category=DeprecationWarning, - stacklevel=4, - ) - self.example = example - self.include_in_schema = include_in_schema - self.openapi_examples = openapi_examples - kwargs = dict( - default=default, - default_factory=default_factory, - alias=alias, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - discriminator=discriminator, - multiple_of=multiple_of, - allow_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - **extra, - ) - if examples is not None: - kwargs["examples"] = examples - if regex is not None: - warnings.warn( - "`regex` has been deprecated, please use `pattern` instead", - category=DeprecationWarning, - stacklevel=4, - ) - current_json_schema_extra = json_schema_extra or extra - if PYDANTIC_V2: - kwargs.update( - { - "annotation": annotation, - "alias_priority": alias_priority, - "validation_alias": validation_alias, - "serialization_alias": serialization_alias, - "strict": strict, - "json_schema_extra": current_json_schema_extra, - } - ) - kwargs["pattern"] = pattern or regex - else: - kwargs["regex"] = pattern or regex - kwargs.update(**current_json_schema_extra) - use_kwargs = {k: v for k, v in kwargs.items() if v is not _Unset} - - super().__init__(**use_kwargs) - - def __repr__(self) -> str: - return f"{self.__class__.__name__}({self.default})" - - -class Path(Param): - in_ = ParamTypes.path - - def __init__( - self, - default: Any = ..., - *, - default_factory: Union[Callable[[], Any], None] = _Unset, - annotation: Optional[Any] = None, - alias: Optional[str] = None, - alias_priority: Union[int, None] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Union[str, None] = None, - serialization_alias: Union[str, None] = None, - title: Optional[str] = None, - description: Optional[str] = None, - gt: Optional[float] = None, - ge: Optional[float] = None, - lt: Optional[float] = None, - le: Optional[float] = None, - min_length: Optional[int] = None, - max_length: Optional[int] = None, - pattern: Optional[str] = None, - regex: Annotated[ - Optional[str], - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Union[str, None] = None, - strict: Union[bool, None] = _Unset, - multiple_of: Union[float, None] = _Unset, - allow_inf_nan: Union[bool, None] = _Unset, - max_digits: Union[int, None] = _Unset, - decimal_places: Union[int, None] = _Unset, - examples: Optional[List[Any]] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Optional[Dict[str, Example]] = None, - deprecated: Optional[bool] = None, - include_in_schema: bool = True, - json_schema_extra: Union[Dict[str, Any], None] = None, - **extra: Any, - ): - assert default is ..., "Path parameters cannot have a default value" - self.in_ = self.in_ - super().__init__( - default=default, - default_factory=default_factory, - annotation=annotation, - alias=alias, - alias_priority=alias_priority, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - pattern=pattern, - regex=regex, - discriminator=discriminator, - strict=strict, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - deprecated=deprecated, - example=example, - examples=examples, - openapi_examples=openapi_examples, - include_in_schema=include_in_schema, - json_schema_extra=json_schema_extra, - **extra, - ) - - -class Query(Param): - in_ = ParamTypes.query - - def __init__( - self, - default: Any = Undefined, - *, - default_factory: Union[Callable[[], Any], None] = _Unset, - annotation: Optional[Any] = None, - alias: Optional[str] = None, - alias_priority: Union[int, None] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Union[str, None] = None, - serialization_alias: Union[str, None] = None, - title: Optional[str] = None, - description: Optional[str] = None, - gt: Optional[float] = None, - ge: Optional[float] = None, - lt: Optional[float] = None, - le: Optional[float] = None, - min_length: Optional[int] = None, - max_length: Optional[int] = None, - pattern: Optional[str] = None, - regex: Annotated[ - Optional[str], - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Union[str, None] = None, - strict: Union[bool, None] = _Unset, - multiple_of: Union[float, None] = _Unset, - allow_inf_nan: Union[bool, None] = _Unset, - max_digits: Union[int, None] = _Unset, - decimal_places: Union[int, None] = _Unset, - examples: Optional[List[Any]] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Optional[Dict[str, Example]] = None, - deprecated: Optional[bool] = None, - include_in_schema: bool = True, - json_schema_extra: Union[Dict[str, Any], None] = None, - **extra: Any, - ): - super().__init__( - default=default, - default_factory=default_factory, - annotation=annotation, - alias=alias, - alias_priority=alias_priority, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - pattern=pattern, - regex=regex, - discriminator=discriminator, - strict=strict, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - deprecated=deprecated, - example=example, - examples=examples, - openapi_examples=openapi_examples, - include_in_schema=include_in_schema, - json_schema_extra=json_schema_extra, - **extra, - ) - - -class Header(Param): - in_ = ParamTypes.header - - def __init__( - self, - default: Any = Undefined, - *, - default_factory: Union[Callable[[], Any], None] = _Unset, - annotation: Optional[Any] = None, - alias: Optional[str] = None, - alias_priority: Union[int, None] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Union[str, None] = None, - serialization_alias: Union[str, None] = None, - convert_underscores: bool = True, - title: Optional[str] = None, - description: Optional[str] = None, - gt: Optional[float] = None, - ge: Optional[float] = None, - lt: Optional[float] = None, - le: Optional[float] = None, - min_length: Optional[int] = None, - max_length: Optional[int] = None, - pattern: Optional[str] = None, - regex: Annotated[ - Optional[str], - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Union[str, None] = None, - strict: Union[bool, None] = _Unset, - multiple_of: Union[float, None] = _Unset, - allow_inf_nan: Union[bool, None] = _Unset, - max_digits: Union[int, None] = _Unset, - decimal_places: Union[int, None] = _Unset, - examples: Optional[List[Any]] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Optional[Dict[str, Example]] = None, - deprecated: Optional[bool] = None, - include_in_schema: bool = True, - json_schema_extra: Union[Dict[str, Any], None] = None, - **extra: Any, - ): - self.convert_underscores = convert_underscores - super().__init__( - default=default, - default_factory=default_factory, - annotation=annotation, - alias=alias, - alias_priority=alias_priority, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - pattern=pattern, - regex=regex, - discriminator=discriminator, - strict=strict, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - deprecated=deprecated, - example=example, - examples=examples, - openapi_examples=openapi_examples, - include_in_schema=include_in_schema, - json_schema_extra=json_schema_extra, - **extra, - ) - - -class Cookie(Param): - in_ = ParamTypes.cookie - - def __init__( - self, - default: Any = Undefined, - *, - default_factory: Union[Callable[[], Any], None] = _Unset, - annotation: Optional[Any] = None, - alias: Optional[str] = None, - alias_priority: Union[int, None] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Union[str, None] = None, - serialization_alias: Union[str, None] = None, - title: Optional[str] = None, - description: Optional[str] = None, - gt: Optional[float] = None, - ge: Optional[float] = None, - lt: Optional[float] = None, - le: Optional[float] = None, - min_length: Optional[int] = None, - max_length: Optional[int] = None, - pattern: Optional[str] = None, - regex: Annotated[ - Optional[str], - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Union[str, None] = None, - strict: Union[bool, None] = _Unset, - multiple_of: Union[float, None] = _Unset, - allow_inf_nan: Union[bool, None] = _Unset, - max_digits: Union[int, None] = _Unset, - decimal_places: Union[int, None] = _Unset, - examples: Optional[List[Any]] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Optional[Dict[str, Example]] = None, - deprecated: Optional[bool] = None, - include_in_schema: bool = True, - json_schema_extra: Union[Dict[str, Any], None] = None, - **extra: Any, - ): - super().__init__( - default=default, - default_factory=default_factory, - annotation=annotation, - alias=alias, - alias_priority=alias_priority, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - pattern=pattern, - regex=regex, - discriminator=discriminator, - strict=strict, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - deprecated=deprecated, - example=example, - examples=examples, - openapi_examples=openapi_examples, - include_in_schema=include_in_schema, - json_schema_extra=json_schema_extra, - **extra, - ) - - -class Body(FieldInfo): - def __init__( - self, - default: Any = Undefined, - *, - default_factory: Union[Callable[[], Any], None] = _Unset, - annotation: Optional[Any] = None, - embed: bool = False, - media_type: str = "application/json", - alias: Optional[str] = None, - alias_priority: Union[int, None] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Union[str, None] = None, - serialization_alias: Union[str, None] = None, - title: Optional[str] = None, - description: Optional[str] = None, - gt: Optional[float] = None, - ge: Optional[float] = None, - lt: Optional[float] = None, - le: Optional[float] = None, - min_length: Optional[int] = None, - max_length: Optional[int] = None, - pattern: Optional[str] = None, - regex: Annotated[ - Optional[str], - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Union[str, None] = None, - strict: Union[bool, None] = _Unset, - multiple_of: Union[float, None] = _Unset, - allow_inf_nan: Union[bool, None] = _Unset, - max_digits: Union[int, None] = _Unset, - decimal_places: Union[int, None] = _Unset, - examples: Optional[List[Any]] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Optional[Dict[str, Example]] = None, - deprecated: Optional[bool] = None, - include_in_schema: bool = True, - json_schema_extra: Union[Dict[str, Any], None] = None, - **extra: Any, - ): - self.embed = embed - self.media_type = media_type - self.deprecated = deprecated - if example is not _Unset: - warnings.warn( - "`example` has been deprecated, please use `examples` instead", - category=DeprecationWarning, - stacklevel=4, - ) - self.example = example - self.include_in_schema = include_in_schema - self.openapi_examples = openapi_examples - kwargs = dict( - default=default, - default_factory=default_factory, - alias=alias, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - discriminator=discriminator, - multiple_of=multiple_of, - allow_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - **extra, - ) - if examples is not None: - kwargs["examples"] = examples - if regex is not None: - warnings.warn( - "`regex` has been depreacated, please use `pattern` instead", - category=DeprecationWarning, - stacklevel=4, - ) - current_json_schema_extra = json_schema_extra or extra - if PYDANTIC_V2: - kwargs.update( - { - "annotation": annotation, - "alias_priority": alias_priority, - "validation_alias": validation_alias, - "serialization_alias": serialization_alias, - "strict": strict, - "json_schema_extra": current_json_schema_extra, - } - ) - kwargs["pattern"] = pattern or regex - else: - kwargs["regex"] = pattern or regex - kwargs.update(**current_json_schema_extra) - - use_kwargs = {k: v for k, v in kwargs.items() if v is not _Unset} - - super().__init__(**use_kwargs) - - def __repr__(self) -> str: - return f"{self.__class__.__name__}({self.default})" - - -class Form(Body): - def __init__( - self, - default: Any = Undefined, - *, - default_factory: Union[Callable[[], Any], None] = _Unset, - annotation: Optional[Any] = None, - media_type: str = "application/x-www-form-urlencoded", - alias: Optional[str] = None, - alias_priority: Union[int, None] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Union[str, None] = None, - serialization_alias: Union[str, None] = None, - title: Optional[str] = None, - description: Optional[str] = None, - gt: Optional[float] = None, - ge: Optional[float] = None, - lt: Optional[float] = None, - le: Optional[float] = None, - min_length: Optional[int] = None, - max_length: Optional[int] = None, - pattern: Optional[str] = None, - regex: Annotated[ - Optional[str], - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Union[str, None] = None, - strict: Union[bool, None] = _Unset, - multiple_of: Union[float, None] = _Unset, - allow_inf_nan: Union[bool, None] = _Unset, - max_digits: Union[int, None] = _Unset, - decimal_places: Union[int, None] = _Unset, - examples: Optional[List[Any]] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Optional[Dict[str, Example]] = None, - deprecated: Optional[bool] = None, - include_in_schema: bool = True, - json_schema_extra: Union[Dict[str, Any], None] = None, - **extra: Any, - ): - super().__init__( - default=default, - default_factory=default_factory, - annotation=annotation, - embed=True, - media_type=media_type, - alias=alias, - alias_priority=alias_priority, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - pattern=pattern, - regex=regex, - discriminator=discriminator, - strict=strict, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - deprecated=deprecated, - example=example, - examples=examples, - openapi_examples=openapi_examples, - include_in_schema=include_in_schema, - json_schema_extra=json_schema_extra, - **extra, - ) - - -class File(Form): - def __init__( - self, - default: Any = Undefined, - *, - default_factory: Union[Callable[[], Any], None] = _Unset, - annotation: Optional[Any] = None, - media_type: str = "multipart/form-data", - alias: Optional[str] = None, - alias_priority: Union[int, None] = _Unset, - # TODO: update when deprecating Pydantic v1, import these types - # validation_alias: str | AliasPath | AliasChoices | None - validation_alias: Union[str, None] = None, - serialization_alias: Union[str, None] = None, - title: Optional[str] = None, - description: Optional[str] = None, - gt: Optional[float] = None, - ge: Optional[float] = None, - lt: Optional[float] = None, - le: Optional[float] = None, - min_length: Optional[int] = None, - max_length: Optional[int] = None, - pattern: Optional[str] = None, - regex: Annotated[ - Optional[str], - deprecated( - "Deprecated in FastAPI 0.100.0 and Pydantic v2, use `pattern` instead." - ), - ] = None, - discriminator: Union[str, None] = None, - strict: Union[bool, None] = _Unset, - multiple_of: Union[float, None] = _Unset, - allow_inf_nan: Union[bool, None] = _Unset, - max_digits: Union[int, None] = _Unset, - decimal_places: Union[int, None] = _Unset, - examples: Optional[List[Any]] = None, - example: Annotated[ - Optional[Any], - deprecated( - "Deprecated in OpenAPI 3.1.0 that now uses JSON Schema 2020-12, " - "although still supported. Use examples instead." - ), - ] = _Unset, - openapi_examples: Optional[Dict[str, Example]] = None, - deprecated: Optional[bool] = None, - include_in_schema: bool = True, - json_schema_extra: Union[Dict[str, Any], None] = None, - **extra: Any, - ): - super().__init__( - default=default, - default_factory=default_factory, - annotation=annotation, - media_type=media_type, - alias=alias, - alias_priority=alias_priority, - validation_alias=validation_alias, - serialization_alias=serialization_alias, - title=title, - description=description, - gt=gt, - ge=ge, - lt=lt, - le=le, - min_length=min_length, - max_length=max_length, - pattern=pattern, - regex=regex, - discriminator=discriminator, - strict=strict, - multiple_of=multiple_of, - allow_inf_nan=allow_inf_nan, - max_digits=max_digits, - decimal_places=decimal_places, - deprecated=deprecated, - example=example, - examples=examples, - openapi_examples=openapi_examples, - include_in_schema=include_in_schema, - json_schema_extra=json_schema_extra, - **extra, - ) - - -class Depends: - def __init__( - self, dependency: Optional[Callable[..., Any]] = None, *, use_cache: bool = True - ): - self.dependency = dependency - self.use_cache = use_cache - - def __repr__(self) -> str: - attr = getattr(self.dependency, "__name__", type(self.dependency).__name__) - cache = "" if self.use_cache else ", use_cache=False" - return f"{self.__class__.__name__}({attr}{cache})" - - -class Security(Depends): - def __init__( - self, - dependency: Optional[Callable[..., Any]] = None, - *, - scopes: Optional[Sequence[str]] = None, - use_cache: bool = True, - ): - super().__init__(dependency=dependency, use_cache=use_cache) - self.scopes = scopes or [] diff --git a/server/venv/Lib/site-packages/fastapi/py.typed b/server/venv/Lib/site-packages/fastapi/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/fastapi/requests.py b/server/venv/Lib/site-packages/fastapi/requests.py deleted file mode 100644 index d16552c..0000000 --- a/server/venv/Lib/site-packages/fastapi/requests.py +++ /dev/null @@ -1,2 +0,0 @@ -from starlette.requests import HTTPConnection as HTTPConnection # noqa: F401 -from starlette.requests import Request as Request # noqa: F401 diff --git a/server/venv/Lib/site-packages/fastapi/responses.py b/server/venv/Lib/site-packages/fastapi/responses.py deleted file mode 100644 index 6c8db6f..0000000 --- a/server/venv/Lib/site-packages/fastapi/responses.py +++ /dev/null @@ -1,48 +0,0 @@ -from typing import Any - -from starlette.responses import FileResponse as FileResponse # noqa -from starlette.responses import HTMLResponse as HTMLResponse # noqa -from starlette.responses import JSONResponse as JSONResponse # noqa -from starlette.responses import PlainTextResponse as PlainTextResponse # noqa -from starlette.responses import RedirectResponse as RedirectResponse # noqa -from starlette.responses import Response as Response # noqa -from starlette.responses import StreamingResponse as StreamingResponse # noqa - -try: - import ujson -except ImportError: # pragma: nocover - ujson = None # type: ignore - - -try: - import orjson -except ImportError: # pragma: nocover - orjson = None # type: ignore - - -class UJSONResponse(JSONResponse): - """ - JSON response using the high-performance ujson library to serialize data to JSON. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/). - """ - - def render(self, content: Any) -> bytes: - assert ujson is not None, "ujson must be installed to use UJSONResponse" - return ujson.dumps(content, ensure_ascii=False).encode("utf-8") - - -class ORJSONResponse(JSONResponse): - """ - JSON response using the high-performance orjson library to serialize data to JSON. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/). - """ - - def render(self, content: Any) -> bytes: - assert orjson is not None, "orjson must be installed to use ORJSONResponse" - return orjson.dumps( - content, option=orjson.OPT_NON_STR_KEYS | orjson.OPT_SERIALIZE_NUMPY - ) diff --git a/server/venv/Lib/site-packages/fastapi/routing.py b/server/venv/Lib/site-packages/fastapi/routing.py deleted file mode 100644 index e33e1d8..0000000 --- a/server/venv/Lib/site-packages/fastapi/routing.py +++ /dev/null @@ -1,4369 +0,0 @@ -import asyncio -import dataclasses -import email.message -import inspect -import json -from contextlib import AsyncExitStack -from enum import Enum, IntEnum -from typing import ( - Any, - Callable, - Coroutine, - Dict, - List, - Optional, - Sequence, - Set, - Tuple, - Type, - Union, -) - -from fastapi import params -from fastapi._compat import ( - ModelField, - Undefined, - _get_model_config, - _model_dump, - _normalize_errors, - lenient_issubclass, -) -from fastapi.datastructures import Default, DefaultPlaceholder -from fastapi.dependencies.models import Dependant -from fastapi.dependencies.utils import ( - get_body_field, - get_dependant, - get_parameterless_sub_dependant, - get_typed_return_annotation, - solve_dependencies, -) -from fastapi.encoders import jsonable_encoder -from fastapi.exceptions import ( - FastAPIError, - RequestValidationError, - ResponseValidationError, - WebSocketRequestValidationError, -) -from fastapi.types import DecoratedCallable, IncEx -from fastapi.utils import ( - create_cloned_field, - create_response_field, - generate_unique_id, - get_value_or_default, - is_body_allowed_for_status_code, -) -from pydantic import BaseModel -from starlette import routing -from starlette.concurrency import run_in_threadpool -from starlette.exceptions import HTTPException -from starlette.requests import Request -from starlette.responses import JSONResponse, Response -from starlette.routing import ( - BaseRoute, - Match, - compile_path, - get_name, - request_response, - websocket_session, -) -from starlette.routing import Mount as Mount # noqa -from starlette.types import ASGIApp, Lifespan, Scope -from starlette.websockets import WebSocket -from typing_extensions import Annotated, Doc, deprecated # type: ignore [attr-defined] - - -def _prepare_response_content( - res: Any, - *, - exclude_unset: bool, - exclude_defaults: bool = False, - exclude_none: bool = False, -) -> Any: - if isinstance(res, BaseModel): - read_with_orm_mode = getattr(_get_model_config(res), "read_with_orm_mode", None) - if read_with_orm_mode: - # Let from_orm extract the data from this model instead of converting - # it now to a dict. - # Otherwise, there's no way to extract lazy data that requires attribute - # access instead of dict iteration, e.g. lazy relationships. - return res - return _model_dump( - res, - by_alias=True, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - ) - elif isinstance(res, list): - return [ - _prepare_response_content( - item, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - ) - for item in res - ] - elif isinstance(res, dict): - return { - k: _prepare_response_content( - v, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - ) - for k, v in res.items() - } - elif dataclasses.is_dataclass(res): - return dataclasses.asdict(res) - return res - - -async def serialize_response( - *, - field: Optional[ModelField] = None, - response_content: Any, - include: Optional[IncEx] = None, - exclude: Optional[IncEx] = None, - by_alias: bool = True, - exclude_unset: bool = False, - exclude_defaults: bool = False, - exclude_none: bool = False, - is_coroutine: bool = True, -) -> Any: - if field: - errors = [] - if not hasattr(field, "serialize"): - # pydantic v1 - response_content = _prepare_response_content( - response_content, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - ) - if is_coroutine: - value, errors_ = field.validate(response_content, {}, loc=("response",)) - else: - value, errors_ = await run_in_threadpool( - field.validate, response_content, {}, loc=("response",) - ) - if isinstance(errors_, list): - errors.extend(errors_) - elif errors_: - errors.append(errors_) - if errors: - raise ResponseValidationError( - errors=_normalize_errors(errors), body=response_content - ) - - if hasattr(field, "serialize"): - return field.serialize( - value, - include=include, - exclude=exclude, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - ) - - return jsonable_encoder( - value, - include=include, - exclude=exclude, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - ) - else: - return jsonable_encoder(response_content) - - -async def run_endpoint_function( - *, dependant: Dependant, values: Dict[str, Any], is_coroutine: bool -) -> Any: - # Only called by get_request_handler. Has been split into its own function to - # facilitate profiling endpoints, since inner functions are harder to profile. - assert dependant.call is not None, "dependant.call must be a function" - - if is_coroutine: - return await dependant.call(**values) - else: - return await run_in_threadpool(dependant.call, **values) - - -def get_request_handler( - dependant: Dependant, - body_field: Optional[ModelField] = None, - status_code: Optional[int] = None, - response_class: Union[Type[Response], DefaultPlaceholder] = Default(JSONResponse), - response_field: Optional[ModelField] = None, - response_model_include: Optional[IncEx] = None, - response_model_exclude: Optional[IncEx] = None, - response_model_by_alias: bool = True, - response_model_exclude_unset: bool = False, - response_model_exclude_defaults: bool = False, - response_model_exclude_none: bool = False, - dependency_overrides_provider: Optional[Any] = None, -) -> Callable[[Request], Coroutine[Any, Any, Response]]: - assert dependant.call is not None, "dependant.call must be a function" - is_coroutine = asyncio.iscoroutinefunction(dependant.call) - is_body_form = body_field and isinstance(body_field.field_info, params.Form) - if isinstance(response_class, DefaultPlaceholder): - actual_response_class: Type[Response] = response_class.value - else: - actual_response_class = response_class - - async def app(request: Request) -> Response: - try: - body: Any = None - if body_field: - if is_body_form: - body = await request.form() - stack = request.scope.get("fastapi_astack") - assert isinstance(stack, AsyncExitStack) - stack.push_async_callback(body.close) - else: - body_bytes = await request.body() - if body_bytes: - json_body: Any = Undefined - content_type_value = request.headers.get("content-type") - if not content_type_value: - json_body = await request.json() - else: - message = email.message.Message() - message["content-type"] = content_type_value - if message.get_content_maintype() == "application": - subtype = message.get_content_subtype() - if subtype == "json" or subtype.endswith("+json"): - json_body = await request.json() - if json_body != Undefined: - body = json_body - else: - body = body_bytes - except json.JSONDecodeError as e: - raise RequestValidationError( - [ - { - "type": "json_invalid", - "loc": ("body", e.pos), - "msg": "JSON decode error", - "input": {}, - "ctx": {"error": e.msg}, - } - ], - body=e.doc, - ) from e - except HTTPException: - raise - except Exception as e: - raise HTTPException( - status_code=400, detail="There was an error parsing the body" - ) from e - solved_result = await solve_dependencies( - request=request, - dependant=dependant, - body=body, - dependency_overrides_provider=dependency_overrides_provider, - ) - values, errors, background_tasks, sub_response, _ = solved_result - if errors: - raise RequestValidationError(_normalize_errors(errors), body=body) - else: - raw_response = await run_endpoint_function( - dependant=dependant, values=values, is_coroutine=is_coroutine - ) - - if isinstance(raw_response, Response): - if raw_response.background is None: - raw_response.background = background_tasks - return raw_response - response_args: Dict[str, Any] = {"background": background_tasks} - # If status_code was set, use it, otherwise use the default from the - # response class, in the case of redirect it's 307 - current_status_code = ( - status_code if status_code else sub_response.status_code - ) - if current_status_code is not None: - response_args["status_code"] = current_status_code - if sub_response.status_code: - response_args["status_code"] = sub_response.status_code - content = await serialize_response( - field=response_field, - response_content=raw_response, - include=response_model_include, - exclude=response_model_exclude, - by_alias=response_model_by_alias, - exclude_unset=response_model_exclude_unset, - exclude_defaults=response_model_exclude_defaults, - exclude_none=response_model_exclude_none, - is_coroutine=is_coroutine, - ) - response = actual_response_class(content, **response_args) - if not is_body_allowed_for_status_code(response.status_code): - response.body = b"" - response.headers.raw.extend(sub_response.headers.raw) - return response - - return app - - -def get_websocket_app( - dependant: Dependant, dependency_overrides_provider: Optional[Any] = None -) -> Callable[[WebSocket], Coroutine[Any, Any, Any]]: - async def app(websocket: WebSocket) -> None: - solved_result = await solve_dependencies( - request=websocket, - dependant=dependant, - dependency_overrides_provider=dependency_overrides_provider, - ) - values, errors, _, _2, _3 = solved_result - if errors: - raise WebSocketRequestValidationError(_normalize_errors(errors)) - assert dependant.call is not None, "dependant.call must be a function" - await dependant.call(**values) - - return app - - -class APIWebSocketRoute(routing.WebSocketRoute): - def __init__( - self, - path: str, - endpoint: Callable[..., Any], - *, - name: Optional[str] = None, - dependencies: Optional[Sequence[params.Depends]] = None, - dependency_overrides_provider: Optional[Any] = None, - ) -> None: - self.path = path - self.endpoint = endpoint - self.name = get_name(endpoint) if name is None else name - self.dependencies = list(dependencies or []) - self.path_regex, self.path_format, self.param_convertors = compile_path(path) - self.dependant = get_dependant(path=self.path_format, call=self.endpoint) - for depends in self.dependencies[::-1]: - self.dependant.dependencies.insert( - 0, - get_parameterless_sub_dependant(depends=depends, path=self.path_format), - ) - - self.app = websocket_session( - get_websocket_app( - dependant=self.dependant, - dependency_overrides_provider=dependency_overrides_provider, - ) - ) - - def matches(self, scope: Scope) -> Tuple[Match, Scope]: - match, child_scope = super().matches(scope) - if match != Match.NONE: - child_scope["route"] = self - return match, child_scope - - -class APIRoute(routing.Route): - def __init__( - self, - path: str, - endpoint: Callable[..., Any], - *, - response_model: Any = Default(None), - status_code: Optional[int] = None, - tags: Optional[List[Union[str, Enum]]] = None, - dependencies: Optional[Sequence[params.Depends]] = None, - summary: Optional[str] = None, - description: Optional[str] = None, - response_description: str = "Successful Response", - responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, - deprecated: Optional[bool] = None, - name: Optional[str] = None, - methods: Optional[Union[Set[str], List[str]]] = None, - operation_id: Optional[str] = None, - response_model_include: Optional[IncEx] = None, - response_model_exclude: Optional[IncEx] = None, - response_model_by_alias: bool = True, - response_model_exclude_unset: bool = False, - response_model_exclude_defaults: bool = False, - response_model_exclude_none: bool = False, - include_in_schema: bool = True, - response_class: Union[Type[Response], DefaultPlaceholder] = Default( - JSONResponse - ), - dependency_overrides_provider: Optional[Any] = None, - callbacks: Optional[List[BaseRoute]] = None, - openapi_extra: Optional[Dict[str, Any]] = None, - generate_unique_id_function: Union[ - Callable[["APIRoute"], str], DefaultPlaceholder - ] = Default(generate_unique_id), - ) -> None: - self.path = path - self.endpoint = endpoint - if isinstance(response_model, DefaultPlaceholder): - return_annotation = get_typed_return_annotation(endpoint) - if lenient_issubclass(return_annotation, Response): - response_model = None - else: - response_model = return_annotation - self.response_model = response_model - self.summary = summary - self.response_description = response_description - self.deprecated = deprecated - self.operation_id = operation_id - self.response_model_include = response_model_include - self.response_model_exclude = response_model_exclude - self.response_model_by_alias = response_model_by_alias - self.response_model_exclude_unset = response_model_exclude_unset - self.response_model_exclude_defaults = response_model_exclude_defaults - self.response_model_exclude_none = response_model_exclude_none - self.include_in_schema = include_in_schema - self.response_class = response_class - self.dependency_overrides_provider = dependency_overrides_provider - self.callbacks = callbacks - self.openapi_extra = openapi_extra - self.generate_unique_id_function = generate_unique_id_function - self.tags = tags or [] - self.responses = responses or {} - self.name = get_name(endpoint) if name is None else name - self.path_regex, self.path_format, self.param_convertors = compile_path(path) - if methods is None: - methods = ["GET"] - self.methods: Set[str] = {method.upper() for method in methods} - if isinstance(generate_unique_id_function, DefaultPlaceholder): - current_generate_unique_id: Callable[ - ["APIRoute"], str - ] = generate_unique_id_function.value - else: - current_generate_unique_id = generate_unique_id_function - self.unique_id = self.operation_id or current_generate_unique_id(self) - # normalize enums e.g. http.HTTPStatus - if isinstance(status_code, IntEnum): - status_code = int(status_code) - self.status_code = status_code - if self.response_model: - assert is_body_allowed_for_status_code( - status_code - ), f"Status code {status_code} must not have a response body" - response_name = "Response_" + self.unique_id - self.response_field = create_response_field( - name=response_name, - type_=self.response_model, - mode="serialization", - ) - # Create a clone of the field, so that a Pydantic submodel is not returned - # as is just because it's an instance of a subclass of a more limited class - # e.g. UserInDB (containing hashed_password) could be a subclass of User - # that doesn't have the hashed_password. But because it's a subclass, it - # would pass the validation and be returned as is. - # By being a new field, no inheritance will be passed as is. A new model - # will always be created. - # TODO: remove when deprecating Pydantic v1 - self.secure_cloned_response_field: Optional[ - ModelField - ] = create_cloned_field(self.response_field) - else: - self.response_field = None # type: ignore - self.secure_cloned_response_field = None - self.dependencies = list(dependencies or []) - self.description = description or inspect.cleandoc(self.endpoint.__doc__ or "") - # if a "form feed" character (page break) is found in the description text, - # truncate description text to the content preceding the first "form feed" - self.description = self.description.split("\f")[0].strip() - response_fields = {} - for additional_status_code, response in self.responses.items(): - assert isinstance(response, dict), "An additional response must be a dict" - model = response.get("model") - if model: - assert is_body_allowed_for_status_code( - additional_status_code - ), f"Status code {additional_status_code} must not have a response body" - response_name = f"Response_{additional_status_code}_{self.unique_id}" - response_field = create_response_field(name=response_name, type_=model) - response_fields[additional_status_code] = response_field - if response_fields: - self.response_fields: Dict[Union[int, str], ModelField] = response_fields - else: - self.response_fields = {} - - assert callable(endpoint), "An endpoint must be a callable" - self.dependant = get_dependant(path=self.path_format, call=self.endpoint) - for depends in self.dependencies[::-1]: - self.dependant.dependencies.insert( - 0, - get_parameterless_sub_dependant(depends=depends, path=self.path_format), - ) - self.body_field = get_body_field(dependant=self.dependant, name=self.unique_id) - self.app = request_response(self.get_route_handler()) - - def get_route_handler(self) -> Callable[[Request], Coroutine[Any, Any, Response]]: - return get_request_handler( - dependant=self.dependant, - body_field=self.body_field, - status_code=self.status_code, - response_class=self.response_class, - response_field=self.secure_cloned_response_field, - response_model_include=self.response_model_include, - response_model_exclude=self.response_model_exclude, - response_model_by_alias=self.response_model_by_alias, - response_model_exclude_unset=self.response_model_exclude_unset, - response_model_exclude_defaults=self.response_model_exclude_defaults, - response_model_exclude_none=self.response_model_exclude_none, - dependency_overrides_provider=self.dependency_overrides_provider, - ) - - def matches(self, scope: Scope) -> Tuple[Match, Scope]: - match, child_scope = super().matches(scope) - if match != Match.NONE: - child_scope["route"] = self - return match, child_scope - - -class APIRouter(routing.Router): - """ - `APIRouter` class, used to group *path operations*, for example to structure - an app in multiple files. It would then be included in the `FastAPI` app, or - in another `APIRouter` (ultimately included in the app). - - Read more about it in the - [FastAPI docs for Bigger Applications - Multiple Files](https://fastapi.tiangolo.com/tutorial/bigger-applications/). - - ## Example - - ```python - from fastapi import APIRouter, FastAPI - - app = FastAPI() - router = APIRouter() - - - @router.get("/users/", tags=["users"]) - async def read_users(): - return [{"username": "Rick"}, {"username": "Morty"}] - - - app.include_router(router) - ``` - """ - - def __init__( - self, - *, - prefix: Annotated[str, Doc("An optional path prefix for the router.")] = "", - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to all the *path operations* in this - router. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[params.Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to all the - *path operations* in this router. - - Read more about it in the - [FastAPI docs for Bigger Applications - Multiple Files](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies). - """ - ), - ] = None, - default_response_class: Annotated[ - Type[Response], - Doc( - """ - The default response class to be used. - - Read more in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#default-response-class). - """ - ), - ] = Default(JSONResponse), - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses to be shown in OpenAPI. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Additional Responses in OpenAPI](https://fastapi.tiangolo.com/advanced/additional-responses/). - - And in the - [FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies). - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - OpenAPI callbacks that should apply to all *path operations* in this - router. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - routes: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - **Note**: you probably shouldn't use this parameter, it is inherited - from Starlette and supported for compatibility. - - In FastAPI, you normally would use the *path operation* decorators, - like: - - * `router.get()` - * `router.post()` - * etc. - - --- - - A list of routes to serve incoming HTTP and WebSocket requests. - """ - ), - deprecated( - """ - You normally wouldn't use this parameter with FastAPI, it is inherited - from Starlette and supported for compatibility. - - In FastAPI, you normally would use the *path operation methods*, - like `router.get()`, `router.post()`, etc. - """ - ), - ] = None, - redirect_slashes: Annotated[ - bool, - Doc( - """ - Whether to detect and redirect slashes in URLs when the client doesn't - use the same format. - """ - ), - ] = True, - default: Annotated[ - Optional[ASGIApp], - Doc( - """ - Default function handler for this router. Used to handle - 404 Not Found errors. - """ - ), - ] = None, - dependency_overrides_provider: Annotated[ - Optional[Any], - Doc( - """ - Only used internally by FastAPI to handle dependency overrides. - - You shouldn't need to use it. It normally points to the `FastAPI` app - object. - """ - ), - ] = None, - route_class: Annotated[ - Type[APIRoute], - Doc( - """ - Custom route (*path operation*) class to be used by this router. - - Read more about it in the - [FastAPI docs for Custom Request and APIRoute class](https://fastapi.tiangolo.com/how-to/custom-request-and-route/#custom-apiroute-class-in-a-router). - """ - ), - ] = APIRoute, - on_startup: Annotated[ - Optional[Sequence[Callable[[], Any]]], - Doc( - """ - A list of startup event handler functions. - - You should instead use the `lifespan` handlers. - - Read more in the [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/). - """ - ), - ] = None, - on_shutdown: Annotated[ - Optional[Sequence[Callable[[], Any]]], - Doc( - """ - A list of shutdown event handler functions. - - You should instead use the `lifespan` handlers. - - Read more in the - [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/). - """ - ), - ] = None, - # the generic to Lifespan[AppType] is the type of the top level application - # which the router cannot know statically, so we use typing.Any - lifespan: Annotated[ - Optional[Lifespan[Any]], - Doc( - """ - A `Lifespan` context manager handler. This replaces `startup` and - `shutdown` functions with a single context manager. - - Read more in the - [FastAPI docs for `lifespan`](https://fastapi.tiangolo.com/advanced/events/). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark all *path operations* in this router as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - include_in_schema: Annotated[ - bool, - Doc( - """ - To include (or not) all the *path operations* in this router in the - generated OpenAPI. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). - """ - ), - ] = True, - generate_unique_id_function: Annotated[ - Callable[[APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> None: - super().__init__( - routes=routes, - redirect_slashes=redirect_slashes, - default=default, - on_startup=on_startup, - on_shutdown=on_shutdown, - lifespan=lifespan, - ) - if prefix: - assert prefix.startswith("/"), "A path prefix must start with '/'" - assert not prefix.endswith( - "/" - ), "A path prefix must not end with '/', as the routes will start with '/'" - self.prefix = prefix - self.tags: List[Union[str, Enum]] = tags or [] - self.dependencies = list(dependencies or []) - self.deprecated = deprecated - self.include_in_schema = include_in_schema - self.responses = responses or {} - self.callbacks = callbacks or [] - self.dependency_overrides_provider = dependency_overrides_provider - self.route_class = route_class - self.default_response_class = default_response_class - self.generate_unique_id_function = generate_unique_id_function - - def route( - self, - path: str, - methods: Optional[List[str]] = None, - name: Optional[str] = None, - include_in_schema: bool = True, - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - def decorator(func: DecoratedCallable) -> DecoratedCallable: - self.add_route( - path, - func, - methods=methods, - name=name, - include_in_schema=include_in_schema, - ) - return func - - return decorator - - def add_api_route( - self, - path: str, - endpoint: Callable[..., Any], - *, - response_model: Any = Default(None), - status_code: Optional[int] = None, - tags: Optional[List[Union[str, Enum]]] = None, - dependencies: Optional[Sequence[params.Depends]] = None, - summary: Optional[str] = None, - description: Optional[str] = None, - response_description: str = "Successful Response", - responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, - deprecated: Optional[bool] = None, - methods: Optional[Union[Set[str], List[str]]] = None, - operation_id: Optional[str] = None, - response_model_include: Optional[IncEx] = None, - response_model_exclude: Optional[IncEx] = None, - response_model_by_alias: bool = True, - response_model_exclude_unset: bool = False, - response_model_exclude_defaults: bool = False, - response_model_exclude_none: bool = False, - include_in_schema: bool = True, - response_class: Union[Type[Response], DefaultPlaceholder] = Default( - JSONResponse - ), - name: Optional[str] = None, - route_class_override: Optional[Type[APIRoute]] = None, - callbacks: Optional[List[BaseRoute]] = None, - openapi_extra: Optional[Dict[str, Any]] = None, - generate_unique_id_function: Union[ - Callable[[APIRoute], str], DefaultPlaceholder - ] = Default(generate_unique_id), - ) -> None: - route_class = route_class_override or self.route_class - responses = responses or {} - combined_responses = {**self.responses, **responses} - current_response_class = get_value_or_default( - response_class, self.default_response_class - ) - current_tags = self.tags.copy() - if tags: - current_tags.extend(tags) - current_dependencies = self.dependencies.copy() - if dependencies: - current_dependencies.extend(dependencies) - current_callbacks = self.callbacks.copy() - if callbacks: - current_callbacks.extend(callbacks) - current_generate_unique_id = get_value_or_default( - generate_unique_id_function, self.generate_unique_id_function - ) - route = route_class( - self.prefix + path, - endpoint=endpoint, - response_model=response_model, - status_code=status_code, - tags=current_tags, - dependencies=current_dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=combined_responses, - deprecated=deprecated or self.deprecated, - methods=methods, - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema and self.include_in_schema, - response_class=current_response_class, - name=name, - dependency_overrides_provider=self.dependency_overrides_provider, - callbacks=current_callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=current_generate_unique_id, - ) - self.routes.append(route) - - def api_route( - self, - path: str, - *, - response_model: Any = Default(None), - status_code: Optional[int] = None, - tags: Optional[List[Union[str, Enum]]] = None, - dependencies: Optional[Sequence[params.Depends]] = None, - summary: Optional[str] = None, - description: Optional[str] = None, - response_description: str = "Successful Response", - responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None, - deprecated: Optional[bool] = None, - methods: Optional[List[str]] = None, - operation_id: Optional[str] = None, - response_model_include: Optional[IncEx] = None, - response_model_exclude: Optional[IncEx] = None, - response_model_by_alias: bool = True, - response_model_exclude_unset: bool = False, - response_model_exclude_defaults: bool = False, - response_model_exclude_none: bool = False, - include_in_schema: bool = True, - response_class: Type[Response] = Default(JSONResponse), - name: Optional[str] = None, - callbacks: Optional[List[BaseRoute]] = None, - openapi_extra: Optional[Dict[str, Any]] = None, - generate_unique_id_function: Callable[[APIRoute], str] = Default( - generate_unique_id - ), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - def decorator(func: DecoratedCallable) -> DecoratedCallable: - self.add_api_route( - path, - func, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - methods=methods, - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - return func - - return decorator - - def add_api_websocket_route( - self, - path: str, - endpoint: Callable[..., Any], - name: Optional[str] = None, - *, - dependencies: Optional[Sequence[params.Depends]] = None, - ) -> None: - current_dependencies = self.dependencies.copy() - if dependencies: - current_dependencies.extend(dependencies) - - route = APIWebSocketRoute( - self.prefix + path, - endpoint=endpoint, - name=name, - dependencies=current_dependencies, - dependency_overrides_provider=self.dependency_overrides_provider, - ) - self.routes.append(route) - - def websocket( - self, - path: Annotated[ - str, - Doc( - """ - WebSocket path. - """ - ), - ], - name: Annotated[ - Optional[str], - Doc( - """ - A name for the WebSocket. Only used internally. - """ - ), - ] = None, - *, - dependencies: Annotated[ - Optional[Sequence[params.Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be used for this - WebSocket. - - Read more about it in the - [FastAPI docs for WebSockets](https://fastapi.tiangolo.com/advanced/websockets/). - """ - ), - ] = None, - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Decorate a WebSocket function. - - Read more about it in the - [FastAPI docs for WebSockets](https://fastapi.tiangolo.com/advanced/websockets/). - - **Example** - - ## Example - - ```python - from fastapi import APIRouter, FastAPI, WebSocket - - app = FastAPI() - router = APIRouter() - - @router.websocket("/ws") - async def websocket_endpoint(websocket: WebSocket): - await websocket.accept() - while True: - data = await websocket.receive_text() - await websocket.send_text(f"Message text was: {data}") - - app.include_router(router) - ``` - """ - - def decorator(func: DecoratedCallable) -> DecoratedCallable: - self.add_api_websocket_route( - path, func, name=name, dependencies=dependencies - ) - return func - - return decorator - - def websocket_route( - self, path: str, name: Union[str, None] = None - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - def decorator(func: DecoratedCallable) -> DecoratedCallable: - self.add_websocket_route(path, func, name=name) - return func - - return decorator - - def include_router( - self, - router: Annotated["APIRouter", Doc("The `APIRouter` to include.")], - *, - prefix: Annotated[str, Doc("An optional path prefix for the router.")] = "", - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to all the *path operations* in this - router. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[params.Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to all the - *path operations* in this router. - - Read more about it in the - [FastAPI docs for Bigger Applications - Multiple Files](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies). - """ - ), - ] = None, - default_response_class: Annotated[ - Type[Response], - Doc( - """ - The default response class to be used. - - Read more in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#default-response-class). - """ - ), - ] = Default(JSONResponse), - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses to be shown in OpenAPI. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Additional Responses in OpenAPI](https://fastapi.tiangolo.com/advanced/additional-responses/). - - And in the - [FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/#include-an-apirouter-with-a-custom-prefix-tags-responses-and-dependencies). - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - OpenAPI callbacks that should apply to all *path operations* in this - router. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark all *path operations* in this router as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include (or not) all the *path operations* in this router in the - generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = True, - generate_unique_id_function: Annotated[ - Callable[[APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> None: - """ - Include another `APIRouter` in the same current `APIRouter`. - - Read more about it in the - [FastAPI docs for Bigger Applications](https://fastapi.tiangolo.com/tutorial/bigger-applications/). - - ## Example - - ```python - from fastapi import APIRouter, FastAPI - - app = FastAPI() - internal_router = APIRouter() - users_router = APIRouter() - - @users_router.get("/users/") - def read_users(): - return [{"name": "Rick"}, {"name": "Morty"}] - - internal_router.include_router(users_router) - app.include_router(internal_router) - ``` - """ - if prefix: - assert prefix.startswith("/"), "A path prefix must start with '/'" - assert not prefix.endswith( - "/" - ), "A path prefix must not end with '/', as the routes will start with '/'" - else: - for r in router.routes: - path = getattr(r, "path") # noqa: B009 - name = getattr(r, "name", "unknown") - if path is not None and not path: - raise FastAPIError( - f"Prefix and path cannot be both empty (path operation: {name})" - ) - if responses is None: - responses = {} - for route in router.routes: - if isinstance(route, APIRoute): - combined_responses = {**responses, **route.responses} - use_response_class = get_value_or_default( - route.response_class, - router.default_response_class, - default_response_class, - self.default_response_class, - ) - current_tags = [] - if tags: - current_tags.extend(tags) - if route.tags: - current_tags.extend(route.tags) - current_dependencies: List[params.Depends] = [] - if dependencies: - current_dependencies.extend(dependencies) - if route.dependencies: - current_dependencies.extend(route.dependencies) - current_callbacks = [] - if callbacks: - current_callbacks.extend(callbacks) - if route.callbacks: - current_callbacks.extend(route.callbacks) - current_generate_unique_id = get_value_or_default( - route.generate_unique_id_function, - router.generate_unique_id_function, - generate_unique_id_function, - self.generate_unique_id_function, - ) - self.add_api_route( - prefix + route.path, - route.endpoint, - response_model=route.response_model, - status_code=route.status_code, - tags=current_tags, - dependencies=current_dependencies, - summary=route.summary, - description=route.description, - response_description=route.response_description, - responses=combined_responses, - deprecated=route.deprecated or deprecated or self.deprecated, - methods=route.methods, - operation_id=route.operation_id, - response_model_include=route.response_model_include, - response_model_exclude=route.response_model_exclude, - response_model_by_alias=route.response_model_by_alias, - response_model_exclude_unset=route.response_model_exclude_unset, - response_model_exclude_defaults=route.response_model_exclude_defaults, - response_model_exclude_none=route.response_model_exclude_none, - include_in_schema=route.include_in_schema - and self.include_in_schema - and include_in_schema, - response_class=use_response_class, - name=route.name, - route_class_override=type(route), - callbacks=current_callbacks, - openapi_extra=route.openapi_extra, - generate_unique_id_function=current_generate_unique_id, - ) - elif isinstance(route, routing.Route): - methods = list(route.methods or []) - self.add_route( - prefix + route.path, - route.endpoint, - methods=methods, - include_in_schema=route.include_in_schema, - name=route.name, - ) - elif isinstance(route, APIWebSocketRoute): - current_dependencies = [] - if dependencies: - current_dependencies.extend(dependencies) - if route.dependencies: - current_dependencies.extend(route.dependencies) - self.add_api_websocket_route( - prefix + route.path, - route.endpoint, - dependencies=current_dependencies, - name=route.name, - ) - elif isinstance(route, routing.WebSocketRoute): - self.add_websocket_route( - prefix + route.path, route.endpoint, name=route.name - ) - for handler in router.on_startup: - self.add_event_handler("startup", handler) - for handler in router.on_shutdown: - self.add_event_handler("shutdown", handler) - - def get( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[params.Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP GET operation. - - ## Example - - ```python - from fastapi import APIRouter, FastAPI - - app = FastAPI() - router = APIRouter() - - @router.get("/items/") - def read_items(): - return [{"name": "Empanada"}, {"name": "Arepa"}] - - app.include_router(router) - ``` - """ - return self.api_route( - path=path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - methods=["GET"], - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def put( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[params.Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP PUT operation. - - ## Example - - ```python - from fastapi import APIRouter, FastAPI - from pydantic import BaseModel - - class Item(BaseModel): - name: str - description: str | None = None - - app = FastAPI() - router = APIRouter() - - @router.put("/items/{item_id}") - def replace_item(item_id: str, item: Item): - return {"message": "Item replaced", "id": item_id} - - app.include_router(router) - ``` - """ - return self.api_route( - path=path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - methods=["PUT"], - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def post( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[params.Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP POST operation. - - ## Example - - ```python - from fastapi import APIRouter, FastAPI - from pydantic import BaseModel - - class Item(BaseModel): - name: str - description: str | None = None - - app = FastAPI() - router = APIRouter() - - @router.post("/items/") - def create_item(item: Item): - return {"message": "Item created"} - - app.include_router(router) - ``` - """ - return self.api_route( - path=path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - methods=["POST"], - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def delete( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[params.Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP DELETE operation. - - ## Example - - ```python - from fastapi import APIRouter, FastAPI - - app = FastAPI() - router = APIRouter() - - @router.delete("/items/{item_id}") - def delete_item(item_id: str): - return {"message": "Item deleted"} - - app.include_router(router) - ``` - """ - return self.api_route( - path=path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - methods=["DELETE"], - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def options( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[params.Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP OPTIONS operation. - - ## Example - - ```python - from fastapi import APIRouter, FastAPI - - app = FastAPI() - router = APIRouter() - - @router.options("/items/") - def get_item_options(): - return {"additions": ["Aji", "Guacamole"]} - - app.include_router(router) - ``` - """ - return self.api_route( - path=path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - methods=["OPTIONS"], - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def head( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[params.Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP HEAD operation. - - ## Example - - ```python - from fastapi import APIRouter, FastAPI - from pydantic import BaseModel - - class Item(BaseModel): - name: str - description: str | None = None - - app = FastAPI() - router = APIRouter() - - @router.head("/items/", status_code=204) - def get_items_headers(response: Response): - response.headers["X-Cat-Dog"] = "Alone in the world" - - app.include_router(router) - ``` - """ - return self.api_route( - path=path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - methods=["HEAD"], - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def patch( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[params.Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP PATCH operation. - - ## Example - - ```python - from fastapi import APIRouter, FastAPI - from pydantic import BaseModel - - class Item(BaseModel): - name: str - description: str | None = None - - app = FastAPI() - router = APIRouter() - - @router.patch("/items/") - def update_item(item: Item): - return {"message": "Item updated in place"} - - app.include_router(router) - ``` - """ - return self.api_route( - path=path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - methods=["PATCH"], - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - def trace( - self, - path: Annotated[ - str, - Doc( - """ - The URL path to be used for this *path operation*. - - For example, in `http://example.com/items`, the path is `/items`. - """ - ), - ], - *, - response_model: Annotated[ - Any, - Doc( - """ - The type to use for the response. - - It could be any valid Pydantic *field* type. So, it doesn't have to - be a Pydantic model, it could be other things, like a `list`, `dict`, - etc. - - It will be used for: - - * Documentation: the generated OpenAPI (and the UI at `/docs`) will - show it as the response (JSON Schema). - * Serialization: you could return an arbitrary object and the - `response_model` would be used to serialize that object into the - corresponding JSON. - * Filtering: the JSON sent to the client will only contain the data - (fields) defined in the `response_model`. If you returned an object - that contains an attribute `password` but the `response_model` does - not include that field, the JSON sent to the client would not have - that `password`. - * Validation: whatever you return will be serialized with the - `response_model`, converting any data as necessary to generate the - corresponding JSON. But if the data in the object returned is not - valid, that would mean a violation of the contract with the client, - so it's an error from the API developer. So, FastAPI will raise an - error and return a 500 error code (Internal Server Error). - - Read more about it in the - [FastAPI docs for Response Model](https://fastapi.tiangolo.com/tutorial/response-model/). - """ - ), - ] = Default(None), - status_code: Annotated[ - Optional[int], - Doc( - """ - The default status code to be used for the response. - - You could override the status code by returning a response directly. - - Read more about it in the - [FastAPI docs for Response Status Code](https://fastapi.tiangolo.com/tutorial/response-status-code/). - """ - ), - ] = None, - tags: Annotated[ - Optional[List[Union[str, Enum]]], - Doc( - """ - A list of tags to be applied to the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/#tags). - """ - ), - ] = None, - dependencies: Annotated[ - Optional[Sequence[params.Depends]], - Doc( - """ - A list of dependencies (using `Depends()`) to be applied to the - *path operation*. - - Read more about it in the - [FastAPI docs for Dependencies in path operation decorators](https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-in-path-operation-decorators/). - """ - ), - ] = None, - summary: Annotated[ - Optional[str], - Doc( - """ - A summary for the *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - A description for the *path operation*. - - If not provided, it will be extracted automatically from the docstring - of the *path operation function*. - - It can contain Markdown. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Path Operation Configuration](https://fastapi.tiangolo.com/tutorial/path-operation-configuration/). - """ - ), - ] = None, - response_description: Annotated[ - str, - Doc( - """ - The description for the default response. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = "Successful Response", - responses: Annotated[ - Optional[Dict[Union[int, str], Dict[str, Any]]], - Doc( - """ - Additional responses that could be returned by this *path operation*. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - deprecated: Annotated[ - Optional[bool], - Doc( - """ - Mark this *path operation* as deprecated. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - operation_id: Annotated[ - Optional[str], - Doc( - """ - Custom operation ID to be used by this *path operation*. - - By default, it is generated automatically. - - If you provide a custom operation ID, you need to make sure it is - unique for the whole API. - - You can customize the - operation ID generation with the parameter - `generate_unique_id_function` in the `FastAPI` class. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = None, - response_model_include: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to include only certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_exclude: Annotated[ - Optional[IncEx], - Doc( - """ - Configuration passed to Pydantic to exclude certain fields in the - response data. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = None, - response_model_by_alias: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response model - should be serialized by alias when an alias is used. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_include-and-response_model_exclude). - """ - ), - ] = True, - response_model_exclude_unset: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that were not set and - have their default values. This is different from - `response_model_exclude_defaults` in that if the fields are set, - they will be included in the response, even if the value is the same - as the default. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_defaults: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data - should have all the fields, including the ones that have the same value - as the default. This is different from `response_model_exclude_unset` - in that if the fields are set but contain the same default values, - they will be excluded from the response. - - When `True`, default values are omitted from the response. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#use-the-response_model_exclude_unset-parameter). - """ - ), - ] = False, - response_model_exclude_none: Annotated[ - bool, - Doc( - """ - Configuration passed to Pydantic to define if the response data should - exclude fields set to `None`. - - This is much simpler (less smart) than `response_model_exclude_unset` - and `response_model_exclude_defaults`. You probably want to use one of - those two instead of this one, as those allow returning `None` values - when it makes sense. - - Read more about it in the - [FastAPI docs for Response Model - Return Type](https://fastapi.tiangolo.com/tutorial/response-model/#response_model_exclude_none). - """ - ), - ] = False, - include_in_schema: Annotated[ - bool, - Doc( - """ - Include this *path operation* in the generated OpenAPI schema. - - This affects the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for Query Parameters and String Validations](https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#exclude-from-openapi). - """ - ), - ] = True, - response_class: Annotated[ - Type[Response], - Doc( - """ - Response class to be used for this *path operation*. - - This will not be used if you return a response directly. - - Read more about it in the - [FastAPI docs for Custom Response - HTML, Stream, File, others](https://fastapi.tiangolo.com/advanced/custom-response/#redirectresponse). - """ - ), - ] = Default(JSONResponse), - name: Annotated[ - Optional[str], - Doc( - """ - Name for this *path operation*. Only used internally. - """ - ), - ] = None, - callbacks: Annotated[ - Optional[List[BaseRoute]], - Doc( - """ - List of *path operations* that will be used as OpenAPI callbacks. - - This is only for OpenAPI documentation, the callbacks won't be used - directly. - - It will be added to the generated OpenAPI (e.g. visible at `/docs`). - - Read more about it in the - [FastAPI docs for OpenAPI Callbacks](https://fastapi.tiangolo.com/advanced/openapi-callbacks/). - """ - ), - ] = None, - openapi_extra: Annotated[ - Optional[Dict[str, Any]], - Doc( - """ - Extra metadata to be included in the OpenAPI schema for this *path - operation*. - - Read more about it in the - [FastAPI docs for Path Operation Advanced Configuration](https://fastapi.tiangolo.com/advanced/path-operation-advanced-configuration/#custom-openapi-path-operation-schema). - """ - ), - ] = None, - generate_unique_id_function: Annotated[ - Callable[[APIRoute], str], - Doc( - """ - Customize the function used to generate unique IDs for the *path - operations* shown in the generated OpenAPI. - - This is particularly useful when automatically generating clients or - SDKs for your API. - - Read more about it in the - [FastAPI docs about how to Generate Clients](https://fastapi.tiangolo.com/advanced/generate-clients/#custom-generate-unique-id-function). - """ - ), - ] = Default(generate_unique_id), - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add a *path operation* using an HTTP TRACE operation. - - ## Example - - ```python - from fastapi import APIRouter, FastAPI - from pydantic import BaseModel - - class Item(BaseModel): - name: str - description: str | None = None - - app = FastAPI() - router = APIRouter() - - @router.put("/items/{item_id}") - def trace_item(item_id: str): - return None - - app.include_router(router) - ``` - """ - return self.api_route( - path=path, - response_model=response_model, - status_code=status_code, - tags=tags, - dependencies=dependencies, - summary=summary, - description=description, - response_description=response_description, - responses=responses, - deprecated=deprecated, - methods=["TRACE"], - operation_id=operation_id, - response_model_include=response_model_include, - response_model_exclude=response_model_exclude, - response_model_by_alias=response_model_by_alias, - response_model_exclude_unset=response_model_exclude_unset, - response_model_exclude_defaults=response_model_exclude_defaults, - response_model_exclude_none=response_model_exclude_none, - include_in_schema=include_in_schema, - response_class=response_class, - name=name, - callbacks=callbacks, - openapi_extra=openapi_extra, - generate_unique_id_function=generate_unique_id_function, - ) - - @deprecated( - """ - on_event is deprecated, use lifespan event handlers instead. - - Read more about it in the - [FastAPI docs for Lifespan Events](https://fastapi.tiangolo.com/advanced/events/). - """ - ) - def on_event( - self, - event_type: Annotated[ - str, - Doc( - """ - The type of event. `startup` or `shutdown`. - """ - ), - ], - ) -> Callable[[DecoratedCallable], DecoratedCallable]: - """ - Add an event handler for the router. - - `on_event` is deprecated, use `lifespan` event handlers instead. - - Read more about it in the - [FastAPI docs for Lifespan Events](https://fastapi.tiangolo.com/advanced/events/#alternative-events-deprecated). - """ - - def decorator(func: DecoratedCallable) -> DecoratedCallable: - self.add_event_handler(event_type, func) - return func - - return decorator diff --git a/server/venv/Lib/site-packages/fastapi/security/__init__.py b/server/venv/Lib/site-packages/fastapi/security/__init__.py deleted file mode 100644 index 3aa6bf2..0000000 --- a/server/venv/Lib/site-packages/fastapi/security/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -from .api_key import APIKeyCookie as APIKeyCookie -from .api_key import APIKeyHeader as APIKeyHeader -from .api_key import APIKeyQuery as APIKeyQuery -from .http import HTTPAuthorizationCredentials as HTTPAuthorizationCredentials -from .http import HTTPBasic as HTTPBasic -from .http import HTTPBasicCredentials as HTTPBasicCredentials -from .http import HTTPBearer as HTTPBearer -from .http import HTTPDigest as HTTPDigest -from .oauth2 import OAuth2 as OAuth2 -from .oauth2 import OAuth2AuthorizationCodeBearer as OAuth2AuthorizationCodeBearer -from .oauth2 import OAuth2PasswordBearer as OAuth2PasswordBearer -from .oauth2 import OAuth2PasswordRequestForm as OAuth2PasswordRequestForm -from .oauth2 import OAuth2PasswordRequestFormStrict as OAuth2PasswordRequestFormStrict -from .oauth2 import SecurityScopes as SecurityScopes -from .open_id_connect_url import OpenIdConnect as OpenIdConnect diff --git a/server/venv/Lib/site-packages/fastapi/security/api_key.py b/server/venv/Lib/site-packages/fastapi/security/api_key.py deleted file mode 100644 index b1a6b4f..0000000 --- a/server/venv/Lib/site-packages/fastapi/security/api_key.py +++ /dev/null @@ -1,301 +0,0 @@ -from typing import Optional - -from fastapi.openapi.models import APIKey, APIKeyIn -from fastapi.security.base import SecurityBase -from starlette.exceptions import HTTPException -from starlette.requests import Request -from starlette.status import HTTP_403_FORBIDDEN -from typing_extensions import Annotated, Doc # type: ignore [attr-defined] - - -class APIKeyBase(SecurityBase): - pass - - -class APIKeyQuery(APIKeyBase): - """ - API key authentication using a query parameter. - - This defines the name of the query parameter that should be provided in the request - with the API key and integrates that into the OpenAPI documentation. It extracts - the key value sent in the query parameter automatically and provides it as the - dependency result. But it doesn't define how to send that API key to the client. - - ## Usage - - Create an instance object and use that object as the dependency in `Depends()`. - - The dependency result will be a string containing the key value. - - ## Example - - ```python - from fastapi import Depends, FastAPI - from fastapi.security import APIKeyQuery - - app = FastAPI() - - query_scheme = APIKeyQuery(name="api_key") - - - @app.get("/items/") - async def read_items(api_key: str = Depends(query_scheme)): - return {"api_key": api_key} - ``` - """ - - def __init__( - self, - *, - name: Annotated[ - str, - Doc("Query parameter name."), - ], - scheme_name: Annotated[ - Optional[str], - Doc( - """ - Security scheme name. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Security scheme description. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - auto_error: Annotated[ - bool, - Doc( - """ - By default, if the query parameter is not provided, `APIKeyQuery` will - automatically cancel the request and sebd the client an error. - - If `auto_error` is set to `False`, when the query parameter is not - available, instead of erroring out, the dependency result will be - `None`. - - This is useful when you want to have optional authentication. - - It is also useful when you want to have authentication that can be - provided in one of multiple optional ways (for example, in a query - parameter or in an HTTP Bearer token). - """ - ), - ] = True, - ): - self.model: APIKey = APIKey( - **{"in": APIKeyIn.query}, # type: ignore[arg-type] - name=name, - description=description, - ) - self.scheme_name = scheme_name or self.__class__.__name__ - self.auto_error = auto_error - - async def __call__(self, request: Request) -> Optional[str]: - api_key = request.query_params.get(self.model.name) - if not api_key: - if self.auto_error: - raise HTTPException( - status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" - ) - else: - return None - return api_key - - -class APIKeyHeader(APIKeyBase): - """ - API key authentication using a header. - - This defines the name of the header that should be provided in the request with - the API key and integrates that into the OpenAPI documentation. It extracts - the key value sent in the header automatically and provides it as the dependency - result. But it doesn't define how to send that key to the client. - - ## Usage - - Create an instance object and use that object as the dependency in `Depends()`. - - The dependency result will be a string containing the key value. - - ## Example - - ```python - from fastapi import Depends, FastAPI - from fastapi.security import APIKeyHeader - - app = FastAPI() - - header_scheme = APIKeyHeader(name="x-key") - - - @app.get("/items/") - async def read_items(key: str = Depends(header_scheme)): - return {"key": key} - ``` - """ - - def __init__( - self, - *, - name: Annotated[str, Doc("Header name.")], - scheme_name: Annotated[ - Optional[str], - Doc( - """ - Security scheme name. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Security scheme description. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - auto_error: Annotated[ - bool, - Doc( - """ - By default, if the header is not provided, `APIKeyHeader` will - automatically cancel the request and send the client an error. - - If `auto_error` is set to `False`, when the header is not available, - instead of erroring out, the dependency result will be `None`. - - This is useful when you want to have optional authentication. - - It is also useful when you want to have authentication that can be - provided in one of multiple optional ways (for example, in a header or - in an HTTP Bearer token). - """ - ), - ] = True, - ): - self.model: APIKey = APIKey( - **{"in": APIKeyIn.header}, # type: ignore[arg-type] - name=name, - description=description, - ) - self.scheme_name = scheme_name or self.__class__.__name__ - self.auto_error = auto_error - - async def __call__(self, request: Request) -> Optional[str]: - api_key = request.headers.get(self.model.name) - if not api_key: - if self.auto_error: - raise HTTPException( - status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" - ) - else: - return None - return api_key - - -class APIKeyCookie(APIKeyBase): - """ - API key authentication using a cookie. - - This defines the name of the cookie that should be provided in the request with - the API key and integrates that into the OpenAPI documentation. It extracts - the key value sent in the cookie automatically and provides it as the dependency - result. But it doesn't define how to set that cookie. - - ## Usage - - Create an instance object and use that object as the dependency in `Depends()`. - - The dependency result will be a string containing the key value. - - ## Example - - ```python - from fastapi import Depends, FastAPI - from fastapi.security import APIKeyCookie - - app = FastAPI() - - cookie_scheme = APIKeyCookie(name="session") - - - @app.get("/items/") - async def read_items(session: str = Depends(cookie_scheme)): - return {"session": session} - ``` - """ - - def __init__( - self, - *, - name: Annotated[str, Doc("Cookie name.")], - scheme_name: Annotated[ - Optional[str], - Doc( - """ - Security scheme name. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Security scheme description. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - auto_error: Annotated[ - bool, - Doc( - """ - By default, if the cookie is not provided, `APIKeyCookie` will - automatically cancel the request and send the client an error. - - If `auto_error` is set to `False`, when the cookie is not available, - instead of erroring out, the dependency result will be `None`. - - This is useful when you want to have optional authentication. - - It is also useful when you want to have authentication that can be - provided in one of multiple optional ways (for example, in a cookie or - in an HTTP Bearer token). - """ - ), - ] = True, - ): - self.model: APIKey = APIKey( - **{"in": APIKeyIn.cookie}, # type: ignore[arg-type] - name=name, - description=description, - ) - self.scheme_name = scheme_name or self.__class__.__name__ - self.auto_error = auto_error - - async def __call__(self, request: Request) -> Optional[str]: - api_key = request.cookies.get(self.model.name) - if not api_key: - if self.auto_error: - raise HTTPException( - status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" - ) - else: - return None - return api_key diff --git a/server/venv/Lib/site-packages/fastapi/security/base.py b/server/venv/Lib/site-packages/fastapi/security/base.py deleted file mode 100644 index c43555d..0000000 --- a/server/venv/Lib/site-packages/fastapi/security/base.py +++ /dev/null @@ -1,6 +0,0 @@ -from fastapi.openapi.models import SecurityBase as SecurityBaseModel - - -class SecurityBase: - model: SecurityBaseModel - scheme_name: str diff --git a/server/venv/Lib/site-packages/fastapi/security/http.py b/server/venv/Lib/site-packages/fastapi/security/http.py deleted file mode 100644 index 3627777..0000000 --- a/server/venv/Lib/site-packages/fastapi/security/http.py +++ /dev/null @@ -1,420 +0,0 @@ -import binascii -from base64 import b64decode -from typing import Optional - -from fastapi.exceptions import HTTPException -from fastapi.openapi.models import HTTPBase as HTTPBaseModel -from fastapi.openapi.models import HTTPBearer as HTTPBearerModel -from fastapi.security.base import SecurityBase -from fastapi.security.utils import get_authorization_scheme_param -from pydantic import BaseModel -from starlette.requests import Request -from starlette.status import HTTP_401_UNAUTHORIZED, HTTP_403_FORBIDDEN -from typing_extensions import Annotated, Doc # type: ignore [attr-defined] - - -class HTTPBasicCredentials(BaseModel): - """ - The HTTP Basic credendials given as the result of using `HTTPBasic` in a - dependency. - - Read more about it in the - [FastAPI docs for HTTP Basic Auth](https://fastapi.tiangolo.com/advanced/security/http-basic-auth/). - """ - - username: Annotated[str, Doc("The HTTP Basic username.")] - password: Annotated[str, Doc("The HTTP Basic password.")] - - -class HTTPAuthorizationCredentials(BaseModel): - """ - The HTTP authorization credentials in the result of using `HTTPBearer` or - `HTTPDigest` in a dependency. - - The HTTP authorization header value is split by the first space. - - The first part is the `scheme`, the second part is the `credentials`. - - For example, in an HTTP Bearer token scheme, the client will send a header - like: - - ``` - Authorization: Bearer deadbeef12346 - ``` - - In this case: - - * `scheme` will have the value `"Bearer"` - * `credentials` will have the value `"deadbeef12346"` - """ - - scheme: Annotated[ - str, - Doc( - """ - The HTTP authorization scheme extracted from the header value. - """ - ), - ] - credentials: Annotated[ - str, - Doc( - """ - The HTTP authorization credentials extracted from the header value. - """ - ), - ] - - -class HTTPBase(SecurityBase): - def __init__( - self, - *, - scheme: str, - scheme_name: Optional[str] = None, - description: Optional[str] = None, - auto_error: bool = True, - ): - self.model = HTTPBaseModel(scheme=scheme, description=description) - self.scheme_name = scheme_name or self.__class__.__name__ - self.auto_error = auto_error - - async def __call__( - self, request: Request - ) -> Optional[HTTPAuthorizationCredentials]: - authorization = request.headers.get("Authorization") - scheme, credentials = get_authorization_scheme_param(authorization) - if not (authorization and scheme and credentials): - if self.auto_error: - raise HTTPException( - status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" - ) - else: - return None - return HTTPAuthorizationCredentials(scheme=scheme, credentials=credentials) - - -class HTTPBasic(HTTPBase): - """ - HTTP Basic authentication. - - ## Usage - - Create an instance object and use that object as the dependency in `Depends()`. - - The dependency result will be an `HTTPBasicCredentials` object containing the - `username` and the `password`. - - Read more about it in the - [FastAPI docs for HTTP Basic Auth](https://fastapi.tiangolo.com/advanced/security/http-basic-auth/). - - ## Example - - ```python - from typing import Annotated - - from fastapi import Depends, FastAPI - from fastapi.security import HTTPBasic, HTTPBasicCredentials - - app = FastAPI() - - security = HTTPBasic() - - - @app.get("/users/me") - def read_current_user(credentials: Annotated[HTTPBasicCredentials, Depends(security)]): - return {"username": credentials.username, "password": credentials.password} - ``` - """ - - def __init__( - self, - *, - scheme_name: Annotated[ - Optional[str], - Doc( - """ - Security scheme name. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - realm: Annotated[ - Optional[str], - Doc( - """ - HTTP Basic authentication realm. - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Security scheme description. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - auto_error: Annotated[ - bool, - Doc( - """ - By default, if the HTTP Basic authentication is not provided (a - header), `HTTPBasic` will automatically cancel the request and send the - client an error. - - If `auto_error` is set to `False`, when the HTTP Basic authentication - is not available, instead of erroring out, the dependency result will - be `None`. - - This is useful when you want to have optional authentication. - - It is also useful when you want to have authentication that can be - provided in one of multiple optional ways (for example, in HTTP Basic - authentication or in an HTTP Bearer token). - """ - ), - ] = True, - ): - self.model = HTTPBaseModel(scheme="basic", description=description) - self.scheme_name = scheme_name or self.__class__.__name__ - self.realm = realm - self.auto_error = auto_error - - async def __call__( # type: ignore - self, request: Request - ) -> Optional[HTTPBasicCredentials]: - authorization = request.headers.get("Authorization") - scheme, param = get_authorization_scheme_param(authorization) - if self.realm: - unauthorized_headers = {"WWW-Authenticate": f'Basic realm="{self.realm}"'} - else: - unauthorized_headers = {"WWW-Authenticate": "Basic"} - if not authorization or scheme.lower() != "basic": - if self.auto_error: - raise HTTPException( - status_code=HTTP_401_UNAUTHORIZED, - detail="Not authenticated", - headers=unauthorized_headers, - ) - else: - return None - invalid_user_credentials_exc = HTTPException( - status_code=HTTP_401_UNAUTHORIZED, - detail="Invalid authentication credentials", - headers=unauthorized_headers, - ) - try: - data = b64decode(param).decode("ascii") - except (ValueError, UnicodeDecodeError, binascii.Error): - raise invalid_user_credentials_exc - username, separator, password = data.partition(":") - if not separator: - raise invalid_user_credentials_exc - return HTTPBasicCredentials(username=username, password=password) - - -class HTTPBearer(HTTPBase): - """ - HTTP Bearer token authentication. - - ## Usage - - Create an instance object and use that object as the dependency in `Depends()`. - - The dependency result will be an `HTTPAuthorizationCredentials` object containing - the `scheme` and the `credentials`. - - ## Example - - ```python - from typing import Annotated - - from fastapi import Depends, FastAPI - from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer - - app = FastAPI() - - security = HTTPBearer() - - - @app.get("/users/me") - def read_current_user( - credentials: Annotated[HTTPAuthorizationCredentials, Depends(security)] - ): - return {"scheme": credentials.scheme, "credentials": credentials.credentials} - ``` - """ - - def __init__( - self, - *, - bearerFormat: Annotated[Optional[str], Doc("Bearer token format.")] = None, - scheme_name: Annotated[ - Optional[str], - Doc( - """ - Security scheme name. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Security scheme description. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - auto_error: Annotated[ - bool, - Doc( - """ - By default, if the HTTP Bearer token not provided (in an - `Authorization` header), `HTTPBearer` will automatically cancel the - request and send the client an error. - - If `auto_error` is set to `False`, when the HTTP Bearer token - is not available, instead of erroring out, the dependency result will - be `None`. - - This is useful when you want to have optional authentication. - - It is also useful when you want to have authentication that can be - provided in one of multiple optional ways (for example, in an HTTP - Bearer token or in a cookie). - """ - ), - ] = True, - ): - self.model = HTTPBearerModel(bearerFormat=bearerFormat, description=description) - self.scheme_name = scheme_name or self.__class__.__name__ - self.auto_error = auto_error - - async def __call__( - self, request: Request - ) -> Optional[HTTPAuthorizationCredentials]: - authorization = request.headers.get("Authorization") - scheme, credentials = get_authorization_scheme_param(authorization) - if not (authorization and scheme and credentials): - if self.auto_error: - raise HTTPException( - status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" - ) - else: - return None - if scheme.lower() != "bearer": - if self.auto_error: - raise HTTPException( - status_code=HTTP_403_FORBIDDEN, - detail="Invalid authentication credentials", - ) - else: - return None - return HTTPAuthorizationCredentials(scheme=scheme, credentials=credentials) - - -class HTTPDigest(HTTPBase): - """ - HTTP Digest authentication. - - ## Usage - - Create an instance object and use that object as the dependency in `Depends()`. - - The dependency result will be an `HTTPAuthorizationCredentials` object containing - the `scheme` and the `credentials`. - - ## Example - - ```python - from typing import Annotated - - from fastapi import Depends, FastAPI - from fastapi.security import HTTPAuthorizationCredentials, HTTPDigest - - app = FastAPI() - - security = HTTPDigest() - - - @app.get("/users/me") - def read_current_user( - credentials: Annotated[HTTPAuthorizationCredentials, Depends(security)] - ): - return {"scheme": credentials.scheme, "credentials": credentials.credentials} - ``` - """ - - def __init__( - self, - *, - scheme_name: Annotated[ - Optional[str], - Doc( - """ - Security scheme name. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Security scheme description. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - auto_error: Annotated[ - bool, - Doc( - """ - By default, if the HTTP Digest not provided, `HTTPDigest` will - automatically cancel the request and send the client an error. - - If `auto_error` is set to `False`, when the HTTP Digest is not - available, instead of erroring out, the dependency result will - be `None`. - - This is useful when you want to have optional authentication. - - It is also useful when you want to have authentication that can be - provided in one of multiple optional ways (for example, in HTTP - Digest or in a cookie). - """ - ), - ] = True, - ): - self.model = HTTPBaseModel(scheme="digest", description=description) - self.scheme_name = scheme_name or self.__class__.__name__ - self.auto_error = auto_error - - async def __call__( - self, request: Request - ) -> Optional[HTTPAuthorizationCredentials]: - authorization = request.headers.get("Authorization") - scheme, credentials = get_authorization_scheme_param(authorization) - if not (authorization and scheme and credentials): - if self.auto_error: - raise HTTPException( - status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" - ) - else: - return None - if scheme.lower() != "digest": - raise HTTPException( - status_code=HTTP_403_FORBIDDEN, - detail="Invalid authentication credentials", - ) - return HTTPAuthorizationCredentials(scheme=scheme, credentials=credentials) diff --git a/server/venv/Lib/site-packages/fastapi/security/oauth2.py b/server/venv/Lib/site-packages/fastapi/security/oauth2.py deleted file mode 100644 index d427783..0000000 --- a/server/venv/Lib/site-packages/fastapi/security/oauth2.py +++ /dev/null @@ -1,640 +0,0 @@ -from typing import Any, Dict, List, Optional, Union, cast - -from fastapi.exceptions import HTTPException -from fastapi.openapi.models import OAuth2 as OAuth2Model -from fastapi.openapi.models import OAuthFlows as OAuthFlowsModel -from fastapi.param_functions import Form -from fastapi.security.base import SecurityBase -from fastapi.security.utils import get_authorization_scheme_param -from starlette.requests import Request -from starlette.status import HTTP_401_UNAUTHORIZED, HTTP_403_FORBIDDEN - -# TODO: import from typing when deprecating Python 3.9 -from typing_extensions import Annotated, Doc # type: ignore [attr-defined] - - -class OAuth2PasswordRequestForm: - """ - This is a dependency class to collect the `username` and `password` as form data - for an OAuth2 password flow. - - The OAuth2 specification dictates that for a password flow the data should be - collected using form data (instead of JSON) and that it should have the specific - fields `username` and `password`. - - All the initialization parameters are extracted from the request. - - Read more about it in the - [FastAPI docs for Simple OAuth2 with Password and Bearer](https://fastapi.tiangolo.com/tutorial/security/simple-oauth2/). - - ## Example - - ```python - from typing import Annotated - - from fastapi import Depends, FastAPI - from fastapi.security import OAuth2PasswordRequestForm - - app = FastAPI() - - - @app.post("/login") - def login(form_data: Annotated[OAuth2PasswordRequestForm, Depends()]): - data = {} - data["scopes"] = [] - for scope in form_data.scopes: - data["scopes"].append(scope) - if form_data.client_id: - data["client_id"] = form_data.client_id - if form_data.client_secret: - data["client_secret"] = form_data.client_secret - return data - ``` - - Note that for OAuth2 the scope `items:read` is a single scope in an opaque string. - You could have custom internal logic to separate it by colon caracters (`:`) or - similar, and get the two parts `items` and `read`. Many applications do that to - group and organize permisions, you could do it as well in your application, just - know that that it is application specific, it's not part of the specification. - """ - - def __init__( - self, - *, - grant_type: Annotated[ - Union[str, None], - Form(pattern="password"), - Doc( - """ - The OAuth2 spec says it is required and MUST be the fixed string - "password". Nevertheless, this dependency class is permissive and - allows not passing it. If you want to enforce it, use instead the - `OAuth2PasswordRequestFormStrict` dependency. - """ - ), - ] = None, - username: Annotated[ - str, - Form(), - Doc( - """ - `username` string. The OAuth2 spec requires the exact field name - `username`. - """ - ), - ], - password: Annotated[ - str, - Form(), - Doc( - """ - `password` string. The OAuth2 spec requires the exact field name - `password". - """ - ), - ], - scope: Annotated[ - str, - Form(), - Doc( - """ - A single string with actually several scopes separated by spaces. Each - scope is also a string. - - For example, a single string with: - - ```python - "items:read items:write users:read profile openid" - ```` - - would represent the scopes: - - * `items:read` - * `items:write` - * `users:read` - * `profile` - * `openid` - """ - ), - ] = "", - client_id: Annotated[ - Union[str, None], - Form(), - Doc( - """ - If there's a `client_id`, it can be sent as part of the form fields. - But the OAuth2 specification recommends sending the `client_id` and - `client_secret` (if any) using HTTP Basic auth. - """ - ), - ] = None, - client_secret: Annotated[ - Union[str, None], - Form(), - Doc( - """ - If there's a `client_password` (and a `client_id`), they can be sent - as part of the form fields. But the OAuth2 specification recommends - sending the `client_id` and `client_secret` (if any) using HTTP Basic - auth. - """ - ), - ] = None, - ): - self.grant_type = grant_type - self.username = username - self.password = password - self.scopes = scope.split() - self.client_id = client_id - self.client_secret = client_secret - - -class OAuth2PasswordRequestFormStrict(OAuth2PasswordRequestForm): - """ - This is a dependency class to collect the `username` and `password` as form data - for an OAuth2 password flow. - - The OAuth2 specification dictates that for a password flow the data should be - collected using form data (instead of JSON) and that it should have the specific - fields `username` and `password`. - - All the initialization parameters are extracted from the request. - - The only difference between `OAuth2PasswordRequestFormStrict` and - `OAuth2PasswordRequestForm` is that `OAuth2PasswordRequestFormStrict` requires the - client to send the form field `grant_type` with the value `"password"`, which - is required in the OAuth2 specification (it seems that for no particular reason), - while for `OAuth2PasswordRequestForm` `grant_type` is optional. - - Read more about it in the - [FastAPI docs for Simple OAuth2 with Password and Bearer](https://fastapi.tiangolo.com/tutorial/security/simple-oauth2/). - - ## Example - - ```python - from typing import Annotated - - from fastapi import Depends, FastAPI - from fastapi.security import OAuth2PasswordRequestForm - - app = FastAPI() - - - @app.post("/login") - def login(form_data: Annotated[OAuth2PasswordRequestFormStrict, Depends()]): - data = {} - data["scopes"] = [] - for scope in form_data.scopes: - data["scopes"].append(scope) - if form_data.client_id: - data["client_id"] = form_data.client_id - if form_data.client_secret: - data["client_secret"] = form_data.client_secret - return data - ``` - - Note that for OAuth2 the scope `items:read` is a single scope in an opaque string. - You could have custom internal logic to separate it by colon caracters (`:`) or - similar, and get the two parts `items` and `read`. Many applications do that to - group and organize permisions, you could do it as well in your application, just - know that that it is application specific, it's not part of the specification. - - - grant_type: the OAuth2 spec says it is required and MUST be the fixed string "password". - This dependency is strict about it. If you want to be permissive, use instead the - OAuth2PasswordRequestForm dependency class. - username: username string. The OAuth2 spec requires the exact field name "username". - password: password string. The OAuth2 spec requires the exact field name "password". - scope: Optional string. Several scopes (each one a string) separated by spaces. E.g. - "items:read items:write users:read profile openid" - client_id: optional string. OAuth2 recommends sending the client_id and client_secret (if any) - using HTTP Basic auth, as: client_id:client_secret - client_secret: optional string. OAuth2 recommends sending the client_id and client_secret (if any) - using HTTP Basic auth, as: client_id:client_secret - """ - - def __init__( - self, - grant_type: Annotated[ - str, - Form(pattern="password"), - Doc( - """ - The OAuth2 spec says it is required and MUST be the fixed string - "password". This dependency is strict about it. If you want to be - permissive, use instead the `OAuth2PasswordRequestForm` dependency - class. - """ - ), - ], - username: Annotated[ - str, - Form(), - Doc( - """ - `username` string. The OAuth2 spec requires the exact field name - `username`. - """ - ), - ], - password: Annotated[ - str, - Form(), - Doc( - """ - `password` string. The OAuth2 spec requires the exact field name - `password". - """ - ), - ], - scope: Annotated[ - str, - Form(), - Doc( - """ - A single string with actually several scopes separated by spaces. Each - scope is also a string. - - For example, a single string with: - - ```python - "items:read items:write users:read profile openid" - ```` - - would represent the scopes: - - * `items:read` - * `items:write` - * `users:read` - * `profile` - * `openid` - """ - ), - ] = "", - client_id: Annotated[ - Union[str, None], - Form(), - Doc( - """ - If there's a `client_id`, it can be sent as part of the form fields. - But the OAuth2 specification recommends sending the `client_id` and - `client_secret` (if any) using HTTP Basic auth. - """ - ), - ] = None, - client_secret: Annotated[ - Union[str, None], - Form(), - Doc( - """ - If there's a `client_password` (and a `client_id`), they can be sent - as part of the form fields. But the OAuth2 specification recommends - sending the `client_id` and `client_secret` (if any) using HTTP Basic - auth. - """ - ), - ] = None, - ): - super().__init__( - grant_type=grant_type, - username=username, - password=password, - scope=scope, - client_id=client_id, - client_secret=client_secret, - ) - - -class OAuth2(SecurityBase): - """ - This is the base class for OAuth2 authentication, an instance of it would be used - as a dependency. All other OAuth2 classes inherit from it and customize it for - each OAuth2 flow. - - You normally would not create a new class inheriting from it but use one of the - existing subclasses, and maybe compose them if you want to support multiple flows. - - Read more about it in the - [FastAPI docs for Security](https://fastapi.tiangolo.com/tutorial/security/). - """ - - def __init__( - self, - *, - flows: Annotated[ - Union[OAuthFlowsModel, Dict[str, Dict[str, Any]]], - Doc( - """ - The dictionary of OAuth2 flows. - """ - ), - ] = OAuthFlowsModel(), - scheme_name: Annotated[ - Optional[str], - Doc( - """ - Security scheme name. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Security scheme description. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - auto_error: Annotated[ - bool, - Doc( - """ - By default, if no HTTP Auhtorization header is provided, required for - OAuth2 authentication, it will automatically cancel the request and - send the client an error. - - If `auto_error` is set to `False`, when the HTTP Authorization header - is not available, instead of erroring out, the dependency result will - be `None`. - - This is useful when you want to have optional authentication. - - It is also useful when you want to have authentication that can be - provided in one of multiple optional ways (for example, with OAuth2 - or in a cookie). - """ - ), - ] = True, - ): - self.model = OAuth2Model( - flows=cast(OAuthFlowsModel, flows), description=description - ) - self.scheme_name = scheme_name or self.__class__.__name__ - self.auto_error = auto_error - - async def __call__(self, request: Request) -> Optional[str]: - authorization = request.headers.get("Authorization") - if not authorization: - if self.auto_error: - raise HTTPException( - status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" - ) - else: - return None - return authorization - - -class OAuth2PasswordBearer(OAuth2): - """ - OAuth2 flow for authentication using a bearer token obtained with a password. - An instance of it would be used as a dependency. - - Read more about it in the - [FastAPI docs for Simple OAuth2 with Password and Bearer](https://fastapi.tiangolo.com/tutorial/security/simple-oauth2/). - """ - - def __init__( - self, - tokenUrl: Annotated[ - str, - Doc( - """ - The URL to obtain the OAuth2 token. This would be the *path operation* - that has `OAuth2PasswordRequestForm` as a dependency. - """ - ), - ], - scheme_name: Annotated[ - Optional[str], - Doc( - """ - Security scheme name. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - scopes: Annotated[ - Optional[Dict[str, str]], - Doc( - """ - The OAuth2 scopes that would be required by the *path operations* that - use this dependency. - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Security scheme description. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - auto_error: Annotated[ - bool, - Doc( - """ - By default, if no HTTP Auhtorization header is provided, required for - OAuth2 authentication, it will automatically cancel the request and - send the client an error. - - If `auto_error` is set to `False`, when the HTTP Authorization header - is not available, instead of erroring out, the dependency result will - be `None`. - - This is useful when you want to have optional authentication. - - It is also useful when you want to have authentication that can be - provided in one of multiple optional ways (for example, with OAuth2 - or in a cookie). - """ - ), - ] = True, - ): - if not scopes: - scopes = {} - flows = OAuthFlowsModel( - password=cast(Any, {"tokenUrl": tokenUrl, "scopes": scopes}) - ) - super().__init__( - flows=flows, - scheme_name=scheme_name, - description=description, - auto_error=auto_error, - ) - - async def __call__(self, request: Request) -> Optional[str]: - authorization = request.headers.get("Authorization") - scheme, param = get_authorization_scheme_param(authorization) - if not authorization or scheme.lower() != "bearer": - if self.auto_error: - raise HTTPException( - status_code=HTTP_401_UNAUTHORIZED, - detail="Not authenticated", - headers={"WWW-Authenticate": "Bearer"}, - ) - else: - return None - return param - - -class OAuth2AuthorizationCodeBearer(OAuth2): - """ - OAuth2 flow for authentication using a bearer token obtained with an OAuth2 code - flow. An instance of it would be used as a dependency. - """ - - def __init__( - self, - authorizationUrl: str, - tokenUrl: Annotated[ - str, - Doc( - """ - The URL to obtain the OAuth2 token. - """ - ), - ], - refreshUrl: Annotated[ - Optional[str], - Doc( - """ - The URL to refresh the token and obtain a new one. - """ - ), - ] = None, - scheme_name: Annotated[ - Optional[str], - Doc( - """ - Security scheme name. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - scopes: Annotated[ - Optional[Dict[str, str]], - Doc( - """ - The OAuth2 scopes that would be required by the *path operations* that - use this dependency. - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Security scheme description. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - auto_error: Annotated[ - bool, - Doc( - """ - By default, if no HTTP Auhtorization header is provided, required for - OAuth2 authentication, it will automatically cancel the request and - send the client an error. - - If `auto_error` is set to `False`, when the HTTP Authorization header - is not available, instead of erroring out, the dependency result will - be `None`. - - This is useful when you want to have optional authentication. - - It is also useful when you want to have authentication that can be - provided in one of multiple optional ways (for example, with OAuth2 - or in a cookie). - """ - ), - ] = True, - ): - if not scopes: - scopes = {} - flows = OAuthFlowsModel( - authorizationCode=cast( - Any, - { - "authorizationUrl": authorizationUrl, - "tokenUrl": tokenUrl, - "refreshUrl": refreshUrl, - "scopes": scopes, - }, - ) - ) - super().__init__( - flows=flows, - scheme_name=scheme_name, - description=description, - auto_error=auto_error, - ) - - async def __call__(self, request: Request) -> Optional[str]: - authorization = request.headers.get("Authorization") - scheme, param = get_authorization_scheme_param(authorization) - if not authorization or scheme.lower() != "bearer": - if self.auto_error: - raise HTTPException( - status_code=HTTP_401_UNAUTHORIZED, - detail="Not authenticated", - headers={"WWW-Authenticate": "Bearer"}, - ) - else: - return None # pragma: nocover - return param - - -class SecurityScopes: - """ - This is a special class that you can define in a parameter in a dependency to - obtain the OAuth2 scopes required by all the dependencies in the same chain. - - This way, multiple dependencies can have different scopes, even when used in the - same *path operation*. And with this, you can access all the scopes required in - all those dependencies in a single place. - - Read more about it in the - [FastAPI docs for OAuth2 scopes](https://fastapi.tiangolo.com/advanced/security/oauth2-scopes/). - """ - - def __init__( - self, - scopes: Annotated[ - Optional[List[str]], - Doc( - """ - This will be filled by FastAPI. - """ - ), - ] = None, - ): - self.scopes: Annotated[ - List[str], - Doc( - """ - The list of all the scopes required by dependencies. - """ - ), - ] = ( - scopes or [] - ) - self.scope_str: Annotated[ - str, - Doc( - """ - All the scopes required by all the dependencies in a single string - separated by spaces, as defined in the OAuth2 specification. - """ - ), - ] = " ".join(self.scopes) diff --git a/server/venv/Lib/site-packages/fastapi/security/open_id_connect_url.py b/server/venv/Lib/site-packages/fastapi/security/open_id_connect_url.py deleted file mode 100644 index c612b47..0000000 --- a/server/venv/Lib/site-packages/fastapi/security/open_id_connect_url.py +++ /dev/null @@ -1,84 +0,0 @@ -from typing import Optional - -from fastapi.openapi.models import OpenIdConnect as OpenIdConnectModel -from fastapi.security.base import SecurityBase -from starlette.exceptions import HTTPException -from starlette.requests import Request -from starlette.status import HTTP_403_FORBIDDEN -from typing_extensions import Annotated, Doc # type: ignore [attr-defined] - - -class OpenIdConnect(SecurityBase): - """ - OpenID Connect authentication class. An instance of it would be used as a - dependency. - """ - - def __init__( - self, - *, - openIdConnectUrl: Annotated[ - str, - Doc( - """ - The OpenID Connect URL. - """ - ), - ], - scheme_name: Annotated[ - Optional[str], - Doc( - """ - Security scheme name. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - description: Annotated[ - Optional[str], - Doc( - """ - Security scheme description. - - It will be included in the generated OpenAPI (e.g. visible at `/docs`). - """ - ), - ] = None, - auto_error: Annotated[ - bool, - Doc( - """ - By default, if no HTTP Auhtorization header is provided, required for - OpenID Connect authentication, it will automatically cancel the request - and send the client an error. - - If `auto_error` is set to `False`, when the HTTP Authorization header - is not available, instead of erroring out, the dependency result will - be `None`. - - This is useful when you want to have optional authentication. - - It is also useful when you want to have authentication that can be - provided in one of multiple optional ways (for example, with OpenID - Connect or in a cookie). - """ - ), - ] = True, - ): - self.model = OpenIdConnectModel( - openIdConnectUrl=openIdConnectUrl, description=description - ) - self.scheme_name = scheme_name or self.__class__.__name__ - self.auto_error = auto_error - - async def __call__(self, request: Request) -> Optional[str]: - authorization = request.headers.get("Authorization") - if not authorization: - if self.auto_error: - raise HTTPException( - status_code=HTTP_403_FORBIDDEN, detail="Not authenticated" - ) - else: - return None - return authorization diff --git a/server/venv/Lib/site-packages/fastapi/security/utils.py b/server/venv/Lib/site-packages/fastapi/security/utils.py deleted file mode 100644 index fa7a450..0000000 --- a/server/venv/Lib/site-packages/fastapi/security/utils.py +++ /dev/null @@ -1,10 +0,0 @@ -from typing import Optional, Tuple - - -def get_authorization_scheme_param( - authorization_header_value: Optional[str], -) -> Tuple[str, str]: - if not authorization_header_value: - return "", "" - scheme, _, param = authorization_header_value.partition(" ") - return scheme, param diff --git a/server/venv/Lib/site-packages/fastapi/staticfiles.py b/server/venv/Lib/site-packages/fastapi/staticfiles.py deleted file mode 100644 index 299015d..0000000 --- a/server/venv/Lib/site-packages/fastapi/staticfiles.py +++ /dev/null @@ -1 +0,0 @@ -from starlette.staticfiles import StaticFiles as StaticFiles # noqa diff --git a/server/venv/Lib/site-packages/fastapi/templating.py b/server/venv/Lib/site-packages/fastapi/templating.py deleted file mode 100644 index 0cb8684..0000000 --- a/server/venv/Lib/site-packages/fastapi/templating.py +++ /dev/null @@ -1 +0,0 @@ -from starlette.templating import Jinja2Templates as Jinja2Templates # noqa diff --git a/server/venv/Lib/site-packages/fastapi/testclient.py b/server/venv/Lib/site-packages/fastapi/testclient.py deleted file mode 100644 index 4012406..0000000 --- a/server/venv/Lib/site-packages/fastapi/testclient.py +++ /dev/null @@ -1 +0,0 @@ -from starlette.testclient import TestClient as TestClient # noqa diff --git a/server/venv/Lib/site-packages/fastapi/types.py b/server/venv/Lib/site-packages/fastapi/types.py deleted file mode 100644 index 7adf565..0000000 --- a/server/venv/Lib/site-packages/fastapi/types.py +++ /dev/null @@ -1,11 +0,0 @@ -import types -from enum import Enum -from typing import Any, Callable, Dict, Set, Type, TypeVar, Union - -from pydantic import BaseModel - -DecoratedCallable = TypeVar("DecoratedCallable", bound=Callable[..., Any]) -UnionType = getattr(types, "UnionType", Union) -NoneType = getattr(types, "UnionType", None) -ModelNameMap = Dict[Union[Type[BaseModel], Type[Enum]], str] -IncEx = Union[Set[int], Set[str], Dict[int, Any], Dict[str, Any]] diff --git a/server/venv/Lib/site-packages/fastapi/utils.py b/server/venv/Lib/site-packages/fastapi/utils.py deleted file mode 100644 index 267d64c..0000000 --- a/server/venv/Lib/site-packages/fastapi/utils.py +++ /dev/null @@ -1,228 +0,0 @@ -import re -import warnings -from dataclasses import is_dataclass -from typing import ( - TYPE_CHECKING, - Any, - Dict, - MutableMapping, - Optional, - Set, - Type, - Union, - cast, -) -from weakref import WeakKeyDictionary - -import fastapi -from fastapi._compat import ( - PYDANTIC_V2, - BaseConfig, - ModelField, - PydanticSchemaGenerationError, - Undefined, - UndefinedType, - Validator, - lenient_issubclass, -) -from fastapi.datastructures import DefaultPlaceholder, DefaultType -from pydantic import BaseModel, create_model -from pydantic.fields import FieldInfo -from typing_extensions import Literal - -if TYPE_CHECKING: # pragma: nocover - from .routing import APIRoute - -# Cache for `create_cloned_field` -_CLONED_TYPES_CACHE: MutableMapping[ - Type[BaseModel], Type[BaseModel] -] = WeakKeyDictionary() - - -def is_body_allowed_for_status_code(status_code: Union[int, str, None]) -> bool: - if status_code is None: - return True - # Ref: https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.1.0.md#patterned-fields-1 - if status_code in { - "default", - "1XX", - "2XX", - "3XX", - "4XX", - "5XX", - }: - return True - current_status_code = int(status_code) - return not (current_status_code < 200 or current_status_code in {204, 304}) - - -def get_path_param_names(path: str) -> Set[str]: - return set(re.findall("{(.*?)}", path)) - - -def create_response_field( - name: str, - type_: Type[Any], - class_validators: Optional[Dict[str, Validator]] = None, - default: Optional[Any] = Undefined, - required: Union[bool, UndefinedType] = Undefined, - model_config: Type[BaseConfig] = BaseConfig, - field_info: Optional[FieldInfo] = None, - alias: Optional[str] = None, - mode: Literal["validation", "serialization"] = "validation", -) -> ModelField: - """ - Create a new response field. Raises if type_ is invalid. - """ - class_validators = class_validators or {} - if PYDANTIC_V2: - field_info = field_info or FieldInfo( - annotation=type_, default=default, alias=alias - ) - else: - field_info = field_info or FieldInfo() - kwargs = {"name": name, "field_info": field_info} - if PYDANTIC_V2: - kwargs.update({"mode": mode}) - else: - kwargs.update( - { - "type_": type_, - "class_validators": class_validators, - "default": default, - "required": required, - "model_config": model_config, - "alias": alias, - } - ) - try: - return ModelField(**kwargs) # type: ignore[arg-type] - except (RuntimeError, PydanticSchemaGenerationError): - raise fastapi.exceptions.FastAPIError( - "Invalid args for response field! Hint: " - f"check that {type_} is a valid Pydantic field type. " - "If you are using a return type annotation that is not a valid Pydantic " - "field (e.g. Union[Response, dict, None]) you can disable generating the " - "response model from the type annotation with the path operation decorator " - "parameter response_model=None. Read more: " - "https://fastapi.tiangolo.com/tutorial/response-model/" - ) from None - - -def create_cloned_field( - field: ModelField, - *, - cloned_types: Optional[MutableMapping[Type[BaseModel], Type[BaseModel]]] = None, -) -> ModelField: - if PYDANTIC_V2: - return field - # cloned_types caches already cloned types to support recursive models and improve - # performance by avoiding unecessary cloning - if cloned_types is None: - cloned_types = _CLONED_TYPES_CACHE - - original_type = field.type_ - if is_dataclass(original_type) and hasattr(original_type, "__pydantic_model__"): - original_type = original_type.__pydantic_model__ - use_type = original_type - if lenient_issubclass(original_type, BaseModel): - original_type = cast(Type[BaseModel], original_type) - use_type = cloned_types.get(original_type) - if use_type is None: - use_type = create_model(original_type.__name__, __base__=original_type) - cloned_types[original_type] = use_type - for f in original_type.__fields__.values(): - use_type.__fields__[f.name] = create_cloned_field( - f, cloned_types=cloned_types - ) - new_field = create_response_field(name=field.name, type_=use_type) - new_field.has_alias = field.has_alias # type: ignore[attr-defined] - new_field.alias = field.alias # type: ignore[misc] - new_field.class_validators = field.class_validators # type: ignore[attr-defined] - new_field.default = field.default # type: ignore[misc] - new_field.required = field.required # type: ignore[misc] - new_field.model_config = field.model_config # type: ignore[attr-defined] - new_field.field_info = field.field_info - new_field.allow_none = field.allow_none # type: ignore[attr-defined] - new_field.validate_always = field.validate_always # type: ignore[attr-defined] - if field.sub_fields: # type: ignore[attr-defined] - new_field.sub_fields = [ # type: ignore[attr-defined] - create_cloned_field(sub_field, cloned_types=cloned_types) - for sub_field in field.sub_fields # type: ignore[attr-defined] - ] - if field.key_field: # type: ignore[attr-defined] - new_field.key_field = create_cloned_field( # type: ignore[attr-defined] - field.key_field, cloned_types=cloned_types # type: ignore[attr-defined] - ) - new_field.validators = field.validators # type: ignore[attr-defined] - new_field.pre_validators = field.pre_validators # type: ignore[attr-defined] - new_field.post_validators = field.post_validators # type: ignore[attr-defined] - new_field.parse_json = field.parse_json # type: ignore[attr-defined] - new_field.shape = field.shape # type: ignore[attr-defined] - new_field.populate_validators() # type: ignore[attr-defined] - return new_field - - -def generate_operation_id_for_path( - *, name: str, path: str, method: str -) -> str: # pragma: nocover - warnings.warn( - "fastapi.utils.generate_operation_id_for_path() was deprecated, " - "it is not used internally, and will be removed soon", - DeprecationWarning, - stacklevel=2, - ) - operation_id = name + path - operation_id = re.sub(r"\W", "_", operation_id) - operation_id = operation_id + "_" + method.lower() - return operation_id - - -def generate_unique_id(route: "APIRoute") -> str: - operation_id = route.name + route.path_format - operation_id = re.sub(r"\W", "_", operation_id) - assert route.methods - operation_id = operation_id + "_" + list(route.methods)[0].lower() - return operation_id - - -def deep_dict_update(main_dict: Dict[Any, Any], update_dict: Dict[Any, Any]) -> None: - for key, value in update_dict.items(): - if ( - key in main_dict - and isinstance(main_dict[key], dict) - and isinstance(value, dict) - ): - deep_dict_update(main_dict[key], value) - elif ( - key in main_dict - and isinstance(main_dict[key], list) - and isinstance(update_dict[key], list) - ): - main_dict[key] = main_dict[key] + update_dict[key] - else: - main_dict[key] = value - - -def get_value_or_default( - first_item: Union[DefaultPlaceholder, DefaultType], - *extra_items: Union[DefaultPlaceholder, DefaultType], -) -> Union[DefaultPlaceholder, DefaultType]: - """ - Pass items or `DefaultPlaceholder`s by descending priority. - - The first one to _not_ be a `DefaultPlaceholder` will be returned. - - Otherwise, the first item (a `DefaultPlaceholder`) will be returned. - """ - items = (first_item,) + extra_items - for item in items: - if not isinstance(item, DefaultPlaceholder): - return item - return first_item - - -def match_pydantic_error_url(error_type: str) -> Any: - from dirty_equals import IsStr - - return IsStr(regex=rf"^https://errors\.pydantic\.dev/.*/v/{error_type}") diff --git a/server/venv/Lib/site-packages/fastapi/websockets.py b/server/venv/Lib/site-packages/fastapi/websockets.py deleted file mode 100644 index 55a4ac4..0000000 --- a/server/venv/Lib/site-packages/fastapi/websockets.py +++ /dev/null @@ -1,3 +0,0 @@ -from starlette.websockets import WebSocket as WebSocket # noqa -from starlette.websockets import WebSocketDisconnect as WebSocketDisconnect # noqa -from starlette.websockets import WebSocketState as WebSocketState # noqa diff --git a/server/venv/Lib/site-packages/h11-0.14.0.dist-info/INSTALLER b/server/venv/Lib/site-packages/h11-0.14.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/server/venv/Lib/site-packages/h11-0.14.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/server/venv/Lib/site-packages/h11-0.14.0.dist-info/LICENSE.txt b/server/venv/Lib/site-packages/h11-0.14.0.dist-info/LICENSE.txt deleted file mode 100644 index 8f080ea..0000000 --- a/server/venv/Lib/site-packages/h11-0.14.0.dist-info/LICENSE.txt +++ /dev/null @@ -1,22 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2016 Nathaniel J. Smith and other contributors - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/server/venv/Lib/site-packages/h11-0.14.0.dist-info/METADATA b/server/venv/Lib/site-packages/h11-0.14.0.dist-info/METADATA deleted file mode 100644 index cf12a82..0000000 --- a/server/venv/Lib/site-packages/h11-0.14.0.dist-info/METADATA +++ /dev/null @@ -1,193 +0,0 @@ -Metadata-Version: 2.1 -Name: h11 -Version: 0.14.0 -Summary: A pure-Python, bring-your-own-I/O implementation of HTTP/1.1 -Home-page: https://github.com/python-hyper/h11 -Author: Nathaniel J. Smith -Author-email: njs@pobox.com -License: MIT -Classifier: Development Status :: 3 - Alpha -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Topic :: Internet :: WWW/HTTP -Classifier: Topic :: System :: Networking -Requires-Python: >=3.7 -License-File: LICENSE.txt -Requires-Dist: typing-extensions ; python_version < "3.8" - -h11 -=== - -.. image:: https://travis-ci.org/python-hyper/h11.svg?branch=master - :target: https://travis-ci.org/python-hyper/h11 - :alt: Automated test status - -.. image:: https://codecov.io/gh/python-hyper/h11/branch/master/graph/badge.svg - :target: https://codecov.io/gh/python-hyper/h11 - :alt: Test coverage - -.. image:: https://readthedocs.org/projects/h11/badge/?version=latest - :target: http://h11.readthedocs.io/en/latest/?badge=latest - :alt: Documentation Status - -This is a little HTTP/1.1 library written from scratch in Python, -heavily inspired by `hyper-h2 `_. - -It's a "bring-your-own-I/O" library; h11 contains no IO code -whatsoever. This means you can hook h11 up to your favorite network -API, and that could be anything you want: synchronous, threaded, -asynchronous, or your own implementation of `RFC 6214 -`_ -- h11 won't judge you. -(Compare this to the current state of the art, where every time a `new -network API `_ comes along then someone -gets to start over reimplementing the entire HTTP protocol from -scratch.) Cory Benfield made an `excellent blog post describing the -benefits of this approach -`_, or if you like video -then here's his `PyCon 2016 talk on the same theme -`_. - -This also means that h11 is not immediately useful out of the box: -it's a toolkit for building programs that speak HTTP, not something -that could directly replace ``requests`` or ``twisted.web`` or -whatever. But h11 makes it much easier to implement something like -``requests`` or ``twisted.web``. - -At a high level, working with h11 goes like this: - -1) First, create an ``h11.Connection`` object to track the state of a - single HTTP/1.1 connection. - -2) When you read data off the network, pass it to - ``conn.receive_data(...)``; you'll get back a list of objects - representing high-level HTTP "events". - -3) When you want to send a high-level HTTP event, create the - corresponding "event" object and pass it to ``conn.send(...)``; - this will give you back some bytes that you can then push out - through the network. - -For example, a client might instantiate and then send a -``h11.Request`` object, then zero or more ``h11.Data`` objects for the -request body (e.g., if this is a POST), and then a -``h11.EndOfMessage`` to indicate the end of the message. Then the -server would then send back a ``h11.Response``, some ``h11.Data``, and -its own ``h11.EndOfMessage``. If either side violates the protocol, -you'll get a ``h11.ProtocolError`` exception. - -h11 is suitable for implementing both servers and clients, and has a -pleasantly symmetric API: the events you send as a client are exactly -the ones that you receive as a server and vice-versa. - -`Here's an example of a tiny HTTP client -`_ - -It also has `a fine manual `_. - -FAQ ---- - -*Whyyyyy?* - -I wanted to play with HTTP in `Curio -`__ and `Trio -`__, which at the time didn't have any -HTTP libraries. So I thought, no big deal, Python has, like, a dozen -different implementations of HTTP, surely I can find one that's -reusable. I didn't find one, but I did find Cory's call-to-arms -blog-post. So I figured, well, fine, if I have to implement HTTP from -scratch, at least I can make sure no-one *else* has to ever again. - -*Should I use it?* - -Maybe. You should be aware that it's a very young project. But, it's -feature complete and has an exhaustive test-suite and complete docs, -so the next step is for people to try using it and see how it goes -:-). If you do then please let us know -- if nothing else we'll want -to talk to you before making any incompatible changes! - -*What are the features/limitations?* - -Roughly speaking, it's trying to be a robust, complete, and non-hacky -implementation of the first "chapter" of the HTTP/1.1 spec: `RFC 7230: -HTTP/1.1 Message Syntax and Routing -`_. That is, it mostly focuses on -implementing HTTP at the level of taking bytes on and off the wire, -and the headers related to that, and tries to be anal about spec -conformance. It doesn't know about higher-level concerns like URL -routing, conditional GETs, cross-origin cookie policies, or content -negotiation. But it does know how to take care of framing, -cross-version differences in keep-alive handling, and the "obsolete -line folding" rule, so you can focus your energies on the hard / -interesting parts for your application, and it tries to support the -full specification in the sense that any useful HTTP/1.1 conformant -application should be able to use h11. - -It's pure Python, and has no dependencies outside of the standard -library. - -It has a test suite with 100.0% coverage for both statements and -branches. - -Currently it supports Python 3 (testing on 3.7-3.10) and PyPy 3. -The last Python 2-compatible version was h11 0.11.x. -(Originally it had a Cython wrapper for `http-parser -`_ and a beautiful nested state -machine implemented with ``yield from`` to postprocess the output. But -I had to take these out -- the new *parser* needs fewer lines-of-code -than the old *parser wrapper*, is written in pure Python, uses no -exotic language syntax, and has more features. It's sad, really; that -old state machine was really slick. I just need a few sentences here -to mourn that.) - -I don't know how fast it is. I haven't benchmarked or profiled it yet, -so it's probably got a few pointless hot spots, and I've been trying -to err on the side of simplicity and robustness instead of -micro-optimization. But at the architectural level I tried hard to -avoid fundamentally bad decisions, e.g., I believe that all the -parsing algorithms remain linear-time even in the face of pathological -input like slowloris, and there are no byte-by-byte loops. (I also -believe that it maintains bounded memory usage in the face of -arbitrary/pathological input.) - -The whole library is ~800 lines-of-code. You can read and understand -the whole thing in less than an hour. Most of the energy invested in -this so far has been spent on trying to keep things simple by -minimizing special-cases and ad hoc state manipulation; even though it -is now quite small and simple, I'm still annoyed that I haven't -figured out how to make it even smaller and simpler. (Unfortunately, -HTTP does not lend itself to simplicity.) - -The API is ~feature complete and I don't expect the general outlines -to change much, but you can't judge an API's ergonomics until you -actually document and use it, so I'd expect some changes in the -details. - -*How do I try it?* - -.. code-block:: sh - - $ pip install h11 - $ git clone git@github.com:python-hyper/h11 - $ cd h11/examples - $ python basic-client.py - -and go from there. - -*License?* - -MIT - -*Code of conduct?* - -Contributors are requested to follow our `code of conduct -`_ in -all project spaces. diff --git a/server/venv/Lib/site-packages/h11-0.14.0.dist-info/RECORD b/server/venv/Lib/site-packages/h11-0.14.0.dist-info/RECORD deleted file mode 100644 index 2296d70..0000000 --- a/server/venv/Lib/site-packages/h11-0.14.0.dist-info/RECORD +++ /dev/null @@ -1,53 +0,0 @@ -h11-0.14.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -h11-0.14.0.dist-info/LICENSE.txt,sha256=N9tbuFkm2yikJ6JYZ_ELEjIAOuob5pzLhRE4rbjm82E,1124 -h11-0.14.0.dist-info/METADATA,sha256=B7pZ0m7WBXNs17vl6hUH9bJTL9s37DaGvY31w7jNxSg,8175 -h11-0.14.0.dist-info/RECORD,, -h11-0.14.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -h11-0.14.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 -h11-0.14.0.dist-info/top_level.txt,sha256=F7dC4jl3zeh8TGHEPaWJrMbeuoWbS379Gwdi-Yvdcis,4 -h11/__init__.py,sha256=iO1KzkSO42yZ6ffg-VMgbx_ZVTWGUY00nRYEWn-s3kY,1507 -h11/__pycache__/__init__.cpython-311.pyc,, -h11/__pycache__/_abnf.cpython-311.pyc,, -h11/__pycache__/_connection.cpython-311.pyc,, -h11/__pycache__/_events.cpython-311.pyc,, -h11/__pycache__/_headers.cpython-311.pyc,, -h11/__pycache__/_readers.cpython-311.pyc,, -h11/__pycache__/_receivebuffer.cpython-311.pyc,, -h11/__pycache__/_state.cpython-311.pyc,, -h11/__pycache__/_util.cpython-311.pyc,, -h11/__pycache__/_version.cpython-311.pyc,, -h11/__pycache__/_writers.cpython-311.pyc,, -h11/_abnf.py,sha256=ybixr0xsupnkA6GFAyMubuXF6Tc1lb_hF890NgCsfNc,4815 -h11/_connection.py,sha256=eS2sorMD0zKLCFiB9lW9W9F_Nzny2tjHa4e6s1ujr1c,26539 -h11/_events.py,sha256=LEfuvg1AbhHaVRwxCd0I-pFn9-ezUOaoL8o2Kvy1PBA,11816 -h11/_headers.py,sha256=RqB8cd8CN0blYPzcLe5qeCh-phv6D1U_CHj4hs67lgQ,10230 -h11/_readers.py,sha256=EbSed0jzwVUiD1nOPAeUcVE4Flf3wXkxfb8c06-OTBM,8383 -h11/_receivebuffer.py,sha256=xrspsdsNgWFxRfQcTXxR8RrdjRXXTK0Io5cQYWpJ1Ws,5252 -h11/_state.py,sha256=k1VL6SDbaPkSrZ-49ewCXDpuiUS69_46YhbWjuV1qEY,13300 -h11/_util.py,sha256=LWkkjXyJaFlAy6Lt39w73UStklFT5ovcvo0TkY7RYuk,4888 -h11/_version.py,sha256=LVyTdiZRzIIEv79UyOgbM5iUrJUllEzlCWaJEYBY1zc,686 -h11/_writers.py,sha256=oFKm6PtjeHfbj4RLX7VB7KDc1gIY53gXG3_HR9ltmTA,5081 -h11/py.typed,sha256=sow9soTwP9T_gEAQSVh7Gb8855h04Nwmhs2We-JRgZM,7 -h11/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -h11/tests/__pycache__/__init__.cpython-311.pyc,, -h11/tests/__pycache__/helpers.cpython-311.pyc,, -h11/tests/__pycache__/test_against_stdlib_http.cpython-311.pyc,, -h11/tests/__pycache__/test_connection.cpython-311.pyc,, -h11/tests/__pycache__/test_events.cpython-311.pyc,, -h11/tests/__pycache__/test_headers.cpython-311.pyc,, -h11/tests/__pycache__/test_helpers.cpython-311.pyc,, -h11/tests/__pycache__/test_io.cpython-311.pyc,, -h11/tests/__pycache__/test_receivebuffer.cpython-311.pyc,, -h11/tests/__pycache__/test_state.cpython-311.pyc,, -h11/tests/__pycache__/test_util.cpython-311.pyc,, -h11/tests/data/test-file,sha256=ZJ03Rqs98oJw29OHzJg7LlMzyGQaRAY0r3AqBeM2wVU,65 -h11/tests/helpers.py,sha256=a1EVG_p7xU4wRsa3tMPTRxuaKCmretok9sxXWvqfmQA,3355 -h11/tests/test_against_stdlib_http.py,sha256=cojCHgHXFQ8gWhNlEEwl3trmOpN-5uDukRoHnElqo3A,3995 -h11/tests/test_connection.py,sha256=ZbPLDPclKvjgjAhgk-WlCPBaf17c4XUIV2tpaW08jOI,38720 -h11/tests/test_events.py,sha256=LPVLbcV-NvPNK9fW3rraR6Bdpz1hAlsWubMtNaJ5gHg,4657 -h11/tests/test_headers.py,sha256=qd8T1Zenuz5GbD6wklSJ5G8VS7trrYgMV0jT-SMvqg8,5612 -h11/tests/test_helpers.py,sha256=kAo0CEM4LGqmyyP2ZFmhsyq3UFJqoFfAbzu3hbWreRM,794 -h11/tests/test_io.py,sha256=uCZVnjarkRBkudfC1ij-KSCQ71XWJhnkgkgWWkKgYPQ,16386 -h11/tests/test_receivebuffer.py,sha256=3jGbeJM36Akqg_pAhPb7XzIn2NS6RhPg-Ryg8Eu6ytk,3454 -h11/tests/test_state.py,sha256=rqll9WqFsJPE0zSrtCn9LH659mPKsDeXZ-DwXwleuBQ,8928 -h11/tests/test_util.py,sha256=VO5L4nSFe4pgtSwKuv6u_6l0H7UeizF5WKuHTWreg70,2970 diff --git a/server/venv/Lib/site-packages/h11-0.14.0.dist-info/REQUESTED b/server/venv/Lib/site-packages/h11-0.14.0.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/h11-0.14.0.dist-info/WHEEL b/server/venv/Lib/site-packages/h11-0.14.0.dist-info/WHEEL deleted file mode 100644 index 5bad85f..0000000 --- a/server/venv/Lib/site-packages/h11-0.14.0.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/server/venv/Lib/site-packages/h11-0.14.0.dist-info/top_level.txt b/server/venv/Lib/site-packages/h11-0.14.0.dist-info/top_level.txt deleted file mode 100644 index 0d24def..0000000 --- a/server/venv/Lib/site-packages/h11-0.14.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -h11 diff --git a/server/venv/Lib/site-packages/h11/__init__.py b/server/venv/Lib/site-packages/h11/__init__.py deleted file mode 100644 index 989e92c..0000000 --- a/server/venv/Lib/site-packages/h11/__init__.py +++ /dev/null @@ -1,62 +0,0 @@ -# A highish-level implementation of the HTTP/1.1 wire protocol (RFC 7230), -# containing no networking code at all, loosely modelled on hyper-h2's generic -# implementation of HTTP/2 (and in particular the h2.connection.H2Connection -# class). There's still a bunch of subtle details you need to get right if you -# want to make this actually useful, because it doesn't implement all the -# semantics to check that what you're asking to write to the wire is sensible, -# but at least it gets you out of dealing with the wire itself. - -from h11._connection import Connection, NEED_DATA, PAUSED -from h11._events import ( - ConnectionClosed, - Data, - EndOfMessage, - Event, - InformationalResponse, - Request, - Response, -) -from h11._state import ( - CLIENT, - CLOSED, - DONE, - ERROR, - IDLE, - MIGHT_SWITCH_PROTOCOL, - MUST_CLOSE, - SEND_BODY, - SEND_RESPONSE, - SERVER, - SWITCHED_PROTOCOL, -) -from h11._util import LocalProtocolError, ProtocolError, RemoteProtocolError -from h11._version import __version__ - -PRODUCT_ID = "python-h11/" + __version__ - - -__all__ = ( - "Connection", - "NEED_DATA", - "PAUSED", - "ConnectionClosed", - "Data", - "EndOfMessage", - "Event", - "InformationalResponse", - "Request", - "Response", - "CLIENT", - "CLOSED", - "DONE", - "ERROR", - "IDLE", - "MUST_CLOSE", - "SEND_BODY", - "SEND_RESPONSE", - "SERVER", - "SWITCHED_PROTOCOL", - "ProtocolError", - "LocalProtocolError", - "RemoteProtocolError", -) diff --git a/server/venv/Lib/site-packages/h11/_abnf.py b/server/venv/Lib/site-packages/h11/_abnf.py deleted file mode 100644 index 933587f..0000000 --- a/server/venv/Lib/site-packages/h11/_abnf.py +++ /dev/null @@ -1,132 +0,0 @@ -# We use native strings for all the re patterns, to take advantage of string -# formatting, and then convert to bytestrings when compiling the final re -# objects. - -# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#whitespace -# OWS = *( SP / HTAB ) -# ; optional whitespace -OWS = r"[ \t]*" - -# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.token.separators -# token = 1*tchar -# -# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" -# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" -# / DIGIT / ALPHA -# ; any VCHAR, except delimiters -token = r"[-!#$%&'*+.^_`|~0-9a-zA-Z]+" - -# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#header.fields -# field-name = token -field_name = token - -# The standard says: -# -# field-value = *( field-content / obs-fold ) -# field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] -# field-vchar = VCHAR / obs-text -# obs-fold = CRLF 1*( SP / HTAB ) -# ; obsolete line folding -# ; see Section 3.2.4 -# -# https://tools.ietf.org/html/rfc5234#appendix-B.1 -# -# VCHAR = %x21-7E -# ; visible (printing) characters -# -# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.quoted-string -# obs-text = %x80-FF -# -# However, the standard definition of field-content is WRONG! It disallows -# fields containing a single visible character surrounded by whitespace, -# e.g. "foo a bar". -# -# See: https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189 -# -# So our definition of field_content attempts to fix it up... -# -# Also, we allow lots of control characters, because apparently people assume -# that they're legal in practice (e.g., google analytics makes cookies with -# \x01 in them!): -# https://github.com/python-hyper/h11/issues/57 -# We still don't allow NUL or whitespace, because those are often treated as -# meta-characters and letting them through can lead to nasty issues like SSRF. -vchar = r"[\x21-\x7e]" -vchar_or_obs_text = r"[^\x00\s]" -field_vchar = vchar_or_obs_text -field_content = r"{field_vchar}+(?:[ \t]+{field_vchar}+)*".format(**globals()) - -# We handle obs-fold at a different level, and our fixed-up field_content -# already grows to swallow the whole value, so ? instead of * -field_value = r"({field_content})?".format(**globals()) - -# header-field = field-name ":" OWS field-value OWS -header_field = ( - r"(?P{field_name})" - r":" - r"{OWS}" - r"(?P{field_value})" - r"{OWS}".format(**globals()) -) - -# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#request.line -# -# request-line = method SP request-target SP HTTP-version CRLF -# method = token -# HTTP-version = HTTP-name "/" DIGIT "." DIGIT -# HTTP-name = %x48.54.54.50 ; "HTTP", case-sensitive -# -# request-target is complicated (see RFC 7230 sec 5.3) -- could be path, full -# URL, host+port (for connect), or even "*", but in any case we are guaranteed -# that it contists of the visible printing characters. -method = token -request_target = r"{vchar}+".format(**globals()) -http_version = r"HTTP/(?P[0-9]\.[0-9])" -request_line = ( - r"(?P{method})" - r" " - r"(?P{request_target})" - r" " - r"{http_version}".format(**globals()) -) - -# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#status.line -# -# status-line = HTTP-version SP status-code SP reason-phrase CRLF -# status-code = 3DIGIT -# reason-phrase = *( HTAB / SP / VCHAR / obs-text ) -status_code = r"[0-9]{3}" -reason_phrase = r"([ \t]|{vchar_or_obs_text})*".format(**globals()) -status_line = ( - r"{http_version}" - r" " - r"(?P{status_code})" - # However, there are apparently a few too many servers out there that just - # leave out the reason phrase: - # https://github.com/scrapy/scrapy/issues/345#issuecomment-281756036 - # https://github.com/seanmonstar/httparse/issues/29 - # so make it optional. ?: is a non-capturing group. - r"(?: (?P{reason_phrase}))?".format(**globals()) -) - -HEXDIG = r"[0-9A-Fa-f]" -# Actually -# -# chunk-size = 1*HEXDIG -# -# but we impose an upper-limit to avoid ridiculosity. len(str(2**64)) == 20 -chunk_size = r"({HEXDIG}){{1,20}}".format(**globals()) -# Actually -# -# chunk-ext = *( ";" chunk-ext-name [ "=" chunk-ext-val ] ) -# -# but we aren't parsing the things so we don't really care. -chunk_ext = r";.*" -chunk_header = ( - r"(?P{chunk_size})" - r"(?P{chunk_ext})?" - r"{OWS}\r\n".format( - **globals() - ) # Even though the specification does not allow for extra whitespaces, - # we are lenient with trailing whitespaces because some servers on the wild use it. -) diff --git a/server/venv/Lib/site-packages/h11/_connection.py b/server/venv/Lib/site-packages/h11/_connection.py deleted file mode 100644 index d175270..0000000 --- a/server/venv/Lib/site-packages/h11/_connection.py +++ /dev/null @@ -1,633 +0,0 @@ -# This contains the main Connection class. Everything in h11 revolves around -# this. -from typing import Any, Callable, cast, Dict, List, Optional, Tuple, Type, Union - -from ._events import ( - ConnectionClosed, - Data, - EndOfMessage, - Event, - InformationalResponse, - Request, - Response, -) -from ._headers import get_comma_header, has_expect_100_continue, set_comma_header -from ._readers import READERS, ReadersType -from ._receivebuffer import ReceiveBuffer -from ._state import ( - _SWITCH_CONNECT, - _SWITCH_UPGRADE, - CLIENT, - ConnectionState, - DONE, - ERROR, - MIGHT_SWITCH_PROTOCOL, - SEND_BODY, - SERVER, - SWITCHED_PROTOCOL, -) -from ._util import ( # Import the internal things we need - LocalProtocolError, - RemoteProtocolError, - Sentinel, -) -from ._writers import WRITERS, WritersType - -# Everything in __all__ gets re-exported as part of the h11 public API. -__all__ = ["Connection", "NEED_DATA", "PAUSED"] - - -class NEED_DATA(Sentinel, metaclass=Sentinel): - pass - - -class PAUSED(Sentinel, metaclass=Sentinel): - pass - - -# If we ever have this much buffered without it making a complete parseable -# event, we error out. The only time we really buffer is when reading the -# request/response line + headers together, so this is effectively the limit on -# the size of that. -# -# Some precedents for defaults: -# - node.js: 80 * 1024 -# - tomcat: 8 * 1024 -# - IIS: 16 * 1024 -# - Apache: <8 KiB per line> -DEFAULT_MAX_INCOMPLETE_EVENT_SIZE = 16 * 1024 - -# RFC 7230's rules for connection lifecycles: -# - If either side says they want to close the connection, then the connection -# must close. -# - HTTP/1.1 defaults to keep-alive unless someone says Connection: close -# - HTTP/1.0 defaults to close unless both sides say Connection: keep-alive -# (and even this is a mess -- e.g. if you're implementing a proxy then -# sending Connection: keep-alive is forbidden). -# -# We simplify life by simply not supporting keep-alive with HTTP/1.0 peers. So -# our rule is: -# - If someone says Connection: close, we will close -# - If someone uses HTTP/1.0, we will close. -def _keep_alive(event: Union[Request, Response]) -> bool: - connection = get_comma_header(event.headers, b"connection") - if b"close" in connection: - return False - if getattr(event, "http_version", b"1.1") < b"1.1": - return False - return True - - -def _body_framing( - request_method: bytes, event: Union[Request, Response] -) -> Tuple[str, Union[Tuple[()], Tuple[int]]]: - # Called when we enter SEND_BODY to figure out framing information for - # this body. - # - # These are the only two events that can trigger a SEND_BODY state: - assert type(event) in (Request, Response) - # Returns one of: - # - # ("content-length", count) - # ("chunked", ()) - # ("http/1.0", ()) - # - # which are (lookup key, *args) for constructing body reader/writer - # objects. - # - # Reference: https://tools.ietf.org/html/rfc7230#section-3.3.3 - # - # Step 1: some responses always have an empty body, regardless of what the - # headers say. - if type(event) is Response: - if ( - event.status_code in (204, 304) - or request_method == b"HEAD" - or (request_method == b"CONNECT" and 200 <= event.status_code < 300) - ): - return ("content-length", (0,)) - # Section 3.3.3 also lists another case -- responses with status_code - # < 200. For us these are InformationalResponses, not Responses, so - # they can't get into this function in the first place. - assert event.status_code >= 200 - - # Step 2: check for Transfer-Encoding (T-E beats C-L): - transfer_encodings = get_comma_header(event.headers, b"transfer-encoding") - if transfer_encodings: - assert transfer_encodings == [b"chunked"] - return ("chunked", ()) - - # Step 3: check for Content-Length - content_lengths = get_comma_header(event.headers, b"content-length") - if content_lengths: - return ("content-length", (int(content_lengths[0]),)) - - # Step 4: no applicable headers; fallback/default depends on type - if type(event) is Request: - return ("content-length", (0,)) - else: - return ("http/1.0", ()) - - -################################################################ -# -# The main Connection class -# -################################################################ - - -class Connection: - """An object encapsulating the state of an HTTP connection. - - Args: - our_role: If you're implementing a client, pass :data:`h11.CLIENT`. If - you're implementing a server, pass :data:`h11.SERVER`. - - max_incomplete_event_size (int): - The maximum number of bytes we're willing to buffer of an - incomplete event. In practice this mostly sets a limit on the - maximum size of the request/response line + headers. If this is - exceeded, then :meth:`next_event` will raise - :exc:`RemoteProtocolError`. - - """ - - def __init__( - self, - our_role: Type[Sentinel], - max_incomplete_event_size: int = DEFAULT_MAX_INCOMPLETE_EVENT_SIZE, - ) -> None: - self._max_incomplete_event_size = max_incomplete_event_size - # State and role tracking - if our_role not in (CLIENT, SERVER): - raise ValueError("expected CLIENT or SERVER, not {!r}".format(our_role)) - self.our_role = our_role - self.their_role: Type[Sentinel] - if our_role is CLIENT: - self.their_role = SERVER - else: - self.their_role = CLIENT - self._cstate = ConnectionState() - - # Callables for converting data->events or vice-versa given the - # current state - self._writer = self._get_io_object(self.our_role, None, WRITERS) - self._reader = self._get_io_object(self.their_role, None, READERS) - - # Holds any unprocessed received data - self._receive_buffer = ReceiveBuffer() - # If this is true, then it indicates that the incoming connection was - # closed *after* the end of whatever's in self._receive_buffer: - self._receive_buffer_closed = False - - # Extra bits of state that don't fit into the state machine. - # - # These two are only used to interpret framing headers for figuring - # out how to read/write response bodies. their_http_version is also - # made available as a convenient public API. - self.their_http_version: Optional[bytes] = None - self._request_method: Optional[bytes] = None - # This is pure flow-control and doesn't at all affect the set of legal - # transitions, so no need to bother ConnectionState with it: - self.client_is_waiting_for_100_continue = False - - @property - def states(self) -> Dict[Type[Sentinel], Type[Sentinel]]: - """A dictionary like:: - - {CLIENT: , SERVER: } - - See :ref:`state-machine` for details. - - """ - return dict(self._cstate.states) - - @property - def our_state(self) -> Type[Sentinel]: - """The current state of whichever role we are playing. See - :ref:`state-machine` for details. - """ - return self._cstate.states[self.our_role] - - @property - def their_state(self) -> Type[Sentinel]: - """The current state of whichever role we are NOT playing. See - :ref:`state-machine` for details. - """ - return self._cstate.states[self.their_role] - - @property - def they_are_waiting_for_100_continue(self) -> bool: - return self.their_role is CLIENT and self.client_is_waiting_for_100_continue - - def start_next_cycle(self) -> None: - """Attempt to reset our connection state for a new request/response - cycle. - - If both client and server are in :data:`DONE` state, then resets them - both to :data:`IDLE` state in preparation for a new request/response - cycle on this same connection. Otherwise, raises a - :exc:`LocalProtocolError`. - - See :ref:`keepalive-and-pipelining`. - - """ - old_states = dict(self._cstate.states) - self._cstate.start_next_cycle() - self._request_method = None - # self.their_http_version gets left alone, since it presumably lasts - # beyond a single request/response cycle - assert not self.client_is_waiting_for_100_continue - self._respond_to_state_changes(old_states) - - def _process_error(self, role: Type[Sentinel]) -> None: - old_states = dict(self._cstate.states) - self._cstate.process_error(role) - self._respond_to_state_changes(old_states) - - def _server_switch_event(self, event: Event) -> Optional[Type[Sentinel]]: - if type(event) is InformationalResponse and event.status_code == 101: - return _SWITCH_UPGRADE - if type(event) is Response: - if ( - _SWITCH_CONNECT in self._cstate.pending_switch_proposals - and 200 <= event.status_code < 300 - ): - return _SWITCH_CONNECT - return None - - # All events go through here - def _process_event(self, role: Type[Sentinel], event: Event) -> None: - # First, pass the event through the state machine to make sure it - # succeeds. - old_states = dict(self._cstate.states) - if role is CLIENT and type(event) is Request: - if event.method == b"CONNECT": - self._cstate.process_client_switch_proposal(_SWITCH_CONNECT) - if get_comma_header(event.headers, b"upgrade"): - self._cstate.process_client_switch_proposal(_SWITCH_UPGRADE) - server_switch_event = None - if role is SERVER: - server_switch_event = self._server_switch_event(event) - self._cstate.process_event(role, type(event), server_switch_event) - - # Then perform the updates triggered by it. - - if type(event) is Request: - self._request_method = event.method - - if role is self.their_role and type(event) in ( - Request, - Response, - InformationalResponse, - ): - event = cast(Union[Request, Response, InformationalResponse], event) - self.their_http_version = event.http_version - - # Keep alive handling - # - # RFC 7230 doesn't really say what one should do if Connection: close - # shows up on a 1xx InformationalResponse. I think the idea is that - # this is not supposed to happen. In any case, if it does happen, we - # ignore it. - if type(event) in (Request, Response) and not _keep_alive( - cast(Union[Request, Response], event) - ): - self._cstate.process_keep_alive_disabled() - - # 100-continue - if type(event) is Request and has_expect_100_continue(event): - self.client_is_waiting_for_100_continue = True - if type(event) in (InformationalResponse, Response): - self.client_is_waiting_for_100_continue = False - if role is CLIENT and type(event) in (Data, EndOfMessage): - self.client_is_waiting_for_100_continue = False - - self._respond_to_state_changes(old_states, event) - - def _get_io_object( - self, - role: Type[Sentinel], - event: Optional[Event], - io_dict: Union[ReadersType, WritersType], - ) -> Optional[Callable[..., Any]]: - # event may be None; it's only used when entering SEND_BODY - state = self._cstate.states[role] - if state is SEND_BODY: - # Special case: the io_dict has a dict of reader/writer factories - # that depend on the request/response framing. - framing_type, args = _body_framing( - cast(bytes, self._request_method), cast(Union[Request, Response], event) - ) - return io_dict[SEND_BODY][framing_type](*args) # type: ignore[index] - else: - # General case: the io_dict just has the appropriate reader/writer - # for this state - return io_dict.get((role, state)) # type: ignore[return-value] - - # This must be called after any action that might have caused - # self._cstate.states to change. - def _respond_to_state_changes( - self, - old_states: Dict[Type[Sentinel], Type[Sentinel]], - event: Optional[Event] = None, - ) -> None: - # Update reader/writer - if self.our_state != old_states[self.our_role]: - self._writer = self._get_io_object(self.our_role, event, WRITERS) - if self.their_state != old_states[self.their_role]: - self._reader = self._get_io_object(self.their_role, event, READERS) - - @property - def trailing_data(self) -> Tuple[bytes, bool]: - """Data that has been received, but not yet processed, represented as - a tuple with two elements, where the first is a byte-string containing - the unprocessed data itself, and the second is a bool that is True if - the receive connection was closed. - - See :ref:`switching-protocols` for discussion of why you'd want this. - """ - return (bytes(self._receive_buffer), self._receive_buffer_closed) - - def receive_data(self, data: bytes) -> None: - """Add data to our internal receive buffer. - - This does not actually do any processing on the data, just stores - it. To trigger processing, you have to call :meth:`next_event`. - - Args: - data (:term:`bytes-like object`): - The new data that was just received. - - Special case: If *data* is an empty byte-string like ``b""``, - then this indicates that the remote side has closed the - connection (end of file). Normally this is convenient, because - standard Python APIs like :meth:`file.read` or - :meth:`socket.recv` use ``b""`` to indicate end-of-file, while - other failures to read are indicated using other mechanisms - like raising :exc:`TimeoutError`. When using such an API you - can just blindly pass through whatever you get from ``read`` - to :meth:`receive_data`, and everything will work. - - But, if you have an API where reading an empty string is a - valid non-EOF condition, then you need to be aware of this and - make sure to check for such strings and avoid passing them to - :meth:`receive_data`. - - Returns: - Nothing, but after calling this you should call :meth:`next_event` - to parse the newly received data. - - Raises: - RuntimeError: - Raised if you pass an empty *data*, indicating EOF, and then - pass a non-empty *data*, indicating more data that somehow - arrived after the EOF. - - (Calling ``receive_data(b"")`` multiple times is fine, - and equivalent to calling it once.) - - """ - if data: - if self._receive_buffer_closed: - raise RuntimeError("received close, then received more data?") - self._receive_buffer += data - else: - self._receive_buffer_closed = True - - def _extract_next_receive_event( - self, - ) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]: - state = self.their_state - # We don't pause immediately when they enter DONE, because even in - # DONE state we can still process a ConnectionClosed() event. But - # if we have data in our buffer, then we definitely aren't getting - # a ConnectionClosed() immediately and we need to pause. - if state is DONE and self._receive_buffer: - return PAUSED - if state is MIGHT_SWITCH_PROTOCOL or state is SWITCHED_PROTOCOL: - return PAUSED - assert self._reader is not None - event = self._reader(self._receive_buffer) - if event is None: - if not self._receive_buffer and self._receive_buffer_closed: - # In some unusual cases (basically just HTTP/1.0 bodies), EOF - # triggers an actual protocol event; in that case, we want to - # return that event, and then the state will change and we'll - # get called again to generate the actual ConnectionClosed(). - if hasattr(self._reader, "read_eof"): - event = self._reader.read_eof() # type: ignore[attr-defined] - else: - event = ConnectionClosed() - if event is None: - event = NEED_DATA - return event # type: ignore[no-any-return] - - def next_event(self) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]: - """Parse the next event out of our receive buffer, update our internal - state, and return it. - - This is a mutating operation -- think of it like calling :func:`next` - on an iterator. - - Returns: - : One of three things: - - 1) An event object -- see :ref:`events`. - - 2) The special constant :data:`NEED_DATA`, which indicates that - you need to read more data from your socket and pass it to - :meth:`receive_data` before this method will be able to return - any more events. - - 3) The special constant :data:`PAUSED`, which indicates that we - are not in a state where we can process incoming data (usually - because the peer has finished their part of the current - request/response cycle, and you have not yet called - :meth:`start_next_cycle`). See :ref:`flow-control` for details. - - Raises: - RemoteProtocolError: - The peer has misbehaved. You should close the connection - (possibly after sending some kind of 4xx response). - - Once this method returns :class:`ConnectionClosed` once, then all - subsequent calls will also return :class:`ConnectionClosed`. - - If this method raises any exception besides :exc:`RemoteProtocolError` - then that's a bug -- if it happens please file a bug report! - - If this method raises any exception then it also sets - :attr:`Connection.their_state` to :data:`ERROR` -- see - :ref:`error-handling` for discussion. - - """ - - if self.their_state is ERROR: - raise RemoteProtocolError("Can't receive data when peer state is ERROR") - try: - event = self._extract_next_receive_event() - if event not in [NEED_DATA, PAUSED]: - self._process_event(self.their_role, cast(Event, event)) - if event is NEED_DATA: - if len(self._receive_buffer) > self._max_incomplete_event_size: - # 431 is "Request header fields too large" which is pretty - # much the only situation where we can get here - raise RemoteProtocolError( - "Receive buffer too long", error_status_hint=431 - ) - if self._receive_buffer_closed: - # We're still trying to complete some event, but that's - # never going to happen because no more data is coming - raise RemoteProtocolError("peer unexpectedly closed connection") - return event - except BaseException as exc: - self._process_error(self.their_role) - if isinstance(exc, LocalProtocolError): - exc._reraise_as_remote_protocol_error() - else: - raise - - def send(self, event: Event) -> Optional[bytes]: - """Convert a high-level event into bytes that can be sent to the peer, - while updating our internal state machine. - - Args: - event: The :ref:`event ` to send. - - Returns: - If ``type(event) is ConnectionClosed``, then returns - ``None``. Otherwise, returns a :term:`bytes-like object`. - - Raises: - LocalProtocolError: - Sending this event at this time would violate our - understanding of the HTTP/1.1 protocol. - - If this method raises any exception then it also sets - :attr:`Connection.our_state` to :data:`ERROR` -- see - :ref:`error-handling` for discussion. - - """ - data_list = self.send_with_data_passthrough(event) - if data_list is None: - return None - else: - return b"".join(data_list) - - def send_with_data_passthrough(self, event: Event) -> Optional[List[bytes]]: - """Identical to :meth:`send`, except that in situations where - :meth:`send` returns a single :term:`bytes-like object`, this instead - returns a list of them -- and when sending a :class:`Data` event, this - list is guaranteed to contain the exact object you passed in as - :attr:`Data.data`. See :ref:`sendfile` for discussion. - - """ - if self.our_state is ERROR: - raise LocalProtocolError("Can't send data when our state is ERROR") - try: - if type(event) is Response: - event = self._clean_up_response_headers_for_sending(event) - # We want to call _process_event before calling the writer, - # because if someone tries to do something invalid then this will - # give a sensible error message, while our writers all just assume - # they will only receive valid events. But, _process_event might - # change self._writer. So we have to do a little dance: - writer = self._writer - self._process_event(self.our_role, event) - if type(event) is ConnectionClosed: - return None - else: - # In any situation where writer is None, process_event should - # have raised ProtocolError - assert writer is not None - data_list: List[bytes] = [] - writer(event, data_list.append) - return data_list - except: - self._process_error(self.our_role) - raise - - def send_failed(self) -> None: - """Notify the state machine that we failed to send the data it gave - us. - - This causes :attr:`Connection.our_state` to immediately become - :data:`ERROR` -- see :ref:`error-handling` for discussion. - - """ - self._process_error(self.our_role) - - # When sending a Response, we take responsibility for a few things: - # - # - Sometimes you MUST set Connection: close. We take care of those - # times. (You can also set it yourself if you want, and if you do then - # we'll respect that and close the connection at the right time. But you - # don't have to worry about that unless you want to.) - # - # - The user has to set Content-Length if they want it. Otherwise, for - # responses that have bodies (e.g. not HEAD), then we will automatically - # select the right mechanism for streaming a body of unknown length, - # which depends on depending on the peer's HTTP version. - # - # This function's *only* responsibility is making sure headers are set up - # right -- everything downstream just looks at the headers. There are no - # side channels. - def _clean_up_response_headers_for_sending(self, response: Response) -> Response: - assert type(response) is Response - - headers = response.headers - need_close = False - - # HEAD requests need some special handling: they always act like they - # have Content-Length: 0, and that's how _body_framing treats - # them. But their headers are supposed to match what we would send if - # the request was a GET. (Technically there is one deviation allowed: - # we're allowed to leave out the framing headers -- see - # https://tools.ietf.org/html/rfc7231#section-4.3.2 . But it's just as - # easy to get them right.) - method_for_choosing_headers = cast(bytes, self._request_method) - if method_for_choosing_headers == b"HEAD": - method_for_choosing_headers = b"GET" - framing_type, _ = _body_framing(method_for_choosing_headers, response) - if framing_type in ("chunked", "http/1.0"): - # This response has a body of unknown length. - # If our peer is HTTP/1.1, we use Transfer-Encoding: chunked - # If our peer is HTTP/1.0, we use no framing headers, and close the - # connection afterwards. - # - # Make sure to clear Content-Length (in principle user could have - # set both and then we ignored Content-Length b/c - # Transfer-Encoding overwrote it -- this would be naughty of them, - # but the HTTP spec says that if our peer does this then we have - # to fix it instead of erroring out, so we'll accord the user the - # same respect). - headers = set_comma_header(headers, b"content-length", []) - if self.their_http_version is None or self.their_http_version < b"1.1": - # Either we never got a valid request and are sending back an - # error (their_http_version is None), so we assume the worst; - # or else we did get a valid HTTP/1.0 request, so we know that - # they don't understand chunked encoding. - headers = set_comma_header(headers, b"transfer-encoding", []) - # This is actually redundant ATM, since currently we - # unconditionally disable keep-alive when talking to HTTP/1.0 - # peers. But let's be defensive just in case we add - # Connection: keep-alive support later: - if self._request_method != b"HEAD": - need_close = True - else: - headers = set_comma_header(headers, b"transfer-encoding", [b"chunked"]) - - if not self._cstate.keep_alive or need_close: - # Make sure Connection: close is set - connection = set(get_comma_header(headers, b"connection")) - connection.discard(b"keep-alive") - connection.add(b"close") - headers = set_comma_header(headers, b"connection", sorted(connection)) - - return Response( - headers=headers, - status_code=response.status_code, - http_version=response.http_version, - reason=response.reason, - ) diff --git a/server/venv/Lib/site-packages/h11/_events.py b/server/venv/Lib/site-packages/h11/_events.py deleted file mode 100644 index 075bf8a..0000000 --- a/server/venv/Lib/site-packages/h11/_events.py +++ /dev/null @@ -1,369 +0,0 @@ -# High level events that make up HTTP/1.1 conversations. Loosely inspired by -# the corresponding events in hyper-h2: -# -# http://python-hyper.org/h2/en/stable/api.html#events -# -# Don't subclass these. Stuff will break. - -import re -from abc import ABC -from dataclasses import dataclass, field -from typing import Any, cast, Dict, List, Tuple, Union - -from ._abnf import method, request_target -from ._headers import Headers, normalize_and_validate -from ._util import bytesify, LocalProtocolError, validate - -# Everything in __all__ gets re-exported as part of the h11 public API. -__all__ = [ - "Event", - "Request", - "InformationalResponse", - "Response", - "Data", - "EndOfMessage", - "ConnectionClosed", -] - -method_re = re.compile(method.encode("ascii")) -request_target_re = re.compile(request_target.encode("ascii")) - - -class Event(ABC): - """ - Base class for h11 events. - """ - - __slots__ = () - - -@dataclass(init=False, frozen=True) -class Request(Event): - """The beginning of an HTTP request. - - Fields: - - .. attribute:: method - - An HTTP method, e.g. ``b"GET"`` or ``b"POST"``. Always a byte - string. :term:`Bytes-like objects ` and native - strings containing only ascii characters will be automatically - converted to byte strings. - - .. attribute:: target - - The target of an HTTP request, e.g. ``b"/index.html"``, or one of the - more exotic formats described in `RFC 7320, section 5.3 - `_. Always a byte - string. :term:`Bytes-like objects ` and native - strings containing only ascii characters will be automatically - converted to byte strings. - - .. attribute:: headers - - Request headers, represented as a list of (name, value) pairs. See - :ref:`the header normalization rules ` for details. - - .. attribute:: http_version - - The HTTP protocol version, represented as a byte string like - ``b"1.1"``. See :ref:`the HTTP version normalization rules - ` for details. - - """ - - __slots__ = ("method", "headers", "target", "http_version") - - method: bytes - headers: Headers - target: bytes - http_version: bytes - - def __init__( - self, - *, - method: Union[bytes, str], - headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]], - target: Union[bytes, str], - http_version: Union[bytes, str] = b"1.1", - _parsed: bool = False, - ) -> None: - super().__init__() - if isinstance(headers, Headers): - object.__setattr__(self, "headers", headers) - else: - object.__setattr__( - self, "headers", normalize_and_validate(headers, _parsed=_parsed) - ) - if not _parsed: - object.__setattr__(self, "method", bytesify(method)) - object.__setattr__(self, "target", bytesify(target)) - object.__setattr__(self, "http_version", bytesify(http_version)) - else: - object.__setattr__(self, "method", method) - object.__setattr__(self, "target", target) - object.__setattr__(self, "http_version", http_version) - - # "A server MUST respond with a 400 (Bad Request) status code to any - # HTTP/1.1 request message that lacks a Host header field and to any - # request message that contains more than one Host header field or a - # Host header field with an invalid field-value." - # -- https://tools.ietf.org/html/rfc7230#section-5.4 - host_count = 0 - for name, value in self.headers: - if name == b"host": - host_count += 1 - if self.http_version == b"1.1" and host_count == 0: - raise LocalProtocolError("Missing mandatory Host: header") - if host_count > 1: - raise LocalProtocolError("Found multiple Host: headers") - - validate(method_re, self.method, "Illegal method characters") - validate(request_target_re, self.target, "Illegal target characters") - - # This is an unhashable type. - __hash__ = None # type: ignore - - -@dataclass(init=False, frozen=True) -class _ResponseBase(Event): - __slots__ = ("headers", "http_version", "reason", "status_code") - - headers: Headers - http_version: bytes - reason: bytes - status_code: int - - def __init__( - self, - *, - headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]], - status_code: int, - http_version: Union[bytes, str] = b"1.1", - reason: Union[bytes, str] = b"", - _parsed: bool = False, - ) -> None: - super().__init__() - if isinstance(headers, Headers): - object.__setattr__(self, "headers", headers) - else: - object.__setattr__( - self, "headers", normalize_and_validate(headers, _parsed=_parsed) - ) - if not _parsed: - object.__setattr__(self, "reason", bytesify(reason)) - object.__setattr__(self, "http_version", bytesify(http_version)) - if not isinstance(status_code, int): - raise LocalProtocolError("status code must be integer") - # Because IntEnum objects are instances of int, but aren't - # duck-compatible (sigh), see gh-72. - object.__setattr__(self, "status_code", int(status_code)) - else: - object.__setattr__(self, "reason", reason) - object.__setattr__(self, "http_version", http_version) - object.__setattr__(self, "status_code", status_code) - - self.__post_init__() - - def __post_init__(self) -> None: - pass - - # This is an unhashable type. - __hash__ = None # type: ignore - - -@dataclass(init=False, frozen=True) -class InformationalResponse(_ResponseBase): - """An HTTP informational response. - - Fields: - - .. attribute:: status_code - - The status code of this response, as an integer. For an - :class:`InformationalResponse`, this is always in the range [100, - 200). - - .. attribute:: headers - - Request headers, represented as a list of (name, value) pairs. See - :ref:`the header normalization rules ` for - details. - - .. attribute:: http_version - - The HTTP protocol version, represented as a byte string like - ``b"1.1"``. See :ref:`the HTTP version normalization rules - ` for details. - - .. attribute:: reason - - The reason phrase of this response, as a byte string. For example: - ``b"OK"``, or ``b"Not Found"``. - - """ - - def __post_init__(self) -> None: - if not (100 <= self.status_code < 200): - raise LocalProtocolError( - "InformationalResponse status_code should be in range " - "[100, 200), not {}".format(self.status_code) - ) - - # This is an unhashable type. - __hash__ = None # type: ignore - - -@dataclass(init=False, frozen=True) -class Response(_ResponseBase): - """The beginning of an HTTP response. - - Fields: - - .. attribute:: status_code - - The status code of this response, as an integer. For an - :class:`Response`, this is always in the range [200, - 1000). - - .. attribute:: headers - - Request headers, represented as a list of (name, value) pairs. See - :ref:`the header normalization rules ` for details. - - .. attribute:: http_version - - The HTTP protocol version, represented as a byte string like - ``b"1.1"``. See :ref:`the HTTP version normalization rules - ` for details. - - .. attribute:: reason - - The reason phrase of this response, as a byte string. For example: - ``b"OK"``, or ``b"Not Found"``. - - """ - - def __post_init__(self) -> None: - if not (200 <= self.status_code < 1000): - raise LocalProtocolError( - "Response status_code should be in range [200, 1000), not {}".format( - self.status_code - ) - ) - - # This is an unhashable type. - __hash__ = None # type: ignore - - -@dataclass(init=False, frozen=True) -class Data(Event): - """Part of an HTTP message body. - - Fields: - - .. attribute:: data - - A :term:`bytes-like object` containing part of a message body. Or, if - using the ``combine=False`` argument to :meth:`Connection.send`, then - any object that your socket writing code knows what to do with, and for - which calling :func:`len` returns the number of bytes that will be - written -- see :ref:`sendfile` for details. - - .. attribute:: chunk_start - - A marker that indicates whether this data object is from the start of a - chunked transfer encoding chunk. This field is ignored when when a Data - event is provided to :meth:`Connection.send`: it is only valid on - events emitted from :meth:`Connection.next_event`. You probably - shouldn't use this attribute at all; see - :ref:`chunk-delimiters-are-bad` for details. - - .. attribute:: chunk_end - - A marker that indicates whether this data object is the last for a - given chunked transfer encoding chunk. This field is ignored when when - a Data event is provided to :meth:`Connection.send`: it is only valid - on events emitted from :meth:`Connection.next_event`. You probably - shouldn't use this attribute at all; see - :ref:`chunk-delimiters-are-bad` for details. - - """ - - __slots__ = ("data", "chunk_start", "chunk_end") - - data: bytes - chunk_start: bool - chunk_end: bool - - def __init__( - self, data: bytes, chunk_start: bool = False, chunk_end: bool = False - ) -> None: - object.__setattr__(self, "data", data) - object.__setattr__(self, "chunk_start", chunk_start) - object.__setattr__(self, "chunk_end", chunk_end) - - # This is an unhashable type. - __hash__ = None # type: ignore - - -# XX FIXME: "A recipient MUST ignore (or consider as an error) any fields that -# are forbidden to be sent in a trailer, since processing them as if they were -# present in the header section might bypass external security filters." -# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#chunked.trailer.part -# Unfortunately, the list of forbidden fields is long and vague :-/ -@dataclass(init=False, frozen=True) -class EndOfMessage(Event): - """The end of an HTTP message. - - Fields: - - .. attribute:: headers - - Default value: ``[]`` - - Any trailing headers attached to this message, represented as a list of - (name, value) pairs. See :ref:`the header normalization rules - ` for details. - - Must be empty unless ``Transfer-Encoding: chunked`` is in use. - - """ - - __slots__ = ("headers",) - - headers: Headers - - def __init__( - self, - *, - headers: Union[ - Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]], None - ] = None, - _parsed: bool = False, - ) -> None: - super().__init__() - if headers is None: - headers = Headers([]) - elif not isinstance(headers, Headers): - headers = normalize_and_validate(headers, _parsed=_parsed) - - object.__setattr__(self, "headers", headers) - - # This is an unhashable type. - __hash__ = None # type: ignore - - -@dataclass(frozen=True) -class ConnectionClosed(Event): - """This event indicates that the sender has closed their outgoing - connection. - - Note that this does not necessarily mean that they can't *receive* further - data, because TCP connections are composed to two one-way channels which - can be closed independently. See :ref:`closing` for details. - - No fields. - """ - - pass diff --git a/server/venv/Lib/site-packages/h11/_headers.py b/server/venv/Lib/site-packages/h11/_headers.py deleted file mode 100644 index b97d020..0000000 --- a/server/venv/Lib/site-packages/h11/_headers.py +++ /dev/null @@ -1,278 +0,0 @@ -import re -from typing import AnyStr, cast, List, overload, Sequence, Tuple, TYPE_CHECKING, Union - -from ._abnf import field_name, field_value -from ._util import bytesify, LocalProtocolError, validate - -if TYPE_CHECKING: - from ._events import Request - -try: - from typing import Literal -except ImportError: - from typing_extensions import Literal # type: ignore - - -# Facts -# ----- -# -# Headers are: -# keys: case-insensitive ascii -# values: mixture of ascii and raw bytes -# -# "Historically, HTTP has allowed field content with text in the ISO-8859-1 -# charset [ISO-8859-1], supporting other charsets only through use of -# [RFC2047] encoding. In practice, most HTTP header field values use only a -# subset of the US-ASCII charset [USASCII]. Newly defined header fields SHOULD -# limit their field values to US-ASCII octets. A recipient SHOULD treat other -# octets in field content (obs-text) as opaque data." -# And it deprecates all non-ascii values -# -# Leading/trailing whitespace in header names is forbidden -# -# Values get leading/trailing whitespace stripped -# -# Content-Disposition actually needs to contain unicode semantically; to -# accomplish this it has a terrifically weird way of encoding the filename -# itself as ascii (and even this still has lots of cross-browser -# incompatibilities) -# -# Order is important: -# "a proxy MUST NOT change the order of these field values when forwarding a -# message" -# (and there are several headers where the order indicates a preference) -# -# Multiple occurences of the same header: -# "A sender MUST NOT generate multiple header fields with the same field name -# in a message unless either the entire field value for that header field is -# defined as a comma-separated list [or the header is Set-Cookie which gets a -# special exception]" - RFC 7230. (cookies are in RFC 6265) -# -# So every header aside from Set-Cookie can be merged by b", ".join if it -# occurs repeatedly. But, of course, they can't necessarily be split by -# .split(b","), because quoting. -# -# Given all this mess (case insensitive, duplicates allowed, order is -# important, ...), there doesn't appear to be any standard way to handle -# headers in Python -- they're almost like dicts, but... actually just -# aren't. For now we punt and just use a super simple representation: headers -# are a list of pairs -# -# [(name1, value1), (name2, value2), ...] -# -# where all entries are bytestrings, names are lowercase and have no -# leading/trailing whitespace, and values are bytestrings with no -# leading/trailing whitespace. Searching and updating are done via naive O(n) -# methods. -# -# Maybe a dict-of-lists would be better? - -_content_length_re = re.compile(rb"[0-9]+") -_field_name_re = re.compile(field_name.encode("ascii")) -_field_value_re = re.compile(field_value.encode("ascii")) - - -class Headers(Sequence[Tuple[bytes, bytes]]): - """ - A list-like interface that allows iterating over headers as byte-pairs - of (lowercased-name, value). - - Internally we actually store the representation as three-tuples, - including both the raw original casing, in order to preserve casing - over-the-wire, and the lowercased name, for case-insensitive comparisions. - - r = Request( - method="GET", - target="/", - headers=[("Host", "example.org"), ("Connection", "keep-alive")], - http_version="1.1", - ) - assert r.headers == [ - (b"host", b"example.org"), - (b"connection", b"keep-alive") - ] - assert r.headers.raw_items() == [ - (b"Host", b"example.org"), - (b"Connection", b"keep-alive") - ] - """ - - __slots__ = "_full_items" - - def __init__(self, full_items: List[Tuple[bytes, bytes, bytes]]) -> None: - self._full_items = full_items - - def __bool__(self) -> bool: - return bool(self._full_items) - - def __eq__(self, other: object) -> bool: - return list(self) == list(other) # type: ignore - - def __len__(self) -> int: - return len(self._full_items) - - def __repr__(self) -> str: - return "" % repr(list(self)) - - def __getitem__(self, idx: int) -> Tuple[bytes, bytes]: # type: ignore[override] - _, name, value = self._full_items[idx] - return (name, value) - - def raw_items(self) -> List[Tuple[bytes, bytes]]: - return [(raw_name, value) for raw_name, _, value in self._full_items] - - -HeaderTypes = Union[ - List[Tuple[bytes, bytes]], - List[Tuple[bytes, str]], - List[Tuple[str, bytes]], - List[Tuple[str, str]], -] - - -@overload -def normalize_and_validate(headers: Headers, _parsed: Literal[True]) -> Headers: - ... - - -@overload -def normalize_and_validate(headers: HeaderTypes, _parsed: Literal[False]) -> Headers: - ... - - -@overload -def normalize_and_validate( - headers: Union[Headers, HeaderTypes], _parsed: bool = False -) -> Headers: - ... - - -def normalize_and_validate( - headers: Union[Headers, HeaderTypes], _parsed: bool = False -) -> Headers: - new_headers = [] - seen_content_length = None - saw_transfer_encoding = False - for name, value in headers: - # For headers coming out of the parser, we can safely skip some steps, - # because it always returns bytes and has already run these regexes - # over the data: - if not _parsed: - name = bytesify(name) - value = bytesify(value) - validate(_field_name_re, name, "Illegal header name {!r}", name) - validate(_field_value_re, value, "Illegal header value {!r}", value) - assert isinstance(name, bytes) - assert isinstance(value, bytes) - - raw_name = name - name = name.lower() - if name == b"content-length": - lengths = {length.strip() for length in value.split(b",")} - if len(lengths) != 1: - raise LocalProtocolError("conflicting Content-Length headers") - value = lengths.pop() - validate(_content_length_re, value, "bad Content-Length") - if seen_content_length is None: - seen_content_length = value - new_headers.append((raw_name, name, value)) - elif seen_content_length != value: - raise LocalProtocolError("conflicting Content-Length headers") - elif name == b"transfer-encoding": - # "A server that receives a request message with a transfer coding - # it does not understand SHOULD respond with 501 (Not - # Implemented)." - # https://tools.ietf.org/html/rfc7230#section-3.3.1 - if saw_transfer_encoding: - raise LocalProtocolError( - "multiple Transfer-Encoding headers", error_status_hint=501 - ) - # "All transfer-coding names are case-insensitive" - # -- https://tools.ietf.org/html/rfc7230#section-4 - value = value.lower() - if value != b"chunked": - raise LocalProtocolError( - "Only Transfer-Encoding: chunked is supported", - error_status_hint=501, - ) - saw_transfer_encoding = True - new_headers.append((raw_name, name, value)) - else: - new_headers.append((raw_name, name, value)) - return Headers(new_headers) - - -def get_comma_header(headers: Headers, name: bytes) -> List[bytes]: - # Should only be used for headers whose value is a list of - # comma-separated, case-insensitive values. - # - # The header name `name` is expected to be lower-case bytes. - # - # Connection: meets these criteria (including cast insensitivity). - # - # Content-Length: technically is just a single value (1*DIGIT), but the - # standard makes reference to implementations that do multiple values, and - # using this doesn't hurt. Ditto, case insensitivity doesn't things either - # way. - # - # Transfer-Encoding: is more complex (allows for quoted strings), so - # splitting on , is actually wrong. For example, this is legal: - # - # Transfer-Encoding: foo; options="1,2", chunked - # - # and should be parsed as - # - # foo; options="1,2" - # chunked - # - # but this naive function will parse it as - # - # foo; options="1 - # 2" - # chunked - # - # However, this is okay because the only thing we are going to do with - # any Transfer-Encoding is reject ones that aren't just "chunked", so - # both of these will be treated the same anyway. - # - # Expect: the only legal value is the literal string - # "100-continue". Splitting on commas is harmless. Case insensitive. - # - out: List[bytes] = [] - for _, found_name, found_raw_value in headers._full_items: - if found_name == name: - found_raw_value = found_raw_value.lower() - for found_split_value in found_raw_value.split(b","): - found_split_value = found_split_value.strip() - if found_split_value: - out.append(found_split_value) - return out - - -def set_comma_header(headers: Headers, name: bytes, new_values: List[bytes]) -> Headers: - # The header name `name` is expected to be lower-case bytes. - # - # Note that when we store the header we use title casing for the header - # names, in order to match the conventional HTTP header style. - # - # Simply calling `.title()` is a blunt approach, but it's correct - # here given the cases where we're using `set_comma_header`... - # - # Connection, Content-Length, Transfer-Encoding. - new_headers: List[Tuple[bytes, bytes]] = [] - for found_raw_name, found_name, found_raw_value in headers._full_items: - if found_name != name: - new_headers.append((found_raw_name, found_raw_value)) - for new_value in new_values: - new_headers.append((name.title(), new_value)) - return normalize_and_validate(new_headers) - - -def has_expect_100_continue(request: "Request") -> bool: - # https://tools.ietf.org/html/rfc7231#section-5.1.1 - # "A server that receives a 100-continue expectation in an HTTP/1.0 request - # MUST ignore that expectation." - if request.http_version < b"1.1": - return False - expect = get_comma_header(request.headers, b"expect") - return b"100-continue" in expect diff --git a/server/venv/Lib/site-packages/h11/_readers.py b/server/venv/Lib/site-packages/h11/_readers.py deleted file mode 100644 index 08a9574..0000000 --- a/server/venv/Lib/site-packages/h11/_readers.py +++ /dev/null @@ -1,247 +0,0 @@ -# Code to read HTTP data -# -# Strategy: each reader is a callable which takes a ReceiveBuffer object, and -# either: -# 1) consumes some of it and returns an Event -# 2) raises a LocalProtocolError (for consistency -- e.g. we call validate() -# and it might raise a LocalProtocolError, so simpler just to always use -# this) -# 3) returns None, meaning "I need more data" -# -# If they have a .read_eof attribute, then this will be called if an EOF is -# received -- but this is optional. Either way, the actual ConnectionClosed -# event will be generated afterwards. -# -# READERS is a dict describing how to pick a reader. It maps states to either: -# - a reader -# - or, for body readers, a dict of per-framing reader factories - -import re -from typing import Any, Callable, Dict, Iterable, NoReturn, Optional, Tuple, Type, Union - -from ._abnf import chunk_header, header_field, request_line, status_line -from ._events import Data, EndOfMessage, InformationalResponse, Request, Response -from ._receivebuffer import ReceiveBuffer -from ._state import ( - CLIENT, - CLOSED, - DONE, - IDLE, - MUST_CLOSE, - SEND_BODY, - SEND_RESPONSE, - SERVER, -) -from ._util import LocalProtocolError, RemoteProtocolError, Sentinel, validate - -__all__ = ["READERS"] - -header_field_re = re.compile(header_field.encode("ascii")) -obs_fold_re = re.compile(rb"[ \t]+") - - -def _obsolete_line_fold(lines: Iterable[bytes]) -> Iterable[bytes]: - it = iter(lines) - last: Optional[bytes] = None - for line in it: - match = obs_fold_re.match(line) - if match: - if last is None: - raise LocalProtocolError("continuation line at start of headers") - if not isinstance(last, bytearray): - # Cast to a mutable type, avoiding copy on append to ensure O(n) time - last = bytearray(last) - last += b" " - last += line[match.end() :] - else: - if last is not None: - yield last - last = line - if last is not None: - yield last - - -def _decode_header_lines( - lines: Iterable[bytes], -) -> Iterable[Tuple[bytes, bytes]]: - for line in _obsolete_line_fold(lines): - matches = validate(header_field_re, line, "illegal header line: {!r}", line) - yield (matches["field_name"], matches["field_value"]) - - -request_line_re = re.compile(request_line.encode("ascii")) - - -def maybe_read_from_IDLE_client(buf: ReceiveBuffer) -> Optional[Request]: - lines = buf.maybe_extract_lines() - if lines is None: - if buf.is_next_line_obviously_invalid_request_line(): - raise LocalProtocolError("illegal request line") - return None - if not lines: - raise LocalProtocolError("no request line received") - matches = validate( - request_line_re, lines[0], "illegal request line: {!r}", lines[0] - ) - return Request( - headers=list(_decode_header_lines(lines[1:])), _parsed=True, **matches - ) - - -status_line_re = re.compile(status_line.encode("ascii")) - - -def maybe_read_from_SEND_RESPONSE_server( - buf: ReceiveBuffer, -) -> Union[InformationalResponse, Response, None]: - lines = buf.maybe_extract_lines() - if lines is None: - if buf.is_next_line_obviously_invalid_request_line(): - raise LocalProtocolError("illegal request line") - return None - if not lines: - raise LocalProtocolError("no response line received") - matches = validate(status_line_re, lines[0], "illegal status line: {!r}", lines[0]) - http_version = ( - b"1.1" if matches["http_version"] is None else matches["http_version"] - ) - reason = b"" if matches["reason"] is None else matches["reason"] - status_code = int(matches["status_code"]) - class_: Union[Type[InformationalResponse], Type[Response]] = ( - InformationalResponse if status_code < 200 else Response - ) - return class_( - headers=list(_decode_header_lines(lines[1:])), - _parsed=True, - status_code=status_code, - reason=reason, - http_version=http_version, - ) - - -class ContentLengthReader: - def __init__(self, length: int) -> None: - self._length = length - self._remaining = length - - def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]: - if self._remaining == 0: - return EndOfMessage() - data = buf.maybe_extract_at_most(self._remaining) - if data is None: - return None - self._remaining -= len(data) - return Data(data=data) - - def read_eof(self) -> NoReturn: - raise RemoteProtocolError( - "peer closed connection without sending complete message body " - "(received {} bytes, expected {})".format( - self._length - self._remaining, self._length - ) - ) - - -chunk_header_re = re.compile(chunk_header.encode("ascii")) - - -class ChunkedReader: - def __init__(self) -> None: - self._bytes_in_chunk = 0 - # After reading a chunk, we have to throw away the trailing \r\n; if - # this is >0 then we discard that many bytes before resuming regular - # de-chunkification. - self._bytes_to_discard = 0 - self._reading_trailer = False - - def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]: - if self._reading_trailer: - lines = buf.maybe_extract_lines() - if lines is None: - return None - return EndOfMessage(headers=list(_decode_header_lines(lines))) - if self._bytes_to_discard > 0: - data = buf.maybe_extract_at_most(self._bytes_to_discard) - if data is None: - return None - self._bytes_to_discard -= len(data) - if self._bytes_to_discard > 0: - return None - # else, fall through and read some more - assert self._bytes_to_discard == 0 - if self._bytes_in_chunk == 0: - # We need to refill our chunk count - chunk_header = buf.maybe_extract_next_line() - if chunk_header is None: - return None - matches = validate( - chunk_header_re, - chunk_header, - "illegal chunk header: {!r}", - chunk_header, - ) - # XX FIXME: we discard chunk extensions. Does anyone care? - self._bytes_in_chunk = int(matches["chunk_size"], base=16) - if self._bytes_in_chunk == 0: - self._reading_trailer = True - return self(buf) - chunk_start = True - else: - chunk_start = False - assert self._bytes_in_chunk > 0 - data = buf.maybe_extract_at_most(self._bytes_in_chunk) - if data is None: - return None - self._bytes_in_chunk -= len(data) - if self._bytes_in_chunk == 0: - self._bytes_to_discard = 2 - chunk_end = True - else: - chunk_end = False - return Data(data=data, chunk_start=chunk_start, chunk_end=chunk_end) - - def read_eof(self) -> NoReturn: - raise RemoteProtocolError( - "peer closed connection without sending complete message body " - "(incomplete chunked read)" - ) - - -class Http10Reader: - def __call__(self, buf: ReceiveBuffer) -> Optional[Data]: - data = buf.maybe_extract_at_most(999999999) - if data is None: - return None - return Data(data=data) - - def read_eof(self) -> EndOfMessage: - return EndOfMessage() - - -def expect_nothing(buf: ReceiveBuffer) -> None: - if buf: - raise LocalProtocolError("Got data when expecting EOF") - return None - - -ReadersType = Dict[ - Union[Type[Sentinel], Tuple[Type[Sentinel], Type[Sentinel]]], - Union[Callable[..., Any], Dict[str, Callable[..., Any]]], -] - -READERS: ReadersType = { - (CLIENT, IDLE): maybe_read_from_IDLE_client, - (SERVER, IDLE): maybe_read_from_SEND_RESPONSE_server, - (SERVER, SEND_RESPONSE): maybe_read_from_SEND_RESPONSE_server, - (CLIENT, DONE): expect_nothing, - (CLIENT, MUST_CLOSE): expect_nothing, - (CLIENT, CLOSED): expect_nothing, - (SERVER, DONE): expect_nothing, - (SERVER, MUST_CLOSE): expect_nothing, - (SERVER, CLOSED): expect_nothing, - SEND_BODY: { - "chunked": ChunkedReader, - "content-length": ContentLengthReader, - "http/1.0": Http10Reader, - }, -} diff --git a/server/venv/Lib/site-packages/h11/_receivebuffer.py b/server/venv/Lib/site-packages/h11/_receivebuffer.py deleted file mode 100644 index e5c4e08..0000000 --- a/server/venv/Lib/site-packages/h11/_receivebuffer.py +++ /dev/null @@ -1,153 +0,0 @@ -import re -import sys -from typing import List, Optional, Union - -__all__ = ["ReceiveBuffer"] - - -# Operations we want to support: -# - find next \r\n or \r\n\r\n (\n or \n\n are also acceptable), -# or wait until there is one -# - read at-most-N bytes -# Goals: -# - on average, do this fast -# - worst case, do this in O(n) where n is the number of bytes processed -# Plan: -# - store bytearray, offset, how far we've searched for a separator token -# - use the how-far-we've-searched data to avoid rescanning -# - while doing a stream of uninterrupted processing, advance offset instead -# of constantly copying -# WARNING: -# - I haven't benchmarked or profiled any of this yet. -# -# Note that starting in Python 3.4, deleting the initial n bytes from a -# bytearray is amortized O(n), thanks to some excellent work by Antoine -# Martin: -# -# https://bugs.python.org/issue19087 -# -# This means that if we only supported 3.4+, we could get rid of the code here -# involving self._start and self.compress, because it's doing exactly the same -# thing that bytearray now does internally. -# -# BUT unfortunately, we still support 2.7, and reading short segments out of a -# long buffer MUST be O(bytes read) to avoid DoS issues, so we can't actually -# delete this code. Yet: -# -# https://pythonclock.org/ -# -# (Two things to double-check first though: make sure PyPy also has the -# optimization, and benchmark to make sure it's a win, since we do have a -# slightly clever thing where we delay calling compress() until we've -# processed a whole event, which could in theory be slightly more efficient -# than the internal bytearray support.) -blank_line_regex = re.compile(b"\n\r?\n", re.MULTILINE) - - -class ReceiveBuffer: - def __init__(self) -> None: - self._data = bytearray() - self._next_line_search = 0 - self._multiple_lines_search = 0 - - def __iadd__(self, byteslike: Union[bytes, bytearray]) -> "ReceiveBuffer": - self._data += byteslike - return self - - def __bool__(self) -> bool: - return bool(len(self)) - - def __len__(self) -> int: - return len(self._data) - - # for @property unprocessed_data - def __bytes__(self) -> bytes: - return bytes(self._data) - - def _extract(self, count: int) -> bytearray: - # extracting an initial slice of the data buffer and return it - out = self._data[:count] - del self._data[:count] - - self._next_line_search = 0 - self._multiple_lines_search = 0 - - return out - - def maybe_extract_at_most(self, count: int) -> Optional[bytearray]: - """ - Extract a fixed number of bytes from the buffer. - """ - out = self._data[:count] - if not out: - return None - - return self._extract(count) - - def maybe_extract_next_line(self) -> Optional[bytearray]: - """ - Extract the first line, if it is completed in the buffer. - """ - # Only search in buffer space that we've not already looked at. - search_start_index = max(0, self._next_line_search - 1) - partial_idx = self._data.find(b"\r\n", search_start_index) - - if partial_idx == -1: - self._next_line_search = len(self._data) - return None - - # + 2 is to compensate len(b"\r\n") - idx = partial_idx + 2 - - return self._extract(idx) - - def maybe_extract_lines(self) -> Optional[List[bytearray]]: - """ - Extract everything up to the first blank line, and return a list of lines. - """ - # Handle the case where we have an immediate empty line. - if self._data[:1] == b"\n": - self._extract(1) - return [] - - if self._data[:2] == b"\r\n": - self._extract(2) - return [] - - # Only search in buffer space that we've not already looked at. - match = blank_line_regex.search(self._data, self._multiple_lines_search) - if match is None: - self._multiple_lines_search = max(0, len(self._data) - 2) - return None - - # Truncate the buffer and return it. - idx = match.span(0)[-1] - out = self._extract(idx) - lines = out.split(b"\n") - - for line in lines: - if line.endswith(b"\r"): - del line[-1] - - assert lines[-2] == lines[-1] == b"" - - del lines[-2:] - - return lines - - # In theory we should wait until `\r\n` before starting to validate - # incoming data. However it's interesting to detect (very) invalid data - # early given they might not even contain `\r\n` at all (hence only - # timeout will get rid of them). - # This is not a 100% effective detection but more of a cheap sanity check - # allowing for early abort in some useful cases. - # This is especially interesting when peer is messing up with HTTPS and - # sent us a TLS stream where we were expecting plain HTTP given all - # versions of TLS so far start handshake with a 0x16 message type code. - def is_next_line_obviously_invalid_request_line(self) -> bool: - try: - # HTTP header line must not contain non-printable characters - # and should not start with a space - return self._data[0] < 0x21 - except IndexError: - return False diff --git a/server/venv/Lib/site-packages/h11/_state.py b/server/venv/Lib/site-packages/h11/_state.py deleted file mode 100644 index 3593430..0000000 --- a/server/venv/Lib/site-packages/h11/_state.py +++ /dev/null @@ -1,367 +0,0 @@ -################################################################ -# The core state machine -################################################################ -# -# Rule 1: everything that affects the state machine and state transitions must -# live here in this file. As much as possible goes into the table-based -# representation, but for the bits that don't quite fit, the actual code and -# state must nonetheless live here. -# -# Rule 2: this file does not know about what role we're playing; it only knows -# about HTTP request/response cycles in the abstract. This ensures that we -# don't cheat and apply different rules to local and remote parties. -# -# -# Theory of operation -# =================== -# -# Possibly the simplest way to think about this is that we actually have 5 -# different state machines here. Yes, 5. These are: -# -# 1) The client state, with its complicated automaton (see the docs) -# 2) The server state, with its complicated automaton (see the docs) -# 3) The keep-alive state, with possible states {True, False} -# 4) The SWITCH_CONNECT state, with possible states {False, True} -# 5) The SWITCH_UPGRADE state, with possible states {False, True} -# -# For (3)-(5), the first state listed is the initial state. -# -# (1)-(3) are stored explicitly in member variables. The last -# two are stored implicitly in the pending_switch_proposals set as: -# (state of 4) == (_SWITCH_CONNECT in pending_switch_proposals) -# (state of 5) == (_SWITCH_UPGRADE in pending_switch_proposals) -# -# And each of these machines has two different kinds of transitions: -# -# a) Event-triggered -# b) State-triggered -# -# Event triggered is the obvious thing that you'd think it is: some event -# happens, and if it's the right event at the right time then a transition -# happens. But there are somewhat complicated rules for which machines can -# "see" which events. (As a rule of thumb, if a machine "sees" an event, this -# means two things: the event can affect the machine, and if the machine is -# not in a state where it expects that event then it's an error.) These rules -# are: -# -# 1) The client machine sees all h11.events objects emitted by the client. -# -# 2) The server machine sees all h11.events objects emitted by the server. -# -# It also sees the client's Request event. -# -# And sometimes, server events are annotated with a _SWITCH_* event. For -# example, we can have a (Response, _SWITCH_CONNECT) event, which is -# different from a regular Response event. -# -# 3) The keep-alive machine sees the process_keep_alive_disabled() event -# (which is derived from Request/Response events), and this event -# transitions it from True -> False, or from False -> False. There's no way -# to transition back. -# -# 4&5) The _SWITCH_* machines transition from False->True when we get a -# Request that proposes the relevant type of switch (via -# process_client_switch_proposals), and they go from True->False when we -# get a Response that has no _SWITCH_* annotation. -# -# So that's event-triggered transitions. -# -# State-triggered transitions are less standard. What they do here is couple -# the machines together. The way this works is, when certain *joint* -# configurations of states are achieved, then we automatically transition to a -# new *joint* state. So, for example, if we're ever in a joint state with -# -# client: DONE -# keep-alive: False -# -# then the client state immediately transitions to: -# -# client: MUST_CLOSE -# -# This is fundamentally different from an event-based transition, because it -# doesn't matter how we arrived at the {client: DONE, keep-alive: False} state -# -- maybe the client transitioned SEND_BODY -> DONE, or keep-alive -# transitioned True -> False. Either way, once this precondition is satisfied, -# this transition is immediately triggered. -# -# What if two conflicting state-based transitions get enabled at the same -# time? In practice there's only one case where this arises (client DONE -> -# MIGHT_SWITCH_PROTOCOL versus DONE -> MUST_CLOSE), and we resolve it by -# explicitly prioritizing the DONE -> MIGHT_SWITCH_PROTOCOL transition. -# -# Implementation -# -------------- -# -# The event-triggered transitions for the server and client machines are all -# stored explicitly in a table. Ditto for the state-triggered transitions that -# involve just the server and client state. -# -# The transitions for the other machines, and the state-triggered transitions -# that involve the other machines, are written out as explicit Python code. -# -# It'd be nice if there were some cleaner way to do all this. This isn't -# *too* terrible, but I feel like it could probably be better. -# -# WARNING -# ------- -# -# The script that generates the state machine diagrams for the docs knows how -# to read out the EVENT_TRIGGERED_TRANSITIONS and STATE_TRIGGERED_TRANSITIONS -# tables. But it can't automatically read the transitions that are written -# directly in Python code. So if you touch those, you need to also update the -# script to keep it in sync! -from typing import cast, Dict, Optional, Set, Tuple, Type, Union - -from ._events import * -from ._util import LocalProtocolError, Sentinel - -# Everything in __all__ gets re-exported as part of the h11 public API. -__all__ = [ - "CLIENT", - "SERVER", - "IDLE", - "SEND_RESPONSE", - "SEND_BODY", - "DONE", - "MUST_CLOSE", - "CLOSED", - "MIGHT_SWITCH_PROTOCOL", - "SWITCHED_PROTOCOL", - "ERROR", -] - - -class CLIENT(Sentinel, metaclass=Sentinel): - pass - - -class SERVER(Sentinel, metaclass=Sentinel): - pass - - -# States -class IDLE(Sentinel, metaclass=Sentinel): - pass - - -class SEND_RESPONSE(Sentinel, metaclass=Sentinel): - pass - - -class SEND_BODY(Sentinel, metaclass=Sentinel): - pass - - -class DONE(Sentinel, metaclass=Sentinel): - pass - - -class MUST_CLOSE(Sentinel, metaclass=Sentinel): - pass - - -class CLOSED(Sentinel, metaclass=Sentinel): - pass - - -class ERROR(Sentinel, metaclass=Sentinel): - pass - - -# Switch types -class MIGHT_SWITCH_PROTOCOL(Sentinel, metaclass=Sentinel): - pass - - -class SWITCHED_PROTOCOL(Sentinel, metaclass=Sentinel): - pass - - -class _SWITCH_UPGRADE(Sentinel, metaclass=Sentinel): - pass - - -class _SWITCH_CONNECT(Sentinel, metaclass=Sentinel): - pass - - -EventTransitionType = Dict[ - Type[Sentinel], - Dict[ - Type[Sentinel], - Dict[Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]], Type[Sentinel]], - ], -] - -EVENT_TRIGGERED_TRANSITIONS: EventTransitionType = { - CLIENT: { - IDLE: {Request: SEND_BODY, ConnectionClosed: CLOSED}, - SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, - DONE: {ConnectionClosed: CLOSED}, - MUST_CLOSE: {ConnectionClosed: CLOSED}, - CLOSED: {ConnectionClosed: CLOSED}, - MIGHT_SWITCH_PROTOCOL: {}, - SWITCHED_PROTOCOL: {}, - ERROR: {}, - }, - SERVER: { - IDLE: { - ConnectionClosed: CLOSED, - Response: SEND_BODY, - # Special case: server sees client Request events, in this form - (Request, CLIENT): SEND_RESPONSE, - }, - SEND_RESPONSE: { - InformationalResponse: SEND_RESPONSE, - Response: SEND_BODY, - (InformationalResponse, _SWITCH_UPGRADE): SWITCHED_PROTOCOL, - (Response, _SWITCH_CONNECT): SWITCHED_PROTOCOL, - }, - SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, - DONE: {ConnectionClosed: CLOSED}, - MUST_CLOSE: {ConnectionClosed: CLOSED}, - CLOSED: {ConnectionClosed: CLOSED}, - SWITCHED_PROTOCOL: {}, - ERROR: {}, - }, -} - -StateTransitionType = Dict[ - Tuple[Type[Sentinel], Type[Sentinel]], Dict[Type[Sentinel], Type[Sentinel]] -] - -# NB: there are also some special-case state-triggered transitions hard-coded -# into _fire_state_triggered_transitions below. -STATE_TRIGGERED_TRANSITIONS: StateTransitionType = { - # (Client state, Server state) -> new states - # Protocol negotiation - (MIGHT_SWITCH_PROTOCOL, SWITCHED_PROTOCOL): {CLIENT: SWITCHED_PROTOCOL}, - # Socket shutdown - (CLOSED, DONE): {SERVER: MUST_CLOSE}, - (CLOSED, IDLE): {SERVER: MUST_CLOSE}, - (ERROR, DONE): {SERVER: MUST_CLOSE}, - (DONE, CLOSED): {CLIENT: MUST_CLOSE}, - (IDLE, CLOSED): {CLIENT: MUST_CLOSE}, - (DONE, ERROR): {CLIENT: MUST_CLOSE}, -} - - -class ConnectionState: - def __init__(self) -> None: - # Extra bits of state that don't quite fit into the state model. - - # If this is False then it enables the automatic DONE -> MUST_CLOSE - # transition. Don't set this directly; call .keep_alive_disabled() - self.keep_alive = True - - # This is a subset of {UPGRADE, CONNECT}, containing the proposals - # made by the client for switching protocols. - self.pending_switch_proposals: Set[Type[Sentinel]] = set() - - self.states: Dict[Type[Sentinel], Type[Sentinel]] = {CLIENT: IDLE, SERVER: IDLE} - - def process_error(self, role: Type[Sentinel]) -> None: - self.states[role] = ERROR - self._fire_state_triggered_transitions() - - def process_keep_alive_disabled(self) -> None: - self.keep_alive = False - self._fire_state_triggered_transitions() - - def process_client_switch_proposal(self, switch_event: Type[Sentinel]) -> None: - self.pending_switch_proposals.add(switch_event) - self._fire_state_triggered_transitions() - - def process_event( - self, - role: Type[Sentinel], - event_type: Type[Event], - server_switch_event: Optional[Type[Sentinel]] = None, - ) -> None: - _event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]] = event_type - if server_switch_event is not None: - assert role is SERVER - if server_switch_event not in self.pending_switch_proposals: - raise LocalProtocolError( - "Received server {} event without a pending proposal".format( - server_switch_event - ) - ) - _event_type = (event_type, server_switch_event) - if server_switch_event is None and _event_type is Response: - self.pending_switch_proposals = set() - self._fire_event_triggered_transitions(role, _event_type) - # Special case: the server state does get to see Request - # events. - if _event_type is Request: - assert role is CLIENT - self._fire_event_triggered_transitions(SERVER, (Request, CLIENT)) - self._fire_state_triggered_transitions() - - def _fire_event_triggered_transitions( - self, - role: Type[Sentinel], - event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]], - ) -> None: - state = self.states[role] - try: - new_state = EVENT_TRIGGERED_TRANSITIONS[role][state][event_type] - except KeyError: - event_type = cast(Type[Event], event_type) - raise LocalProtocolError( - "can't handle event type {} when role={} and state={}".format( - event_type.__name__, role, self.states[role] - ) - ) from None - self.states[role] = new_state - - def _fire_state_triggered_transitions(self) -> None: - # We apply these rules repeatedly until converging on a fixed point - while True: - start_states = dict(self.states) - - # It could happen that both these special-case transitions are - # enabled at the same time: - # - # DONE -> MIGHT_SWITCH_PROTOCOL - # DONE -> MUST_CLOSE - # - # For example, this will always be true of a HTTP/1.0 client - # requesting CONNECT. If this happens, the protocol switch takes - # priority. From there the client will either go to - # SWITCHED_PROTOCOL, in which case it's none of our business when - # they close the connection, or else the server will deny the - # request, in which case the client will go back to DONE and then - # from there to MUST_CLOSE. - if self.pending_switch_proposals: - if self.states[CLIENT] is DONE: - self.states[CLIENT] = MIGHT_SWITCH_PROTOCOL - - if not self.pending_switch_proposals: - if self.states[CLIENT] is MIGHT_SWITCH_PROTOCOL: - self.states[CLIENT] = DONE - - if not self.keep_alive: - for role in (CLIENT, SERVER): - if self.states[role] is DONE: - self.states[role] = MUST_CLOSE - - # Tabular state-triggered transitions - joint_state = (self.states[CLIENT], self.states[SERVER]) - changes = STATE_TRIGGERED_TRANSITIONS.get(joint_state, {}) - self.states.update(changes) - - if self.states == start_states: - # Fixed point reached - return - - def start_next_cycle(self) -> None: - if self.states != {CLIENT: DONE, SERVER: DONE}: - raise LocalProtocolError( - "not in a reusable state. self.states={}".format(self.states) - ) - # Can't reach DONE/DONE with any of these active, but still, let's be - # sure. - assert self.keep_alive - assert not self.pending_switch_proposals - self.states = {CLIENT: IDLE, SERVER: IDLE} diff --git a/server/venv/Lib/site-packages/h11/_util.py b/server/venv/Lib/site-packages/h11/_util.py deleted file mode 100644 index 6718445..0000000 --- a/server/venv/Lib/site-packages/h11/_util.py +++ /dev/null @@ -1,135 +0,0 @@ -from typing import Any, Dict, NoReturn, Pattern, Tuple, Type, TypeVar, Union - -__all__ = [ - "ProtocolError", - "LocalProtocolError", - "RemoteProtocolError", - "validate", - "bytesify", -] - - -class ProtocolError(Exception): - """Exception indicating a violation of the HTTP/1.1 protocol. - - This as an abstract base class, with two concrete base classes: - :exc:`LocalProtocolError`, which indicates that you tried to do something - that HTTP/1.1 says is illegal, and :exc:`RemoteProtocolError`, which - indicates that the remote peer tried to do something that HTTP/1.1 says is - illegal. See :ref:`error-handling` for details. - - In addition to the normal :exc:`Exception` features, it has one attribute: - - .. attribute:: error_status_hint - - This gives a suggestion as to what status code a server might use if - this error occurred as part of a request. - - For a :exc:`RemoteProtocolError`, this is useful as a suggestion for - how you might want to respond to a misbehaving peer, if you're - implementing a server. - - For a :exc:`LocalProtocolError`, this can be taken as a suggestion for - how your peer might have responded to *you* if h11 had allowed you to - continue. - - The default is 400 Bad Request, a generic catch-all for protocol - violations. - - """ - - def __init__(self, msg: str, error_status_hint: int = 400) -> None: - if type(self) is ProtocolError: - raise TypeError("tried to directly instantiate ProtocolError") - Exception.__init__(self, msg) - self.error_status_hint = error_status_hint - - -# Strategy: there are a number of public APIs where a LocalProtocolError can -# be raised (send(), all the different event constructors, ...), and only one -# public API where RemoteProtocolError can be raised -# (receive_data()). Therefore we always raise LocalProtocolError internally, -# and then receive_data will translate this into a RemoteProtocolError. -# -# Internally: -# LocalProtocolError is the generic "ProtocolError". -# Externally: -# LocalProtocolError is for local errors and RemoteProtocolError is for -# remote errors. -class LocalProtocolError(ProtocolError): - def _reraise_as_remote_protocol_error(self) -> NoReturn: - # After catching a LocalProtocolError, use this method to re-raise it - # as a RemoteProtocolError. This method must be called from inside an - # except: block. - # - # An easy way to get an equivalent RemoteProtocolError is just to - # modify 'self' in place. - self.__class__ = RemoteProtocolError # type: ignore - # But the re-raising is somewhat non-trivial -- you might think that - # now that we've modified the in-flight exception object, that just - # doing 'raise' to re-raise it would be enough. But it turns out that - # this doesn't work, because Python tracks the exception type - # (exc_info[0]) separately from the exception object (exc_info[1]), - # and we only modified the latter. So we really do need to re-raise - # the new type explicitly. - # On py3, the traceback is part of the exception object, so our - # in-place modification preserved it and we can just re-raise: - raise self - - -class RemoteProtocolError(ProtocolError): - pass - - -def validate( - regex: Pattern[bytes], data: bytes, msg: str = "malformed data", *format_args: Any -) -> Dict[str, bytes]: - match = regex.fullmatch(data) - if not match: - if format_args: - msg = msg.format(*format_args) - raise LocalProtocolError(msg) - return match.groupdict() - - -# Sentinel values -# -# - Inherit identity-based comparison and hashing from object -# - Have a nice repr -# - Have a *bonus property*: type(sentinel) is sentinel -# -# The bonus property is useful if you want to take the return value from -# next_event() and do some sort of dispatch based on type(event). - -_T_Sentinel = TypeVar("_T_Sentinel", bound="Sentinel") - - -class Sentinel(type): - def __new__( - cls: Type[_T_Sentinel], - name: str, - bases: Tuple[type, ...], - namespace: Dict[str, Any], - **kwds: Any - ) -> _T_Sentinel: - assert bases == (Sentinel,) - v = super().__new__(cls, name, bases, namespace, **kwds) - v.__class__ = v # type: ignore - return v - - def __repr__(self) -> str: - return self.__name__ - - -# Used for methods, request targets, HTTP versions, header names, and header -# values. Accepts ascii-strings, or bytes/bytearray/memoryview/..., and always -# returns bytes. -def bytesify(s: Union[bytes, bytearray, memoryview, int, str]) -> bytes: - # Fast-path: - if type(s) is bytes: - return s - if isinstance(s, str): - s = s.encode("ascii") - if isinstance(s, int): - raise TypeError("expected bytes-like object, not int") - return bytes(s) diff --git a/server/venv/Lib/site-packages/h11/_version.py b/server/venv/Lib/site-packages/h11/_version.py deleted file mode 100644 index 4c89113..0000000 --- a/server/venv/Lib/site-packages/h11/_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# This file must be kept very simple, because it is consumed from several -# places -- it is imported by h11/__init__.py, execfile'd by setup.py, etc. - -# We use a simple scheme: -# 1.0.0 -> 1.0.0+dev -> 1.1.0 -> 1.1.0+dev -# where the +dev versions are never released into the wild, they're just what -# we stick into the VCS in between releases. -# -# This is compatible with PEP 440: -# http://legacy.python.org/dev/peps/pep-0440/ -# via the use of the "local suffix" "+dev", which is disallowed on index -# servers and causes 1.0.0+dev to sort after plain 1.0.0, which is what we -# want. (Contrast with the special suffix 1.0.0.dev, which sorts *before* -# 1.0.0.) - -__version__ = "0.14.0" diff --git a/server/venv/Lib/site-packages/h11/_writers.py b/server/venv/Lib/site-packages/h11/_writers.py deleted file mode 100644 index 939cdb9..0000000 --- a/server/venv/Lib/site-packages/h11/_writers.py +++ /dev/null @@ -1,145 +0,0 @@ -# Code to read HTTP data -# -# Strategy: each writer takes an event + a write-some-bytes function, which is -# calls. -# -# WRITERS is a dict describing how to pick a reader. It maps states to either: -# - a writer -# - or, for body writers, a dict of framin-dependent writer factories - -from typing import Any, Callable, Dict, List, Tuple, Type, Union - -from ._events import Data, EndOfMessage, Event, InformationalResponse, Request, Response -from ._headers import Headers -from ._state import CLIENT, IDLE, SEND_BODY, SEND_RESPONSE, SERVER -from ._util import LocalProtocolError, Sentinel - -__all__ = ["WRITERS"] - -Writer = Callable[[bytes], Any] - - -def write_headers(headers: Headers, write: Writer) -> None: - # "Since the Host field-value is critical information for handling a - # request, a user agent SHOULD generate Host as the first header field - # following the request-line." - RFC 7230 - raw_items = headers._full_items - for raw_name, name, value in raw_items: - if name == b"host": - write(b"%s: %s\r\n" % (raw_name, value)) - for raw_name, name, value in raw_items: - if name != b"host": - write(b"%s: %s\r\n" % (raw_name, value)) - write(b"\r\n") - - -def write_request(request: Request, write: Writer) -> None: - if request.http_version != b"1.1": - raise LocalProtocolError("I only send HTTP/1.1") - write(b"%s %s HTTP/1.1\r\n" % (request.method, request.target)) - write_headers(request.headers, write) - - -# Shared between InformationalResponse and Response -def write_any_response( - response: Union[InformationalResponse, Response], write: Writer -) -> None: - if response.http_version != b"1.1": - raise LocalProtocolError("I only send HTTP/1.1") - status_bytes = str(response.status_code).encode("ascii") - # We don't bother sending ascii status messages like "OK"; they're - # optional and ignored by the protocol. (But the space after the numeric - # status code is mandatory.) - # - # XX FIXME: could at least make an effort to pull out the status message - # from stdlib's http.HTTPStatus table. Or maybe just steal their enums - # (either by import or copy/paste). We already accept them as status codes - # since they're of type IntEnum < int. - write(b"HTTP/1.1 %s %s\r\n" % (status_bytes, response.reason)) - write_headers(response.headers, write) - - -class BodyWriter: - def __call__(self, event: Event, write: Writer) -> None: - if type(event) is Data: - self.send_data(event.data, write) - elif type(event) is EndOfMessage: - self.send_eom(event.headers, write) - else: # pragma: no cover - assert False - - def send_data(self, data: bytes, write: Writer) -> None: - pass - - def send_eom(self, headers: Headers, write: Writer) -> None: - pass - - -# -# These are all careful not to do anything to 'data' except call len(data) and -# write(data). This allows us to transparently pass-through funny objects, -# like placeholder objects referring to files on disk that will be sent via -# sendfile(2). -# -class ContentLengthWriter(BodyWriter): - def __init__(self, length: int) -> None: - self._length = length - - def send_data(self, data: bytes, write: Writer) -> None: - self._length -= len(data) - if self._length < 0: - raise LocalProtocolError("Too much data for declared Content-Length") - write(data) - - def send_eom(self, headers: Headers, write: Writer) -> None: - if self._length != 0: - raise LocalProtocolError("Too little data for declared Content-Length") - if headers: - raise LocalProtocolError("Content-Length and trailers don't mix") - - -class ChunkedWriter(BodyWriter): - def send_data(self, data: bytes, write: Writer) -> None: - # if we encoded 0-length data in the naive way, it would look like an - # end-of-message. - if not data: - return - write(b"%x\r\n" % len(data)) - write(data) - write(b"\r\n") - - def send_eom(self, headers: Headers, write: Writer) -> None: - write(b"0\r\n") - write_headers(headers, write) - - -class Http10Writer(BodyWriter): - def send_data(self, data: bytes, write: Writer) -> None: - write(data) - - def send_eom(self, headers: Headers, write: Writer) -> None: - if headers: - raise LocalProtocolError("can't send trailers to HTTP/1.0 client") - # no need to close the socket ourselves, that will be taken care of by - # Connection: close machinery - - -WritersType = Dict[ - Union[Tuple[Type[Sentinel], Type[Sentinel]], Type[Sentinel]], - Union[ - Dict[str, Type[BodyWriter]], - Callable[[Union[InformationalResponse, Response], Writer], None], - Callable[[Request, Writer], None], - ], -] - -WRITERS: WritersType = { - (CLIENT, IDLE): write_request, - (SERVER, IDLE): write_any_response, - (SERVER, SEND_RESPONSE): write_any_response, - SEND_BODY: { - "chunked": ChunkedWriter, - "content-length": ContentLengthWriter, - "http/1.0": Http10Writer, - }, -} diff --git a/server/venv/Lib/site-packages/h11/py.typed b/server/venv/Lib/site-packages/h11/py.typed deleted file mode 100644 index f5642f7..0000000 --- a/server/venv/Lib/site-packages/h11/py.typed +++ /dev/null @@ -1 +0,0 @@ -Marker diff --git a/server/venv/Lib/site-packages/h11/tests/__init__.py b/server/venv/Lib/site-packages/h11/tests/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/h11/tests/data/test-file b/server/venv/Lib/site-packages/h11/tests/data/test-file deleted file mode 100644 index d0be0a6..0000000 --- a/server/venv/Lib/site-packages/h11/tests/data/test-file +++ /dev/null @@ -1 +0,0 @@ -92b12bc045050b55b848d37167a1a63947c364579889ce1d39788e45e9fac9e5 diff --git a/server/venv/Lib/site-packages/h11/tests/helpers.py b/server/venv/Lib/site-packages/h11/tests/helpers.py deleted file mode 100644 index 571be44..0000000 --- a/server/venv/Lib/site-packages/h11/tests/helpers.py +++ /dev/null @@ -1,101 +0,0 @@ -from typing import cast, List, Type, Union, ValuesView - -from .._connection import Connection, NEED_DATA, PAUSED -from .._events import ( - ConnectionClosed, - Data, - EndOfMessage, - Event, - InformationalResponse, - Request, - Response, -) -from .._state import CLIENT, CLOSED, DONE, MUST_CLOSE, SERVER -from .._util import Sentinel - -try: - from typing import Literal -except ImportError: - from typing_extensions import Literal # type: ignore - - -def get_all_events(conn: Connection) -> List[Event]: - got_events = [] - while True: - event = conn.next_event() - if event in (NEED_DATA, PAUSED): - break - event = cast(Event, event) - got_events.append(event) - if type(event) is ConnectionClosed: - break - return got_events - - -def receive_and_get(conn: Connection, data: bytes) -> List[Event]: - conn.receive_data(data) - return get_all_events(conn) - - -# Merges adjacent Data events, converts payloads to bytestrings, and removes -# chunk boundaries. -def normalize_data_events(in_events: List[Event]) -> List[Event]: - out_events: List[Event] = [] - for event in in_events: - if type(event) is Data: - event = Data(data=bytes(event.data), chunk_start=False, chunk_end=False) - if out_events and type(out_events[-1]) is type(event) is Data: - out_events[-1] = Data( - data=out_events[-1].data + event.data, - chunk_start=out_events[-1].chunk_start, - chunk_end=out_events[-1].chunk_end, - ) - else: - out_events.append(event) - return out_events - - -# Given that we want to write tests that push some events through a Connection -# and check that its state updates appropriately... we might as make a habit -# of pushing them through two Connections with a fake network link in -# between. -class ConnectionPair: - def __init__(self) -> None: - self.conn = {CLIENT: Connection(CLIENT), SERVER: Connection(SERVER)} - self.other = {CLIENT: SERVER, SERVER: CLIENT} - - @property - def conns(self) -> ValuesView[Connection]: - return self.conn.values() - - # expect="match" if expect=send_events; expect=[...] to say what expected - def send( - self, - role: Type[Sentinel], - send_events: Union[List[Event], Event], - expect: Union[List[Event], Event, Literal["match"]] = "match", - ) -> bytes: - if not isinstance(send_events, list): - send_events = [send_events] - data = b"" - closed = False - for send_event in send_events: - new_data = self.conn[role].send(send_event) - if new_data is None: - closed = True - else: - data += new_data - # send uses b"" to mean b"", and None to mean closed - # receive uses b"" to mean closed, and None to mean "try again" - # so we have to translate between the two conventions - if data: - self.conn[self.other[role]].receive_data(data) - if closed: - self.conn[self.other[role]].receive_data(b"") - got_events = get_all_events(self.conn[self.other[role]]) - if expect == "match": - expect = send_events - if not isinstance(expect, list): - expect = [expect] - assert got_events == expect - return data diff --git a/server/venv/Lib/site-packages/h11/tests/test_against_stdlib_http.py b/server/venv/Lib/site-packages/h11/tests/test_against_stdlib_http.py deleted file mode 100644 index d2ee131..0000000 --- a/server/venv/Lib/site-packages/h11/tests/test_against_stdlib_http.py +++ /dev/null @@ -1,115 +0,0 @@ -import json -import os.path -import socket -import socketserver -import threading -from contextlib import closing, contextmanager -from http.server import SimpleHTTPRequestHandler -from typing import Callable, Generator -from urllib.request import urlopen - -import h11 - - -@contextmanager -def socket_server( - handler: Callable[..., socketserver.BaseRequestHandler] -) -> Generator[socketserver.TCPServer, None, None]: - httpd = socketserver.TCPServer(("127.0.0.1", 0), handler) - thread = threading.Thread( - target=httpd.serve_forever, kwargs={"poll_interval": 0.01} - ) - thread.daemon = True - try: - thread.start() - yield httpd - finally: - httpd.shutdown() - - -test_file_path = os.path.join(os.path.dirname(__file__), "data/test-file") -with open(test_file_path, "rb") as f: - test_file_data = f.read() - - -class SingleMindedRequestHandler(SimpleHTTPRequestHandler): - def translate_path(self, path: str) -> str: - return test_file_path - - -def test_h11_as_client() -> None: - with socket_server(SingleMindedRequestHandler) as httpd: - with closing(socket.create_connection(httpd.server_address)) as s: - c = h11.Connection(h11.CLIENT) - - s.sendall( - c.send( # type: ignore[arg-type] - h11.Request( - method="GET", target="/foo", headers=[("Host", "localhost")] - ) - ) - ) - s.sendall(c.send(h11.EndOfMessage())) # type: ignore[arg-type] - - data = bytearray() - while True: - event = c.next_event() - print(event) - if event is h11.NEED_DATA: - # Use a small read buffer to make things more challenging - # and exercise more paths :-) - c.receive_data(s.recv(10)) - continue - if type(event) is h11.Response: - assert event.status_code == 200 - if type(event) is h11.Data: - data += event.data - if type(event) is h11.EndOfMessage: - break - assert bytes(data) == test_file_data - - -class H11RequestHandler(socketserver.BaseRequestHandler): - def handle(self) -> None: - with closing(self.request) as s: - c = h11.Connection(h11.SERVER) - request = None - while True: - event = c.next_event() - if event is h11.NEED_DATA: - # Use a small read buffer to make things more challenging - # and exercise more paths :-) - c.receive_data(s.recv(10)) - continue - if type(event) is h11.Request: - request = event - if type(event) is h11.EndOfMessage: - break - assert request is not None - info = json.dumps( - { - "method": request.method.decode("ascii"), - "target": request.target.decode("ascii"), - "headers": { - name.decode("ascii"): value.decode("ascii") - for (name, value) in request.headers - }, - } - ) - s.sendall(c.send(h11.Response(status_code=200, headers=[]))) # type: ignore[arg-type] - s.sendall(c.send(h11.Data(data=info.encode("ascii")))) - s.sendall(c.send(h11.EndOfMessage())) - - -def test_h11_as_server() -> None: - with socket_server(H11RequestHandler) as httpd: - host, port = httpd.server_address - url = "http://{}:{}/some-path".format(host, port) - with closing(urlopen(url)) as f: - assert f.getcode() == 200 - data = f.read() - info = json.loads(data.decode("ascii")) - print(info) - assert info["method"] == "GET" - assert info["target"] == "/some-path" - assert "urllib" in info["headers"]["user-agent"] diff --git a/server/venv/Lib/site-packages/h11/tests/test_connection.py b/server/venv/Lib/site-packages/h11/tests/test_connection.py deleted file mode 100644 index 73a27b9..0000000 --- a/server/venv/Lib/site-packages/h11/tests/test_connection.py +++ /dev/null @@ -1,1122 +0,0 @@ -from typing import Any, cast, Dict, List, Optional, Tuple, Type - -import pytest - -from .._connection import _body_framing, _keep_alive, Connection, NEED_DATA, PAUSED -from .._events import ( - ConnectionClosed, - Data, - EndOfMessage, - Event, - InformationalResponse, - Request, - Response, -) -from .._state import ( - CLIENT, - CLOSED, - DONE, - ERROR, - IDLE, - MIGHT_SWITCH_PROTOCOL, - MUST_CLOSE, - SEND_BODY, - SEND_RESPONSE, - SERVER, - SWITCHED_PROTOCOL, -) -from .._util import LocalProtocolError, RemoteProtocolError, Sentinel -from .helpers import ConnectionPair, get_all_events, receive_and_get - - -def test__keep_alive() -> None: - assert _keep_alive( - Request(method="GET", target="/", headers=[("Host", "Example.com")]) - ) - assert not _keep_alive( - Request( - method="GET", - target="/", - headers=[("Host", "Example.com"), ("Connection", "close")], - ) - ) - assert not _keep_alive( - Request( - method="GET", - target="/", - headers=[("Host", "Example.com"), ("Connection", "a, b, cLOse, foo")], - ) - ) - assert not _keep_alive( - Request(method="GET", target="/", headers=[], http_version="1.0") # type: ignore[arg-type] - ) - - assert _keep_alive(Response(status_code=200, headers=[])) # type: ignore[arg-type] - assert not _keep_alive(Response(status_code=200, headers=[("Connection", "close")])) - assert not _keep_alive( - Response(status_code=200, headers=[("Connection", "a, b, cLOse, foo")]) - ) - assert not _keep_alive(Response(status_code=200, headers=[], http_version="1.0")) # type: ignore[arg-type] - - -def test__body_framing() -> None: - def headers(cl: Optional[int], te: bool) -> List[Tuple[str, str]]: - headers = [] - if cl is not None: - headers.append(("Content-Length", str(cl))) - if te: - headers.append(("Transfer-Encoding", "chunked")) - return headers - - def resp( - status_code: int = 200, cl: Optional[int] = None, te: bool = False - ) -> Response: - return Response(status_code=status_code, headers=headers(cl, te)) - - def req(cl: Optional[int] = None, te: bool = False) -> Request: - h = headers(cl, te) - h += [("Host", "example.com")] - return Request(method="GET", target="/", headers=h) - - # Special cases where the headers are ignored: - for kwargs in [{}, {"cl": 100}, {"te": True}, {"cl": 100, "te": True}]: - kwargs = cast(Dict[str, Any], kwargs) - for meth, r in [ - (b"HEAD", resp(**kwargs)), - (b"GET", resp(status_code=204, **kwargs)), - (b"GET", resp(status_code=304, **kwargs)), - ]: - assert _body_framing(meth, r) == ("content-length", (0,)) - - # Transfer-encoding - for kwargs in [{"te": True}, {"cl": 100, "te": True}]: - kwargs = cast(Dict[str, Any], kwargs) - for meth, r in [(None, req(**kwargs)), (b"GET", resp(**kwargs))]: # type: ignore - assert _body_framing(meth, r) == ("chunked", ()) - - # Content-Length - for meth, r in [(None, req(cl=100)), (b"GET", resp(cl=100))]: # type: ignore - assert _body_framing(meth, r) == ("content-length", (100,)) - - # No headers - assert _body_framing(None, req()) == ("content-length", (0,)) # type: ignore - assert _body_framing(b"GET", resp()) == ("http/1.0", ()) - - -def test_Connection_basics_and_content_length() -> None: - with pytest.raises(ValueError): - Connection("CLIENT") # type: ignore - - p = ConnectionPair() - assert p.conn[CLIENT].our_role is CLIENT - assert p.conn[CLIENT].their_role is SERVER - assert p.conn[SERVER].our_role is SERVER - assert p.conn[SERVER].their_role is CLIENT - - data = p.send( - CLIENT, - Request( - method="GET", - target="/", - headers=[("Host", "example.com"), ("Content-Length", "10")], - ), - ) - assert data == ( - b"GET / HTTP/1.1\r\n" b"Host: example.com\r\n" b"Content-Length: 10\r\n\r\n" - ) - - for conn in p.conns: - assert conn.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} - assert p.conn[CLIENT].our_state is SEND_BODY - assert p.conn[CLIENT].their_state is SEND_RESPONSE - assert p.conn[SERVER].our_state is SEND_RESPONSE - assert p.conn[SERVER].their_state is SEND_BODY - - assert p.conn[CLIENT].their_http_version is None - assert p.conn[SERVER].their_http_version == b"1.1" - - data = p.send(SERVER, InformationalResponse(status_code=100, headers=[])) # type: ignore[arg-type] - assert data == b"HTTP/1.1 100 \r\n\r\n" - - data = p.send(SERVER, Response(status_code=200, headers=[("Content-Length", "11")])) - assert data == b"HTTP/1.1 200 \r\nContent-Length: 11\r\n\r\n" - - for conn in p.conns: - assert conn.states == {CLIENT: SEND_BODY, SERVER: SEND_BODY} - - assert p.conn[CLIENT].their_http_version == b"1.1" - assert p.conn[SERVER].their_http_version == b"1.1" - - data = p.send(CLIENT, Data(data=b"12345")) - assert data == b"12345" - data = p.send( - CLIENT, Data(data=b"67890"), expect=[Data(data=b"67890"), EndOfMessage()] - ) - assert data == b"67890" - data = p.send(CLIENT, EndOfMessage(), expect=[]) - assert data == b"" - - for conn in p.conns: - assert conn.states == {CLIENT: DONE, SERVER: SEND_BODY} - - data = p.send(SERVER, Data(data=b"1234567890")) - assert data == b"1234567890" - data = p.send(SERVER, Data(data=b"1"), expect=[Data(data=b"1"), EndOfMessage()]) - assert data == b"1" - data = p.send(SERVER, EndOfMessage(), expect=[]) - assert data == b"" - - for conn in p.conns: - assert conn.states == {CLIENT: DONE, SERVER: DONE} - - -def test_chunked() -> None: - p = ConnectionPair() - - p.send( - CLIENT, - Request( - method="GET", - target="/", - headers=[("Host", "example.com"), ("Transfer-Encoding", "chunked")], - ), - ) - data = p.send(CLIENT, Data(data=b"1234567890", chunk_start=True, chunk_end=True)) - assert data == b"a\r\n1234567890\r\n" - data = p.send(CLIENT, Data(data=b"abcde", chunk_start=True, chunk_end=True)) - assert data == b"5\r\nabcde\r\n" - data = p.send(CLIENT, Data(data=b""), expect=[]) - assert data == b"" - data = p.send(CLIENT, EndOfMessage(headers=[("hello", "there")])) - assert data == b"0\r\nhello: there\r\n\r\n" - - p.send( - SERVER, Response(status_code=200, headers=[("Transfer-Encoding", "chunked")]) - ) - p.send(SERVER, Data(data=b"54321", chunk_start=True, chunk_end=True)) - p.send(SERVER, Data(data=b"12345", chunk_start=True, chunk_end=True)) - p.send(SERVER, EndOfMessage()) - - for conn in p.conns: - assert conn.states == {CLIENT: DONE, SERVER: DONE} - - -def test_chunk_boundaries() -> None: - conn = Connection(our_role=SERVER) - - request = ( - b"POST / HTTP/1.1\r\n" - b"Host: example.com\r\n" - b"Transfer-Encoding: chunked\r\n" - b"\r\n" - ) - conn.receive_data(request) - assert conn.next_event() == Request( - method="POST", - target="/", - headers=[("Host", "example.com"), ("Transfer-Encoding", "chunked")], - ) - assert conn.next_event() is NEED_DATA - - conn.receive_data(b"5\r\nhello\r\n") - assert conn.next_event() == Data(data=b"hello", chunk_start=True, chunk_end=True) - - conn.receive_data(b"5\r\nhel") - assert conn.next_event() == Data(data=b"hel", chunk_start=True, chunk_end=False) - - conn.receive_data(b"l") - assert conn.next_event() == Data(data=b"l", chunk_start=False, chunk_end=False) - - conn.receive_data(b"o\r\n") - assert conn.next_event() == Data(data=b"o", chunk_start=False, chunk_end=True) - - conn.receive_data(b"5\r\nhello") - assert conn.next_event() == Data(data=b"hello", chunk_start=True, chunk_end=True) - - conn.receive_data(b"\r\n") - assert conn.next_event() == NEED_DATA - - conn.receive_data(b"0\r\n\r\n") - assert conn.next_event() == EndOfMessage() - - -def test_client_talking_to_http10_server() -> None: - c = Connection(CLIENT) - c.send(Request(method="GET", target="/", headers=[("Host", "example.com")])) - c.send(EndOfMessage()) - assert c.our_state is DONE - # No content-length, so Http10 framing for body - assert receive_and_get(c, b"HTTP/1.0 200 OK\r\n\r\n") == [ - Response(status_code=200, headers=[], http_version="1.0", reason=b"OK") # type: ignore[arg-type] - ] - assert c.our_state is MUST_CLOSE - assert receive_and_get(c, b"12345") == [Data(data=b"12345")] - assert receive_and_get(c, b"67890") == [Data(data=b"67890")] - assert receive_and_get(c, b"") == [EndOfMessage(), ConnectionClosed()] - assert c.their_state is CLOSED - - -def test_server_talking_to_http10_client() -> None: - c = Connection(SERVER) - # No content-length, so no body - # NB: no host header - assert receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") == [ - Request(method="GET", target="/", headers=[], http_version="1.0"), # type: ignore[arg-type] - EndOfMessage(), - ] - assert c.their_state is MUST_CLOSE - - # We automatically Connection: close back at them - assert ( - c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] - == b"HTTP/1.1 200 \r\nConnection: close\r\n\r\n" - ) - - assert c.send(Data(data=b"12345")) == b"12345" - assert c.send(EndOfMessage()) == b"" - assert c.our_state is MUST_CLOSE - - # Check that it works if they do send Content-Length - c = Connection(SERVER) - # NB: no host header - assert receive_and_get(c, b"POST / HTTP/1.0\r\nContent-Length: 10\r\n\r\n1") == [ - Request( - method="POST", - target="/", - headers=[("Content-Length", "10")], - http_version="1.0", - ), - Data(data=b"1"), - ] - assert receive_and_get(c, b"234567890") == [Data(data=b"234567890"), EndOfMessage()] - assert c.their_state is MUST_CLOSE - assert receive_and_get(c, b"") == [ConnectionClosed()] - - -def test_automatic_transfer_encoding_in_response() -> None: - # Check that in responses, the user can specify either Transfer-Encoding: - # chunked or no framing at all, and in both cases we automatically select - # the right option depending on whether the peer speaks HTTP/1.0 or - # HTTP/1.1 - for user_headers in [ - [("Transfer-Encoding", "chunked")], - [], - # In fact, this even works if Content-Length is set, - # because if both are set then Transfer-Encoding wins - [("Transfer-Encoding", "chunked"), ("Content-Length", "100")], - ]: - user_headers = cast(List[Tuple[str, str]], user_headers) - p = ConnectionPair() - p.send( - CLIENT, - [ - Request(method="GET", target="/", headers=[("Host", "example.com")]), - EndOfMessage(), - ], - ) - # When speaking to HTTP/1.1 client, all of the above cases get - # normalized to Transfer-Encoding: chunked - p.send( - SERVER, - Response(status_code=200, headers=user_headers), - expect=Response( - status_code=200, headers=[("Transfer-Encoding", "chunked")] - ), - ) - - # When speaking to HTTP/1.0 client, all of the above cases get - # normalized to no-framing-headers - c = Connection(SERVER) - receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") - assert ( - c.send(Response(status_code=200, headers=user_headers)) - == b"HTTP/1.1 200 \r\nConnection: close\r\n\r\n" - ) - assert c.send(Data(data=b"12345")) == b"12345" - - -def test_automagic_connection_close_handling() -> None: - p = ConnectionPair() - # If the user explicitly sets Connection: close, then we notice and - # respect it - p.send( - CLIENT, - [ - Request( - method="GET", - target="/", - headers=[("Host", "example.com"), ("Connection", "close")], - ), - EndOfMessage(), - ], - ) - for conn in p.conns: - assert conn.states[CLIENT] is MUST_CLOSE - # And if the client sets it, the server automatically echoes it back - p.send( - SERVER, - # no header here... - [Response(status_code=204, headers=[]), EndOfMessage()], # type: ignore[arg-type] - # ...but oh look, it arrived anyway - expect=[ - Response(status_code=204, headers=[("connection", "close")]), - EndOfMessage(), - ], - ) - for conn in p.conns: - assert conn.states == {CLIENT: MUST_CLOSE, SERVER: MUST_CLOSE} - - -def test_100_continue() -> None: - def setup() -> ConnectionPair: - p = ConnectionPair() - p.send( - CLIENT, - Request( - method="GET", - target="/", - headers=[ - ("Host", "example.com"), - ("Content-Length", "100"), - ("Expect", "100-continue"), - ], - ), - ) - for conn in p.conns: - assert conn.client_is_waiting_for_100_continue - assert not p.conn[CLIENT].they_are_waiting_for_100_continue - assert p.conn[SERVER].they_are_waiting_for_100_continue - return p - - # Disabled by 100 Continue - p = setup() - p.send(SERVER, InformationalResponse(status_code=100, headers=[])) # type: ignore[arg-type] - for conn in p.conns: - assert not conn.client_is_waiting_for_100_continue - assert not conn.they_are_waiting_for_100_continue - - # Disabled by a real response - p = setup() - p.send( - SERVER, Response(status_code=200, headers=[("Transfer-Encoding", "chunked")]) - ) - for conn in p.conns: - assert not conn.client_is_waiting_for_100_continue - assert not conn.they_are_waiting_for_100_continue - - # Disabled by the client going ahead and sending stuff anyway - p = setup() - p.send(CLIENT, Data(data=b"12345")) - for conn in p.conns: - assert not conn.client_is_waiting_for_100_continue - assert not conn.they_are_waiting_for_100_continue - - -def test_max_incomplete_event_size_countermeasure() -> None: - # Infinitely long headers are definitely not okay - c = Connection(SERVER) - c.receive_data(b"GET / HTTP/1.0\r\nEndless: ") - assert c.next_event() is NEED_DATA - with pytest.raises(RemoteProtocolError): - while True: - c.receive_data(b"a" * 1024) - c.next_event() - - # Checking that the same header is accepted / rejected depending on the - # max_incomplete_event_size setting: - c = Connection(SERVER, max_incomplete_event_size=5000) - c.receive_data(b"GET / HTTP/1.0\r\nBig: ") - c.receive_data(b"a" * 4000) - c.receive_data(b"\r\n\r\n") - assert get_all_events(c) == [ - Request( - method="GET", target="/", http_version="1.0", headers=[("big", "a" * 4000)] - ), - EndOfMessage(), - ] - - c = Connection(SERVER, max_incomplete_event_size=4000) - c.receive_data(b"GET / HTTP/1.0\r\nBig: ") - c.receive_data(b"a" * 4000) - with pytest.raises(RemoteProtocolError): - c.next_event() - - # Temporarily exceeding the size limit is fine, as long as its done with - # complete events: - c = Connection(SERVER, max_incomplete_event_size=5000) - c.receive_data(b"GET / HTTP/1.0\r\nContent-Length: 10000") - c.receive_data(b"\r\n\r\n" + b"a" * 10000) - assert get_all_events(c) == [ - Request( - method="GET", - target="/", - http_version="1.0", - headers=[("Content-Length", "10000")], - ), - Data(data=b"a" * 10000), - EndOfMessage(), - ] - - c = Connection(SERVER, max_incomplete_event_size=100) - # Two pipelined requests to create a way-too-big receive buffer... but - # it's fine because we're not checking - c.receive_data( - b"GET /1 HTTP/1.1\r\nHost: a\r\n\r\n" - b"GET /2 HTTP/1.1\r\nHost: b\r\n\r\n" + b"X" * 1000 - ) - assert get_all_events(c) == [ - Request(method="GET", target="/1", headers=[("host", "a")]), - EndOfMessage(), - ] - # Even more data comes in, still no problem - c.receive_data(b"X" * 1000) - # We can respond and reuse to get the second pipelined request - c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] - c.send(EndOfMessage()) - c.start_next_cycle() - assert get_all_events(c) == [ - Request(method="GET", target="/2", headers=[("host", "b")]), - EndOfMessage(), - ] - # But once we unpause and try to read the next message, and find that it's - # incomplete and the buffer is *still* way too large, then *that's* a - # problem: - c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] - c.send(EndOfMessage()) - c.start_next_cycle() - with pytest.raises(RemoteProtocolError): - c.next_event() - - -def test_reuse_simple() -> None: - p = ConnectionPair() - p.send( - CLIENT, - [Request(method="GET", target="/", headers=[("Host", "a")]), EndOfMessage()], - ) - p.send( - SERVER, - [ - Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), - EndOfMessage(), - ], - ) - for conn in p.conns: - assert conn.states == {CLIENT: DONE, SERVER: DONE} - conn.start_next_cycle() - - p.send( - CLIENT, - [ - Request(method="DELETE", target="/foo", headers=[("Host", "a")]), - EndOfMessage(), - ], - ) - p.send( - SERVER, - [ - Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), - EndOfMessage(), - ], - ) - - -def test_pipelining() -> None: - # Client doesn't support pipelining, so we have to do this by hand - c = Connection(SERVER) - assert c.next_event() is NEED_DATA - # 3 requests all bunched up - c.receive_data( - b"GET /1 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" - b"12345" - b"GET /2 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" - b"67890" - b"GET /3 HTTP/1.1\r\nHost: a.com\r\n\r\n" - ) - assert get_all_events(c) == [ - Request( - method="GET", - target="/1", - headers=[("Host", "a.com"), ("Content-Length", "5")], - ), - Data(data=b"12345"), - EndOfMessage(), - ] - assert c.their_state is DONE - assert c.our_state is SEND_RESPONSE - - assert c.next_event() is PAUSED - - c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] - c.send(EndOfMessage()) - assert c.their_state is DONE - assert c.our_state is DONE - - c.start_next_cycle() - - assert get_all_events(c) == [ - Request( - method="GET", - target="/2", - headers=[("Host", "a.com"), ("Content-Length", "5")], - ), - Data(data=b"67890"), - EndOfMessage(), - ] - assert c.next_event() is PAUSED - c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] - c.send(EndOfMessage()) - c.start_next_cycle() - - assert get_all_events(c) == [ - Request(method="GET", target="/3", headers=[("Host", "a.com")]), - EndOfMessage(), - ] - # Doesn't pause this time, no trailing data - assert c.next_event() is NEED_DATA - c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] - c.send(EndOfMessage()) - - # Arrival of more data triggers pause - assert c.next_event() is NEED_DATA - c.receive_data(b"SADF") - assert c.next_event() is PAUSED - assert c.trailing_data == (b"SADF", False) - # If EOF arrives while paused, we don't see that either: - c.receive_data(b"") - assert c.trailing_data == (b"SADF", True) - assert c.next_event() is PAUSED - c.receive_data(b"") - assert c.next_event() is PAUSED - # Can't call receive_data with non-empty buf after closing it - with pytest.raises(RuntimeError): - c.receive_data(b"FDSA") - - -def test_protocol_switch() -> None: - for (req, deny, accept) in [ - ( - Request( - method="CONNECT", - target="example.com:443", - headers=[("Host", "foo"), ("Content-Length", "1")], - ), - Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), - Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), - ), - ( - Request( - method="GET", - target="/", - headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], - ), - Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), - InformationalResponse(status_code=101, headers=[("Upgrade", "a")]), - ), - ( - Request( - method="CONNECT", - target="example.com:443", - headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], - ), - Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), - # Accept CONNECT, not upgrade - Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), - ), - ( - Request( - method="CONNECT", - target="example.com:443", - headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], - ), - Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), - # Accept Upgrade, not CONNECT - InformationalResponse(status_code=101, headers=[("Upgrade", "b")]), - ), - ]: - - def setup() -> ConnectionPair: - p = ConnectionPair() - p.send(CLIENT, req) - # No switch-related state change stuff yet; the client has to - # finish the request before that kicks in - for conn in p.conns: - assert conn.states[CLIENT] is SEND_BODY - p.send(CLIENT, [Data(data=b"1"), EndOfMessage()]) - for conn in p.conns: - assert conn.states[CLIENT] is MIGHT_SWITCH_PROTOCOL - assert p.conn[SERVER].next_event() is PAUSED - return p - - # Test deny case - p = setup() - p.send(SERVER, deny) - for conn in p.conns: - assert conn.states == {CLIENT: DONE, SERVER: SEND_BODY} - p.send(SERVER, EndOfMessage()) - # Check that re-use is still allowed after a denial - for conn in p.conns: - conn.start_next_cycle() - - # Test accept case - p = setup() - p.send(SERVER, accept) - for conn in p.conns: - assert conn.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} - conn.receive_data(b"123") - assert conn.next_event() is PAUSED - conn.receive_data(b"456") - assert conn.next_event() is PAUSED - assert conn.trailing_data == (b"123456", False) - - # Pausing in might-switch, then recovery - # (weird artificial case where the trailing data actually is valid - # HTTP for some reason, because this makes it easier to test the state - # logic) - p = setup() - sc = p.conn[SERVER] - sc.receive_data(b"GET / HTTP/1.0\r\n\r\n") - assert sc.next_event() is PAUSED - assert sc.trailing_data == (b"GET / HTTP/1.0\r\n\r\n", False) - sc.send(deny) - assert sc.next_event() is PAUSED - sc.send(EndOfMessage()) - sc.start_next_cycle() - assert get_all_events(sc) == [ - Request(method="GET", target="/", headers=[], http_version="1.0"), # type: ignore[arg-type] - EndOfMessage(), - ] - - # When we're DONE, have no trailing data, and the connection gets - # closed, we report ConnectionClosed(). When we're in might-switch or - # switched, we don't. - p = setup() - sc = p.conn[SERVER] - sc.receive_data(b"") - assert sc.next_event() is PAUSED - assert sc.trailing_data == (b"", True) - p.send(SERVER, accept) - assert sc.next_event() is PAUSED - - p = setup() - sc = p.conn[SERVER] - sc.receive_data(b"") - assert sc.next_event() is PAUSED - sc.send(deny) - assert sc.next_event() == ConnectionClosed() - - # You can't send after switching protocols, or while waiting for a - # protocol switch - p = setup() - with pytest.raises(LocalProtocolError): - p.conn[CLIENT].send( - Request(method="GET", target="/", headers=[("Host", "a")]) - ) - p = setup() - p.send(SERVER, accept) - with pytest.raises(LocalProtocolError): - p.conn[SERVER].send(Data(data=b"123")) - - -def test_close_simple() -> None: - # Just immediately closing a new connection without anything having - # happened yet. - for (who_shot_first, who_shot_second) in [(CLIENT, SERVER), (SERVER, CLIENT)]: - - def setup() -> ConnectionPair: - p = ConnectionPair() - p.send(who_shot_first, ConnectionClosed()) - for conn in p.conns: - assert conn.states == { - who_shot_first: CLOSED, - who_shot_second: MUST_CLOSE, - } - return p - - # You can keep putting b"" into a closed connection, and you keep - # getting ConnectionClosed() out: - p = setup() - assert p.conn[who_shot_second].next_event() == ConnectionClosed() - assert p.conn[who_shot_second].next_event() == ConnectionClosed() - p.conn[who_shot_second].receive_data(b"") - assert p.conn[who_shot_second].next_event() == ConnectionClosed() - # Second party can close... - p = setup() - p.send(who_shot_second, ConnectionClosed()) - for conn in p.conns: - assert conn.our_state is CLOSED - assert conn.their_state is CLOSED - # But trying to receive new data on a closed connection is a - # RuntimeError (not ProtocolError, because the problem here isn't - # violation of HTTP, it's violation of physics) - p = setup() - with pytest.raises(RuntimeError): - p.conn[who_shot_second].receive_data(b"123") - # And receiving new data on a MUST_CLOSE connection is a ProtocolError - p = setup() - p.conn[who_shot_first].receive_data(b"GET") - with pytest.raises(RemoteProtocolError): - p.conn[who_shot_first].next_event() - - -def test_close_different_states() -> None: - req = [ - Request(method="GET", target="/foo", headers=[("Host", "a")]), - EndOfMessage(), - ] - resp = [ - Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), - EndOfMessage(), - ] - - # Client before request - p = ConnectionPair() - p.send(CLIENT, ConnectionClosed()) - for conn in p.conns: - assert conn.states == {CLIENT: CLOSED, SERVER: MUST_CLOSE} - - # Client after request - p = ConnectionPair() - p.send(CLIENT, req) - p.send(CLIENT, ConnectionClosed()) - for conn in p.conns: - assert conn.states == {CLIENT: CLOSED, SERVER: SEND_RESPONSE} - - # Server after request -> not allowed - p = ConnectionPair() - p.send(CLIENT, req) - with pytest.raises(LocalProtocolError): - p.conn[SERVER].send(ConnectionClosed()) - p.conn[CLIENT].receive_data(b"") - with pytest.raises(RemoteProtocolError): - p.conn[CLIENT].next_event() - - # Server after response - p = ConnectionPair() - p.send(CLIENT, req) - p.send(SERVER, resp) - p.send(SERVER, ConnectionClosed()) - for conn in p.conns: - assert conn.states == {CLIENT: MUST_CLOSE, SERVER: CLOSED} - - # Both after closing (ConnectionClosed() is idempotent) - p = ConnectionPair() - p.send(CLIENT, req) - p.send(SERVER, resp) - p.send(CLIENT, ConnectionClosed()) - p.send(SERVER, ConnectionClosed()) - p.send(CLIENT, ConnectionClosed()) - p.send(SERVER, ConnectionClosed()) - - # In the middle of sending -> not allowed - p = ConnectionPair() - p.send( - CLIENT, - Request( - method="GET", target="/", headers=[("Host", "a"), ("Content-Length", "10")] - ), - ) - with pytest.raises(LocalProtocolError): - p.conn[CLIENT].send(ConnectionClosed()) - p.conn[SERVER].receive_data(b"") - with pytest.raises(RemoteProtocolError): - p.conn[SERVER].next_event() - - -# Receive several requests and then client shuts down their side of the -# connection; we can respond to each -def test_pipelined_close() -> None: - c = Connection(SERVER) - # 2 requests then a close - c.receive_data( - b"GET /1 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" - b"12345" - b"GET /2 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" - b"67890" - ) - c.receive_data(b"") - assert get_all_events(c) == [ - Request( - method="GET", - target="/1", - headers=[("host", "a.com"), ("content-length", "5")], - ), - Data(data=b"12345"), - EndOfMessage(), - ] - assert c.states[CLIENT] is DONE - c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] - c.send(EndOfMessage()) - assert c.states[SERVER] is DONE - c.start_next_cycle() - assert get_all_events(c) == [ - Request( - method="GET", - target="/2", - headers=[("host", "a.com"), ("content-length", "5")], - ), - Data(data=b"67890"), - EndOfMessage(), - ConnectionClosed(), - ] - assert c.states == {CLIENT: CLOSED, SERVER: SEND_RESPONSE} - c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] - c.send(EndOfMessage()) - assert c.states == {CLIENT: CLOSED, SERVER: MUST_CLOSE} - c.send(ConnectionClosed()) - assert c.states == {CLIENT: CLOSED, SERVER: CLOSED} - - -def test_sendfile() -> None: - class SendfilePlaceholder: - def __len__(self) -> int: - return 10 - - placeholder = SendfilePlaceholder() - - def setup( - header: Tuple[str, str], http_version: str - ) -> Tuple[Connection, Optional[List[bytes]]]: - c = Connection(SERVER) - receive_and_get( - c, "GET / HTTP/{}\r\nHost: a\r\n\r\n".format(http_version).encode("ascii") - ) - headers = [] - if header: - headers.append(header) - c.send(Response(status_code=200, headers=headers)) - return c, c.send_with_data_passthrough(Data(data=placeholder)) # type: ignore - - c, data = setup(("Content-Length", "10"), "1.1") - assert data == [placeholder] # type: ignore - # Raises an error if the connection object doesn't think we've sent - # exactly 10 bytes - c.send(EndOfMessage()) - - _, data = setup(("Transfer-Encoding", "chunked"), "1.1") - assert placeholder in data # type: ignore - data[data.index(placeholder)] = b"x" * 10 # type: ignore - assert b"".join(data) == b"a\r\nxxxxxxxxxx\r\n" # type: ignore - - c, data = setup(None, "1.0") # type: ignore - assert data == [placeholder] # type: ignore - assert c.our_state is SEND_BODY - - -def test_errors() -> None: - # After a receive error, you can't receive - for role in [CLIENT, SERVER]: - c = Connection(our_role=role) - c.receive_data(b"gibberish\r\n\r\n") - with pytest.raises(RemoteProtocolError): - c.next_event() - # Now any attempt to receive continues to raise - assert c.their_state is ERROR - assert c.our_state is not ERROR - print(c._cstate.states) - with pytest.raises(RemoteProtocolError): - c.next_event() - # But we can still yell at the client for sending us gibberish - if role is SERVER: - assert ( - c.send(Response(status_code=400, headers=[])) # type: ignore[arg-type] - == b"HTTP/1.1 400 \r\nConnection: close\r\n\r\n" - ) - - # After an error sending, you can no longer send - # (This is especially important for things like content-length errors, - # where there's complex internal state being modified) - def conn(role: Type[Sentinel]) -> Connection: - c = Connection(our_role=role) - if role is SERVER: - # Put it into the state where it *could* send a response... - receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") - assert c.our_state is SEND_RESPONSE - return c - - for role in [CLIENT, SERVER]: - if role is CLIENT: - # This HTTP/1.0 request won't be detected as bad until after we go - # through the state machine and hit the writing code - good = Request(method="GET", target="/", headers=[("Host", "example.com")]) - bad = Request( - method="GET", - target="/", - headers=[("Host", "example.com")], - http_version="1.0", - ) - elif role is SERVER: - good = Response(status_code=200, headers=[]) # type: ignore[arg-type,assignment] - bad = Response(status_code=200, headers=[], http_version="1.0") # type: ignore[arg-type,assignment] - # Make sure 'good' actually is good - c = conn(role) - c.send(good) - assert c.our_state is not ERROR - # Do that again, but this time sending 'bad' first - c = conn(role) - with pytest.raises(LocalProtocolError): - c.send(bad) - assert c.our_state is ERROR - assert c.their_state is not ERROR - # Now 'good' is not so good - with pytest.raises(LocalProtocolError): - c.send(good) - - # And check send_failed() too - c = conn(role) - c.send_failed() - assert c.our_state is ERROR - assert c.their_state is not ERROR - # This is idempotent - c.send_failed() - assert c.our_state is ERROR - assert c.their_state is not ERROR - - -def test_idle_receive_nothing() -> None: - # At one point this incorrectly raised an error - for role in [CLIENT, SERVER]: - c = Connection(role) - assert c.next_event() is NEED_DATA - - -def test_connection_drop() -> None: - c = Connection(SERVER) - c.receive_data(b"GET /") - assert c.next_event() is NEED_DATA - c.receive_data(b"") - with pytest.raises(RemoteProtocolError): - c.next_event() - - -def test_408_request_timeout() -> None: - # Should be able to send this spontaneously as a server without seeing - # anything from client - p = ConnectionPair() - p.send(SERVER, Response(status_code=408, headers=[(b"connection", b"close")])) - - -# This used to raise IndexError -def test_empty_request() -> None: - c = Connection(SERVER) - c.receive_data(b"\r\n") - with pytest.raises(RemoteProtocolError): - c.next_event() - - -# This used to raise IndexError -def test_empty_response() -> None: - c = Connection(CLIENT) - c.send(Request(method="GET", target="/", headers=[("Host", "a")])) - c.receive_data(b"\r\n") - with pytest.raises(RemoteProtocolError): - c.next_event() - - -@pytest.mark.parametrize( - "data", - [ - b"\x00", - b"\x20", - b"\x16\x03\x01\x00\xa5", # Typical start of a TLS Client Hello - ], -) -def test_early_detection_of_invalid_request(data: bytes) -> None: - c = Connection(SERVER) - # Early detection should occur before even receiving a `\r\n` - c.receive_data(data) - with pytest.raises(RemoteProtocolError): - c.next_event() - - -@pytest.mark.parametrize( - "data", - [ - b"\x00", - b"\x20", - b"\x16\x03\x03\x00\x31", # Typical start of a TLS Server Hello - ], -) -def test_early_detection_of_invalid_response(data: bytes) -> None: - c = Connection(CLIENT) - # Early detection should occur before even receiving a `\r\n` - c.receive_data(data) - with pytest.raises(RemoteProtocolError): - c.next_event() - - -# This used to give different headers for HEAD and GET. -# The correct way to handle HEAD is to put whatever headers we *would* have -# put if it were a GET -- even though we know that for HEAD, those headers -# will be ignored. -def test_HEAD_framing_headers() -> None: - def setup(method: bytes, http_version: bytes) -> Connection: - c = Connection(SERVER) - c.receive_data( - method + b" / HTTP/" + http_version + b"\r\n" + b"Host: example.com\r\n\r\n" - ) - assert type(c.next_event()) is Request - assert type(c.next_event()) is EndOfMessage - return c - - for method in [b"GET", b"HEAD"]: - # No Content-Length, HTTP/1.1 peer, should use chunked - c = setup(method, b"1.1") - assert ( - c.send(Response(status_code=200, headers=[])) == b"HTTP/1.1 200 \r\n" # type: ignore[arg-type] - b"Transfer-Encoding: chunked\r\n\r\n" - ) - - # No Content-Length, HTTP/1.0 peer, frame with connection: close - c = setup(method, b"1.0") - assert ( - c.send(Response(status_code=200, headers=[])) == b"HTTP/1.1 200 \r\n" # type: ignore[arg-type] - b"Connection: close\r\n\r\n" - ) - - # Content-Length + Transfer-Encoding, TE wins - c = setup(method, b"1.1") - assert ( - c.send( - Response( - status_code=200, - headers=[ - ("Content-Length", "100"), - ("Transfer-Encoding", "chunked"), - ], - ) - ) - == b"HTTP/1.1 200 \r\n" - b"Transfer-Encoding: chunked\r\n\r\n" - ) - - -def test_special_exceptions_for_lost_connection_in_message_body() -> None: - c = Connection(SERVER) - c.receive_data( - b"POST / HTTP/1.1\r\n" b"Host: example.com\r\n" b"Content-Length: 100\r\n\r\n" - ) - assert type(c.next_event()) is Request - assert c.next_event() is NEED_DATA - c.receive_data(b"12345") - assert c.next_event() == Data(data=b"12345") - c.receive_data(b"") - with pytest.raises(RemoteProtocolError) as excinfo: - c.next_event() - assert "received 5 bytes" in str(excinfo.value) - assert "expected 100" in str(excinfo.value) - - c = Connection(SERVER) - c.receive_data( - b"POST / HTTP/1.1\r\n" - b"Host: example.com\r\n" - b"Transfer-Encoding: chunked\r\n\r\n" - ) - assert type(c.next_event()) is Request - assert c.next_event() is NEED_DATA - c.receive_data(b"8\r\n012345") - assert c.next_event().data == b"012345" # type: ignore - c.receive_data(b"") - with pytest.raises(RemoteProtocolError) as excinfo: - c.next_event() - assert "incomplete chunked read" in str(excinfo.value) diff --git a/server/venv/Lib/site-packages/h11/tests/test_events.py b/server/venv/Lib/site-packages/h11/tests/test_events.py deleted file mode 100644 index bc6c313..0000000 --- a/server/venv/Lib/site-packages/h11/tests/test_events.py +++ /dev/null @@ -1,150 +0,0 @@ -from http import HTTPStatus - -import pytest - -from .. import _events -from .._events import ( - ConnectionClosed, - Data, - EndOfMessage, - Event, - InformationalResponse, - Request, - Response, -) -from .._util import LocalProtocolError - - -def test_events() -> None: - with pytest.raises(LocalProtocolError): - # Missing Host: - req = Request( - method="GET", target="/", headers=[("a", "b")], http_version="1.1" - ) - # But this is okay (HTTP/1.0) - req = Request(method="GET", target="/", headers=[("a", "b")], http_version="1.0") - # fields are normalized - assert req.method == b"GET" - assert req.target == b"/" - assert req.headers == [(b"a", b"b")] - assert req.http_version == b"1.0" - - # This is also okay -- has a Host (with weird capitalization, which is ok) - req = Request( - method="GET", - target="/", - headers=[("a", "b"), ("hOSt", "example.com")], - http_version="1.1", - ) - # we normalize header capitalization - assert req.headers == [(b"a", b"b"), (b"host", b"example.com")] - - # Multiple host is bad too - with pytest.raises(LocalProtocolError): - req = Request( - method="GET", - target="/", - headers=[("Host", "a"), ("Host", "a")], - http_version="1.1", - ) - # Even for HTTP/1.0 - with pytest.raises(LocalProtocolError): - req = Request( - method="GET", - target="/", - headers=[("Host", "a"), ("Host", "a")], - http_version="1.0", - ) - - # Header values are validated - for bad_char in "\x00\r\n\f\v": - with pytest.raises(LocalProtocolError): - req = Request( - method="GET", - target="/", - headers=[("Host", "a"), ("Foo", "asd" + bad_char)], - http_version="1.0", - ) - - # But for compatibility we allow non-whitespace control characters, even - # though they're forbidden by the spec. - Request( - method="GET", - target="/", - headers=[("Host", "a"), ("Foo", "asd\x01\x02\x7f")], - http_version="1.0", - ) - - # Request target is validated - for bad_byte in b"\x00\x20\x7f\xee": - target = bytearray(b"/") - target.append(bad_byte) - with pytest.raises(LocalProtocolError): - Request( - method="GET", target=target, headers=[("Host", "a")], http_version="1.1" - ) - - # Request method is validated - with pytest.raises(LocalProtocolError): - Request( - method="GET / HTTP/1.1", - target=target, - headers=[("Host", "a")], - http_version="1.1", - ) - - ir = InformationalResponse(status_code=100, headers=[("Host", "a")]) - assert ir.status_code == 100 - assert ir.headers == [(b"host", b"a")] - assert ir.http_version == b"1.1" - - with pytest.raises(LocalProtocolError): - InformationalResponse(status_code=200, headers=[("Host", "a")]) - - resp = Response(status_code=204, headers=[], http_version="1.0") # type: ignore[arg-type] - assert resp.status_code == 204 - assert resp.headers == [] - assert resp.http_version == b"1.0" - - with pytest.raises(LocalProtocolError): - resp = Response(status_code=100, headers=[], http_version="1.0") # type: ignore[arg-type] - - with pytest.raises(LocalProtocolError): - Response(status_code="100", headers=[], http_version="1.0") # type: ignore[arg-type] - - with pytest.raises(LocalProtocolError): - InformationalResponse(status_code=b"100", headers=[], http_version="1.0") # type: ignore[arg-type] - - d = Data(data=b"asdf") - assert d.data == b"asdf" - - eom = EndOfMessage() - assert eom.headers == [] - - cc = ConnectionClosed() - assert repr(cc) == "ConnectionClosed()" - - -def test_intenum_status_code() -> None: - # https://github.com/python-hyper/h11/issues/72 - - r = Response(status_code=HTTPStatus.OK, headers=[], http_version="1.0") # type: ignore[arg-type] - assert r.status_code == HTTPStatus.OK - assert type(r.status_code) is not type(HTTPStatus.OK) - assert type(r.status_code) is int - - -def test_header_casing() -> None: - r = Request( - method="GET", - target="/", - headers=[("Host", "example.org"), ("Connection", "keep-alive")], - http_version="1.1", - ) - assert len(r.headers) == 2 - assert r.headers[0] == (b"host", b"example.org") - assert r.headers == [(b"host", b"example.org"), (b"connection", b"keep-alive")] - assert r.headers.raw_items() == [ - (b"Host", b"example.org"), - (b"Connection", b"keep-alive"), - ] diff --git a/server/venv/Lib/site-packages/h11/tests/test_headers.py b/server/venv/Lib/site-packages/h11/tests/test_headers.py deleted file mode 100644 index ba53d08..0000000 --- a/server/venv/Lib/site-packages/h11/tests/test_headers.py +++ /dev/null @@ -1,157 +0,0 @@ -import pytest - -from .._events import Request -from .._headers import ( - get_comma_header, - has_expect_100_continue, - Headers, - normalize_and_validate, - set_comma_header, -) -from .._util import LocalProtocolError - - -def test_normalize_and_validate() -> None: - assert normalize_and_validate([("foo", "bar")]) == [(b"foo", b"bar")] - assert normalize_and_validate([(b"foo", b"bar")]) == [(b"foo", b"bar")] - - # no leading/trailing whitespace in names - with pytest.raises(LocalProtocolError): - normalize_and_validate([(b"foo ", "bar")]) - with pytest.raises(LocalProtocolError): - normalize_and_validate([(b" foo", "bar")]) - - # no weird characters in names - with pytest.raises(LocalProtocolError) as excinfo: - normalize_and_validate([(b"foo bar", b"baz")]) - assert "foo bar" in str(excinfo.value) - with pytest.raises(LocalProtocolError): - normalize_and_validate([(b"foo\x00bar", b"baz")]) - # Not even 8-bit characters: - with pytest.raises(LocalProtocolError): - normalize_and_validate([(b"foo\xffbar", b"baz")]) - # And not even the control characters we allow in values: - with pytest.raises(LocalProtocolError): - normalize_and_validate([(b"foo\x01bar", b"baz")]) - - # no return or NUL characters in values - with pytest.raises(LocalProtocolError) as excinfo: - normalize_and_validate([("foo", "bar\rbaz")]) - assert "bar\\rbaz" in str(excinfo.value) - with pytest.raises(LocalProtocolError): - normalize_and_validate([("foo", "bar\nbaz")]) - with pytest.raises(LocalProtocolError): - normalize_and_validate([("foo", "bar\x00baz")]) - # no leading/trailing whitespace - with pytest.raises(LocalProtocolError): - normalize_and_validate([("foo", "barbaz ")]) - with pytest.raises(LocalProtocolError): - normalize_and_validate([("foo", " barbaz")]) - with pytest.raises(LocalProtocolError): - normalize_and_validate([("foo", "barbaz\t")]) - with pytest.raises(LocalProtocolError): - normalize_and_validate([("foo", "\tbarbaz")]) - - # content-length - assert normalize_and_validate([("Content-Length", "1")]) == [ - (b"content-length", b"1") - ] - with pytest.raises(LocalProtocolError): - normalize_and_validate([("Content-Length", "asdf")]) - with pytest.raises(LocalProtocolError): - normalize_and_validate([("Content-Length", "1x")]) - with pytest.raises(LocalProtocolError): - normalize_and_validate([("Content-Length", "1"), ("Content-Length", "2")]) - assert normalize_and_validate( - [("Content-Length", "0"), ("Content-Length", "0")] - ) == [(b"content-length", b"0")] - assert normalize_and_validate([("Content-Length", "0 , 0")]) == [ - (b"content-length", b"0") - ] - with pytest.raises(LocalProtocolError): - normalize_and_validate( - [("Content-Length", "1"), ("Content-Length", "1"), ("Content-Length", "2")] - ) - with pytest.raises(LocalProtocolError): - normalize_and_validate([("Content-Length", "1 , 1,2")]) - - # transfer-encoding - assert normalize_and_validate([("Transfer-Encoding", "chunked")]) == [ - (b"transfer-encoding", b"chunked") - ] - assert normalize_and_validate([("Transfer-Encoding", "cHuNkEd")]) == [ - (b"transfer-encoding", b"chunked") - ] - with pytest.raises(LocalProtocolError) as excinfo: - normalize_and_validate([("Transfer-Encoding", "gzip")]) - assert excinfo.value.error_status_hint == 501 # Not Implemented - with pytest.raises(LocalProtocolError) as excinfo: - normalize_and_validate( - [("Transfer-Encoding", "chunked"), ("Transfer-Encoding", "gzip")] - ) - assert excinfo.value.error_status_hint == 501 # Not Implemented - - -def test_get_set_comma_header() -> None: - headers = normalize_and_validate( - [ - ("Connection", "close"), - ("whatever", "something"), - ("connectiON", "fOo,, , BAR"), - ] - ) - - assert get_comma_header(headers, b"connection") == [b"close", b"foo", b"bar"] - - headers = set_comma_header(headers, b"newthing", ["a", "b"]) # type: ignore - - with pytest.raises(LocalProtocolError): - set_comma_header(headers, b"newthing", [" a", "b"]) # type: ignore - - assert headers == [ - (b"connection", b"close"), - (b"whatever", b"something"), - (b"connection", b"fOo,, , BAR"), - (b"newthing", b"a"), - (b"newthing", b"b"), - ] - - headers = set_comma_header(headers, b"whatever", ["different thing"]) # type: ignore - - assert headers == [ - (b"connection", b"close"), - (b"connection", b"fOo,, , BAR"), - (b"newthing", b"a"), - (b"newthing", b"b"), - (b"whatever", b"different thing"), - ] - - -def test_has_100_continue() -> None: - assert has_expect_100_continue( - Request( - method="GET", - target="/", - headers=[("Host", "example.com"), ("Expect", "100-continue")], - ) - ) - assert not has_expect_100_continue( - Request(method="GET", target="/", headers=[("Host", "example.com")]) - ) - # Case insensitive - assert has_expect_100_continue( - Request( - method="GET", - target="/", - headers=[("Host", "example.com"), ("Expect", "100-Continue")], - ) - ) - # Doesn't work in HTTP/1.0 - assert not has_expect_100_continue( - Request( - method="GET", - target="/", - headers=[("Host", "example.com"), ("Expect", "100-continue")], - http_version="1.0", - ) - ) diff --git a/server/venv/Lib/site-packages/h11/tests/test_helpers.py b/server/venv/Lib/site-packages/h11/tests/test_helpers.py deleted file mode 100644 index c329c76..0000000 --- a/server/venv/Lib/site-packages/h11/tests/test_helpers.py +++ /dev/null @@ -1,32 +0,0 @@ -from .._events import ( - ConnectionClosed, - Data, - EndOfMessage, - Event, - InformationalResponse, - Request, - Response, -) -from .helpers import normalize_data_events - - -def test_normalize_data_events() -> None: - assert normalize_data_events( - [ - Data(data=bytearray(b"1")), - Data(data=b"2"), - Response(status_code=200, headers=[]), # type: ignore[arg-type] - Data(data=b"3"), - Data(data=b"4"), - EndOfMessage(), - Data(data=b"5"), - Data(data=b"6"), - Data(data=b"7"), - ] - ) == [ - Data(data=b"12"), - Response(status_code=200, headers=[]), # type: ignore[arg-type] - Data(data=b"34"), - EndOfMessage(), - Data(data=b"567"), - ] diff --git a/server/venv/Lib/site-packages/h11/tests/test_io.py b/server/venv/Lib/site-packages/h11/tests/test_io.py deleted file mode 100644 index 2b47c0e..0000000 --- a/server/venv/Lib/site-packages/h11/tests/test_io.py +++ /dev/null @@ -1,572 +0,0 @@ -from typing import Any, Callable, Generator, List - -import pytest - -from .._events import ( - ConnectionClosed, - Data, - EndOfMessage, - Event, - InformationalResponse, - Request, - Response, -) -from .._headers import Headers, normalize_and_validate -from .._readers import ( - _obsolete_line_fold, - ChunkedReader, - ContentLengthReader, - Http10Reader, - READERS, -) -from .._receivebuffer import ReceiveBuffer -from .._state import ( - CLIENT, - CLOSED, - DONE, - IDLE, - MIGHT_SWITCH_PROTOCOL, - MUST_CLOSE, - SEND_BODY, - SEND_RESPONSE, - SERVER, - SWITCHED_PROTOCOL, -) -from .._util import LocalProtocolError -from .._writers import ( - ChunkedWriter, - ContentLengthWriter, - Http10Writer, - write_any_response, - write_headers, - write_request, - WRITERS, -) -from .helpers import normalize_data_events - -SIMPLE_CASES = [ - ( - (CLIENT, IDLE), - Request( - method="GET", - target="/a", - headers=[("Host", "foo"), ("Connection", "close")], - ), - b"GET /a HTTP/1.1\r\nHost: foo\r\nConnection: close\r\n\r\n", - ), - ( - (SERVER, SEND_RESPONSE), - Response(status_code=200, headers=[("Connection", "close")], reason=b"OK"), - b"HTTP/1.1 200 OK\r\nConnection: close\r\n\r\n", - ), - ( - (SERVER, SEND_RESPONSE), - Response(status_code=200, headers=[], reason=b"OK"), # type: ignore[arg-type] - b"HTTP/1.1 200 OK\r\n\r\n", - ), - ( - (SERVER, SEND_RESPONSE), - InformationalResponse( - status_code=101, headers=[("Upgrade", "websocket")], reason=b"Upgrade" - ), - b"HTTP/1.1 101 Upgrade\r\nUpgrade: websocket\r\n\r\n", - ), - ( - (SERVER, SEND_RESPONSE), - InformationalResponse(status_code=101, headers=[], reason=b"Upgrade"), # type: ignore[arg-type] - b"HTTP/1.1 101 Upgrade\r\n\r\n", - ), -] - - -def dowrite(writer: Callable[..., None], obj: Any) -> bytes: - got_list: List[bytes] = [] - writer(obj, got_list.append) - return b"".join(got_list) - - -def tw(writer: Any, obj: Any, expected: Any) -> None: - got = dowrite(writer, obj) - assert got == expected - - -def makebuf(data: bytes) -> ReceiveBuffer: - buf = ReceiveBuffer() - buf += data - return buf - - -def tr(reader: Any, data: bytes, expected: Any) -> None: - def check(got: Any) -> None: - assert got == expected - # Headers should always be returned as bytes, not e.g. bytearray - # https://github.com/python-hyper/wsproto/pull/54#issuecomment-377709478 - for name, value in getattr(got, "headers", []): - assert type(name) is bytes - assert type(value) is bytes - - # Simple: consume whole thing - buf = makebuf(data) - check(reader(buf)) - assert not buf - - # Incrementally growing buffer - buf = ReceiveBuffer() - for i in range(len(data)): - assert reader(buf) is None - buf += data[i : i + 1] - check(reader(buf)) - - # Trailing data - buf = makebuf(data) - buf += b"trailing" - check(reader(buf)) - assert bytes(buf) == b"trailing" - - -def test_writers_simple() -> None: - for ((role, state), event, binary) in SIMPLE_CASES: - tw(WRITERS[role, state], event, binary) - - -def test_readers_simple() -> None: - for ((role, state), event, binary) in SIMPLE_CASES: - tr(READERS[role, state], binary, event) - - -def test_writers_unusual() -> None: - # Simple test of the write_headers utility routine - tw( - write_headers, - normalize_and_validate([("foo", "bar"), ("baz", "quux")]), - b"foo: bar\r\nbaz: quux\r\n\r\n", - ) - tw(write_headers, Headers([]), b"\r\n") - - # We understand HTTP/1.0, but we don't speak it - with pytest.raises(LocalProtocolError): - tw( - write_request, - Request( - method="GET", - target="/", - headers=[("Host", "foo"), ("Connection", "close")], - http_version="1.0", - ), - None, - ) - with pytest.raises(LocalProtocolError): - tw( - write_any_response, - Response( - status_code=200, headers=[("Connection", "close")], http_version="1.0" - ), - None, - ) - - -def test_readers_unusual() -> None: - # Reading HTTP/1.0 - tr( - READERS[CLIENT, IDLE], - b"HEAD /foo HTTP/1.0\r\nSome: header\r\n\r\n", - Request( - method="HEAD", - target="/foo", - headers=[("Some", "header")], - http_version="1.0", - ), - ) - - # check no-headers, since it's only legal with HTTP/1.0 - tr( - READERS[CLIENT, IDLE], - b"HEAD /foo HTTP/1.0\r\n\r\n", - Request(method="HEAD", target="/foo", headers=[], http_version="1.0"), # type: ignore[arg-type] - ) - - tr( - READERS[SERVER, SEND_RESPONSE], - b"HTTP/1.0 200 OK\r\nSome: header\r\n\r\n", - Response( - status_code=200, - headers=[("Some", "header")], - http_version="1.0", - reason=b"OK", - ), - ) - - # single-character header values (actually disallowed by the ABNF in RFC - # 7230 -- this is a bug in the standard that we originally copied...) - tr( - READERS[SERVER, SEND_RESPONSE], - b"HTTP/1.0 200 OK\r\n" b"Foo: a a a a a \r\n\r\n", - Response( - status_code=200, - headers=[("Foo", "a a a a a")], - http_version="1.0", - reason=b"OK", - ), - ) - - # Empty headers -- also legal - tr( - READERS[SERVER, SEND_RESPONSE], - b"HTTP/1.0 200 OK\r\n" b"Foo:\r\n\r\n", - Response( - status_code=200, headers=[("Foo", "")], http_version="1.0", reason=b"OK" - ), - ) - - tr( - READERS[SERVER, SEND_RESPONSE], - b"HTTP/1.0 200 OK\r\n" b"Foo: \t \t \r\n\r\n", - Response( - status_code=200, headers=[("Foo", "")], http_version="1.0", reason=b"OK" - ), - ) - - # Tolerate broken servers that leave off the response code - tr( - READERS[SERVER, SEND_RESPONSE], - b"HTTP/1.0 200\r\n" b"Foo: bar\r\n\r\n", - Response( - status_code=200, headers=[("Foo", "bar")], http_version="1.0", reason=b"" - ), - ) - - # Tolerate headers line endings (\r\n and \n) - # \n\r\b between headers and body - tr( - READERS[SERVER, SEND_RESPONSE], - b"HTTP/1.1 200 OK\r\nSomeHeader: val\n\r\n", - Response( - status_code=200, - headers=[("SomeHeader", "val")], - http_version="1.1", - reason="OK", - ), - ) - - # delimited only with \n - tr( - READERS[SERVER, SEND_RESPONSE], - b"HTTP/1.1 200 OK\nSomeHeader1: val1\nSomeHeader2: val2\n\n", - Response( - status_code=200, - headers=[("SomeHeader1", "val1"), ("SomeHeader2", "val2")], - http_version="1.1", - reason="OK", - ), - ) - - # mixed \r\n and \n - tr( - READERS[SERVER, SEND_RESPONSE], - b"HTTP/1.1 200 OK\r\nSomeHeader1: val1\nSomeHeader2: val2\n\r\n", - Response( - status_code=200, - headers=[("SomeHeader1", "val1"), ("SomeHeader2", "val2")], - http_version="1.1", - reason="OK", - ), - ) - - # obsolete line folding - tr( - READERS[CLIENT, IDLE], - b"HEAD /foo HTTP/1.1\r\n" - b"Host: example.com\r\n" - b"Some: multi-line\r\n" - b" header\r\n" - b"\tnonsense\r\n" - b" \t \t\tI guess\r\n" - b"Connection: close\r\n" - b"More-nonsense: in the\r\n" - b" last header \r\n\r\n", - Request( - method="HEAD", - target="/foo", - headers=[ - ("Host", "example.com"), - ("Some", "multi-line header nonsense I guess"), - ("Connection", "close"), - ("More-nonsense", "in the last header"), - ], - ), - ) - - with pytest.raises(LocalProtocolError): - tr( - READERS[CLIENT, IDLE], - b"HEAD /foo HTTP/1.1\r\n" b" folded: line\r\n\r\n", - None, - ) - - with pytest.raises(LocalProtocolError): - tr( - READERS[CLIENT, IDLE], - b"HEAD /foo HTTP/1.1\r\n" b"foo : line\r\n\r\n", - None, - ) - with pytest.raises(LocalProtocolError): - tr( - READERS[CLIENT, IDLE], - b"HEAD /foo HTTP/1.1\r\n" b"foo\t: line\r\n\r\n", - None, - ) - with pytest.raises(LocalProtocolError): - tr( - READERS[CLIENT, IDLE], - b"HEAD /foo HTTP/1.1\r\n" b"foo\t: line\r\n\r\n", - None, - ) - with pytest.raises(LocalProtocolError): - tr(READERS[CLIENT, IDLE], b"HEAD /foo HTTP/1.1\r\n" b": line\r\n\r\n", None) - - -def test__obsolete_line_fold_bytes() -> None: - # _obsolete_line_fold has a defensive cast to bytearray, which is - # necessary to protect against O(n^2) behavior in case anyone ever passes - # in regular bytestrings... but right now we never pass in regular - # bytestrings. so this test just exists to get some coverage on that - # defensive cast. - assert list(_obsolete_line_fold([b"aaa", b"bbb", b" ccc", b"ddd"])) == [ - b"aaa", - bytearray(b"bbb ccc"), - b"ddd", - ] - - -def _run_reader_iter( - reader: Any, buf: bytes, do_eof: bool -) -> Generator[Any, None, None]: - while True: - event = reader(buf) - if event is None: - break - yield event - # body readers have undefined behavior after returning EndOfMessage, - # because this changes the state so they don't get called again - if type(event) is EndOfMessage: - break - if do_eof: - assert not buf - yield reader.read_eof() - - -def _run_reader(*args: Any) -> List[Event]: - events = list(_run_reader_iter(*args)) - return normalize_data_events(events) - - -def t_body_reader(thunk: Any, data: bytes, expected: Any, do_eof: bool = False) -> None: - # Simple: consume whole thing - print("Test 1") - buf = makebuf(data) - assert _run_reader(thunk(), buf, do_eof) == expected - - # Incrementally growing buffer - print("Test 2") - reader = thunk() - buf = ReceiveBuffer() - events = [] - for i in range(len(data)): - events += _run_reader(reader, buf, False) - buf += data[i : i + 1] - events += _run_reader(reader, buf, do_eof) - assert normalize_data_events(events) == expected - - is_complete = any(type(event) is EndOfMessage for event in expected) - if is_complete and not do_eof: - buf = makebuf(data + b"trailing") - assert _run_reader(thunk(), buf, False) == expected - - -def test_ContentLengthReader() -> None: - t_body_reader(lambda: ContentLengthReader(0), b"", [EndOfMessage()]) - - t_body_reader( - lambda: ContentLengthReader(10), - b"0123456789", - [Data(data=b"0123456789"), EndOfMessage()], - ) - - -def test_Http10Reader() -> None: - t_body_reader(Http10Reader, b"", [EndOfMessage()], do_eof=True) - t_body_reader(Http10Reader, b"asdf", [Data(data=b"asdf")], do_eof=False) - t_body_reader( - Http10Reader, b"asdf", [Data(data=b"asdf"), EndOfMessage()], do_eof=True - ) - - -def test_ChunkedReader() -> None: - t_body_reader(ChunkedReader, b"0\r\n\r\n", [EndOfMessage()]) - - t_body_reader( - ChunkedReader, - b"0\r\nSome: header\r\n\r\n", - [EndOfMessage(headers=[("Some", "header")])], - ) - - t_body_reader( - ChunkedReader, - b"5\r\n01234\r\n" - + b"10\r\n0123456789abcdef\r\n" - + b"0\r\n" - + b"Some: header\r\n\r\n", - [ - Data(data=b"012340123456789abcdef"), - EndOfMessage(headers=[("Some", "header")]), - ], - ) - - t_body_reader( - ChunkedReader, - b"5\r\n01234\r\n" + b"10\r\n0123456789abcdef\r\n" + b"0\r\n\r\n", - [Data(data=b"012340123456789abcdef"), EndOfMessage()], - ) - - # handles upper and lowercase hex - t_body_reader( - ChunkedReader, - b"aA\r\n" + b"x" * 0xAA + b"\r\n" + b"0\r\n\r\n", - [Data(data=b"x" * 0xAA), EndOfMessage()], - ) - - # refuses arbitrarily long chunk integers - with pytest.raises(LocalProtocolError): - # Technically this is legal HTTP/1.1, but we refuse to process chunk - # sizes that don't fit into 20 characters of hex - t_body_reader(ChunkedReader, b"9" * 100 + b"\r\nxxx", [Data(data=b"xxx")]) - - # refuses garbage in the chunk count - with pytest.raises(LocalProtocolError): - t_body_reader(ChunkedReader, b"10\x00\r\nxxx", None) - - # handles (and discards) "chunk extensions" omg wtf - t_body_reader( - ChunkedReader, - b"5; hello=there\r\n" - + b"xxxxx" - + b"\r\n" - + b'0; random="junk"; some=more; canbe=lonnnnngg\r\n\r\n', - [Data(data=b"xxxxx"), EndOfMessage()], - ) - - t_body_reader( - ChunkedReader, - b"5 \r\n01234\r\n" + b"0\r\n\r\n", - [Data(data=b"01234"), EndOfMessage()], - ) - - -def test_ContentLengthWriter() -> None: - w = ContentLengthWriter(5) - assert dowrite(w, Data(data=b"123")) == b"123" - assert dowrite(w, Data(data=b"45")) == b"45" - assert dowrite(w, EndOfMessage()) == b"" - - w = ContentLengthWriter(5) - with pytest.raises(LocalProtocolError): - dowrite(w, Data(data=b"123456")) - - w = ContentLengthWriter(5) - dowrite(w, Data(data=b"123")) - with pytest.raises(LocalProtocolError): - dowrite(w, Data(data=b"456")) - - w = ContentLengthWriter(5) - dowrite(w, Data(data=b"123")) - with pytest.raises(LocalProtocolError): - dowrite(w, EndOfMessage()) - - w = ContentLengthWriter(5) - dowrite(w, Data(data=b"123")) == b"123" - dowrite(w, Data(data=b"45")) == b"45" - with pytest.raises(LocalProtocolError): - dowrite(w, EndOfMessage(headers=[("Etag", "asdf")])) - - -def test_ChunkedWriter() -> None: - w = ChunkedWriter() - assert dowrite(w, Data(data=b"aaa")) == b"3\r\naaa\r\n" - assert dowrite(w, Data(data=b"a" * 20)) == b"14\r\n" + b"a" * 20 + b"\r\n" - - assert dowrite(w, Data(data=b"")) == b"" - - assert dowrite(w, EndOfMessage()) == b"0\r\n\r\n" - - assert ( - dowrite(w, EndOfMessage(headers=[("Etag", "asdf"), ("a", "b")])) - == b"0\r\nEtag: asdf\r\na: b\r\n\r\n" - ) - - -def test_Http10Writer() -> None: - w = Http10Writer() - assert dowrite(w, Data(data=b"1234")) == b"1234" - assert dowrite(w, EndOfMessage()) == b"" - - with pytest.raises(LocalProtocolError): - dowrite(w, EndOfMessage(headers=[("Etag", "asdf")])) - - -def test_reject_garbage_after_request_line() -> None: - with pytest.raises(LocalProtocolError): - tr(READERS[SERVER, SEND_RESPONSE], b"HTTP/1.0 200 OK\x00xxxx\r\n\r\n", None) - - -def test_reject_garbage_after_response_line() -> None: - with pytest.raises(LocalProtocolError): - tr( - READERS[CLIENT, IDLE], - b"HEAD /foo HTTP/1.1 xxxxxx\r\n" b"Host: a\r\n\r\n", - None, - ) - - -def test_reject_garbage_in_header_line() -> None: - with pytest.raises(LocalProtocolError): - tr( - READERS[CLIENT, IDLE], - b"HEAD /foo HTTP/1.1\r\n" b"Host: foo\x00bar\r\n\r\n", - None, - ) - - -def test_reject_non_vchar_in_path() -> None: - for bad_char in b"\x00\x20\x7f\xee": - message = bytearray(b"HEAD /") - message.append(bad_char) - message.extend(b" HTTP/1.1\r\nHost: foobar\r\n\r\n") - with pytest.raises(LocalProtocolError): - tr(READERS[CLIENT, IDLE], message, None) - - -# https://github.com/python-hyper/h11/issues/57 -def test_allow_some_garbage_in_cookies() -> None: - tr( - READERS[CLIENT, IDLE], - b"HEAD /foo HTTP/1.1\r\n" - b"Host: foo\r\n" - b"Set-Cookie: ___utmvafIumyLc=kUd\x01UpAt; path=/; Max-Age=900\r\n" - b"\r\n", - Request( - method="HEAD", - target="/foo", - headers=[ - ("Host", "foo"), - ("Set-Cookie", "___utmvafIumyLc=kUd\x01UpAt; path=/; Max-Age=900"), - ], - ), - ) - - -def test_host_comes_first() -> None: - tw( - write_headers, - normalize_and_validate([("foo", "bar"), ("Host", "example.com")]), - b"Host: example.com\r\nfoo: bar\r\n\r\n", - ) diff --git a/server/venv/Lib/site-packages/h11/tests/test_receivebuffer.py b/server/venv/Lib/site-packages/h11/tests/test_receivebuffer.py deleted file mode 100644 index 21a3870..0000000 --- a/server/venv/Lib/site-packages/h11/tests/test_receivebuffer.py +++ /dev/null @@ -1,135 +0,0 @@ -import re -from typing import Tuple - -import pytest - -from .._receivebuffer import ReceiveBuffer - - -def test_receivebuffer() -> None: - b = ReceiveBuffer() - assert not b - assert len(b) == 0 - assert bytes(b) == b"" - - b += b"123" - assert b - assert len(b) == 3 - assert bytes(b) == b"123" - - assert bytes(b) == b"123" - - assert b.maybe_extract_at_most(2) == b"12" - assert b - assert len(b) == 1 - assert bytes(b) == b"3" - - assert bytes(b) == b"3" - - assert b.maybe_extract_at_most(10) == b"3" - assert bytes(b) == b"" - - assert b.maybe_extract_at_most(10) is None - assert not b - - ################################################################ - # maybe_extract_until_next - ################################################################ - - b += b"123\n456\r\n789\r\n" - - assert b.maybe_extract_next_line() == b"123\n456\r\n" - assert bytes(b) == b"789\r\n" - - assert b.maybe_extract_next_line() == b"789\r\n" - assert bytes(b) == b"" - - b += b"12\r" - assert b.maybe_extract_next_line() is None - assert bytes(b) == b"12\r" - - b += b"345\n\r" - assert b.maybe_extract_next_line() is None - assert bytes(b) == b"12\r345\n\r" - - # here we stopped at the middle of b"\r\n" delimiter - - b += b"\n6789aaa123\r\n" - assert b.maybe_extract_next_line() == b"12\r345\n\r\n" - assert b.maybe_extract_next_line() == b"6789aaa123\r\n" - assert b.maybe_extract_next_line() is None - assert bytes(b) == b"" - - ################################################################ - # maybe_extract_lines - ################################################################ - - b += b"123\r\na: b\r\nfoo:bar\r\n\r\ntrailing" - lines = b.maybe_extract_lines() - assert lines == [b"123", b"a: b", b"foo:bar"] - assert bytes(b) == b"trailing" - - assert b.maybe_extract_lines() is None - - b += b"\r\n\r" - assert b.maybe_extract_lines() is None - - assert b.maybe_extract_at_most(100) == b"trailing\r\n\r" - assert not b - - # Empty body case (as happens at the end of chunked encoding if there are - # no trailing headers, e.g.) - b += b"\r\ntrailing" - assert b.maybe_extract_lines() == [] - assert bytes(b) == b"trailing" - - -@pytest.mark.parametrize( - "data", - [ - pytest.param( - ( - b"HTTP/1.1 200 OK\r\n", - b"Content-type: text/plain\r\n", - b"Connection: close\r\n", - b"\r\n", - b"Some body", - ), - id="with_crlf_delimiter", - ), - pytest.param( - ( - b"HTTP/1.1 200 OK\n", - b"Content-type: text/plain\n", - b"Connection: close\n", - b"\n", - b"Some body", - ), - id="with_lf_only_delimiter", - ), - pytest.param( - ( - b"HTTP/1.1 200 OK\n", - b"Content-type: text/plain\r\n", - b"Connection: close\n", - b"\n", - b"Some body", - ), - id="with_mixed_crlf_and_lf", - ), - ], -) -def test_receivebuffer_for_invalid_delimiter(data: Tuple[bytes]) -> None: - b = ReceiveBuffer() - - for line in data: - b += line - - lines = b.maybe_extract_lines() - - assert lines == [ - b"HTTP/1.1 200 OK", - b"Content-type: text/plain", - b"Connection: close", - ] - assert bytes(b) == b"Some body" diff --git a/server/venv/Lib/site-packages/h11/tests/test_state.py b/server/venv/Lib/site-packages/h11/tests/test_state.py deleted file mode 100644 index bc974e6..0000000 --- a/server/venv/Lib/site-packages/h11/tests/test_state.py +++ /dev/null @@ -1,271 +0,0 @@ -import pytest - -from .._events import ( - ConnectionClosed, - Data, - EndOfMessage, - Event, - InformationalResponse, - Request, - Response, -) -from .._state import ( - _SWITCH_CONNECT, - _SWITCH_UPGRADE, - CLIENT, - CLOSED, - ConnectionState, - DONE, - IDLE, - MIGHT_SWITCH_PROTOCOL, - MUST_CLOSE, - SEND_BODY, - SEND_RESPONSE, - SERVER, - SWITCHED_PROTOCOL, -) -from .._util import LocalProtocolError - - -def test_ConnectionState() -> None: - cs = ConnectionState() - - # Basic event-triggered transitions - - assert cs.states == {CLIENT: IDLE, SERVER: IDLE} - - cs.process_event(CLIENT, Request) - # The SERVER-Request special case: - assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} - - # Illegal transitions raise an error and nothing happens - with pytest.raises(LocalProtocolError): - cs.process_event(CLIENT, Request) - assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} - - cs.process_event(SERVER, InformationalResponse) - assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} - - cs.process_event(SERVER, Response) - assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_BODY} - - cs.process_event(CLIENT, EndOfMessage) - cs.process_event(SERVER, EndOfMessage) - assert cs.states == {CLIENT: DONE, SERVER: DONE} - - # State-triggered transition - - cs.process_event(SERVER, ConnectionClosed) - assert cs.states == {CLIENT: MUST_CLOSE, SERVER: CLOSED} - - -def test_ConnectionState_keep_alive() -> None: - # keep_alive = False - cs = ConnectionState() - cs.process_event(CLIENT, Request) - cs.process_keep_alive_disabled() - cs.process_event(CLIENT, EndOfMessage) - assert cs.states == {CLIENT: MUST_CLOSE, SERVER: SEND_RESPONSE} - - cs.process_event(SERVER, Response) - cs.process_event(SERVER, EndOfMessage) - assert cs.states == {CLIENT: MUST_CLOSE, SERVER: MUST_CLOSE} - - -def test_ConnectionState_keep_alive_in_DONE() -> None: - # Check that if keep_alive is disabled when the CLIENT is already in DONE, - # then this is sufficient to immediately trigger the DONE -> MUST_CLOSE - # transition - cs = ConnectionState() - cs.process_event(CLIENT, Request) - cs.process_event(CLIENT, EndOfMessage) - assert cs.states[CLIENT] is DONE - cs.process_keep_alive_disabled() - assert cs.states[CLIENT] is MUST_CLOSE - - -def test_ConnectionState_switch_denied() -> None: - for switch_type in (_SWITCH_CONNECT, _SWITCH_UPGRADE): - for deny_early in (True, False): - cs = ConnectionState() - cs.process_client_switch_proposal(switch_type) - cs.process_event(CLIENT, Request) - cs.process_event(CLIENT, Data) - assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} - - assert switch_type in cs.pending_switch_proposals - - if deny_early: - # before client reaches DONE - cs.process_event(SERVER, Response) - assert not cs.pending_switch_proposals - - cs.process_event(CLIENT, EndOfMessage) - - if deny_early: - assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} - else: - assert cs.states == { - CLIENT: MIGHT_SWITCH_PROTOCOL, - SERVER: SEND_RESPONSE, - } - - cs.process_event(SERVER, InformationalResponse) - assert cs.states == { - CLIENT: MIGHT_SWITCH_PROTOCOL, - SERVER: SEND_RESPONSE, - } - - cs.process_event(SERVER, Response) - assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} - assert not cs.pending_switch_proposals - - -_response_type_for_switch = { - _SWITCH_UPGRADE: InformationalResponse, - _SWITCH_CONNECT: Response, - None: Response, -} - - -def test_ConnectionState_protocol_switch_accepted() -> None: - for switch_event in [_SWITCH_UPGRADE, _SWITCH_CONNECT]: - cs = ConnectionState() - cs.process_client_switch_proposal(switch_event) - cs.process_event(CLIENT, Request) - cs.process_event(CLIENT, Data) - assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} - - cs.process_event(CLIENT, EndOfMessage) - assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} - - cs.process_event(SERVER, InformationalResponse) - assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} - - cs.process_event(SERVER, _response_type_for_switch[switch_event], switch_event) - assert cs.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} - - -def test_ConnectionState_double_protocol_switch() -> None: - # CONNECT + Upgrade is legal! Very silly, but legal. So we support - # it. Because sometimes doing the silly thing is easier than not. - for server_switch in [None, _SWITCH_UPGRADE, _SWITCH_CONNECT]: - cs = ConnectionState() - cs.process_client_switch_proposal(_SWITCH_UPGRADE) - cs.process_client_switch_proposal(_SWITCH_CONNECT) - cs.process_event(CLIENT, Request) - cs.process_event(CLIENT, EndOfMessage) - assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} - cs.process_event( - SERVER, _response_type_for_switch[server_switch], server_switch - ) - if server_switch is None: - assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} - else: - assert cs.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} - - -def test_ConnectionState_inconsistent_protocol_switch() -> None: - for client_switches, server_switch in [ - ([], _SWITCH_CONNECT), - ([], _SWITCH_UPGRADE), - ([_SWITCH_UPGRADE], _SWITCH_CONNECT), - ([_SWITCH_CONNECT], _SWITCH_UPGRADE), - ]: - cs = ConnectionState() - for client_switch in client_switches: # type: ignore[attr-defined] - cs.process_client_switch_proposal(client_switch) - cs.process_event(CLIENT, Request) - with pytest.raises(LocalProtocolError): - cs.process_event(SERVER, Response, server_switch) - - -def test_ConnectionState_keepalive_protocol_switch_interaction() -> None: - # keep_alive=False + pending_switch_proposals - cs = ConnectionState() - cs.process_client_switch_proposal(_SWITCH_UPGRADE) - cs.process_event(CLIENT, Request) - cs.process_keep_alive_disabled() - cs.process_event(CLIENT, Data) - assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} - - # the protocol switch "wins" - cs.process_event(CLIENT, EndOfMessage) - assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} - - # but when the server denies the request, keep_alive comes back into play - cs.process_event(SERVER, Response) - assert cs.states == {CLIENT: MUST_CLOSE, SERVER: SEND_BODY} - - -def test_ConnectionState_reuse() -> None: - cs = ConnectionState() - - with pytest.raises(LocalProtocolError): - cs.start_next_cycle() - - cs.process_event(CLIENT, Request) - cs.process_event(CLIENT, EndOfMessage) - - with pytest.raises(LocalProtocolError): - cs.start_next_cycle() - - cs.process_event(SERVER, Response) - cs.process_event(SERVER, EndOfMessage) - - cs.start_next_cycle() - assert cs.states == {CLIENT: IDLE, SERVER: IDLE} - - # No keepalive - - cs.process_event(CLIENT, Request) - cs.process_keep_alive_disabled() - cs.process_event(CLIENT, EndOfMessage) - cs.process_event(SERVER, Response) - cs.process_event(SERVER, EndOfMessage) - - with pytest.raises(LocalProtocolError): - cs.start_next_cycle() - - # One side closed - - cs = ConnectionState() - cs.process_event(CLIENT, Request) - cs.process_event(CLIENT, EndOfMessage) - cs.process_event(CLIENT, ConnectionClosed) - cs.process_event(SERVER, Response) - cs.process_event(SERVER, EndOfMessage) - - with pytest.raises(LocalProtocolError): - cs.start_next_cycle() - - # Succesful protocol switch - - cs = ConnectionState() - cs.process_client_switch_proposal(_SWITCH_UPGRADE) - cs.process_event(CLIENT, Request) - cs.process_event(CLIENT, EndOfMessage) - cs.process_event(SERVER, InformationalResponse, _SWITCH_UPGRADE) - - with pytest.raises(LocalProtocolError): - cs.start_next_cycle() - - # Failed protocol switch - - cs = ConnectionState() - cs.process_client_switch_proposal(_SWITCH_UPGRADE) - cs.process_event(CLIENT, Request) - cs.process_event(CLIENT, EndOfMessage) - cs.process_event(SERVER, Response) - cs.process_event(SERVER, EndOfMessage) - - cs.start_next_cycle() - assert cs.states == {CLIENT: IDLE, SERVER: IDLE} - - -def test_server_request_is_illegal() -> None: - # There used to be a bug in how we handled the Request special case that - # made this allowed... - cs = ConnectionState() - with pytest.raises(LocalProtocolError): - cs.process_event(SERVER, Request) diff --git a/server/venv/Lib/site-packages/h11/tests/test_util.py b/server/venv/Lib/site-packages/h11/tests/test_util.py deleted file mode 100644 index 79bc095..0000000 --- a/server/venv/Lib/site-packages/h11/tests/test_util.py +++ /dev/null @@ -1,112 +0,0 @@ -import re -import sys -import traceback -from typing import NoReturn - -import pytest - -from .._util import ( - bytesify, - LocalProtocolError, - ProtocolError, - RemoteProtocolError, - Sentinel, - validate, -) - - -def test_ProtocolError() -> None: - with pytest.raises(TypeError): - ProtocolError("abstract base class") - - -def test_LocalProtocolError() -> None: - try: - raise LocalProtocolError("foo") - except LocalProtocolError as e: - assert str(e) == "foo" - assert e.error_status_hint == 400 - - try: - raise LocalProtocolError("foo", error_status_hint=418) - except LocalProtocolError as e: - assert str(e) == "foo" - assert e.error_status_hint == 418 - - def thunk() -> NoReturn: - raise LocalProtocolError("a", error_status_hint=420) - - try: - try: - thunk() - except LocalProtocolError as exc1: - orig_traceback = "".join(traceback.format_tb(sys.exc_info()[2])) - exc1._reraise_as_remote_protocol_error() - except RemoteProtocolError as exc2: - assert type(exc2) is RemoteProtocolError - assert exc2.args == ("a",) - assert exc2.error_status_hint == 420 - new_traceback = "".join(traceback.format_tb(sys.exc_info()[2])) - assert new_traceback.endswith(orig_traceback) - - -def test_validate() -> None: - my_re = re.compile(rb"(?P[0-9]+)\.(?P[0-9]+)") - with pytest.raises(LocalProtocolError): - validate(my_re, b"0.") - - groups = validate(my_re, b"0.1") - assert groups == {"group1": b"0", "group2": b"1"} - - # successful partial matches are an error - must match whole string - with pytest.raises(LocalProtocolError): - validate(my_re, b"0.1xx") - with pytest.raises(LocalProtocolError): - validate(my_re, b"0.1\n") - - -def test_validate_formatting() -> None: - my_re = re.compile(rb"foo") - - with pytest.raises(LocalProtocolError) as excinfo: - validate(my_re, b"", "oops") - assert "oops" in str(excinfo.value) - - with pytest.raises(LocalProtocolError) as excinfo: - validate(my_re, b"", "oops {}") - assert "oops {}" in str(excinfo.value) - - with pytest.raises(LocalProtocolError) as excinfo: - validate(my_re, b"", "oops {} xx", 10) - assert "oops 10 xx" in str(excinfo.value) - - -def test_make_sentinel() -> None: - class S(Sentinel, metaclass=Sentinel): - pass - - assert repr(S) == "S" - assert S == S - assert type(S).__name__ == "S" - assert S in {S} - assert type(S) is S - - class S2(Sentinel, metaclass=Sentinel): - pass - - assert repr(S2) == "S2" - assert S != S2 - assert S not in {S2} - assert type(S) is not type(S2) - - -def test_bytesify() -> None: - assert bytesify(b"123") == b"123" - assert bytesify(bytearray(b"123")) == b"123" - assert bytesify("123") == b"123" - - with pytest.raises(UnicodeEncodeError): - bytesify("\u1234") - - with pytest.raises(TypeError): - bytesify(10) diff --git a/server/venv/Lib/site-packages/httpcore-0.18.0.dist-info/INSTALLER b/server/venv/Lib/site-packages/httpcore-0.18.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/server/venv/Lib/site-packages/httpcore-0.18.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/server/venv/Lib/site-packages/httpcore-0.18.0.dist-info/METADATA b/server/venv/Lib/site-packages/httpcore-0.18.0.dist-info/METADATA deleted file mode 100644 index 3776738..0000000 --- a/server/venv/Lib/site-packages/httpcore-0.18.0.dist-info/METADATA +++ /dev/null @@ -1,547 +0,0 @@ -Metadata-Version: 2.1 -Name: httpcore -Version: 0.18.0 -Summary: A minimal low-level HTTP client. -Project-URL: Documentation, https://www.encode.io/httpcore -Project-URL: Homepage, https://www.encode.io/httpcore/ -Project-URL: Source, https://github.com/encode/httpcore -Author-email: Tom Christie -License-Expression: BSD-3-Clause -License-File: LICENSE.md -Classifier: Development Status :: 3 - Alpha -Classifier: Environment :: Web Environment -Classifier: Framework :: AsyncIO -Classifier: Framework :: Trio -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Topic :: Internet :: WWW/HTTP -Requires-Python: >=3.8 -Requires-Dist: anyio<5.0,>=3.0 -Requires-Dist: certifi -Requires-Dist: h11<0.15,>=0.13 -Requires-Dist: sniffio==1.* -Provides-Extra: http2 -Requires-Dist: h2<5,>=3; extra == 'http2' -Provides-Extra: socks -Requires-Dist: socksio==1.*; extra == 'socks' -Description-Content-Type: text/markdown - -# HTTP Core - -[![Test Suite](https://github.com/encode/httpcore/workflows/Test%20Suite/badge.svg)](https://github.com/encode/httpcore/actions) -[![Package version](https://badge.fury.io/py/httpcore.svg)](https://pypi.org/project/httpcore/) - -> *Do one thing, and do it well.* - -The HTTP Core package provides a minimal low-level HTTP client, which does -one thing only. Sending HTTP requests. - -It does not provide any high level model abstractions over the API, -does not handle redirects, multipart uploads, building authentication headers, -transparent HTTP caching, URL parsing, session cookie handling, -content or charset decoding, handling JSON, environment based configuration -defaults, or any of that Jazz. - -Some things HTTP Core does do: - -* Sending HTTP requests. -* Thread-safe / task-safe connection pooling. -* HTTP(S) proxy & SOCKS proxy support. -* Supports HTTP/1.1 and HTTP/2. -* Provides both sync and async interfaces. -* Async backend support for `asyncio` and `trio`. - -## Requirements - -Python 3.8+ - -## Installation - -For HTTP/1.1 only support, install with: - -```shell -$ pip install httpcore -``` - -For HTTP/1.1 and HTTP/2 support, install with: - -```shell -$ pip install httpcore[http2] -``` - -For SOCKS proxy support, install with: - -```shell -$ pip install httpcore[socks] -``` - -# Sending requests - -Send an HTTP request: - -```python -import httpcore - -response = httpcore.request("GET", "https://www.example.com/") - -print(response) -# -print(response.status) -# 200 -print(response.headers) -# [(b'Accept-Ranges', b'bytes'), (b'Age', b'557328'), (b'Cache-Control', b'max-age=604800'), ...] -print(response.content) -# b'\n\n\nExample Domain\n\n\n ...' -``` - -The top-level `httpcore.request()` function is provided for convenience. In practice whenever you're working with `httpcore` you'll want to use the connection pooling functionality that it provides. - -```python -import httpcore - -http = httpcore.ConnectionPool() -response = http.request("GET", "https://www.example.com/") -``` - -Once you're ready to get going, [head over to the documentation](https://www.encode.io/httpcore/). - -## Motivation - -You *probably* don't want to be using HTTP Core directly. It might make sense if -you're writing something like a proxy service in Python, and you just want -something at the lowest possible level, but more typically you'll want to use -a higher level client library, such as `httpx`. - -The motivation for `httpcore` is: - -* To provide a reusable low-level client library, that other packages can then build on top of. -* To provide a *really clear interface split* between the networking code and client logic, - so that each is easier to understand and reason about in isolation. -# Changelog - -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - -## 0.18.0 (September 8th, 2023) - -- Add support for HTTPS proxies. (#745, #786) -- Drop Python 3.7 support. (#727) -- Handle `sni_hostname` extension with SOCKS proxy. (#774) -- Handle HTTP/1.1 half-closed connections gracefully. (#641) -- Change the type of `Extensions` from `Mapping[Str, Any]` to `MutableMapping[Str, Any]`. (#762) - -## 0.17.3 (July 5th, 2023) - -- Support async cancellations, ensuring that the connection pool is left in a clean state when cancellations occur. (#726) -- The networking backend interface has [been added to the public API](https://www.encode.io/httpcore/network-backends). Some classes which were previously private implementation detail are now part of the top-level public API. (#699) -- Graceful handling of HTTP/2 GoAway frames, with requests being transparently retried on a new connection. (#730) -- Add exceptions when a synchronous `trace callback` is passed to an asynchronous request or an asynchronous `trace callback` is passed to a synchronous request. (#717) -- Drop Python 3.7 support. (#727) - -## 0.17.2 (May 23th, 2023) - -- Add `socket_options` argument to `ConnectionPool` and `HTTProxy` classes. (#668) -- Improve logging with per-module logger names. (#690) -- Add `sni_hostname` request extension. (#696) -- Resolve race condition during import of `anyio` package. (#692) -- Enable TCP_NODELAY for all synchronous sockets. (#651) - -## 0.17.1 (May 17th, 2023) - -- If 'retries' is set, then allow retries if an SSL handshake error occurs. (#669) -- Improve correctness of tracebacks on network exceptions, by raising properly chained exceptions. (#678) -- Prevent connection-hanging behaviour when HTTP/2 connections are closed by a server-sent 'GoAway' frame. (#679) -- Fix edge-case exception when removing requests from the connection pool. (#680) -- Fix pool timeout edge-case. (#688) - -## 0.17.0 (March 16th, 2023) - -- Add DEBUG level logging. (#648) -- Respect HTTP/2 max concurrent streams when settings updates are sent by server. (#652) -- Increase the allowable HTTP header size to 100kB. (#647) -- Add `retries` option to SOCKS proxy classes. (#643) - -## 0.16.3 (December 20th, 2022) - -- Allow `ws` and `wss` schemes. Allows us to properly support websocket upgrade connections. (#625) -- Forwarding HTTP proxies use a connection-per-remote-host. Required by some proxy implementations. (#637) -- Don't raise `RuntimeError` when closing a connection pool with active connections. Removes some error cases when cancellations are used. (#631) -- Lazy import `anyio`, so that it's no longer a hard dependancy, and isn't imported if unused. (#639) - -## 0.16.2 (November 25th, 2022) - -- Revert 'Fix async cancellation behaviour', which introduced race conditions. (#627) -- Raise `RuntimeError` if attempting to us UNIX domain sockets on Windows. (#619) - -## 0.16.1 (November 17th, 2022) - -- Fix HTTP/1.1 interim informational responses, such as "100 Continue". (#605) - -## 0.16.0 (October 11th, 2022) - -- Support HTTP/1.1 informational responses. (#581) -- Fix async cancellation behaviour. (#580) -- Support `h11` 0.14. (#579) - -## 0.15.0 (May 17th, 2022) - -- Drop Python 3.6 support (#535) -- Ensure HTTP proxy CONNECT requests include `timeout` configuration. (#506) -- Switch to explicit `typing.Optional` for type hints. (#513) -- For `trio` map OSError exceptions to `ConnectError`. (#543) - -## 0.14.7 (February 4th, 2022) - -- Requests which raise a PoolTimeout need to be removed from the pool queue. (#502) -- Fix AttributeError that happened when Socks5Connection were terminated. (#501) - -## 0.14.6 (February 1st, 2022) - -- Fix SOCKS support for `http://` URLs. (#492) -- Resolve race condition around exceptions during streaming a response. (#491) - -## 0.14.5 (January 18th, 2022) - -- SOCKS proxy support. (#478) -- Add proxy_auth argument to HTTPProxy. (#481) -- Improve error message on 'RemoteProtocolError' exception when server disconnects without sending a response. (#479) - -## 0.14.4 (January 5th, 2022) - -- Support HTTP/2 on HTTPS tunnelling proxies. (#468) -- Fix proxy headers missing on HTTP forwarding. (#456) -- Only instantiate SSL context if required. (#457) -- More robust HTTP/2 handling. (#253, #439, #440, #441) - -## 0.14.3 (November 17th, 2021) - -- Fix race condition when removing closed connections from the pool. (#437) - -## 0.14.2 (November 16th, 2021) - -- Failed connections no longer remain in the pool. (Pull #433) - -## 0.14.1 (November 12th, 2021) - -- `max_connections` becomes optional. (Pull #429) -- `certifi` is now included in the install dependancies. (Pull #428) -- `h2` is now strictly optional. (Pull #428) - -## 0.14.0 (November 11th, 2021) - -The 0.14 release is a complete reworking of `httpcore`, comprehensively addressing some underlying issues in the connection pooling, as well as substantially redesigning the API to be more user friendly. - -Some of the lower-level API design also makes the components more easily testable in isolation, and the package now has 100% test coverage. - -See [discussion #419](https://github.com/encode/httpcore/discussions/419) for a little more background. - -There's some other neat bits in there too, such as the "trace" extension, which gives a hook into inspecting the internal events that occur during the request/response cycle. This extension is needed for the HTTPX cli, in order to... - -* Log the point at which the connection is established, and the IP/port on which it is made. -* Determine if the outgoing request should log as HTTP/1.1 or HTTP/2, rather than having to assume it's HTTP/2 if the --http2 flag was passed. (Which may not actually be true.) -* Log SSL version info / certificate info. - -Note that `curio` support is not currently available in 0.14.0. If you're using `httpcore` with `curio` please get in touch, so we can assess if we ought to prioritize it as a feature or not. - -## 0.13.7 (September 13th, 2021) - -- Fix broken error messaging when URL scheme is missing, or a non HTTP(S) scheme is used. (Pull #403) - -## 0.13.6 (June 15th, 2021) - -### Fixed - -- Close sockets when read or write timeouts occur. (Pull #365) - -## 0.13.5 (June 14th, 2021) - -### Fixed - -- Resolved niggles with AnyIO EOF behaviours. (Pull #358, #362) - -## 0.13.4 (June 9th, 2021) - -### Added - -- Improved error messaging when URL scheme is missing, or a non HTTP(S) scheme is used. (Pull #354) - -### Fixed - -- Switched to `anyio` as the default backend implementation when running with `asyncio`. Resolves some awkward [TLS timeout issues](https://github.com/encode/httpx/discussions/1511). - -## 0.13.3 (May 6th, 2021) - -### Added - -- Support HTTP/2 prior knowledge, using `httpcore.SyncConnectionPool(http1=False)`. (Pull #333) - -### Fixed - -- Handle cases where environment does not provide `select.poll` support. (Pull #331) - -## 0.13.2 (April 29th, 2021) - -### Added - -- Improve error message for specific case of `RemoteProtocolError` where server disconnects without sending a response. (Pull #313) - -## 0.13.1 (April 28th, 2021) - -### Fixed - -- More resiliant testing for closed connections. (Pull #311) -- Don't raise exceptions on ungraceful connection closes. (Pull #310) - -## 0.13.0 (April 21st, 2021) - -The 0.13 release updates the core API in order to match the HTTPX Transport API, -introduced in HTTPX 0.18 onwards. - -An example of making requests with the new interface is: - -```python -with httpcore.SyncConnectionPool() as http: - status_code, headers, stream, extensions = http.handle_request( - method=b'GET', - url=(b'https', b'example.org', 443, b'/'), - headers=[(b'host', b'example.org'), (b'user-agent', b'httpcore')] - stream=httpcore.ByteStream(b''), - extensions={} - ) - body = stream.read() - print(status_code, body) -``` - -### Changed - -- The `.request()` method is now `handle_request()`. (Pull #296) -- The `.arequest()` method is now `.handle_async_request()`. (Pull #296) -- The `headers` argument is no longer optional. (Pull #296) -- The `stream` argument is no longer optional. (Pull #296) -- The `ext` argument is now named `extensions`, and is no longer optional. (Pull #296) -- The `"reason"` extension keyword is now named `"reason_phrase"`. (Pull #296) -- The `"reason_phrase"` and `"http_version"` extensions now use byte strings for their values. (Pull #296) -- The `httpcore.PlainByteStream()` class becomes `httpcore.ByteStream()`. (Pull #296) - -### Added - -- Streams now support a `.read()` interface. (Pull #296) - -### Fixed - -- Task cancellation no longer leaks connections from the connection pool. (Pull #305) - -## 0.12.3 (December 7th, 2020) - -### Fixed - -- Abort SSL connections on close rather than waiting for remote EOF when using `asyncio`. (Pull #167) -- Fix exception raised in case of connect timeouts when using the `anyio` backend. (Pull #236) -- Fix `Host` header precedence for `:authority` in HTTP/2. (Pull #241, #243) -- Handle extra edge case when detecting for socket readability when using `asyncio`. (Pull #242, #244) -- Fix `asyncio` SSL warning when using proxy tunneling. (Pull #249) - -## 0.12.2 (November 20th, 2020) - -### Fixed - -- Properly wrap connect errors on the asyncio backend. (Pull #235) -- Fix `ImportError` occurring on Python 3.9 when using the HTTP/1.1 sync client in a multithreaded context. (Pull #237) - -## 0.12.1 (November 7th, 2020) - -### Added - -- Add connect retries. (Pull #221) - -### Fixed - -- Tweak detection of dropped connections, resolving an issue with open files limits on Linux. (Pull #185) -- Avoid leaking connections when establishing an HTTP tunnel to a proxy has failed. (Pull #223) -- Properly wrap OS errors when using `trio`. (Pull #225) - -## 0.12.0 (October 6th, 2020) - -### Changed - -- HTTP header casing is now preserved, rather than always sent in lowercase. (#216 and python-hyper/h11#104) - -### Added - -- Add Python 3.9 to officially supported versions. - -### Fixed - -- Gracefully handle a stdlib asyncio bug when a connection is closed while it is in a paused-for-reading state. (#201) - -## 0.11.1 (September 28nd, 2020) - -### Fixed - -- Add await to async semaphore release() coroutine (#197) -- Drop incorrect curio classifier (#192) - -## 0.11.0 (September 22nd, 2020) - -The Transport API with 0.11.0 has a couple of significant changes. - -Firstly we've moved changed the request interface in order to allow extensions, which will later enable us to support features -such as trailing headers, HTTP/2 server push, and CONNECT/Upgrade connections. - -The interface changes from: - -```python -def request(method, url, headers, stream, timeout): - return (http_version, status_code, reason, headers, stream) -``` - -To instead including an optional dictionary of extensions on the request and response: - -```python -def request(method, url, headers, stream, ext): - return (status_code, headers, stream, ext) -``` - -Having an open-ended extensions point will allow us to add later support for various optional features, that wouldn't otherwise be supported without these API changes. - -In particular: - -* Trailing headers support. -* HTTP/2 Server Push -* sendfile. -* Exposing raw connection on CONNECT, Upgrade, HTTP/2 bi-di streaming. -* Exposing debug information out of the API, including template name, template context. - -Currently extensions are limited to: - -* request: `timeout` - Optional. Timeout dictionary. -* response: `http_version` - Optional. Include the HTTP version used on the response. -* response: `reason` - Optional. Include the reason phrase used on the response. Only valid with HTTP/1.*. - -See https://github.com/encode/httpx/issues/1274#issuecomment-694884553 for the history behind this. - -Secondly, the async version of `request` is now namespaced as `arequest`. - -This allows concrete transports to support both sync and async implementations on the same class. - -### Added - -- Add curio support. (Pull #168) -- Add anyio support, with `backend="anyio"`. (Pull #169) - -### Changed - -- Update the Transport API to use 'ext' for optional extensions. (Pull #190) -- Update the Transport API to use `.request` and `.arequest` so implementations can support both sync and async. (Pull #189) - -## 0.10.2 (August 20th, 2020) - -### Added - -- Added Unix Domain Socket support. (Pull #139) - -### Fixed - -- Always include the port on proxy CONNECT requests. (Pull #154) -- Fix `max_keepalive_connections` configuration. (Pull #153) -- Fixes behaviour in HTTP/1.1 where server disconnects can be used to signal the end of the response body. (Pull #164) - -## 0.10.1 (August 7th, 2020) - -- Include `max_keepalive_connections` on `AsyncHTTPProxy`/`SyncHTTPProxy` classes. - -## 0.10.0 (August 7th, 2020) - -The most notable change in the 0.10.0 release is that HTTP/2 support is now fully optional. - -Use either `pip install httpcore` for HTTP/1.1 support only, or `pip install httpcore[http2]` for HTTP/1.1 and HTTP/2 support. - -### Added - -- HTTP/2 support becomes optional. (Pull #121, #130) -- Add `local_address=...` support. (Pull #100, #134) -- Add `PlainByteStream`, `IteratorByteStream`, `AsyncIteratorByteStream`. The `AsyncByteSteam` and `SyncByteStream` classes are now pure interface classes. (#133) -- Add `LocalProtocolError`, `RemoteProtocolError` exceptions. (Pull #129) -- Add `UnsupportedProtocol` exception. (Pull #128) -- Add `.get_connection_info()` method. (Pull #102, #137) -- Add better TRACE logs. (Pull #101) - -### Changed - -- `max_keepalive` is deprecated in favour of `max_keepalive_connections`. (Pull #140) - -### Fixed - -- Improve handling of server disconnects. (Pull #112) - -## 0.9.1 (May 27th, 2020) - -### Fixed - -- Proper host resolution for sync case, including IPv6 support. (Pull #97) -- Close outstanding connections when connection pool is closed. (Pull #98) - -## 0.9.0 (May 21th, 2020) - -### Changed - -- URL port becomes an `Optional[int]` instead of `int`. (Pull #92) - -### Fixed - -- Honor HTTP/2 max concurrent streams settings. (Pull #89, #90) -- Remove incorrect debug log. (Pull #83) - -## 0.8.4 (May 11th, 2020) - -### Added - -- Logging via HTTPCORE_LOG_LEVEL and HTTPX_LOG_LEVEL environment variables -and TRACE level logging. (Pull #79) - -### Fixed - -- Reuse of connections on HTTP/2 in close concurrency situations. (Pull #81) - -## 0.8.3 (May 6rd, 2020) - -### Fixed - -- Include `Host` and `Accept` headers on proxy "CONNECT" requests. -- De-duplicate any headers also contained in proxy_headers. -- HTTP/2 flag not being passed down to proxy connections. - -## 0.8.2 (May 3rd, 2020) - -### Fixed - -- Fix connections using proxy forwarding requests not being added to the -connection pool properly. (Pull #70) - -## 0.8.1 (April 30th, 2020) - -### Changed - -- Allow inherintance of both `httpcore.AsyncByteStream`, `httpcore.SyncByteStream` without type conflicts. - -## 0.8.0 (April 30th, 2020) - -### Fixed - -- Fixed tunnel proxy support. - -### Added - -- New `TimeoutException` base class. - -## 0.7.0 (March 5th, 2020) - -- First integration with HTTPX. diff --git a/server/venv/Lib/site-packages/httpcore-0.18.0.dist-info/RECORD b/server/venv/Lib/site-packages/httpcore-0.18.0.dist-info/RECORD deleted file mode 100644 index 2ff5f56..0000000 --- a/server/venv/Lib/site-packages/httpcore-0.18.0.dist-info/RECORD +++ /dev/null @@ -1,69 +0,0 @@ -httpcore-0.18.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -httpcore-0.18.0.dist-info/METADATA,sha256=HYLl3BBm0kYpuqAPEqFCT0ErYv1rHgvxkY4-EuwPlEY,18914 -httpcore-0.18.0.dist-info/RECORD,, -httpcore-0.18.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -httpcore-0.18.0.dist-info/WHEEL,sha256=9QBuHhg6FNW7lppboF2vKVbCGTVzsFykgRQjjlajrhA,87 -httpcore-0.18.0.dist-info/licenses/LICENSE.md,sha256=_ctZFUx0y6uhahEkL3dAvqnyPW_rVUeRfYxflKgDkqU,1518 -httpcore/__init__.py,sha256=VfaTITS2e1pevgXk6fF0cZSH3f4YUt_iXqGYSsbyyY0,3338 -httpcore/__pycache__/__init__.cpython-311.pyc,, -httpcore/__pycache__/_api.cpython-311.pyc,, -httpcore/__pycache__/_exceptions.cpython-311.pyc,, -httpcore/__pycache__/_models.cpython-311.pyc,, -httpcore/__pycache__/_ssl.cpython-311.pyc,, -httpcore/__pycache__/_synchronization.cpython-311.pyc,, -httpcore/__pycache__/_trace.cpython-311.pyc,, -httpcore/__pycache__/_utils.cpython-311.pyc,, -httpcore/_api.py,sha256=IBR18qZQ8ETcghJXC1Gd-30WuKYRS0EyF2eC80_OBQ8,3167 -httpcore/_async/__init__.py,sha256=EWdl2v4thnAHzJpqjU4h2a8DUiGAvNiWrkii9pfhTf0,1221 -httpcore/_async/__pycache__/__init__.cpython-311.pyc,, -httpcore/_async/__pycache__/connection.cpython-311.pyc,, -httpcore/_async/__pycache__/connection_pool.cpython-311.pyc,, -httpcore/_async/__pycache__/http11.cpython-311.pyc,, -httpcore/_async/__pycache__/http2.cpython-311.pyc,, -httpcore/_async/__pycache__/http_proxy.cpython-311.pyc,, -httpcore/_async/__pycache__/interfaces.cpython-311.pyc,, -httpcore/_async/__pycache__/socks_proxy.cpython-311.pyc,, -httpcore/_async/connection.py,sha256=klHOqiHVo4TUC_X9IsKEnfMHKQcH-WajYAkszBuLXOA,8619 -httpcore/_async/connection_pool.py,sha256=hj1viqcWZivNmoRu-QZjyuOvAFx3-Ae2rMpuK6OZhEM,14305 -httpcore/_async/http11.py,sha256=t7I91I77Zh06UVopMfqvrGskFdpwnQLo5kt04qTCI7U,12441 -httpcore/_async/http2.py,sha256=KXwWZxZ-43vxIWzr1aTLErhaCodDzFr-XAvzc4fUb10,23879 -httpcore/_async/http_proxy.py,sha256=hl4t-PahlAuCGtKNYRx4LSgjx1ZuspE9oDBaL6BOess,14851 -httpcore/_async/interfaces.py,sha256=J2iq9rs7x3nKS6iCfntjHY0Woast6V_HuXuE8rs3HmA,4486 -httpcore/_async/socks_proxy.py,sha256=q4B3QCcBnKeBP_3TkNc6mudszFQ9Ji8pxYPsPeepHPI,13922 -httpcore/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -httpcore/_backends/__pycache__/__init__.cpython-311.pyc,, -httpcore/_backends/__pycache__/anyio.cpython-311.pyc,, -httpcore/_backends/__pycache__/auto.cpython-311.pyc,, -httpcore/_backends/__pycache__/base.cpython-311.pyc,, -httpcore/_backends/__pycache__/mock.cpython-311.pyc,, -httpcore/_backends/__pycache__/sync.cpython-311.pyc,, -httpcore/_backends/__pycache__/trio.cpython-311.pyc,, -httpcore/_backends/anyio.py,sha256=mU8gtunBSLxESGkU0Iy1ZMgumDlAeMkwBjFE3kZiCnc,5208 -httpcore/_backends/auto.py,sha256=8r0ipGxSwXoCb_xKQAyRwL1UzfXVbO4Ee2y8vYQv3Ic,1654 -httpcore/_backends/base.py,sha256=Qsb8b_PSiVP1ldHHGXHxQzJ1Qlzj2r8KR9KQeANkSbE,3218 -httpcore/_backends/mock.py,sha256=S4IADhC6kE22ge_jR_WHlEUkD6QAsXnwz26DSWZLcG4,4179 -httpcore/_backends/sync.py,sha256=z4emZ__8qOAWewBtpkkl3gkpR210RN1l3J8Nud0kZc8,8347 -httpcore/_backends/trio.py,sha256=INOeHEkA8pO6AsSqjColWcayM0FQSyGi1hpaQghjrCs,6078 -httpcore/_exceptions.py,sha256=7zb3KNiG0qmfUNIdFgdaUSbn2Pu3oztghi6Vg7i-LJU,1185 -httpcore/_models.py,sha256=GTqsbLHxsd_lx0cvtgUBf7OltodKHjIrNKs-DbSc67k,16370 -httpcore/_ssl.py,sha256=srqmSNU4iOUvWF-SrJvb8G_YEbHFELOXQOwdDIBTS9c,187 -httpcore/_sync/__init__.py,sha256=JBDIgXt5la1LCJ1sLQeKhjKFpLnpNr8Svs6z2ni3fgg,1141 -httpcore/_sync/__pycache__/__init__.cpython-311.pyc,, -httpcore/_sync/__pycache__/connection.cpython-311.pyc,, -httpcore/_sync/__pycache__/connection_pool.cpython-311.pyc,, -httpcore/_sync/__pycache__/http11.cpython-311.pyc,, -httpcore/_sync/__pycache__/http2.cpython-311.pyc,, -httpcore/_sync/__pycache__/http_proxy.cpython-311.pyc,, -httpcore/_sync/__pycache__/interfaces.cpython-311.pyc,, -httpcore/_sync/__pycache__/socks_proxy.cpython-311.pyc,, -httpcore/_sync/connection.py,sha256=luocU3Tv3jlvOCaro33xrHZAwBkpn991LWdL7BC8Bkg,8408 -httpcore/_sync/connection_pool.py,sha256=1iwYLdiq3pi9LBvpMZ8O8gWdb56qqPlm6rp35zeORBQ,13928 -httpcore/_sync/http11.py,sha256=1VRRlpqQgIKjxt9xQAeUbDH1Mq3280pC1AU_gwu19VQ,12102 -httpcore/_sync/http2.py,sha256=lkpHesGkrwzIA4oHLyClJf5IAwRLcaAFMnmffAahAK4,23343 -httpcore/_sync/http_proxy.py,sha256=82oin8vjt2a7YmmVvz7sXEZSBuajK-mHDF-EwnR_pJ0,14613 -httpcore/_sync/interfaces.py,sha256=EM4PTf-rgkclzisFcrTyx1G8FwraoffE8rbckOznX_o,4365 -httpcore/_sync/socks_proxy.py,sha256=T13QSceeEAg1PM9Yh7Nk-DoqI28TIUqDS-9O3OSC9Uc,13707 -httpcore/_synchronization.py,sha256=_d_vHqylvzm1Jh58_0G7i-1VwCg3Gu39Cgd4nWASvP0,8751 -httpcore/_trace.py,sha256=akf5PsWVq3rZjqmXniomU59OY37K7JHoeNDCQ4GU84E,3954 -httpcore/_utils.py,sha256=9QPh5ib4JilWX4dBCC_XO6wdBY4b0kbUGgfV3QfBANc,1525 -httpcore/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/server/venv/Lib/site-packages/httpcore-0.18.0.dist-info/REQUESTED b/server/venv/Lib/site-packages/httpcore-0.18.0.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/httpcore-0.18.0.dist-info/WHEEL b/server/venv/Lib/site-packages/httpcore-0.18.0.dist-info/WHEEL deleted file mode 100644 index ba1a8af..0000000 --- a/server/venv/Lib/site-packages/httpcore-0.18.0.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: hatchling 1.18.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/server/venv/Lib/site-packages/httpcore-0.18.0.dist-info/licenses/LICENSE.md b/server/venv/Lib/site-packages/httpcore-0.18.0.dist-info/licenses/LICENSE.md deleted file mode 100644 index 311b2b5..0000000 --- a/server/venv/Lib/site-packages/httpcore-0.18.0.dist-info/licenses/LICENSE.md +++ /dev/null @@ -1,27 +0,0 @@ -Copyright © 2020, [Encode OSS Ltd](https://www.encode.io/). -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -* Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/server/venv/Lib/site-packages/httpcore/__init__.py b/server/venv/Lib/site-packages/httpcore/__init__.py deleted file mode 100644 index 65abe97..0000000 --- a/server/venv/Lib/site-packages/httpcore/__init__.py +++ /dev/null @@ -1,139 +0,0 @@ -from ._api import request, stream -from ._async import ( - AsyncConnectionInterface, - AsyncConnectionPool, - AsyncHTTP2Connection, - AsyncHTTP11Connection, - AsyncHTTPConnection, - AsyncHTTPProxy, - AsyncSOCKSProxy, -) -from ._backends.base import ( - SOCKET_OPTION, - AsyncNetworkBackend, - AsyncNetworkStream, - NetworkBackend, - NetworkStream, -) -from ._backends.mock import AsyncMockBackend, AsyncMockStream, MockBackend, MockStream -from ._backends.sync import SyncBackend -from ._exceptions import ( - ConnectError, - ConnectionNotAvailable, - ConnectTimeout, - LocalProtocolError, - NetworkError, - PoolTimeout, - ProtocolError, - ProxyError, - ReadError, - ReadTimeout, - RemoteProtocolError, - TimeoutException, - UnsupportedProtocol, - WriteError, - WriteTimeout, -) -from ._models import URL, Origin, Request, Response -from ._ssl import default_ssl_context -from ._sync import ( - ConnectionInterface, - ConnectionPool, - HTTP2Connection, - HTTP11Connection, - HTTPConnection, - HTTPProxy, - SOCKSProxy, -) - -# The 'httpcore.AnyIOBackend' class is conditional on 'anyio' being installed. -try: - from ._backends.anyio import AnyIOBackend -except ImportError: # pragma: nocover - - class AnyIOBackend: # type: ignore - def __init__(self, *args, **kwargs): # type: ignore - msg = ( - "Attempted to use 'httpcore.AnyIOBackend' but 'anyio' is not installed." - ) - raise RuntimeError(msg) - - -# The 'httpcore.TrioBackend' class is conditional on 'trio' being installed. -try: - from ._backends.trio import TrioBackend -except ImportError: # pragma: nocover - - class TrioBackend: # type: ignore - def __init__(self, *args, **kwargs): # type: ignore - msg = "Attempted to use 'httpcore.TrioBackend' but 'trio' is not installed." - raise RuntimeError(msg) - - -__all__ = [ - # top-level requests - "request", - "stream", - # models - "Origin", - "URL", - "Request", - "Response", - # async - "AsyncHTTPConnection", - "AsyncConnectionPool", - "AsyncHTTPProxy", - "AsyncHTTP11Connection", - "AsyncHTTP2Connection", - "AsyncConnectionInterface", - "AsyncSOCKSProxy", - # sync - "HTTPConnection", - "ConnectionPool", - "HTTPProxy", - "HTTP11Connection", - "HTTP2Connection", - "ConnectionInterface", - "SOCKSProxy", - # network backends, implementations - "SyncBackend", - "AnyIOBackend", - "TrioBackend", - # network backends, mock implementations - "AsyncMockBackend", - "AsyncMockStream", - "MockBackend", - "MockStream", - # network backends, interface - "AsyncNetworkStream", - "AsyncNetworkBackend", - "NetworkStream", - "NetworkBackend", - # util - "default_ssl_context", - "SOCKET_OPTION", - # exceptions - "ConnectionNotAvailable", - "ProxyError", - "ProtocolError", - "LocalProtocolError", - "RemoteProtocolError", - "UnsupportedProtocol", - "TimeoutException", - "PoolTimeout", - "ConnectTimeout", - "ReadTimeout", - "WriteTimeout", - "NetworkError", - "ConnectError", - "ReadError", - "WriteError", -] - -__version__ = "0.18.0" - - -__locals = locals() -for __name in __all__: - if not __name.startswith("__"): - setattr(__locals[__name], "__module__", "httpcore") # noqa diff --git a/server/venv/Lib/site-packages/httpcore/_api.py b/server/venv/Lib/site-packages/httpcore/_api.py deleted file mode 100644 index 854235f..0000000 --- a/server/venv/Lib/site-packages/httpcore/_api.py +++ /dev/null @@ -1,92 +0,0 @@ -from contextlib import contextmanager -from typing import Iterator, Optional, Union - -from ._models import URL, Extensions, HeaderTypes, Response -from ._sync.connection_pool import ConnectionPool - - -def request( - method: Union[bytes, str], - url: Union[URL, bytes, str], - *, - headers: HeaderTypes = None, - content: Union[bytes, Iterator[bytes], None] = None, - extensions: Optional[Extensions] = None, -) -> Response: - """ - Sends an HTTP request, returning the response. - - ``` - response = httpcore.request("GET", "https://www.example.com/") - ``` - - Arguments: - method: The HTTP method for the request. Typically one of `"GET"`, - `"OPTIONS"`, `"HEAD"`, `"POST"`, `"PUT"`, `"PATCH"`, or `"DELETE"`. - url: The URL of the HTTP request. Either as an instance of `httpcore.URL`, - or as str/bytes. - headers: The HTTP request headers. Either as a dictionary of str/bytes, - or as a list of two-tuples of str/bytes. - content: The content of the request body. Either as bytes, - or as a bytes iterator. - extensions: A dictionary of optional extra information included on the request. - Possible keys include `"timeout"`. - - Returns: - An instance of `httpcore.Response`. - """ - with ConnectionPool() as pool: - return pool.request( - method=method, - url=url, - headers=headers, - content=content, - extensions=extensions, - ) - - -@contextmanager -def stream( - method: Union[bytes, str], - url: Union[URL, bytes, str], - *, - headers: HeaderTypes = None, - content: Union[bytes, Iterator[bytes], None] = None, - extensions: Optional[Extensions] = None, -) -> Iterator[Response]: - """ - Sends an HTTP request, returning the response within a content manager. - - ``` - with httpcore.stream("GET", "https://www.example.com/") as response: - ... - ``` - - When using the `stream()` function, the body of the response will not be - automatically read. If you want to access the response body you should - either use `content = response.read()`, or `for chunk in response.iter_content()`. - - Arguments: - method: The HTTP method for the request. Typically one of `"GET"`, - `"OPTIONS"`, `"HEAD"`, `"POST"`, `"PUT"`, `"PATCH"`, or `"DELETE"`. - url: The URL of the HTTP request. Either as an instance of `httpcore.URL`, - or as str/bytes. - headers: The HTTP request headers. Either as a dictionary of str/bytes, - or as a list of two-tuples of str/bytes. - content: The content of the request body. Either as bytes, - or as a bytes iterator. - extensions: A dictionary of optional extra information included on the request. - Possible keys include `"timeout"`. - - Returns: - An instance of `httpcore.Response`. - """ - with ConnectionPool() as pool: - with pool.stream( - method=method, - url=url, - headers=headers, - content=content, - extensions=extensions, - ) as response: - yield response diff --git a/server/venv/Lib/site-packages/httpcore/_async/__init__.py b/server/venv/Lib/site-packages/httpcore/_async/__init__.py deleted file mode 100644 index 88dc7f0..0000000 --- a/server/venv/Lib/site-packages/httpcore/_async/__init__.py +++ /dev/null @@ -1,39 +0,0 @@ -from .connection import AsyncHTTPConnection -from .connection_pool import AsyncConnectionPool -from .http11 import AsyncHTTP11Connection -from .http_proxy import AsyncHTTPProxy -from .interfaces import AsyncConnectionInterface - -try: - from .http2 import AsyncHTTP2Connection -except ImportError: # pragma: nocover - - class AsyncHTTP2Connection: # type: ignore - def __init__(self, *args, **kwargs) -> None: # type: ignore - raise RuntimeError( - "Attempted to use http2 support, but the `h2` package is not " - "installed. Use 'pip install httpcore[http2]'." - ) - - -try: - from .socks_proxy import AsyncSOCKSProxy -except ImportError: # pragma: nocover - - class AsyncSOCKSProxy: # type: ignore - def __init__(self, *args, **kwargs) -> None: # type: ignore - raise RuntimeError( - "Attempted to use SOCKS support, but the `socksio` package is not " - "installed. Use 'pip install httpcore[socks]'." - ) - - -__all__ = [ - "AsyncHTTPConnection", - "AsyncConnectionPool", - "AsyncHTTPProxy", - "AsyncHTTP11Connection", - "AsyncHTTP2Connection", - "AsyncConnectionInterface", - "AsyncSOCKSProxy", -] diff --git a/server/venv/Lib/site-packages/httpcore/_async/connection.py b/server/venv/Lib/site-packages/httpcore/_async/connection.py deleted file mode 100644 index 45ee22a..0000000 --- a/server/venv/Lib/site-packages/httpcore/_async/connection.py +++ /dev/null @@ -1,222 +0,0 @@ -import itertools -import logging -import ssl -from types import TracebackType -from typing import Iterable, Iterator, Optional, Type - -from .._backends.auto import AutoBackend -from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream -from .._exceptions import ConnectError, ConnectionNotAvailable, ConnectTimeout -from .._models import Origin, Request, Response -from .._ssl import default_ssl_context -from .._synchronization import AsyncLock -from .._trace import Trace -from .http11 import AsyncHTTP11Connection -from .interfaces import AsyncConnectionInterface - -RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. - - -logger = logging.getLogger("httpcore.connection") - - -def exponential_backoff(factor: float) -> Iterator[float]: - """ - Generate a geometric sequence that has a ratio of 2 and starts with 0. - - For example: - - `factor = 2`: `0, 2, 4, 8, 16, 32, 64, ...` - - `factor = 3`: `0, 3, 6, 12, 24, 48, 96, ...` - """ - yield 0 - for n in itertools.count(): - yield factor * 2**n - - -class AsyncHTTPConnection(AsyncConnectionInterface): - def __init__( - self, - origin: Origin, - ssl_context: Optional[ssl.SSLContext] = None, - keepalive_expiry: Optional[float] = None, - http1: bool = True, - http2: bool = False, - retries: int = 0, - local_address: Optional[str] = None, - uds: Optional[str] = None, - network_backend: Optional[AsyncNetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, - ) -> None: - self._origin = origin - self._ssl_context = ssl_context - self._keepalive_expiry = keepalive_expiry - self._http1 = http1 - self._http2 = http2 - self._retries = retries - self._local_address = local_address - self._uds = uds - - self._network_backend: AsyncNetworkBackend = ( - AutoBackend() if network_backend is None else network_backend - ) - self._connection: Optional[AsyncConnectionInterface] = None - self._connect_failed: bool = False - self._request_lock = AsyncLock() - self._socket_options = socket_options - - async def handle_async_request(self, request: Request) -> Response: - if not self.can_handle_request(request.url.origin): - raise RuntimeError( - f"Attempted to send request to {request.url.origin} on connection to {self._origin}" - ) - - async with self._request_lock: - if self._connection is None: - try: - stream = await self._connect(request) - - ssl_object = stream.get_extra_info("ssl_object") - http2_negotiated = ( - ssl_object is not None - and ssl_object.selected_alpn_protocol() == "h2" - ) - if http2_negotiated or (self._http2 and not self._http1): - from .http2 import AsyncHTTP2Connection - - self._connection = AsyncHTTP2Connection( - origin=self._origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - else: - self._connection = AsyncHTTP11Connection( - origin=self._origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - except Exception as exc: - self._connect_failed = True - raise exc - elif not self._connection.is_available(): - raise ConnectionNotAvailable() - - return await self._connection.handle_async_request(request) - - async def _connect(self, request: Request) -> AsyncNetworkStream: - timeouts = request.extensions.get("timeout", {}) - sni_hostname = request.extensions.get("sni_hostname", None) - timeout = timeouts.get("connect", None) - - retries_left = self._retries - delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR) - - while True: - try: - if self._uds is None: - kwargs = { - "host": self._origin.host.decode("ascii"), - "port": self._origin.port, - "local_address": self._local_address, - "timeout": timeout, - "socket_options": self._socket_options, - } - async with Trace("connect_tcp", logger, request, kwargs) as trace: - stream = await self._network_backend.connect_tcp(**kwargs) - trace.return_value = stream - else: - kwargs = { - "path": self._uds, - "timeout": timeout, - "socket_options": self._socket_options, - } - async with Trace( - "connect_unix_socket", logger, request, kwargs - ) as trace: - stream = await self._network_backend.connect_unix_socket( - **kwargs - ) - trace.return_value = stream - - if self._origin.scheme == b"https": - ssl_context = ( - default_ssl_context() - if self._ssl_context is None - else self._ssl_context - ) - alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] - ssl_context.set_alpn_protocols(alpn_protocols) - - kwargs = { - "ssl_context": ssl_context, - "server_hostname": sni_hostname - or self._origin.host.decode("ascii"), - "timeout": timeout, - } - async with Trace("start_tls", logger, request, kwargs) as trace: - stream = await stream.start_tls(**kwargs) - trace.return_value = stream - return stream - except (ConnectError, ConnectTimeout): - if retries_left <= 0: - raise - retries_left -= 1 - delay = next(delays) - async with Trace("retry", logger, request, kwargs) as trace: - await self._network_backend.sleep(delay) - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._origin - - async def aclose(self) -> None: - if self._connection is not None: - async with Trace("close", logger, None, {}): - await self._connection.aclose() - - def is_available(self) -> bool: - if self._connection is None: - # If HTTP/2 support is enabled, and the resulting connection could - # end up as HTTP/2 then we should indicate the connection as being - # available to service multiple requests. - return ( - self._http2 - and (self._origin.scheme == b"https" or not self._http1) - and not self._connect_failed - ) - return self._connection.is_available() - - def has_expired(self) -> bool: - if self._connection is None: - return self._connect_failed - return self._connection.has_expired() - - def is_idle(self) -> bool: - if self._connection is None: - return self._connect_failed - return self._connection.is_idle() - - def is_closed(self) -> bool: - if self._connection is None: - return self._connect_failed - return self._connection.is_closed() - - def info(self) -> str: - if self._connection is None: - return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" - return self._connection.info() - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.info()}]>" - - # These context managers are not used in the standard flow, but are - # useful for testing or working with connection instances directly. - - async def __aenter__(self) -> "AsyncHTTPConnection": - return self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, - ) -> None: - await self.aclose() diff --git a/server/venv/Lib/site-packages/httpcore/_async/connection_pool.py b/server/venv/Lib/site-packages/httpcore/_async/connection_pool.py deleted file mode 100644 index ddc0510..0000000 --- a/server/venv/Lib/site-packages/httpcore/_async/connection_pool.py +++ /dev/null @@ -1,356 +0,0 @@ -import ssl -import sys -from types import TracebackType -from typing import AsyncIterable, AsyncIterator, Iterable, List, Optional, Type - -from .._backends.auto import AutoBackend -from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend -from .._exceptions import ConnectionNotAvailable, UnsupportedProtocol -from .._models import Origin, Request, Response -from .._synchronization import AsyncEvent, AsyncLock, AsyncShieldCancellation -from .connection import AsyncHTTPConnection -from .interfaces import AsyncConnectionInterface, AsyncRequestInterface - - -class RequestStatus: - def __init__(self, request: Request): - self.request = request - self.connection: Optional[AsyncConnectionInterface] = None - self._connection_acquired = AsyncEvent() - - def set_connection(self, connection: AsyncConnectionInterface) -> None: - assert self.connection is None - self.connection = connection - self._connection_acquired.set() - - def unset_connection(self) -> None: - assert self.connection is not None - self.connection = None - self._connection_acquired = AsyncEvent() - - async def wait_for_connection( - self, timeout: Optional[float] = None - ) -> AsyncConnectionInterface: - if self.connection is None: - await self._connection_acquired.wait(timeout=timeout) - assert self.connection is not None - return self.connection - - -class AsyncConnectionPool(AsyncRequestInterface): - """ - A connection pool for making HTTP requests. - """ - - def __init__( - self, - ssl_context: Optional[ssl.SSLContext] = None, - max_connections: Optional[int] = 10, - max_keepalive_connections: Optional[int] = None, - keepalive_expiry: Optional[float] = None, - http1: bool = True, - http2: bool = False, - retries: int = 0, - local_address: Optional[str] = None, - uds: Optional[str] = None, - network_backend: Optional[AsyncNetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, - ) -> None: - """ - A connection pool for making HTTP requests. - - Parameters: - ssl_context: An SSL context to use for verifying connections. - If not specified, the default `httpcore.default_ssl_context()` - will be used. - max_connections: The maximum number of concurrent HTTP connections that - the pool should allow. Any attempt to send a request on a pool that - would exceed this amount will block until a connection is available. - max_keepalive_connections: The maximum number of idle HTTP connections - that will be maintained in the pool. - keepalive_expiry: The duration in seconds that an idle HTTP connection - may be maintained for before being expired from the pool. - http1: A boolean indicating if HTTP/1.1 requests should be supported - by the connection pool. Defaults to True. - http2: A boolean indicating if HTTP/2 requests should be supported by - the connection pool. Defaults to False. - retries: The maximum number of retries when trying to establish a - connection. - local_address: Local address to connect from. Can also be used to connect - using a particular address family. Using `local_address="0.0.0.0"` - will connect using an `AF_INET` address (IPv4), while using - `local_address="::"` will connect using an `AF_INET6` address (IPv6). - uds: Path to a Unix Domain Socket to use instead of TCP sockets. - network_backend: A backend instance to use for handling network I/O. - socket_options: Socket options that have to be included - in the TCP socket when the connection was established. - """ - self._ssl_context = ssl_context - - self._max_connections = ( - sys.maxsize if max_connections is None else max_connections - ) - self._max_keepalive_connections = ( - sys.maxsize - if max_keepalive_connections is None - else max_keepalive_connections - ) - self._max_keepalive_connections = min( - self._max_connections, self._max_keepalive_connections - ) - - self._keepalive_expiry = keepalive_expiry - self._http1 = http1 - self._http2 = http2 - self._retries = retries - self._local_address = local_address - self._uds = uds - - self._pool: List[AsyncConnectionInterface] = [] - self._requests: List[RequestStatus] = [] - self._pool_lock = AsyncLock() - self._network_backend = ( - AutoBackend() if network_backend is None else network_backend - ) - self._socket_options = socket_options - - def create_connection(self, origin: Origin) -> AsyncConnectionInterface: - return AsyncHTTPConnection( - origin=origin, - ssl_context=self._ssl_context, - keepalive_expiry=self._keepalive_expiry, - http1=self._http1, - http2=self._http2, - retries=self._retries, - local_address=self._local_address, - uds=self._uds, - network_backend=self._network_backend, - socket_options=self._socket_options, - ) - - @property - def connections(self) -> List[AsyncConnectionInterface]: - """ - Return a list of the connections currently in the pool. - - For example: - - ```python - >>> pool.connections - [ - , - , - , - ] - ``` - """ - return list(self._pool) - - async def _attempt_to_acquire_connection(self, status: RequestStatus) -> bool: - """ - Attempt to provide a connection that can handle the given origin. - """ - origin = status.request.url.origin - - # If there are queued requests in front of us, then don't acquire a - # connection. We handle requests strictly in order. - waiting = [s for s in self._requests if s.connection is None] - if waiting and waiting[0] is not status: - return False - - # Reuse an existing connection if one is currently available. - for idx, connection in enumerate(self._pool): - if connection.can_handle_request(origin) and connection.is_available(): - self._pool.pop(idx) - self._pool.insert(0, connection) - status.set_connection(connection) - return True - - # If the pool is currently full, attempt to close one idle connection. - if len(self._pool) >= self._max_connections: - for idx, connection in reversed(list(enumerate(self._pool))): - if connection.is_idle(): - await connection.aclose() - self._pool.pop(idx) - break - - # If the pool is still full, then we cannot acquire a connection. - if len(self._pool) >= self._max_connections: - return False - - # Otherwise create a new connection. - connection = self.create_connection(origin) - self._pool.insert(0, connection) - status.set_connection(connection) - return True - - async def _close_expired_connections(self) -> None: - """ - Clean up the connection pool by closing off any connections that have expired. - """ - # Close any connections that have expired their keep-alive time. - for idx, connection in reversed(list(enumerate(self._pool))): - if connection.has_expired(): - await connection.aclose() - self._pool.pop(idx) - - # If the pool size exceeds the maximum number of allowed keep-alive connections, - # then close off idle connections as required. - pool_size = len(self._pool) - for idx, connection in reversed(list(enumerate(self._pool))): - if connection.is_idle() and pool_size > self._max_keepalive_connections: - await connection.aclose() - self._pool.pop(idx) - pool_size -= 1 - - async def handle_async_request(self, request: Request) -> Response: - """ - Send an HTTP request, and return an HTTP response. - - This is the core implementation that is called into by `.request()` or `.stream()`. - """ - scheme = request.url.scheme.decode() - if scheme == "": - raise UnsupportedProtocol( - "Request URL is missing an 'http://' or 'https://' protocol." - ) - if scheme not in ("http", "https", "ws", "wss"): - raise UnsupportedProtocol( - f"Request URL has an unsupported protocol '{scheme}://'." - ) - - status = RequestStatus(request) - - async with self._pool_lock: - self._requests.append(status) - await self._close_expired_connections() - await self._attempt_to_acquire_connection(status) - - while True: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("pool", None) - try: - connection = await status.wait_for_connection(timeout=timeout) - except BaseException as exc: - # If we timeout here, or if the task is cancelled, then make - # sure to remove the request from the queue before bubbling - # up the exception. - async with self._pool_lock: - # Ensure only remove when task exists. - if status in self._requests: - self._requests.remove(status) - raise exc - - try: - response = await connection.handle_async_request(request) - except ConnectionNotAvailable: - # The ConnectionNotAvailable exception is a special case, that - # indicates we need to retry the request on a new connection. - # - # The most common case where this can occur is when multiple - # requests are queued waiting for a single connection, which - # might end up as an HTTP/2 connection, but which actually ends - # up as HTTP/1.1. - async with self._pool_lock: - # Maintain our position in the request queue, but reset the - # status so that the request becomes queued again. - status.unset_connection() - await self._attempt_to_acquire_connection(status) - except BaseException as exc: - with AsyncShieldCancellation(): - await self.response_closed(status) - raise exc - else: - break - - # When we return the response, we wrap the stream in a special class - # that handles notifying the connection pool once the response - # has been released. - assert isinstance(response.stream, AsyncIterable) - return Response( - status=response.status, - headers=response.headers, - content=ConnectionPoolByteStream(response.stream, self, status), - extensions=response.extensions, - ) - - async def response_closed(self, status: RequestStatus) -> None: - """ - This method acts as a callback once the request/response cycle is complete. - - It is called into from the `ConnectionPoolByteStream.aclose()` method. - """ - assert status.connection is not None - connection = status.connection - - async with self._pool_lock: - # Update the state of the connection pool. - if status in self._requests: - self._requests.remove(status) - - if connection.is_closed() and connection in self._pool: - self._pool.remove(connection) - - # Since we've had a response closed, it's possible we'll now be able - # to service one or more requests that are currently pending. - for status in self._requests: - if status.connection is None: - acquired = await self._attempt_to_acquire_connection(status) - # If we could not acquire a connection for a queued request - # then we don't need to check anymore requests that are - # queued later behind it. - if not acquired: - break - - # Housekeeping. - await self._close_expired_connections() - - async def aclose(self) -> None: - """ - Close any connections in the pool. - """ - async with self._pool_lock: - for connection in self._pool: - await connection.aclose() - self._pool = [] - self._requests = [] - - async def __aenter__(self) -> "AsyncConnectionPool": - return self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, - ) -> None: - await self.aclose() - - -class ConnectionPoolByteStream: - """ - A wrapper around the response byte stream, that additionally handles - notifying the connection pool when the response has been closed. - """ - - def __init__( - self, - stream: AsyncIterable[bytes], - pool: AsyncConnectionPool, - status: RequestStatus, - ) -> None: - self._stream = stream - self._pool = pool - self._status = status - - async def __aiter__(self) -> AsyncIterator[bytes]: - async for part in self._stream: - yield part - - async def aclose(self) -> None: - try: - if hasattr(self._stream, "aclose"): - await self._stream.aclose() - finally: - with AsyncShieldCancellation(): - await self._pool.response_closed(self._status) diff --git a/server/venv/Lib/site-packages/httpcore/_async/http11.py b/server/venv/Lib/site-packages/httpcore/_async/http11.py deleted file mode 100644 index 32fa3a6..0000000 --- a/server/venv/Lib/site-packages/httpcore/_async/http11.py +++ /dev/null @@ -1,343 +0,0 @@ -import enum -import logging -import time -from types import TracebackType -from typing import ( - AsyncIterable, - AsyncIterator, - List, - Optional, - Tuple, - Type, - Union, - cast, -) - -import h11 - -from .._backends.base import AsyncNetworkStream -from .._exceptions import ( - ConnectionNotAvailable, - LocalProtocolError, - RemoteProtocolError, - WriteError, - map_exceptions, -) -from .._models import Origin, Request, Response -from .._synchronization import AsyncLock, AsyncShieldCancellation -from .._trace import Trace -from .interfaces import AsyncConnectionInterface - -logger = logging.getLogger("httpcore.http11") - - -# A subset of `h11.Event` types supported by `_send_event` -H11SendEvent = Union[ - h11.Request, - h11.Data, - h11.EndOfMessage, -] - - -class HTTPConnectionState(enum.IntEnum): - NEW = 0 - ACTIVE = 1 - IDLE = 2 - CLOSED = 3 - - -class AsyncHTTP11Connection(AsyncConnectionInterface): - READ_NUM_BYTES = 64 * 1024 - MAX_INCOMPLETE_EVENT_SIZE = 100 * 1024 - - def __init__( - self, - origin: Origin, - stream: AsyncNetworkStream, - keepalive_expiry: Optional[float] = None, - ) -> None: - self._origin = origin - self._network_stream = stream - self._keepalive_expiry: Optional[float] = keepalive_expiry - self._expire_at: Optional[float] = None - self._state = HTTPConnectionState.NEW - self._state_lock = AsyncLock() - self._request_count = 0 - self._h11_state = h11.Connection( - our_role=h11.CLIENT, - max_incomplete_event_size=self.MAX_INCOMPLETE_EVENT_SIZE, - ) - - async def handle_async_request(self, request: Request) -> Response: - if not self.can_handle_request(request.url.origin): - raise RuntimeError( - f"Attempted to send request to {request.url.origin} on connection " - f"to {self._origin}" - ) - - async with self._state_lock: - if self._state in (HTTPConnectionState.NEW, HTTPConnectionState.IDLE): - self._request_count += 1 - self._state = HTTPConnectionState.ACTIVE - self._expire_at = None - else: - raise ConnectionNotAvailable() - - try: - kwargs = {"request": request} - try: - async with Trace( - "send_request_headers", logger, request, kwargs - ) as trace: - await self._send_request_headers(**kwargs) - async with Trace("send_request_body", logger, request, kwargs) as trace: - await self._send_request_body(**kwargs) - except WriteError: - # If we get a write error while we're writing the request, - # then we supress this error and move on to attempting to - # read the response. Servers can sometimes close the request - # pre-emptively and then respond with a well formed HTTP - # error response. - pass - - async with Trace( - "receive_response_headers", logger, request, kwargs - ) as trace: - ( - http_version, - status, - reason_phrase, - headers, - ) = await self._receive_response_headers(**kwargs) - trace.return_value = ( - http_version, - status, - reason_phrase, - headers, - ) - - return Response( - status=status, - headers=headers, - content=HTTP11ConnectionByteStream(self, request), - extensions={ - "http_version": http_version, - "reason_phrase": reason_phrase, - "network_stream": self._network_stream, - }, - ) - except BaseException as exc: - with AsyncShieldCancellation(): - async with Trace("response_closed", logger, request) as trace: - await self._response_closed() - raise exc - - # Sending the request... - - async def _send_request_headers(self, request: Request) -> None: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("write", None) - - with map_exceptions({h11.LocalProtocolError: LocalProtocolError}): - event = h11.Request( - method=request.method, - target=request.url.target, - headers=request.headers, - ) - await self._send_event(event, timeout=timeout) - - async def _send_request_body(self, request: Request) -> None: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("write", None) - - assert isinstance(request.stream, AsyncIterable) - async for chunk in request.stream: - event = h11.Data(data=chunk) - await self._send_event(event, timeout=timeout) - - await self._send_event(h11.EndOfMessage(), timeout=timeout) - - async def _send_event( - self, event: h11.Event, timeout: Optional[float] = None - ) -> None: - bytes_to_send = self._h11_state.send(event) - if bytes_to_send is not None: - await self._network_stream.write(bytes_to_send, timeout=timeout) - - # Receiving the response... - - async def _receive_response_headers( - self, request: Request - ) -> Tuple[bytes, int, bytes, List[Tuple[bytes, bytes]]]: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("read", None) - - while True: - event = await self._receive_event(timeout=timeout) - if isinstance(event, h11.Response): - break - if ( - isinstance(event, h11.InformationalResponse) - and event.status_code == 101 - ): - break - - http_version = b"HTTP/" + event.http_version - - # h11 version 0.11+ supports a `raw_items` interface to get the - # raw header casing, rather than the enforced lowercase headers. - headers = event.headers.raw_items() - - return http_version, event.status_code, event.reason, headers - - async def _receive_response_body(self, request: Request) -> AsyncIterator[bytes]: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("read", None) - - while True: - event = await self._receive_event(timeout=timeout) - if isinstance(event, h11.Data): - yield bytes(event.data) - elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)): - break - - async def _receive_event( - self, timeout: Optional[float] = None - ) -> Union[h11.Event, Type[h11.PAUSED]]: - while True: - with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): - event = self._h11_state.next_event() - - if event is h11.NEED_DATA: - data = await self._network_stream.read( - self.READ_NUM_BYTES, timeout=timeout - ) - - # If we feed this case through h11 we'll raise an exception like: - # - # httpcore.RemoteProtocolError: can't handle event type - # ConnectionClosed when role=SERVER and state=SEND_RESPONSE - # - # Which is accurate, but not very informative from an end-user - # perspective. Instead we handle this case distinctly and treat - # it as a ConnectError. - if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE: - msg = "Server disconnected without sending a response." - raise RemoteProtocolError(msg) - - self._h11_state.receive_data(data) - else: - # mypy fails to narrow the type in the above if statement above - return cast(Union[h11.Event, Type[h11.PAUSED]], event) - - async def _response_closed(self) -> None: - async with self._state_lock: - if ( - self._h11_state.our_state is h11.DONE - and self._h11_state.their_state is h11.DONE - ): - self._state = HTTPConnectionState.IDLE - self._h11_state.start_next_cycle() - if self._keepalive_expiry is not None: - now = time.monotonic() - self._expire_at = now + self._keepalive_expiry - else: - await self.aclose() - - # Once the connection is no longer required... - - async def aclose(self) -> None: - # Note that this method unilaterally closes the connection, and does - # not have any kind of locking in place around it. - self._state = HTTPConnectionState.CLOSED - await self._network_stream.aclose() - - # The AsyncConnectionInterface methods provide information about the state of - # the connection, allowing for a connection pooling implementation to - # determine when to reuse and when to close the connection... - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._origin - - def is_available(self) -> bool: - # Note that HTTP/1.1 connections in the "NEW" state are not treated as - # being "available". The control flow which created the connection will - # be able to send an outgoing request, but the connection will not be - # acquired from the connection pool for any other request. - return self._state == HTTPConnectionState.IDLE - - def has_expired(self) -> bool: - now = time.monotonic() - keepalive_expired = self._expire_at is not None and now > self._expire_at - - # If the HTTP connection is idle but the socket is readable, then the - # only valid state is that the socket is about to return b"", indicating - # a server-initiated disconnect. - server_disconnected = ( - self._state == HTTPConnectionState.IDLE - and self._network_stream.get_extra_info("is_readable") - ) - - return keepalive_expired or server_disconnected - - def is_idle(self) -> bool: - return self._state == HTTPConnectionState.IDLE - - def is_closed(self) -> bool: - return self._state == HTTPConnectionState.CLOSED - - def info(self) -> str: - origin = str(self._origin) - return ( - f"{origin!r}, HTTP/1.1, {self._state.name}, " - f"Request Count: {self._request_count}" - ) - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - origin = str(self._origin) - return ( - f"<{class_name} [{origin!r}, {self._state.name}, " - f"Request Count: {self._request_count}]>" - ) - - # These context managers are not used in the standard flow, but are - # useful for testing or working with connection instances directly. - - async def __aenter__(self) -> "AsyncHTTP11Connection": - return self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, - ) -> None: - await self.aclose() - - -class HTTP11ConnectionByteStream: - def __init__(self, connection: AsyncHTTP11Connection, request: Request) -> None: - self._connection = connection - self._request = request - self._closed = False - - async def __aiter__(self) -> AsyncIterator[bytes]: - kwargs = {"request": self._request} - try: - async with Trace("receive_response_body", logger, self._request, kwargs): - async for chunk in self._connection._receive_response_body(**kwargs): - yield chunk - except BaseException as exc: - # If we get an exception while streaming the response, - # we want to close the response (and possibly the connection) - # before raising that exception. - with AsyncShieldCancellation(): - await self.aclose() - raise exc - - async def aclose(self) -> None: - if not self._closed: - self._closed = True - async with Trace("response_closed", logger, self._request): - await self._connection._response_closed() diff --git a/server/venv/Lib/site-packages/httpcore/_async/http2.py b/server/venv/Lib/site-packages/httpcore/_async/http2.py deleted file mode 100644 index 8dc776f..0000000 --- a/server/venv/Lib/site-packages/httpcore/_async/http2.py +++ /dev/null @@ -1,589 +0,0 @@ -import enum -import logging -import time -import types -import typing - -import h2.config -import h2.connection -import h2.events -import h2.exceptions -import h2.settings - -from .._backends.base import AsyncNetworkStream -from .._exceptions import ( - ConnectionNotAvailable, - LocalProtocolError, - RemoteProtocolError, -) -from .._models import Origin, Request, Response -from .._synchronization import AsyncLock, AsyncSemaphore, AsyncShieldCancellation -from .._trace import Trace -from .interfaces import AsyncConnectionInterface - -logger = logging.getLogger("httpcore.http2") - - -def has_body_headers(request: Request) -> bool: - return any( - k.lower() == b"content-length" or k.lower() == b"transfer-encoding" - for k, v in request.headers - ) - - -class HTTPConnectionState(enum.IntEnum): - ACTIVE = 1 - IDLE = 2 - CLOSED = 3 - - -class AsyncHTTP2Connection(AsyncConnectionInterface): - READ_NUM_BYTES = 64 * 1024 - CONFIG = h2.config.H2Configuration(validate_inbound_headers=False) - - def __init__( - self, - origin: Origin, - stream: AsyncNetworkStream, - keepalive_expiry: typing.Optional[float] = None, - ): - self._origin = origin - self._network_stream = stream - self._keepalive_expiry: typing.Optional[float] = keepalive_expiry - self._h2_state = h2.connection.H2Connection(config=self.CONFIG) - self._state = HTTPConnectionState.IDLE - self._expire_at: typing.Optional[float] = None - self._request_count = 0 - self._init_lock = AsyncLock() - self._state_lock = AsyncLock() - self._read_lock = AsyncLock() - self._write_lock = AsyncLock() - self._sent_connection_init = False - self._used_all_stream_ids = False - self._connection_error = False - - # Mapping from stream ID to response stream events. - self._events: typing.Dict[ - int, - typing.Union[ - h2.events.ResponseReceived, - h2.events.DataReceived, - h2.events.StreamEnded, - h2.events.StreamReset, - ], - ] = {} - - # Connection terminated events are stored as state since - # we need to handle them for all streams. - self._connection_terminated: typing.Optional[ - h2.events.ConnectionTerminated - ] = None - - self._read_exception: typing.Optional[Exception] = None - self._write_exception: typing.Optional[Exception] = None - - async def handle_async_request(self, request: Request) -> Response: - if not self.can_handle_request(request.url.origin): - # This cannot occur in normal operation, since the connection pool - # will only send requests on connections that handle them. - # It's in place simply for resilience as a guard against incorrect - # usage, for anyone working directly with httpcore connections. - raise RuntimeError( - f"Attempted to send request to {request.url.origin} on connection " - f"to {self._origin}" - ) - - async with self._state_lock: - if self._state in (HTTPConnectionState.ACTIVE, HTTPConnectionState.IDLE): - self._request_count += 1 - self._expire_at = None - self._state = HTTPConnectionState.ACTIVE - else: - raise ConnectionNotAvailable() - - async with self._init_lock: - if not self._sent_connection_init: - try: - kwargs = {"request": request} - async with Trace("send_connection_init", logger, request, kwargs): - await self._send_connection_init(**kwargs) - except BaseException as exc: - with AsyncShieldCancellation(): - await self.aclose() - raise exc - - self._sent_connection_init = True - - # Initially start with just 1 until the remote server provides - # its max_concurrent_streams value - self._max_streams = 1 - - local_settings_max_streams = ( - self._h2_state.local_settings.max_concurrent_streams - ) - self._max_streams_semaphore = AsyncSemaphore(local_settings_max_streams) - - for _ in range(local_settings_max_streams - self._max_streams): - await self._max_streams_semaphore.acquire() - - await self._max_streams_semaphore.acquire() - - try: - stream_id = self._h2_state.get_next_available_stream_id() - self._events[stream_id] = [] - except h2.exceptions.NoAvailableStreamIDError: # pragma: nocover - self._used_all_stream_ids = True - self._request_count -= 1 - raise ConnectionNotAvailable() - - try: - kwargs = {"request": request, "stream_id": stream_id} - async with Trace("send_request_headers", logger, request, kwargs): - await self._send_request_headers(request=request, stream_id=stream_id) - async with Trace("send_request_body", logger, request, kwargs): - await self._send_request_body(request=request, stream_id=stream_id) - async with Trace( - "receive_response_headers", logger, request, kwargs - ) as trace: - status, headers = await self._receive_response( - request=request, stream_id=stream_id - ) - trace.return_value = (status, headers) - - return Response( - status=status, - headers=headers, - content=HTTP2ConnectionByteStream(self, request, stream_id=stream_id), - extensions={ - "http_version": b"HTTP/2", - "network_stream": self._network_stream, - "stream_id": stream_id, - }, - ) - except BaseException as exc: # noqa: PIE786 - with AsyncShieldCancellation(): - kwargs = {"stream_id": stream_id} - async with Trace("response_closed", logger, request, kwargs): - await self._response_closed(stream_id=stream_id) - - if isinstance(exc, h2.exceptions.ProtocolError): - # One case where h2 can raise a protocol error is when a - # closed frame has been seen by the state machine. - # - # This happens when one stream is reading, and encounters - # a GOAWAY event. Other flows of control may then raise - # a protocol error at any point they interact with the 'h2_state'. - # - # In this case we'll have stored the event, and should raise - # it as a RemoteProtocolError. - if self._connection_terminated: # pragma: nocover - raise RemoteProtocolError(self._connection_terminated) - # If h2 raises a protocol error in some other state then we - # must somehow have made a protocol violation. - raise LocalProtocolError(exc) # pragma: nocover - - raise exc - - async def _send_connection_init(self, request: Request) -> None: - """ - The HTTP/2 connection requires some initial setup before we can start - using individual request/response streams on it. - """ - # Need to set these manually here instead of manipulating via - # __setitem__() otherwise the H2Connection will emit SettingsUpdate - # frames in addition to sending the undesired defaults. - self._h2_state.local_settings = h2.settings.Settings( - client=True, - initial_values={ - # Disable PUSH_PROMISE frames from the server since we don't do anything - # with them for now. Maybe when we support caching? - h2.settings.SettingCodes.ENABLE_PUSH: 0, - # These two are taken from h2 for safe defaults - h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 100, - h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 65536, - }, - ) - - # Some websites (*cough* Yahoo *cough*) balk at this setting being - # present in the initial handshake since it's not defined in the original - # RFC despite the RFC mandating ignoring settings you don't know about. - del self._h2_state.local_settings[ - h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL - ] - - self._h2_state.initiate_connection() - self._h2_state.increment_flow_control_window(2**24) - await self._write_outgoing_data(request) - - # Sending the request... - - async def _send_request_headers(self, request: Request, stream_id: int) -> None: - """ - Send the request headers to a given stream ID. - """ - end_stream = not has_body_headers(request) - - # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'. - # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require - # HTTP/1.1 style headers, and map them appropriately if we end up on - # an HTTP/2 connection. - authority = [v for k, v in request.headers if k.lower() == b"host"][0] - - headers = [ - (b":method", request.method), - (b":authority", authority), - (b":scheme", request.url.scheme), - (b":path", request.url.target), - ] + [ - (k.lower(), v) - for k, v in request.headers - if k.lower() - not in ( - b"host", - b"transfer-encoding", - ) - ] - - self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) - self._h2_state.increment_flow_control_window(2**24, stream_id=stream_id) - await self._write_outgoing_data(request) - - async def _send_request_body(self, request: Request, stream_id: int) -> None: - """ - Iterate over the request body sending it to a given stream ID. - """ - if not has_body_headers(request): - return - - assert isinstance(request.stream, typing.AsyncIterable) - async for data in request.stream: - await self._send_stream_data(request, stream_id, data) - await self._send_end_stream(request, stream_id) - - async def _send_stream_data( - self, request: Request, stream_id: int, data: bytes - ) -> None: - """ - Send a single chunk of data in one or more data frames. - """ - while data: - max_flow = await self._wait_for_outgoing_flow(request, stream_id) - chunk_size = min(len(data), max_flow) - chunk, data = data[:chunk_size], data[chunk_size:] - self._h2_state.send_data(stream_id, chunk) - await self._write_outgoing_data(request) - - async def _send_end_stream(self, request: Request, stream_id: int) -> None: - """ - Send an empty data frame on on a given stream ID with the END_STREAM flag set. - """ - self._h2_state.end_stream(stream_id) - await self._write_outgoing_data(request) - - # Receiving the response... - - async def _receive_response( - self, request: Request, stream_id: int - ) -> typing.Tuple[int, typing.List[typing.Tuple[bytes, bytes]]]: - """ - Return the response status code and headers for a given stream ID. - """ - while True: - event = await self._receive_stream_event(request, stream_id) - if isinstance(event, h2.events.ResponseReceived): - break - - status_code = 200 - headers = [] - for k, v in event.headers: - if k == b":status": - status_code = int(v.decode("ascii", errors="ignore")) - elif not k.startswith(b":"): - headers.append((k, v)) - - return (status_code, headers) - - async def _receive_response_body( - self, request: Request, stream_id: int - ) -> typing.AsyncIterator[bytes]: - """ - Iterator that returns the bytes of the response body for a given stream ID. - """ - while True: - event = await self._receive_stream_event(request, stream_id) - if isinstance(event, h2.events.DataReceived): - amount = event.flow_controlled_length - self._h2_state.acknowledge_received_data(amount, stream_id) - await self._write_outgoing_data(request) - yield event.data - elif isinstance(event, h2.events.StreamEnded): - break - - async def _receive_stream_event( - self, request: Request, stream_id: int - ) -> typing.Union[ - h2.events.ResponseReceived, h2.events.DataReceived, h2.events.StreamEnded - ]: - """ - Return the next available event for a given stream ID. - - Will read more data from the network if required. - """ - while not self._events.get(stream_id): - await self._receive_events(request, stream_id) - event = self._events[stream_id].pop(0) - if isinstance(event, h2.events.StreamReset): - raise RemoteProtocolError(event) - return event - - async def _receive_events( - self, request: Request, stream_id: typing.Optional[int] = None - ) -> None: - """ - Read some data from the network until we see one or more events - for a given stream ID. - """ - async with self._read_lock: - if self._connection_terminated is not None: - last_stream_id = self._connection_terminated.last_stream_id - if stream_id and last_stream_id and stream_id > last_stream_id: - self._request_count -= 1 - raise ConnectionNotAvailable() - raise RemoteProtocolError(self._connection_terminated) - - # This conditional is a bit icky. We don't want to block reading if we've - # actually got an event to return for a given stream. We need to do that - # check *within* the atomic read lock. Though it also need to be optional, - # because when we call it from `_wait_for_outgoing_flow` we *do* want to - # block until we've available flow control, event when we have events - # pending for the stream ID we're attempting to send on. - if stream_id is None or not self._events.get(stream_id): - events = await self._read_incoming_data(request) - for event in events: - if isinstance(event, h2.events.RemoteSettingsChanged): - async with Trace( - "receive_remote_settings", logger, request - ) as trace: - await self._receive_remote_settings_change(event) - trace.return_value = event - - elif isinstance( - event, - ( - h2.events.ResponseReceived, - h2.events.DataReceived, - h2.events.StreamEnded, - h2.events.StreamReset, - ), - ): - if event.stream_id in self._events: - self._events[event.stream_id].append(event) - - elif isinstance(event, h2.events.ConnectionTerminated): - self._connection_terminated = event - - await self._write_outgoing_data(request) - - async def _receive_remote_settings_change(self, event: h2.events.Event) -> None: - max_concurrent_streams = event.changed_settings.get( - h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS - ) - if max_concurrent_streams: - new_max_streams = min( - max_concurrent_streams.new_value, - self._h2_state.local_settings.max_concurrent_streams, - ) - if new_max_streams and new_max_streams != self._max_streams: - while new_max_streams > self._max_streams: - await self._max_streams_semaphore.release() - self._max_streams += 1 - while new_max_streams < self._max_streams: - await self._max_streams_semaphore.acquire() - self._max_streams -= 1 - - async def _response_closed(self, stream_id: int) -> None: - await self._max_streams_semaphore.release() - del self._events[stream_id] - async with self._state_lock: - if self._connection_terminated and not self._events: - await self.aclose() - - elif self._state == HTTPConnectionState.ACTIVE and not self._events: - self._state = HTTPConnectionState.IDLE - if self._keepalive_expiry is not None: - now = time.monotonic() - self._expire_at = now + self._keepalive_expiry - if self._used_all_stream_ids: # pragma: nocover - await self.aclose() - - async def aclose(self) -> None: - # Note that this method unilaterally closes the connection, and does - # not have any kind of locking in place around it. - self._h2_state.close_connection() - self._state = HTTPConnectionState.CLOSED - await self._network_stream.aclose() - - # Wrappers around network read/write operations... - - async def _read_incoming_data( - self, request: Request - ) -> typing.List[h2.events.Event]: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("read", None) - - if self._read_exception is not None: - raise self._read_exception # pragma: nocover - - try: - data = await self._network_stream.read(self.READ_NUM_BYTES, timeout) - if data == b"": - raise RemoteProtocolError("Server disconnected") - except Exception as exc: - # If we get a network error we should: - # - # 1. Save the exception and just raise it immediately on any future reads. - # (For example, this means that a single read timeout or disconnect will - # immediately close all pending streams. Without requiring multiple - # sequential timeouts.) - # 2. Mark the connection as errored, so that we don't accept any other - # incoming requests. - self._read_exception = exc - self._connection_error = True - raise exc - - events: typing.List[h2.events.Event] = self._h2_state.receive_data(data) - - return events - - async def _write_outgoing_data(self, request: Request) -> None: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("write", None) - - async with self._write_lock: - data_to_send = self._h2_state.data_to_send() - - if self._write_exception is not None: - raise self._write_exception # pragma: nocover - - try: - await self._network_stream.write(data_to_send, timeout) - except Exception as exc: # pragma: nocover - # If we get a network error we should: - # - # 1. Save the exception and just raise it immediately on any future write. - # (For example, this means that a single write timeout or disconnect will - # immediately close all pending streams. Without requiring multiple - # sequential timeouts.) - # 2. Mark the connection as errored, so that we don't accept any other - # incoming requests. - self._write_exception = exc - self._connection_error = True - raise exc - - # Flow control... - - async def _wait_for_outgoing_flow(self, request: Request, stream_id: int) -> int: - """ - Returns the maximum allowable outgoing flow for a given stream. - - If the allowable flow is zero, then waits on the network until - WindowUpdated frames have increased the flow rate. - https://tools.ietf.org/html/rfc7540#section-6.9 - """ - local_flow: int = self._h2_state.local_flow_control_window(stream_id) - max_frame_size: int = self._h2_state.max_outbound_frame_size - flow = min(local_flow, max_frame_size) - while flow == 0: - await self._receive_events(request) - local_flow = self._h2_state.local_flow_control_window(stream_id) - max_frame_size = self._h2_state.max_outbound_frame_size - flow = min(local_flow, max_frame_size) - return flow - - # Interface for connection pooling... - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._origin - - def is_available(self) -> bool: - return ( - self._state != HTTPConnectionState.CLOSED - and not self._connection_error - and not self._used_all_stream_ids - and not ( - self._h2_state.state_machine.state - == h2.connection.ConnectionState.CLOSED - ) - ) - - def has_expired(self) -> bool: - now = time.monotonic() - return self._expire_at is not None and now > self._expire_at - - def is_idle(self) -> bool: - return self._state == HTTPConnectionState.IDLE - - def is_closed(self) -> bool: - return self._state == HTTPConnectionState.CLOSED - - def info(self) -> str: - origin = str(self._origin) - return ( - f"{origin!r}, HTTP/2, {self._state.name}, " - f"Request Count: {self._request_count}" - ) - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - origin = str(self._origin) - return ( - f"<{class_name} [{origin!r}, {self._state.name}, " - f"Request Count: {self._request_count}]>" - ) - - # These context managers are not used in the standard flow, but are - # useful for testing or working with connection instances directly. - - async def __aenter__(self) -> "AsyncHTTP2Connection": - return self - - async def __aexit__( - self, - exc_type: typing.Optional[typing.Type[BaseException]] = None, - exc_value: typing.Optional[BaseException] = None, - traceback: typing.Optional[types.TracebackType] = None, - ) -> None: - await self.aclose() - - -class HTTP2ConnectionByteStream: - def __init__( - self, connection: AsyncHTTP2Connection, request: Request, stream_id: int - ) -> None: - self._connection = connection - self._request = request - self._stream_id = stream_id - self._closed = False - - async def __aiter__(self) -> typing.AsyncIterator[bytes]: - kwargs = {"request": self._request, "stream_id": self._stream_id} - try: - async with Trace("receive_response_body", logger, self._request, kwargs): - async for chunk in self._connection._receive_response_body( - request=self._request, stream_id=self._stream_id - ): - yield chunk - except BaseException as exc: - # If we get an exception while streaming the response, - # we want to close the response (and possibly the connection) - # before raising that exception. - with AsyncShieldCancellation(): - await self.aclose() - raise exc - - async def aclose(self) -> None: - if not self._closed: - self._closed = True - kwargs = {"stream_id": self._stream_id} - async with Trace("response_closed", logger, self._request, kwargs): - await self._connection._response_closed(stream_id=self._stream_id) diff --git a/server/venv/Lib/site-packages/httpcore/_async/http_proxy.py b/server/venv/Lib/site-packages/httpcore/_async/http_proxy.py deleted file mode 100644 index 4aa7d87..0000000 --- a/server/venv/Lib/site-packages/httpcore/_async/http_proxy.py +++ /dev/null @@ -1,368 +0,0 @@ -import logging -import ssl -from base64 import b64encode -from typing import Iterable, List, Mapping, Optional, Sequence, Tuple, Union - -from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend -from .._exceptions import ProxyError -from .._models import ( - URL, - Origin, - Request, - Response, - enforce_bytes, - enforce_headers, - enforce_url, -) -from .._ssl import default_ssl_context -from .._synchronization import AsyncLock -from .._trace import Trace -from .connection import AsyncHTTPConnection -from .connection_pool import AsyncConnectionPool -from .http11 import AsyncHTTP11Connection -from .interfaces import AsyncConnectionInterface - -HeadersAsSequence = Sequence[Tuple[Union[bytes, str], Union[bytes, str]]] -HeadersAsMapping = Mapping[Union[bytes, str], Union[bytes, str]] - - -logger = logging.getLogger("httpcore.proxy") - - -def merge_headers( - default_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, - override_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, -) -> List[Tuple[bytes, bytes]]: - """ - Append default_headers and override_headers, de-duplicating if a key exists - in both cases. - """ - default_headers = [] if default_headers is None else list(default_headers) - override_headers = [] if override_headers is None else list(override_headers) - has_override = set(key.lower() for key, value in override_headers) - default_headers = [ - (key, value) - for key, value in default_headers - if key.lower() not in has_override - ] - return default_headers + override_headers - - -def build_auth_header(username: bytes, password: bytes) -> bytes: - userpass = username + b":" + password - return b"Basic " + b64encode(userpass) - - -class AsyncHTTPProxy(AsyncConnectionPool): - """ - A connection pool that sends requests via an HTTP proxy. - """ - - def __init__( - self, - proxy_url: Union[URL, bytes, str], - proxy_auth: Optional[Tuple[Union[bytes, str], Union[bytes, str]]] = None, - proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, - ssl_context: Optional[ssl.SSLContext] = None, - proxy_ssl_context: Optional[ssl.SSLContext] = None, - max_connections: Optional[int] = 10, - max_keepalive_connections: Optional[int] = None, - keepalive_expiry: Optional[float] = None, - http1: bool = True, - http2: bool = False, - retries: int = 0, - local_address: Optional[str] = None, - uds: Optional[str] = None, - network_backend: Optional[AsyncNetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, - ) -> None: - """ - A connection pool for making HTTP requests. - - Parameters: - proxy_url: The URL to use when connecting to the proxy server. - For example `"http://127.0.0.1:8080/"`. - proxy_auth: Any proxy authentication as a two-tuple of - (username, password). May be either bytes or ascii-only str. - proxy_headers: Any HTTP headers to use for the proxy requests. - For example `{"Proxy-Authorization": "Basic :"}`. - ssl_context: An SSL context to use for verifying connections. - If not specified, the default `httpcore.default_ssl_context()` - will be used. - proxy_ssl_context: The same as `ssl_context`, but for a proxy server rather than a remote origin. - max_connections: The maximum number of concurrent HTTP connections that - the pool should allow. Any attempt to send a request on a pool that - would exceed this amount will block until a connection is available. - max_keepalive_connections: The maximum number of idle HTTP connections - that will be maintained in the pool. - keepalive_expiry: The duration in seconds that an idle HTTP connection - may be maintained for before being expired from the pool. - http1: A boolean indicating if HTTP/1.1 requests should be supported - by the connection pool. Defaults to True. - http2: A boolean indicating if HTTP/2 requests should be supported by - the connection pool. Defaults to False. - retries: The maximum number of retries when trying to establish - a connection. - local_address: Local address to connect from. Can also be used to - connect using a particular address family. Using - `local_address="0.0.0.0"` will connect using an `AF_INET` address - (IPv4), while using `local_address="::"` will connect using an - `AF_INET6` address (IPv6). - uds: Path to a Unix Domain Socket to use instead of TCP sockets. - network_backend: A backend instance to use for handling network I/O. - """ - super().__init__( - ssl_context=ssl_context, - max_connections=max_connections, - max_keepalive_connections=max_keepalive_connections, - keepalive_expiry=keepalive_expiry, - http1=http1, - http2=http2, - network_backend=network_backend, - retries=retries, - local_address=local_address, - uds=uds, - socket_options=socket_options, - ) - - self._proxy_url = enforce_url(proxy_url, name="proxy_url") - if ( - self._proxy_url.scheme == b"http" and proxy_ssl_context is not None - ): # pragma: no cover - raise RuntimeError( - "The `proxy_ssl_context` argument is not allowed for the http scheme" - ) - - self._ssl_context = ssl_context - self._proxy_ssl_context = proxy_ssl_context - self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") - if proxy_auth is not None: - username = enforce_bytes(proxy_auth[0], name="proxy_auth") - password = enforce_bytes(proxy_auth[1], name="proxy_auth") - authorization = build_auth_header(username, password) - self._proxy_headers = [ - (b"Proxy-Authorization", authorization) - ] + self._proxy_headers - - def create_connection(self, origin: Origin) -> AsyncConnectionInterface: - if origin.scheme == b"http": - return AsyncForwardHTTPConnection( - proxy_origin=self._proxy_url.origin, - proxy_headers=self._proxy_headers, - remote_origin=origin, - keepalive_expiry=self._keepalive_expiry, - network_backend=self._network_backend, - proxy_ssl_context=self._proxy_ssl_context, - ) - return AsyncTunnelHTTPConnection( - proxy_origin=self._proxy_url.origin, - proxy_headers=self._proxy_headers, - remote_origin=origin, - ssl_context=self._ssl_context, - proxy_ssl_context=self._proxy_ssl_context, - keepalive_expiry=self._keepalive_expiry, - http1=self._http1, - http2=self._http2, - network_backend=self._network_backend, - ) - - -class AsyncForwardHTTPConnection(AsyncConnectionInterface): - def __init__( - self, - proxy_origin: Origin, - remote_origin: Origin, - proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, - keepalive_expiry: Optional[float] = None, - network_backend: Optional[AsyncNetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, - proxy_ssl_context: Optional[ssl.SSLContext] = None, - ) -> None: - self._connection = AsyncHTTPConnection( - origin=proxy_origin, - keepalive_expiry=keepalive_expiry, - network_backend=network_backend, - socket_options=socket_options, - ssl_context=proxy_ssl_context, - ) - self._proxy_origin = proxy_origin - self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") - self._remote_origin = remote_origin - - async def handle_async_request(self, request: Request) -> Response: - headers = merge_headers(self._proxy_headers, request.headers) - url = URL( - scheme=self._proxy_origin.scheme, - host=self._proxy_origin.host, - port=self._proxy_origin.port, - target=bytes(request.url), - ) - proxy_request = Request( - method=request.method, - url=url, - headers=headers, - content=request.stream, - extensions=request.extensions, - ) - return await self._connection.handle_async_request(proxy_request) - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._remote_origin - - async def aclose(self) -> None: - await self._connection.aclose() - - def info(self) -> str: - return self._connection.info() - - def is_available(self) -> bool: - return self._connection.is_available() - - def has_expired(self) -> bool: - return self._connection.has_expired() - - def is_idle(self) -> bool: - return self._connection.is_idle() - - def is_closed(self) -> bool: - return self._connection.is_closed() - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.info()}]>" - - -class AsyncTunnelHTTPConnection(AsyncConnectionInterface): - def __init__( - self, - proxy_origin: Origin, - remote_origin: Origin, - ssl_context: Optional[ssl.SSLContext] = None, - proxy_ssl_context: Optional[ssl.SSLContext] = None, - proxy_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, - keepalive_expiry: Optional[float] = None, - http1: bool = True, - http2: bool = False, - network_backend: Optional[AsyncNetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, - ) -> None: - self._connection: AsyncConnectionInterface = AsyncHTTPConnection( - origin=proxy_origin, - keepalive_expiry=keepalive_expiry, - network_backend=network_backend, - socket_options=socket_options, - ssl_context=proxy_ssl_context, - ) - self._proxy_origin = proxy_origin - self._remote_origin = remote_origin - self._ssl_context = ssl_context - self._proxy_ssl_context = proxy_ssl_context - self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") - self._keepalive_expiry = keepalive_expiry - self._http1 = http1 - self._http2 = http2 - self._connect_lock = AsyncLock() - self._connected = False - - async def handle_async_request(self, request: Request) -> Response: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("connect", None) - - async with self._connect_lock: - if not self._connected: - target = b"%b:%d" % (self._remote_origin.host, self._remote_origin.port) - - connect_url = URL( - scheme=self._proxy_origin.scheme, - host=self._proxy_origin.host, - port=self._proxy_origin.port, - target=target, - ) - connect_headers = merge_headers( - [(b"Host", target), (b"Accept", b"*/*")], self._proxy_headers - ) - connect_request = Request( - method=b"CONNECT", - url=connect_url, - headers=connect_headers, - extensions=request.extensions, - ) - connect_response = await self._connection.handle_async_request( - connect_request - ) - - if connect_response.status < 200 or connect_response.status > 299: - reason_bytes = connect_response.extensions.get("reason_phrase", b"") - reason_str = reason_bytes.decode("ascii", errors="ignore") - msg = "%d %s" % (connect_response.status, reason_str) - await self._connection.aclose() - raise ProxyError(msg) - - stream = connect_response.extensions["network_stream"] - - # Upgrade the stream to SSL - ssl_context = ( - default_ssl_context() - if self._ssl_context is None - else self._ssl_context - ) - alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] - ssl_context.set_alpn_protocols(alpn_protocols) - - kwargs = { - "ssl_context": ssl_context, - "server_hostname": self._remote_origin.host.decode("ascii"), - "timeout": timeout, - } - async with Trace("start_tls", logger, request, kwargs) as trace: - stream = await stream.start_tls(**kwargs) - trace.return_value = stream - - # Determine if we should be using HTTP/1.1 or HTTP/2 - ssl_object = stream.get_extra_info("ssl_object") - http2_negotiated = ( - ssl_object is not None - and ssl_object.selected_alpn_protocol() == "h2" - ) - - # Create the HTTP/1.1 or HTTP/2 connection - if http2_negotiated or (self._http2 and not self._http1): - from .http2 import AsyncHTTP2Connection - - self._connection = AsyncHTTP2Connection( - origin=self._remote_origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - else: - self._connection = AsyncHTTP11Connection( - origin=self._remote_origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - - self._connected = True - return await self._connection.handle_async_request(request) - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._remote_origin - - async def aclose(self) -> None: - await self._connection.aclose() - - def info(self) -> str: - return self._connection.info() - - def is_available(self) -> bool: - return self._connection.is_available() - - def has_expired(self) -> bool: - return self._connection.has_expired() - - def is_idle(self) -> bool: - return self._connection.is_idle() - - def is_closed(self) -> bool: - return self._connection.is_closed() - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/server/venv/Lib/site-packages/httpcore/_async/interfaces.py b/server/venv/Lib/site-packages/httpcore/_async/interfaces.py deleted file mode 100644 index c998dd2..0000000 --- a/server/venv/Lib/site-packages/httpcore/_async/interfaces.py +++ /dev/null @@ -1,135 +0,0 @@ -from contextlib import asynccontextmanager -from typing import AsyncIterator, Optional, Union - -from .._models import ( - URL, - Extensions, - HeaderTypes, - Origin, - Request, - Response, - enforce_bytes, - enforce_headers, - enforce_url, - include_request_headers, -) - - -class AsyncRequestInterface: - async def request( - self, - method: Union[bytes, str], - url: Union[URL, bytes, str], - *, - headers: HeaderTypes = None, - content: Union[bytes, AsyncIterator[bytes], None] = None, - extensions: Optional[Extensions] = None, - ) -> Response: - # Strict type checking on our parameters. - method = enforce_bytes(method, name="method") - url = enforce_url(url, name="url") - headers = enforce_headers(headers, name="headers") - - # Include Host header, and optionally Content-Length or Transfer-Encoding. - headers = include_request_headers(headers, url=url, content=content) - - request = Request( - method=method, - url=url, - headers=headers, - content=content, - extensions=extensions, - ) - response = await self.handle_async_request(request) - try: - await response.aread() - finally: - await response.aclose() - return response - - @asynccontextmanager - async def stream( - self, - method: Union[bytes, str], - url: Union[URL, bytes, str], - *, - headers: HeaderTypes = None, - content: Union[bytes, AsyncIterator[bytes], None] = None, - extensions: Optional[Extensions] = None, - ) -> AsyncIterator[Response]: - # Strict type checking on our parameters. - method = enforce_bytes(method, name="method") - url = enforce_url(url, name="url") - headers = enforce_headers(headers, name="headers") - - # Include Host header, and optionally Content-Length or Transfer-Encoding. - headers = include_request_headers(headers, url=url, content=content) - - request = Request( - method=method, - url=url, - headers=headers, - content=content, - extensions=extensions, - ) - response = await self.handle_async_request(request) - try: - yield response - finally: - await response.aclose() - - async def handle_async_request(self, request: Request) -> Response: - raise NotImplementedError() # pragma: nocover - - -class AsyncConnectionInterface(AsyncRequestInterface): - async def aclose(self) -> None: - raise NotImplementedError() # pragma: nocover - - def info(self) -> str: - raise NotImplementedError() # pragma: nocover - - def can_handle_request(self, origin: Origin) -> bool: - raise NotImplementedError() # pragma: nocover - - def is_available(self) -> bool: - """ - Return `True` if the connection is currently able to accept an - outgoing request. - - An HTTP/1.1 connection will only be available if it is currently idle. - - An HTTP/2 connection will be available so long as the stream ID space is - not yet exhausted, and the connection is not in an error state. - - While the connection is being established we may not yet know if it is going - to result in an HTTP/1.1 or HTTP/2 connection. The connection should be - treated as being available, but might ultimately raise `NewConnectionRequired` - required exceptions if multiple requests are attempted over a connection - that ends up being established as HTTP/1.1. - """ - raise NotImplementedError() # pragma: nocover - - def has_expired(self) -> bool: - """ - Return `True` if the connection is in a state where it should be closed. - - This either means that the connection is idle and it has passed the - expiry time on its keep-alive, or that server has sent an EOF. - """ - raise NotImplementedError() # pragma: nocover - - def is_idle(self) -> bool: - """ - Return `True` if the connection is currently idle. - """ - raise NotImplementedError() # pragma: nocover - - def is_closed(self) -> bool: - """ - Return `True` if the connection has been closed. - - Used when a response is closed to determine if the connection may be - returned to the connection pool or not. - """ - raise NotImplementedError() # pragma: nocover diff --git a/server/venv/Lib/site-packages/httpcore/_async/socks_proxy.py b/server/venv/Lib/site-packages/httpcore/_async/socks_proxy.py deleted file mode 100644 index 08a065d..0000000 --- a/server/venv/Lib/site-packages/httpcore/_async/socks_proxy.py +++ /dev/null @@ -1,342 +0,0 @@ -import logging -import ssl -import typing - -from socksio import socks5 - -from .._backends.auto import AutoBackend -from .._backends.base import AsyncNetworkBackend, AsyncNetworkStream -from .._exceptions import ConnectionNotAvailable, ProxyError -from .._models import URL, Origin, Request, Response, enforce_bytes, enforce_url -from .._ssl import default_ssl_context -from .._synchronization import AsyncLock -from .._trace import Trace -from .connection_pool import AsyncConnectionPool -from .http11 import AsyncHTTP11Connection -from .interfaces import AsyncConnectionInterface - -logger = logging.getLogger("httpcore.socks") - - -AUTH_METHODS = { - b"\x00": "NO AUTHENTICATION REQUIRED", - b"\x01": "GSSAPI", - b"\x02": "USERNAME/PASSWORD", - b"\xff": "NO ACCEPTABLE METHODS", -} - -REPLY_CODES = { - b"\x00": "Succeeded", - b"\x01": "General SOCKS server failure", - b"\x02": "Connection not allowed by ruleset", - b"\x03": "Network unreachable", - b"\x04": "Host unreachable", - b"\x05": "Connection refused", - b"\x06": "TTL expired", - b"\x07": "Command not supported", - b"\x08": "Address type not supported", -} - - -async def _init_socks5_connection( - stream: AsyncNetworkStream, - *, - host: bytes, - port: int, - auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, -) -> None: - conn = socks5.SOCKS5Connection() - - # Auth method request - auth_method = ( - socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED - if auth is None - else socks5.SOCKS5AuthMethod.USERNAME_PASSWORD - ) - conn.send(socks5.SOCKS5AuthMethodsRequest([auth_method])) - outgoing_bytes = conn.data_to_send() - await stream.write(outgoing_bytes) - - # Auth method response - incoming_bytes = await stream.read(max_bytes=4096) - response = conn.receive_data(incoming_bytes) - assert isinstance(response, socks5.SOCKS5AuthReply) - if response.method != auth_method: - requested = AUTH_METHODS.get(auth_method, "UNKNOWN") - responded = AUTH_METHODS.get(response.method, "UNKNOWN") - raise ProxyError( - f"Requested {requested} from proxy server, but got {responded}." - ) - - if response.method == socks5.SOCKS5AuthMethod.USERNAME_PASSWORD: - # Username/password request - assert auth is not None - username, password = auth - conn.send(socks5.SOCKS5UsernamePasswordRequest(username, password)) - outgoing_bytes = conn.data_to_send() - await stream.write(outgoing_bytes) - - # Username/password response - incoming_bytes = await stream.read(max_bytes=4096) - response = conn.receive_data(incoming_bytes) - assert isinstance(response, socks5.SOCKS5UsernamePasswordReply) - if not response.success: - raise ProxyError("Invalid username/password") - - # Connect request - conn.send( - socks5.SOCKS5CommandRequest.from_address( - socks5.SOCKS5Command.CONNECT, (host, port) - ) - ) - outgoing_bytes = conn.data_to_send() - await stream.write(outgoing_bytes) - - # Connect response - incoming_bytes = await stream.read(max_bytes=4096) - response = conn.receive_data(incoming_bytes) - assert isinstance(response, socks5.SOCKS5Reply) - if response.reply_code != socks5.SOCKS5ReplyCode.SUCCEEDED: - reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN") - raise ProxyError(f"Proxy Server could not connect: {reply_code}.") - - -class AsyncSOCKSProxy(AsyncConnectionPool): - """ - A connection pool that sends requests via an HTTP proxy. - """ - - def __init__( - self, - proxy_url: typing.Union[URL, bytes, str], - proxy_auth: typing.Optional[ - typing.Tuple[typing.Union[bytes, str], typing.Union[bytes, str]] - ] = None, - ssl_context: typing.Optional[ssl.SSLContext] = None, - max_connections: typing.Optional[int] = 10, - max_keepalive_connections: typing.Optional[int] = None, - keepalive_expiry: typing.Optional[float] = None, - http1: bool = True, - http2: bool = False, - retries: int = 0, - network_backend: typing.Optional[AsyncNetworkBackend] = None, - ) -> None: - """ - A connection pool for making HTTP requests. - - Parameters: - proxy_url: The URL to use when connecting to the proxy server. - For example `"http://127.0.0.1:8080/"`. - ssl_context: An SSL context to use for verifying connections. - If not specified, the default `httpcore.default_ssl_context()` - will be used. - max_connections: The maximum number of concurrent HTTP connections that - the pool should allow. Any attempt to send a request on a pool that - would exceed this amount will block until a connection is available. - max_keepalive_connections: The maximum number of idle HTTP connections - that will be maintained in the pool. - keepalive_expiry: The duration in seconds that an idle HTTP connection - may be maintained for before being expired from the pool. - http1: A boolean indicating if HTTP/1.1 requests should be supported - by the connection pool. Defaults to True. - http2: A boolean indicating if HTTP/2 requests should be supported by - the connection pool. Defaults to False. - retries: The maximum number of retries when trying to establish - a connection. - local_address: Local address to connect from. Can also be used to - connect using a particular address family. Using - `local_address="0.0.0.0"` will connect using an `AF_INET` address - (IPv4), while using `local_address="::"` will connect using an - `AF_INET6` address (IPv6). - uds: Path to a Unix Domain Socket to use instead of TCP sockets. - network_backend: A backend instance to use for handling network I/O. - """ - super().__init__( - ssl_context=ssl_context, - max_connections=max_connections, - max_keepalive_connections=max_keepalive_connections, - keepalive_expiry=keepalive_expiry, - http1=http1, - http2=http2, - network_backend=network_backend, - retries=retries, - ) - self._ssl_context = ssl_context - self._proxy_url = enforce_url(proxy_url, name="proxy_url") - if proxy_auth is not None: - username, password = proxy_auth - username_bytes = enforce_bytes(username, name="proxy_auth") - password_bytes = enforce_bytes(password, name="proxy_auth") - self._proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = ( - username_bytes, - password_bytes, - ) - else: - self._proxy_auth = None - - def create_connection(self, origin: Origin) -> AsyncConnectionInterface: - return AsyncSocks5Connection( - proxy_origin=self._proxy_url.origin, - remote_origin=origin, - proxy_auth=self._proxy_auth, - ssl_context=self._ssl_context, - keepalive_expiry=self._keepalive_expiry, - http1=self._http1, - http2=self._http2, - network_backend=self._network_backend, - ) - - -class AsyncSocks5Connection(AsyncConnectionInterface): - def __init__( - self, - proxy_origin: Origin, - remote_origin: Origin, - proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, - ssl_context: typing.Optional[ssl.SSLContext] = None, - keepalive_expiry: typing.Optional[float] = None, - http1: bool = True, - http2: bool = False, - network_backend: typing.Optional[AsyncNetworkBackend] = None, - ) -> None: - self._proxy_origin = proxy_origin - self._remote_origin = remote_origin - self._proxy_auth = proxy_auth - self._ssl_context = ssl_context - self._keepalive_expiry = keepalive_expiry - self._http1 = http1 - self._http2 = http2 - - self._network_backend: AsyncNetworkBackend = ( - AutoBackend() if network_backend is None else network_backend - ) - self._connect_lock = AsyncLock() - self._connection: typing.Optional[AsyncConnectionInterface] = None - self._connect_failed = False - - async def handle_async_request(self, request: Request) -> Response: - timeouts = request.extensions.get("timeout", {}) - sni_hostname = request.extensions.get("sni_hostname", None) - timeout = timeouts.get("connect", None) - - async with self._connect_lock: - if self._connection is None: - try: - # Connect to the proxy - kwargs = { - "host": self._proxy_origin.host.decode("ascii"), - "port": self._proxy_origin.port, - "timeout": timeout, - } - with Trace("connect_tcp", logger, request, kwargs) as trace: - stream = await self._network_backend.connect_tcp(**kwargs) - trace.return_value = stream - - # Connect to the remote host using socks5 - kwargs = { - "stream": stream, - "host": self._remote_origin.host.decode("ascii"), - "port": self._remote_origin.port, - "auth": self._proxy_auth, - } - with Trace( - "setup_socks5_connection", logger, request, kwargs - ) as trace: - await _init_socks5_connection(**kwargs) - trace.return_value = stream - - # Upgrade the stream to SSL - if self._remote_origin.scheme == b"https": - ssl_context = ( - default_ssl_context() - if self._ssl_context is None - else self._ssl_context - ) - alpn_protocols = ( - ["http/1.1", "h2"] if self._http2 else ["http/1.1"] - ) - ssl_context.set_alpn_protocols(alpn_protocols) - - kwargs = { - "ssl_context": ssl_context, - "server_hostname": sni_hostname - or self._remote_origin.host.decode("ascii"), - "timeout": timeout, - } - async with Trace("start_tls", logger, request, kwargs) as trace: - stream = await stream.start_tls(**kwargs) - trace.return_value = stream - - # Determine if we should be using HTTP/1.1 or HTTP/2 - ssl_object = stream.get_extra_info("ssl_object") - http2_negotiated = ( - ssl_object is not None - and ssl_object.selected_alpn_protocol() == "h2" - ) - - # Create the HTTP/1.1 or HTTP/2 connection - if http2_negotiated or ( - self._http2 and not self._http1 - ): # pragma: nocover - from .http2 import AsyncHTTP2Connection - - self._connection = AsyncHTTP2Connection( - origin=self._remote_origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - else: - self._connection = AsyncHTTP11Connection( - origin=self._remote_origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - except Exception as exc: - self._connect_failed = True - raise exc - elif not self._connection.is_available(): # pragma: nocover - raise ConnectionNotAvailable() - - return await self._connection.handle_async_request(request) - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._remote_origin - - async def aclose(self) -> None: - if self._connection is not None: - await self._connection.aclose() - - def is_available(self) -> bool: - if self._connection is None: # pragma: nocover - # If HTTP/2 support is enabled, and the resulting connection could - # end up as HTTP/2 then we should indicate the connection as being - # available to service multiple requests. - return ( - self._http2 - and (self._remote_origin.scheme == b"https" or not self._http1) - and not self._connect_failed - ) - return self._connection.is_available() - - def has_expired(self) -> bool: - if self._connection is None: # pragma: nocover - return self._connect_failed - return self._connection.has_expired() - - def is_idle(self) -> bool: - if self._connection is None: # pragma: nocover - return self._connect_failed - return self._connection.is_idle() - - def is_closed(self) -> bool: - if self._connection is None: # pragma: nocover - return self._connect_failed - return self._connection.is_closed() - - def info(self) -> str: - if self._connection is None: # pragma: nocover - return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" - return self._connection.info() - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/server/venv/Lib/site-packages/httpcore/_backends/__init__.py b/server/venv/Lib/site-packages/httpcore/_backends/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/httpcore/_backends/anyio.py b/server/venv/Lib/site-packages/httpcore/_backends/anyio.py deleted file mode 100644 index 1ed5228..0000000 --- a/server/venv/Lib/site-packages/httpcore/_backends/anyio.py +++ /dev/null @@ -1,145 +0,0 @@ -import ssl -import typing - -import anyio - -from .._exceptions import ( - ConnectError, - ConnectTimeout, - ReadError, - ReadTimeout, - WriteError, - WriteTimeout, - map_exceptions, -) -from .._utils import is_socket_readable -from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream - - -class AnyIOStream(AsyncNetworkStream): - def __init__(self, stream: anyio.abc.ByteStream) -> None: - self._stream = stream - - async def read( - self, max_bytes: int, timeout: typing.Optional[float] = None - ) -> bytes: - exc_map = { - TimeoutError: ReadTimeout, - anyio.BrokenResourceError: ReadError, - anyio.ClosedResourceError: ReadError, - } - with map_exceptions(exc_map): - with anyio.fail_after(timeout): - try: - return await self._stream.receive(max_bytes=max_bytes) - except anyio.EndOfStream: # pragma: nocover - return b"" - - async def write( - self, buffer: bytes, timeout: typing.Optional[float] = None - ) -> None: - if not buffer: - return - - exc_map = { - TimeoutError: WriteTimeout, - anyio.BrokenResourceError: WriteError, - anyio.ClosedResourceError: WriteError, - } - with map_exceptions(exc_map): - with anyio.fail_after(timeout): - await self._stream.send(item=buffer) - - async def aclose(self) -> None: - await self._stream.aclose() - - async def start_tls( - self, - ssl_context: ssl.SSLContext, - server_hostname: typing.Optional[str] = None, - timeout: typing.Optional[float] = None, - ) -> AsyncNetworkStream: - exc_map = { - TimeoutError: ConnectTimeout, - anyio.BrokenResourceError: ConnectError, - } - with map_exceptions(exc_map): - try: - with anyio.fail_after(timeout): - ssl_stream = await anyio.streams.tls.TLSStream.wrap( - self._stream, - ssl_context=ssl_context, - hostname=server_hostname, - standard_compatible=False, - server_side=False, - ) - except Exception as exc: # pragma: nocover - await self.aclose() - raise exc - return AnyIOStream(ssl_stream) - - def get_extra_info(self, info: str) -> typing.Any: - if info == "ssl_object": - return self._stream.extra(anyio.streams.tls.TLSAttribute.ssl_object, None) - if info == "client_addr": - return self._stream.extra(anyio.abc.SocketAttribute.local_address, None) - if info == "server_addr": - return self._stream.extra(anyio.abc.SocketAttribute.remote_address, None) - if info == "socket": - return self._stream.extra(anyio.abc.SocketAttribute.raw_socket, None) - if info == "is_readable": - sock = self._stream.extra(anyio.abc.SocketAttribute.raw_socket, None) - return is_socket_readable(sock) - return None - - -class AnyIOBackend(AsyncNetworkBackend): - async def connect_tcp( - self, - host: str, - port: int, - timeout: typing.Optional[float] = None, - local_address: typing.Optional[str] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> AsyncNetworkStream: - if socket_options is None: - socket_options = [] # pragma: no cover - exc_map = { - TimeoutError: ConnectTimeout, - OSError: ConnectError, - anyio.BrokenResourceError: ConnectError, - } - with map_exceptions(exc_map): - with anyio.fail_after(timeout): - stream: anyio.abc.ByteStream = await anyio.connect_tcp( - remote_host=host, - remote_port=port, - local_host=local_address, - ) - # By default TCP sockets opened in `asyncio` include TCP_NODELAY. - for option in socket_options: - stream._raw_socket.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover - return AnyIOStream(stream) - - async def connect_unix_socket( - self, - path: str, - timeout: typing.Optional[float] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> AsyncNetworkStream: # pragma: nocover - if socket_options is None: - socket_options = [] - exc_map = { - TimeoutError: ConnectTimeout, - OSError: ConnectError, - anyio.BrokenResourceError: ConnectError, - } - with map_exceptions(exc_map): - with anyio.fail_after(timeout): - stream: anyio.abc.ByteStream = await anyio.connect_unix(path) - for option in socket_options: - stream._raw_socket.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover - return AnyIOStream(stream) - - async def sleep(self, seconds: float) -> None: - await anyio.sleep(seconds) # pragma: nocover diff --git a/server/venv/Lib/site-packages/httpcore/_backends/auto.py b/server/venv/Lib/site-packages/httpcore/_backends/auto.py deleted file mode 100644 index b612ba0..0000000 --- a/server/venv/Lib/site-packages/httpcore/_backends/auto.py +++ /dev/null @@ -1,52 +0,0 @@ -import typing -from typing import Optional - -import sniffio - -from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream - - -class AutoBackend(AsyncNetworkBackend): - async def _init_backend(self) -> None: - if not (hasattr(self, "_backend")): - backend = sniffio.current_async_library() - if backend == "trio": - from .trio import TrioBackend - - self._backend: AsyncNetworkBackend = TrioBackend() - else: - from .anyio import AnyIOBackend - - self._backend = AnyIOBackend() - - async def connect_tcp( - self, - host: str, - port: int, - timeout: Optional[float] = None, - local_address: Optional[str] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> AsyncNetworkStream: - await self._init_backend() - return await self._backend.connect_tcp( - host, - port, - timeout=timeout, - local_address=local_address, - socket_options=socket_options, - ) - - async def connect_unix_socket( - self, - path: str, - timeout: Optional[float] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> AsyncNetworkStream: # pragma: nocover - await self._init_backend() - return await self._backend.connect_unix_socket( - path, timeout=timeout, socket_options=socket_options - ) - - async def sleep(self, seconds: float) -> None: # pragma: nocover - await self._init_backend() - return await self._backend.sleep(seconds) diff --git a/server/venv/Lib/site-packages/httpcore/_backends/base.py b/server/venv/Lib/site-packages/httpcore/_backends/base.py deleted file mode 100644 index 6cadedb..0000000 --- a/server/venv/Lib/site-packages/httpcore/_backends/base.py +++ /dev/null @@ -1,103 +0,0 @@ -import ssl -import time -import typing - -SOCKET_OPTION = typing.Union[ - typing.Tuple[int, int, int], - typing.Tuple[int, int, typing.Union[bytes, bytearray]], - typing.Tuple[int, int, None, int], -] - - -class NetworkStream: - def read(self, max_bytes: int, timeout: typing.Optional[float] = None) -> bytes: - raise NotImplementedError() # pragma: nocover - - def write(self, buffer: bytes, timeout: typing.Optional[float] = None) -> None: - raise NotImplementedError() # pragma: nocover - - def close(self) -> None: - raise NotImplementedError() # pragma: nocover - - def start_tls( - self, - ssl_context: ssl.SSLContext, - server_hostname: typing.Optional[str] = None, - timeout: typing.Optional[float] = None, - ) -> "NetworkStream": - raise NotImplementedError() # pragma: nocover - - def get_extra_info(self, info: str) -> typing.Any: - return None # pragma: nocover - - -class NetworkBackend: - def connect_tcp( - self, - host: str, - port: int, - timeout: typing.Optional[float] = None, - local_address: typing.Optional[str] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> NetworkStream: - raise NotImplementedError() # pragma: nocover - - def connect_unix_socket( - self, - path: str, - timeout: typing.Optional[float] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> NetworkStream: - raise NotImplementedError() # pragma: nocover - - def sleep(self, seconds: float) -> None: - time.sleep(seconds) # pragma: nocover - - -class AsyncNetworkStream: - async def read( - self, max_bytes: int, timeout: typing.Optional[float] = None - ) -> bytes: - raise NotImplementedError() # pragma: nocover - - async def write( - self, buffer: bytes, timeout: typing.Optional[float] = None - ) -> None: - raise NotImplementedError() # pragma: nocover - - async def aclose(self) -> None: - raise NotImplementedError() # pragma: nocover - - async def start_tls( - self, - ssl_context: ssl.SSLContext, - server_hostname: typing.Optional[str] = None, - timeout: typing.Optional[float] = None, - ) -> "AsyncNetworkStream": - raise NotImplementedError() # pragma: nocover - - def get_extra_info(self, info: str) -> typing.Any: - return None # pragma: nocover - - -class AsyncNetworkBackend: - async def connect_tcp( - self, - host: str, - port: int, - timeout: typing.Optional[float] = None, - local_address: typing.Optional[str] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> AsyncNetworkStream: - raise NotImplementedError() # pragma: nocover - - async def connect_unix_socket( - self, - path: str, - timeout: typing.Optional[float] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> AsyncNetworkStream: - raise NotImplementedError() # pragma: nocover - - async def sleep(self, seconds: float) -> None: - raise NotImplementedError() # pragma: nocover diff --git a/server/venv/Lib/site-packages/httpcore/_backends/mock.py b/server/venv/Lib/site-packages/httpcore/_backends/mock.py deleted file mode 100644 index f7aefeb..0000000 --- a/server/venv/Lib/site-packages/httpcore/_backends/mock.py +++ /dev/null @@ -1,142 +0,0 @@ -import ssl -import typing -from typing import Optional - -from .._exceptions import ReadError -from .base import ( - SOCKET_OPTION, - AsyncNetworkBackend, - AsyncNetworkStream, - NetworkBackend, - NetworkStream, -) - - -class MockSSLObject: - def __init__(self, http2: bool): - self._http2 = http2 - - def selected_alpn_protocol(self) -> str: - return "h2" if self._http2 else "http/1.1" - - -class MockStream(NetworkStream): - def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: - self._buffer = buffer - self._http2 = http2 - self._closed = False - - def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes: - if self._closed: - raise ReadError("Connection closed") - if not self._buffer: - return b"" - return self._buffer.pop(0) - - def write(self, buffer: bytes, timeout: Optional[float] = None) -> None: - pass - - def close(self) -> None: - self._closed = True - - def start_tls( - self, - ssl_context: ssl.SSLContext, - server_hostname: Optional[str] = None, - timeout: Optional[float] = None, - ) -> NetworkStream: - return self - - def get_extra_info(self, info: str) -> typing.Any: - return MockSSLObject(http2=self._http2) if info == "ssl_object" else None - - def __repr__(self) -> str: - return "" - - -class MockBackend(NetworkBackend): - def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: - self._buffer = buffer - self._http2 = http2 - - def connect_tcp( - self, - host: str, - port: int, - timeout: Optional[float] = None, - local_address: Optional[str] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> NetworkStream: - return MockStream(list(self._buffer), http2=self._http2) - - def connect_unix_socket( - self, - path: str, - timeout: Optional[float] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> NetworkStream: - return MockStream(list(self._buffer), http2=self._http2) - - def sleep(self, seconds: float) -> None: - pass - - -class AsyncMockStream(AsyncNetworkStream): - def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: - self._buffer = buffer - self._http2 = http2 - self._closed = False - - async def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes: - if self._closed: - raise ReadError("Connection closed") - if not self._buffer: - return b"" - return self._buffer.pop(0) - - async def write(self, buffer: bytes, timeout: Optional[float] = None) -> None: - pass - - async def aclose(self) -> None: - self._closed = True - - async def start_tls( - self, - ssl_context: ssl.SSLContext, - server_hostname: Optional[str] = None, - timeout: Optional[float] = None, - ) -> AsyncNetworkStream: - return self - - def get_extra_info(self, info: str) -> typing.Any: - return MockSSLObject(http2=self._http2) if info == "ssl_object" else None - - def __repr__(self) -> str: - return "" - - -class AsyncMockBackend(AsyncNetworkBackend): - def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: - self._buffer = buffer - self._http2 = http2 - - async def connect_tcp( - self, - host: str, - port: int, - timeout: Optional[float] = None, - local_address: Optional[str] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> AsyncNetworkStream: - return AsyncMockStream(list(self._buffer), http2=self._http2) - - async def connect_unix_socket( - self, - path: str, - timeout: Optional[float] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> AsyncNetworkStream: - return AsyncMockStream(list(self._buffer), http2=self._http2) - - async def sleep(self, seconds: float) -> None: - pass diff --git a/server/venv/Lib/site-packages/httpcore/_backends/sync.py b/server/venv/Lib/site-packages/httpcore/_backends/sync.py deleted file mode 100644 index f2dbd32..0000000 --- a/server/venv/Lib/site-packages/httpcore/_backends/sync.py +++ /dev/null @@ -1,245 +0,0 @@ -import socket -import ssl -import sys -import typing -from functools import partial - -from .._exceptions import ( - ConnectError, - ConnectTimeout, - ExceptionMapping, - ReadError, - ReadTimeout, - WriteError, - WriteTimeout, - map_exceptions, -) -from .._utils import is_socket_readable -from .base import SOCKET_OPTION, NetworkBackend, NetworkStream - - -class TLSinTLSStream(NetworkStream): # pragma: no cover - """ - Because the standard `SSLContext.wrap_socket` method does - not work for `SSLSocket` objects, we need this class - to implement TLS stream using an underlying `SSLObject` - instance in order to support TLS on top of TLS. - """ - - # Defined in RFC 8449 - TLS_RECORD_SIZE = 16384 - - def __init__( - self, - sock: socket.socket, - ssl_context: ssl.SSLContext, - server_hostname: typing.Optional[str] = None, - timeout: typing.Optional[float] = None, - ): - self._sock = sock - self._incoming = ssl.MemoryBIO() - self._outgoing = ssl.MemoryBIO() - - self.ssl_obj = ssl_context.wrap_bio( - incoming=self._incoming, - outgoing=self._outgoing, - server_hostname=server_hostname, - ) - - self._sock.settimeout(timeout) - self._perform_io(self.ssl_obj.do_handshake) - - def _perform_io( - self, - func: typing.Callable[..., typing.Any], - ) -> typing.Any: - ret = None - - while True: - errno = None - try: - ret = func() - except (ssl.SSLWantReadError, ssl.SSLWantWriteError) as e: - errno = e.errno - - self._sock.sendall(self._outgoing.read()) - - if errno == ssl.SSL_ERROR_WANT_READ: - buf = self._sock.recv(self.TLS_RECORD_SIZE) - - if buf: - self._incoming.write(buf) - else: - self._incoming.write_eof() - if errno is None: - return ret - - def read(self, max_bytes: int, timeout: typing.Optional[float] = None) -> bytes: - exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError} - with map_exceptions(exc_map): - self._sock.settimeout(timeout) - return typing.cast( - bytes, self._perform_io(partial(self.ssl_obj.read, max_bytes)) - ) - - def write(self, buffer: bytes, timeout: typing.Optional[float] = None) -> None: - exc_map: ExceptionMapping = {socket.timeout: WriteTimeout, OSError: WriteError} - with map_exceptions(exc_map): - self._sock.settimeout(timeout) - while buffer: - nsent = self._perform_io(partial(self.ssl_obj.write, buffer)) - buffer = buffer[nsent:] - - def close(self) -> None: - self._sock.close() - - def start_tls( - self, - ssl_context: ssl.SSLContext, - server_hostname: typing.Optional[str] = None, - timeout: typing.Optional[float] = None, - ) -> "NetworkStream": - raise NotImplementedError() - - def get_extra_info(self, info: str) -> typing.Any: - if info == "ssl_object": - return self.ssl_obj - if info == "client_addr": - return self._sock.getsockname() - if info == "server_addr": - return self._sock.getpeername() - if info == "socket": - return self._sock - if info == "is_readable": - return is_socket_readable(self._sock) - return None - - -class SyncStream(NetworkStream): - def __init__(self, sock: socket.socket) -> None: - self._sock = sock - - def read(self, max_bytes: int, timeout: typing.Optional[float] = None) -> bytes: - exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError} - with map_exceptions(exc_map): - self._sock.settimeout(timeout) - return self._sock.recv(max_bytes) - - def write(self, buffer: bytes, timeout: typing.Optional[float] = None) -> None: - if not buffer: - return - - exc_map: ExceptionMapping = {socket.timeout: WriteTimeout, OSError: WriteError} - with map_exceptions(exc_map): - while buffer: - self._sock.settimeout(timeout) - n = self._sock.send(buffer) - buffer = buffer[n:] - - def close(self) -> None: - self._sock.close() - - def start_tls( - self, - ssl_context: ssl.SSLContext, - server_hostname: typing.Optional[str] = None, - timeout: typing.Optional[float] = None, - ) -> NetworkStream: - if isinstance(self._sock, ssl.SSLSocket): # pragma: no cover - raise RuntimeError( - "Attempted to add a TLS layer on top of the existing " - "TLS stream, which is not supported by httpcore package" - ) - - exc_map: ExceptionMapping = { - socket.timeout: ConnectTimeout, - OSError: ConnectError, - } - with map_exceptions(exc_map): - try: - if isinstance(self._sock, ssl.SSLSocket): # pragma: no cover - # If the underlying socket has already been upgraded - # to the TLS layer (i.e. is an instance of SSLSocket), - # we need some additional smarts to support TLS-in-TLS. - return TLSinTLSStream( - self._sock, ssl_context, server_hostname, timeout - ) - else: - self._sock.settimeout(timeout) - sock = ssl_context.wrap_socket( - self._sock, server_hostname=server_hostname - ) - except Exception as exc: # pragma: nocover - self.close() - raise exc - return SyncStream(sock) - - def get_extra_info(self, info: str) -> typing.Any: - if info == "ssl_object" and isinstance(self._sock, ssl.SSLSocket): - return self._sock._sslobj # type: ignore - if info == "client_addr": - return self._sock.getsockname() - if info == "server_addr": - return self._sock.getpeername() - if info == "socket": - return self._sock - if info == "is_readable": - return is_socket_readable(self._sock) - return None - - -class SyncBackend(NetworkBackend): - def connect_tcp( - self, - host: str, - port: int, - timeout: typing.Optional[float] = None, - local_address: typing.Optional[str] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> NetworkStream: - # Note that we automatically include `TCP_NODELAY` - # in addition to any other custom socket options. - if socket_options is None: - socket_options = [] # pragma: no cover - address = (host, port) - source_address = None if local_address is None else (local_address, 0) - exc_map: ExceptionMapping = { - socket.timeout: ConnectTimeout, - OSError: ConnectError, - } - - with map_exceptions(exc_map): - sock = socket.create_connection( - address, - timeout, - source_address=source_address, - ) - for option in socket_options: - sock.setsockopt(*option) # pragma: no cover - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - return SyncStream(sock) - - def connect_unix_socket( - self, - path: str, - timeout: typing.Optional[float] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> NetworkStream: # pragma: nocover - if sys.platform == "win32": - raise RuntimeError( - "Attempted to connect to a UNIX socket on a Windows system." - ) - if socket_options is None: - socket_options = [] - - exc_map: ExceptionMapping = { - socket.timeout: ConnectTimeout, - OSError: ConnectError, - } - with map_exceptions(exc_map): - sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - for option in socket_options: - sock.setsockopt(*option) - sock.settimeout(timeout) - sock.connect(path) - return SyncStream(sock) diff --git a/server/venv/Lib/site-packages/httpcore/_backends/trio.py b/server/venv/Lib/site-packages/httpcore/_backends/trio.py deleted file mode 100644 index b1626d2..0000000 --- a/server/venv/Lib/site-packages/httpcore/_backends/trio.py +++ /dev/null @@ -1,161 +0,0 @@ -import ssl -import typing - -import trio - -from .._exceptions import ( - ConnectError, - ConnectTimeout, - ExceptionMapping, - ReadError, - ReadTimeout, - WriteError, - WriteTimeout, - map_exceptions, -) -from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream - - -class TrioStream(AsyncNetworkStream): - def __init__(self, stream: trio.abc.Stream) -> None: - self._stream = stream - - async def read( - self, max_bytes: int, timeout: typing.Optional[float] = None - ) -> bytes: - timeout_or_inf = float("inf") if timeout is None else timeout - exc_map: ExceptionMapping = { - trio.TooSlowError: ReadTimeout, - trio.BrokenResourceError: ReadError, - trio.ClosedResourceError: ReadError, - } - with map_exceptions(exc_map): - with trio.fail_after(timeout_or_inf): - data: bytes = await self._stream.receive_some(max_bytes=max_bytes) - return data - - async def write( - self, buffer: bytes, timeout: typing.Optional[float] = None - ) -> None: - if not buffer: - return - - timeout_or_inf = float("inf") if timeout is None else timeout - exc_map: ExceptionMapping = { - trio.TooSlowError: WriteTimeout, - trio.BrokenResourceError: WriteError, - trio.ClosedResourceError: WriteError, - } - with map_exceptions(exc_map): - with trio.fail_after(timeout_or_inf): - await self._stream.send_all(data=buffer) - - async def aclose(self) -> None: - await self._stream.aclose() - - async def start_tls( - self, - ssl_context: ssl.SSLContext, - server_hostname: typing.Optional[str] = None, - timeout: typing.Optional[float] = None, - ) -> AsyncNetworkStream: - timeout_or_inf = float("inf") if timeout is None else timeout - exc_map: ExceptionMapping = { - trio.TooSlowError: ConnectTimeout, - trio.BrokenResourceError: ConnectError, - } - ssl_stream = trio.SSLStream( - self._stream, - ssl_context=ssl_context, - server_hostname=server_hostname, - https_compatible=True, - server_side=False, - ) - with map_exceptions(exc_map): - try: - with trio.fail_after(timeout_or_inf): - await ssl_stream.do_handshake() - except Exception as exc: # pragma: nocover - await self.aclose() - raise exc - return TrioStream(ssl_stream) - - def get_extra_info(self, info: str) -> typing.Any: - if info == "ssl_object" and isinstance(self._stream, trio.SSLStream): - # Type checkers cannot see `_ssl_object` attribute because trio._ssl.SSLStream uses __getattr__/__setattr__. - # Tracked at https://github.com/python-trio/trio/issues/542 - return self._stream._ssl_object # type: ignore[attr-defined] - if info == "client_addr": - return self._get_socket_stream().socket.getsockname() - if info == "server_addr": - return self._get_socket_stream().socket.getpeername() - if info == "socket": - stream = self._stream - while isinstance(stream, trio.SSLStream): - stream = stream.transport_stream - assert isinstance(stream, trio.SocketStream) - return stream.socket - if info == "is_readable": - socket = self.get_extra_info("socket") - return socket.is_readable() - return None - - def _get_socket_stream(self) -> trio.SocketStream: - stream = self._stream - while isinstance(stream, trio.SSLStream): - stream = stream.transport_stream - assert isinstance(stream, trio.SocketStream) - return stream - - -class TrioBackend(AsyncNetworkBackend): - async def connect_tcp( - self, - host: str, - port: int, - timeout: typing.Optional[float] = None, - local_address: typing.Optional[str] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> AsyncNetworkStream: - # By default for TCP sockets, trio enables TCP_NODELAY. - # https://trio.readthedocs.io/en/stable/reference-io.html#trio.SocketStream - if socket_options is None: - socket_options = [] # pragma: no cover - timeout_or_inf = float("inf") if timeout is None else timeout - exc_map: ExceptionMapping = { - trio.TooSlowError: ConnectTimeout, - trio.BrokenResourceError: ConnectError, - OSError: ConnectError, - } - with map_exceptions(exc_map): - with trio.fail_after(timeout_or_inf): - stream: trio.abc.Stream = await trio.open_tcp_stream( - host=host, port=port, local_address=local_address - ) - for option in socket_options: - stream.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover - return TrioStream(stream) - - async def connect_unix_socket( - self, - path: str, - timeout: typing.Optional[float] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> AsyncNetworkStream: # pragma: nocover - if socket_options is None: - socket_options = [] - timeout_or_inf = float("inf") if timeout is None else timeout - exc_map: ExceptionMapping = { - trio.TooSlowError: ConnectTimeout, - trio.BrokenResourceError: ConnectError, - OSError: ConnectError, - } - with map_exceptions(exc_map): - with trio.fail_after(timeout_or_inf): - stream: trio.abc.Stream = await trio.open_unix_socket(path) - for option in socket_options: - stream.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover - return TrioStream(stream) - - async def sleep(self, seconds: float) -> None: - await trio.sleep(seconds) # pragma: nocover diff --git a/server/venv/Lib/site-packages/httpcore/_exceptions.py b/server/venv/Lib/site-packages/httpcore/_exceptions.py deleted file mode 100644 index 81e7fc6..0000000 --- a/server/venv/Lib/site-packages/httpcore/_exceptions.py +++ /dev/null @@ -1,81 +0,0 @@ -import contextlib -from typing import Iterator, Mapping, Type - -ExceptionMapping = Mapping[Type[Exception], Type[Exception]] - - -@contextlib.contextmanager -def map_exceptions(map: ExceptionMapping) -> Iterator[None]: - try: - yield - except Exception as exc: # noqa: PIE786 - for from_exc, to_exc in map.items(): - if isinstance(exc, from_exc): - raise to_exc(exc) from exc - raise # pragma: nocover - - -class ConnectionNotAvailable(Exception): - pass - - -class ProxyError(Exception): - pass - - -class UnsupportedProtocol(Exception): - pass - - -class ProtocolError(Exception): - pass - - -class RemoteProtocolError(ProtocolError): - pass - - -class LocalProtocolError(ProtocolError): - pass - - -# Timeout errors - - -class TimeoutException(Exception): - pass - - -class PoolTimeout(TimeoutException): - pass - - -class ConnectTimeout(TimeoutException): - pass - - -class ReadTimeout(TimeoutException): - pass - - -class WriteTimeout(TimeoutException): - pass - - -# Network errors - - -class NetworkError(Exception): - pass - - -class ConnectError(NetworkError): - pass - - -class ReadError(NetworkError): - pass - - -class WriteError(NetworkError): - pass diff --git a/server/venv/Lib/site-packages/httpcore/_models.py b/server/venv/Lib/site-packages/httpcore/_models.py deleted file mode 100644 index 11bfcd8..0000000 --- a/server/venv/Lib/site-packages/httpcore/_models.py +++ /dev/null @@ -1,484 +0,0 @@ -from typing import ( - Any, - AsyncIterable, - AsyncIterator, - Iterable, - Iterator, - List, - Mapping, - MutableMapping, - Optional, - Sequence, - Tuple, - Union, -) -from urllib.parse import urlparse - -# Functions for typechecking... - - -HeadersAsSequence = Sequence[Tuple[Union[bytes, str], Union[bytes, str]]] -HeadersAsMapping = Mapping[Union[bytes, str], Union[bytes, str]] -HeaderTypes = Union[HeadersAsSequence, HeadersAsMapping, None] - -Extensions = MutableMapping[str, Any] - - -def enforce_bytes(value: Union[bytes, str], *, name: str) -> bytes: - """ - Any arguments that are ultimately represented as bytes can be specified - either as bytes or as strings. - - However we enforce that any string arguments must only contain characters in - the plain ASCII range. chr(0)...chr(127). If you need to use characters - outside that range then be precise, and use a byte-wise argument. - """ - if isinstance(value, str): - try: - return value.encode("ascii") - except UnicodeEncodeError: - raise TypeError(f"{name} strings may not include unicode characters.") - elif isinstance(value, bytes): - return value - - seen_type = type(value).__name__ - raise TypeError(f"{name} must be bytes or str, but got {seen_type}.") - - -def enforce_url(value: Union["URL", bytes, str], *, name: str) -> "URL": - """ - Type check for URL parameters. - """ - if isinstance(value, (bytes, str)): - return URL(value) - elif isinstance(value, URL): - return value - - seen_type = type(value).__name__ - raise TypeError(f"{name} must be a URL, bytes, or str, but got {seen_type}.") - - -def enforce_headers( - value: Union[HeadersAsMapping, HeadersAsSequence, None] = None, *, name: str -) -> List[Tuple[bytes, bytes]]: - """ - Convienence function that ensure all items in request or response headers - are either bytes or strings in the plain ASCII range. - """ - if value is None: - return [] - elif isinstance(value, Mapping): - return [ - ( - enforce_bytes(k, name="header name"), - enforce_bytes(v, name="header value"), - ) - for k, v in value.items() - ] - elif isinstance(value, Sequence): - return [ - ( - enforce_bytes(k, name="header name"), - enforce_bytes(v, name="header value"), - ) - for k, v in value - ] - - seen_type = type(value).__name__ - raise TypeError( - f"{name} must be a mapping or sequence of two-tuples, but got {seen_type}." - ) - - -def enforce_stream( - value: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None], *, name: str -) -> Union[Iterable[bytes], AsyncIterable[bytes]]: - if value is None: - return ByteStream(b"") - elif isinstance(value, bytes): - return ByteStream(value) - return value - - -# * https://tools.ietf.org/html/rfc3986#section-3.2.3 -# * https://url.spec.whatwg.org/#url-miscellaneous -# * https://url.spec.whatwg.org/#scheme-state -DEFAULT_PORTS = { - b"ftp": 21, - b"http": 80, - b"https": 443, - b"ws": 80, - b"wss": 443, -} - - -def include_request_headers( - headers: List[Tuple[bytes, bytes]], - *, - url: "URL", - content: Union[None, bytes, Iterable[bytes], AsyncIterable[bytes]], -) -> List[Tuple[bytes, bytes]]: - headers_set = set(k.lower() for k, v in headers) - - if b"host" not in headers_set: - default_port = DEFAULT_PORTS.get(url.scheme) - if url.port is None or url.port == default_port: - header_value = url.host - else: - header_value = b"%b:%d" % (url.host, url.port) - headers = [(b"Host", header_value)] + headers - - if ( - content is not None - and b"content-length" not in headers_set - and b"transfer-encoding" not in headers_set - ): - if isinstance(content, bytes): - content_length = str(len(content)).encode("ascii") - headers += [(b"Content-Length", content_length)] - else: - headers += [(b"Transfer-Encoding", b"chunked")] # pragma: nocover - - return headers - - -# Interfaces for byte streams... - - -class ByteStream: - """ - A container for non-streaming content, and that supports both sync and async - stream iteration. - """ - - def __init__(self, content: bytes) -> None: - self._content = content - - def __iter__(self) -> Iterator[bytes]: - yield self._content - - async def __aiter__(self) -> AsyncIterator[bytes]: - yield self._content - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{len(self._content)} bytes]>" - - -class Origin: - def __init__(self, scheme: bytes, host: bytes, port: int) -> None: - self.scheme = scheme - self.host = host - self.port = port - - def __eq__(self, other: Any) -> bool: - return ( - isinstance(other, Origin) - and self.scheme == other.scheme - and self.host == other.host - and self.port == other.port - ) - - def __str__(self) -> str: - scheme = self.scheme.decode("ascii") - host = self.host.decode("ascii") - port = str(self.port) - return f"{scheme}://{host}:{port}" - - -class URL: - """ - Represents the URL against which an HTTP request may be made. - - The URL may either be specified as a plain string, for convienence: - - ```python - url = httpcore.URL("https://www.example.com/") - ``` - - Or be constructed with explicitily pre-parsed components: - - ```python - url = httpcore.URL(scheme=b'https', host=b'www.example.com', port=None, target=b'/') - ``` - - Using this second more explicit style allows integrations that are using - `httpcore` to pass through URLs that have already been parsed in order to use - libraries such as `rfc-3986` rather than relying on the stdlib. It also ensures - that URL parsing is treated identically at both the networking level and at any - higher layers of abstraction. - - The four components are important here, as they allow the URL to be precisely - specified in a pre-parsed format. They also allow certain types of request to - be created that could not otherwise be expressed. - - For example, an HTTP request to `http://www.example.com/` forwarded via a proxy - at `http://localhost:8080`... - - ```python - # Constructs an HTTP request with a complete URL as the target: - # GET https://www.example.com/ HTTP/1.1 - url = httpcore.URL( - scheme=b'http', - host=b'localhost', - port=8080, - target=b'https://www.example.com/' - ) - request = httpcore.Request( - method="GET", - url=url - ) - ``` - - Another example is constructing an `OPTIONS *` request... - - ```python - # Constructs an 'OPTIONS *' HTTP request: - # OPTIONS * HTTP/1.1 - url = httpcore.URL(scheme=b'https', host=b'www.example.com', target=b'*') - request = httpcore.Request(method="OPTIONS", url=url) - ``` - - This kind of request is not possible to formulate with a URL string, - because the `/` delimiter is always used to demark the target from the - host/port portion of the URL. - - For convenience, string-like arguments may be specified either as strings or - as bytes. However, once a request is being issue over-the-wire, the URL - components are always ultimately required to be a bytewise representation. - - In order to avoid any ambiguity over character encodings, when strings are used - as arguments, they must be strictly limited to the ASCII range `chr(0)`-`chr(127)`. - If you require a bytewise representation that is outside this range you must - handle the character encoding directly, and pass a bytes instance. - """ - - def __init__( - self, - url: Union[bytes, str] = "", - *, - scheme: Union[bytes, str] = b"", - host: Union[bytes, str] = b"", - port: Optional[int] = None, - target: Union[bytes, str] = b"", - ) -> None: - """ - Parameters: - url: The complete URL as a string or bytes. - scheme: The URL scheme as a string or bytes. - Typically either `"http"` or `"https"`. - host: The URL host as a string or bytes. Such as `"www.example.com"`. - port: The port to connect to. Either an integer or `None`. - target: The target of the HTTP request. Such as `"/items?search=red"`. - """ - if url: - parsed = urlparse(enforce_bytes(url, name="url")) - self.scheme = parsed.scheme - self.host = parsed.hostname or b"" - self.port = parsed.port - self.target = (parsed.path or b"/") + ( - b"?" + parsed.query if parsed.query else b"" - ) - else: - self.scheme = enforce_bytes(scheme, name="scheme") - self.host = enforce_bytes(host, name="host") - self.port = port - self.target = enforce_bytes(target, name="target") - - @property - def origin(self) -> Origin: - default_port = { - b"http": 80, - b"https": 443, - b"ws": 80, - b"wss": 443, - b"socks5": 1080, - }[self.scheme] - return Origin( - scheme=self.scheme, host=self.host, port=self.port or default_port - ) - - def __eq__(self, other: Any) -> bool: - return ( - isinstance(other, URL) - and other.scheme == self.scheme - and other.host == self.host - and other.port == self.port - and other.target == self.target - ) - - def __bytes__(self) -> bytes: - if self.port is None: - return b"%b://%b%b" % (self.scheme, self.host, self.target) - return b"%b://%b:%d%b" % (self.scheme, self.host, self.port, self.target) - - def __repr__(self) -> str: - return ( - f"{self.__class__.__name__}(scheme={self.scheme!r}, " - f"host={self.host!r}, port={self.port!r}, target={self.target!r})" - ) - - -class Request: - """ - An HTTP request. - """ - - def __init__( - self, - method: Union[bytes, str], - url: Union[URL, bytes, str], - *, - headers: HeaderTypes = None, - content: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None] = None, - extensions: Optional[Extensions] = None, - ) -> None: - """ - Parameters: - method: The HTTP request method, either as a string or bytes. - For example: `GET`. - url: The request URL, either as a `URL` instance, or as a string or bytes. - For example: `"https://www.example.com".` - headers: The HTTP request headers. - content: The content of the response body. - extensions: A dictionary of optional extra information included on - the request. Possible keys include `"timeout"`, and `"trace"`. - """ - self.method: bytes = enforce_bytes(method, name="method") - self.url: URL = enforce_url(url, name="url") - self.headers: List[Tuple[bytes, bytes]] = enforce_headers( - headers, name="headers" - ) - self.stream: Union[Iterable[bytes], AsyncIterable[bytes]] = enforce_stream( - content, name="content" - ) - self.extensions = {} if extensions is None else extensions - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.method!r}]>" - - -class Response: - """ - An HTTP response. - """ - - def __init__( - self, - status: int, - *, - headers: HeaderTypes = None, - content: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None] = None, - extensions: Optional[Extensions] = None, - ) -> None: - """ - Parameters: - status: The HTTP status code of the response. For example `200`. - headers: The HTTP response headers. - content: The content of the response body. - extensions: A dictionary of optional extra information included on - the responseself.Possible keys include `"http_version"`, - `"reason_phrase"`, and `"network_stream"`. - """ - self.status: int = status - self.headers: List[Tuple[bytes, bytes]] = enforce_headers( - headers, name="headers" - ) - self.stream: Union[Iterable[bytes], AsyncIterable[bytes]] = enforce_stream( - content, name="content" - ) - self.extensions = {} if extensions is None else extensions - - self._stream_consumed = False - - @property - def content(self) -> bytes: - if not hasattr(self, "_content"): - if isinstance(self.stream, Iterable): - raise RuntimeError( - "Attempted to access 'response.content' on a streaming response. " - "Call 'response.read()' first." - ) - else: - raise RuntimeError( - "Attempted to access 'response.content' on a streaming response. " - "Call 'await response.aread()' first." - ) - return self._content - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.status}]>" - - # Sync interface... - - def read(self) -> bytes: - if not isinstance(self.stream, Iterable): # pragma: nocover - raise RuntimeError( - "Attempted to read an asynchronous response using 'response.read()'. " - "You should use 'await response.aread()' instead." - ) - if not hasattr(self, "_content"): - self._content = b"".join([part for part in self.iter_stream()]) - return self._content - - def iter_stream(self) -> Iterator[bytes]: - if not isinstance(self.stream, Iterable): # pragma: nocover - raise RuntimeError( - "Attempted to stream an asynchronous response using 'for ... in " - "response.iter_stream()'. " - "You should use 'async for ... in response.aiter_stream()' instead." - ) - if self._stream_consumed: - raise RuntimeError( - "Attempted to call 'for ... in response.iter_stream()' more than once." - ) - self._stream_consumed = True - for chunk in self.stream: - yield chunk - - def close(self) -> None: - if not isinstance(self.stream, Iterable): # pragma: nocover - raise RuntimeError( - "Attempted to close an asynchronous response using 'response.close()'. " - "You should use 'await response.aclose()' instead." - ) - if hasattr(self.stream, "close"): - self.stream.close() - - # Async interface... - - async def aread(self) -> bytes: - if not isinstance(self.stream, AsyncIterable): # pragma: nocover - raise RuntimeError( - "Attempted to read an synchronous response using " - "'await response.aread()'. " - "You should use 'response.read()' instead." - ) - if not hasattr(self, "_content"): - self._content = b"".join([part async for part in self.aiter_stream()]) - return self._content - - async def aiter_stream(self) -> AsyncIterator[bytes]: - if not isinstance(self.stream, AsyncIterable): # pragma: nocover - raise RuntimeError( - "Attempted to stream an synchronous response using 'async for ... in " - "response.aiter_stream()'. " - "You should use 'for ... in response.iter_stream()' instead." - ) - if self._stream_consumed: - raise RuntimeError( - "Attempted to call 'async for ... in response.aiter_stream()' " - "more than once." - ) - self._stream_consumed = True - async for chunk in self.stream: - yield chunk - - async def aclose(self) -> None: - if not isinstance(self.stream, AsyncIterable): # pragma: nocover - raise RuntimeError( - "Attempted to close a synchronous response using " - "'await response.aclose()'. " - "You should use 'response.close()' instead." - ) - if hasattr(self.stream, "aclose"): - await self.stream.aclose() diff --git a/server/venv/Lib/site-packages/httpcore/_ssl.py b/server/venv/Lib/site-packages/httpcore/_ssl.py deleted file mode 100644 index c99c5a6..0000000 --- a/server/venv/Lib/site-packages/httpcore/_ssl.py +++ /dev/null @@ -1,9 +0,0 @@ -import ssl - -import certifi - - -def default_ssl_context() -> ssl.SSLContext: - context = ssl.create_default_context() - context.load_verify_locations(certifi.where()) - return context diff --git a/server/venv/Lib/site-packages/httpcore/_sync/__init__.py b/server/venv/Lib/site-packages/httpcore/_sync/__init__.py deleted file mode 100644 index b476d76..0000000 --- a/server/venv/Lib/site-packages/httpcore/_sync/__init__.py +++ /dev/null @@ -1,39 +0,0 @@ -from .connection import HTTPConnection -from .connection_pool import ConnectionPool -from .http11 import HTTP11Connection -from .http_proxy import HTTPProxy -from .interfaces import ConnectionInterface - -try: - from .http2 import HTTP2Connection -except ImportError: # pragma: nocover - - class HTTP2Connection: # type: ignore - def __init__(self, *args, **kwargs) -> None: # type: ignore - raise RuntimeError( - "Attempted to use http2 support, but the `h2` package is not " - "installed. Use 'pip install httpcore[http2]'." - ) - - -try: - from .socks_proxy import SOCKSProxy -except ImportError: # pragma: nocover - - class SOCKSProxy: # type: ignore - def __init__(self, *args, **kwargs) -> None: # type: ignore - raise RuntimeError( - "Attempted to use SOCKS support, but the `socksio` package is not " - "installed. Use 'pip install httpcore[socks]'." - ) - - -__all__ = [ - "HTTPConnection", - "ConnectionPool", - "HTTPProxy", - "HTTP11Connection", - "HTTP2Connection", - "ConnectionInterface", - "SOCKSProxy", -] diff --git a/server/venv/Lib/site-packages/httpcore/_sync/connection.py b/server/venv/Lib/site-packages/httpcore/_sync/connection.py deleted file mode 100644 index 81e4172..0000000 --- a/server/venv/Lib/site-packages/httpcore/_sync/connection.py +++ /dev/null @@ -1,222 +0,0 @@ -import itertools -import logging -import ssl -from types import TracebackType -from typing import Iterable, Iterator, Optional, Type - -from .._backends.sync import SyncBackend -from .._backends.base import SOCKET_OPTION, NetworkBackend, NetworkStream -from .._exceptions import ConnectError, ConnectionNotAvailable, ConnectTimeout -from .._models import Origin, Request, Response -from .._ssl import default_ssl_context -from .._synchronization import Lock -from .._trace import Trace -from .http11 import HTTP11Connection -from .interfaces import ConnectionInterface - -RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. - - -logger = logging.getLogger("httpcore.connection") - - -def exponential_backoff(factor: float) -> Iterator[float]: - """ - Generate a geometric sequence that has a ratio of 2 and starts with 0. - - For example: - - `factor = 2`: `0, 2, 4, 8, 16, 32, 64, ...` - - `factor = 3`: `0, 3, 6, 12, 24, 48, 96, ...` - """ - yield 0 - for n in itertools.count(): - yield factor * 2**n - - -class HTTPConnection(ConnectionInterface): - def __init__( - self, - origin: Origin, - ssl_context: Optional[ssl.SSLContext] = None, - keepalive_expiry: Optional[float] = None, - http1: bool = True, - http2: bool = False, - retries: int = 0, - local_address: Optional[str] = None, - uds: Optional[str] = None, - network_backend: Optional[NetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, - ) -> None: - self._origin = origin - self._ssl_context = ssl_context - self._keepalive_expiry = keepalive_expiry - self._http1 = http1 - self._http2 = http2 - self._retries = retries - self._local_address = local_address - self._uds = uds - - self._network_backend: NetworkBackend = ( - SyncBackend() if network_backend is None else network_backend - ) - self._connection: Optional[ConnectionInterface] = None - self._connect_failed: bool = False - self._request_lock = Lock() - self._socket_options = socket_options - - def handle_request(self, request: Request) -> Response: - if not self.can_handle_request(request.url.origin): - raise RuntimeError( - f"Attempted to send request to {request.url.origin} on connection to {self._origin}" - ) - - with self._request_lock: - if self._connection is None: - try: - stream = self._connect(request) - - ssl_object = stream.get_extra_info("ssl_object") - http2_negotiated = ( - ssl_object is not None - and ssl_object.selected_alpn_protocol() == "h2" - ) - if http2_negotiated or (self._http2 and not self._http1): - from .http2 import HTTP2Connection - - self._connection = HTTP2Connection( - origin=self._origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - else: - self._connection = HTTP11Connection( - origin=self._origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - except Exception as exc: - self._connect_failed = True - raise exc - elif not self._connection.is_available(): - raise ConnectionNotAvailable() - - return self._connection.handle_request(request) - - def _connect(self, request: Request) -> NetworkStream: - timeouts = request.extensions.get("timeout", {}) - sni_hostname = request.extensions.get("sni_hostname", None) - timeout = timeouts.get("connect", None) - - retries_left = self._retries - delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR) - - while True: - try: - if self._uds is None: - kwargs = { - "host": self._origin.host.decode("ascii"), - "port": self._origin.port, - "local_address": self._local_address, - "timeout": timeout, - "socket_options": self._socket_options, - } - with Trace("connect_tcp", logger, request, kwargs) as trace: - stream = self._network_backend.connect_tcp(**kwargs) - trace.return_value = stream - else: - kwargs = { - "path": self._uds, - "timeout": timeout, - "socket_options": self._socket_options, - } - with Trace( - "connect_unix_socket", logger, request, kwargs - ) as trace: - stream = self._network_backend.connect_unix_socket( - **kwargs - ) - trace.return_value = stream - - if self._origin.scheme == b"https": - ssl_context = ( - default_ssl_context() - if self._ssl_context is None - else self._ssl_context - ) - alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] - ssl_context.set_alpn_protocols(alpn_protocols) - - kwargs = { - "ssl_context": ssl_context, - "server_hostname": sni_hostname - or self._origin.host.decode("ascii"), - "timeout": timeout, - } - with Trace("start_tls", logger, request, kwargs) as trace: - stream = stream.start_tls(**kwargs) - trace.return_value = stream - return stream - except (ConnectError, ConnectTimeout): - if retries_left <= 0: - raise - retries_left -= 1 - delay = next(delays) - with Trace("retry", logger, request, kwargs) as trace: - self._network_backend.sleep(delay) - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._origin - - def close(self) -> None: - if self._connection is not None: - with Trace("close", logger, None, {}): - self._connection.close() - - def is_available(self) -> bool: - if self._connection is None: - # If HTTP/2 support is enabled, and the resulting connection could - # end up as HTTP/2 then we should indicate the connection as being - # available to service multiple requests. - return ( - self._http2 - and (self._origin.scheme == b"https" or not self._http1) - and not self._connect_failed - ) - return self._connection.is_available() - - def has_expired(self) -> bool: - if self._connection is None: - return self._connect_failed - return self._connection.has_expired() - - def is_idle(self) -> bool: - if self._connection is None: - return self._connect_failed - return self._connection.is_idle() - - def is_closed(self) -> bool: - if self._connection is None: - return self._connect_failed - return self._connection.is_closed() - - def info(self) -> str: - if self._connection is None: - return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" - return self._connection.info() - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.info()}]>" - - # These context managers are not used in the standard flow, but are - # useful for testing or working with connection instances directly. - - def __enter__(self) -> "HTTPConnection": - return self - - def __exit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, - ) -> None: - self.close() diff --git a/server/venv/Lib/site-packages/httpcore/_sync/connection_pool.py b/server/venv/Lib/site-packages/httpcore/_sync/connection_pool.py deleted file mode 100644 index dbcaff1..0000000 --- a/server/venv/Lib/site-packages/httpcore/_sync/connection_pool.py +++ /dev/null @@ -1,356 +0,0 @@ -import ssl -import sys -from types import TracebackType -from typing import Iterable, Iterator, Iterable, List, Optional, Type - -from .._backends.sync import SyncBackend -from .._backends.base import SOCKET_OPTION, NetworkBackend -from .._exceptions import ConnectionNotAvailable, UnsupportedProtocol -from .._models import Origin, Request, Response -from .._synchronization import Event, Lock, ShieldCancellation -from .connection import HTTPConnection -from .interfaces import ConnectionInterface, RequestInterface - - -class RequestStatus: - def __init__(self, request: Request): - self.request = request - self.connection: Optional[ConnectionInterface] = None - self._connection_acquired = Event() - - def set_connection(self, connection: ConnectionInterface) -> None: - assert self.connection is None - self.connection = connection - self._connection_acquired.set() - - def unset_connection(self) -> None: - assert self.connection is not None - self.connection = None - self._connection_acquired = Event() - - def wait_for_connection( - self, timeout: Optional[float] = None - ) -> ConnectionInterface: - if self.connection is None: - self._connection_acquired.wait(timeout=timeout) - assert self.connection is not None - return self.connection - - -class ConnectionPool(RequestInterface): - """ - A connection pool for making HTTP requests. - """ - - def __init__( - self, - ssl_context: Optional[ssl.SSLContext] = None, - max_connections: Optional[int] = 10, - max_keepalive_connections: Optional[int] = None, - keepalive_expiry: Optional[float] = None, - http1: bool = True, - http2: bool = False, - retries: int = 0, - local_address: Optional[str] = None, - uds: Optional[str] = None, - network_backend: Optional[NetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, - ) -> None: - """ - A connection pool for making HTTP requests. - - Parameters: - ssl_context: An SSL context to use for verifying connections. - If not specified, the default `httpcore.default_ssl_context()` - will be used. - max_connections: The maximum number of concurrent HTTP connections that - the pool should allow. Any attempt to send a request on a pool that - would exceed this amount will block until a connection is available. - max_keepalive_connections: The maximum number of idle HTTP connections - that will be maintained in the pool. - keepalive_expiry: The duration in seconds that an idle HTTP connection - may be maintained for before being expired from the pool. - http1: A boolean indicating if HTTP/1.1 requests should be supported - by the connection pool. Defaults to True. - http2: A boolean indicating if HTTP/2 requests should be supported by - the connection pool. Defaults to False. - retries: The maximum number of retries when trying to establish a - connection. - local_address: Local address to connect from. Can also be used to connect - using a particular address family. Using `local_address="0.0.0.0"` - will connect using an `AF_INET` address (IPv4), while using - `local_address="::"` will connect using an `AF_INET6` address (IPv6). - uds: Path to a Unix Domain Socket to use instead of TCP sockets. - network_backend: A backend instance to use for handling network I/O. - socket_options: Socket options that have to be included - in the TCP socket when the connection was established. - """ - self._ssl_context = ssl_context - - self._max_connections = ( - sys.maxsize if max_connections is None else max_connections - ) - self._max_keepalive_connections = ( - sys.maxsize - if max_keepalive_connections is None - else max_keepalive_connections - ) - self._max_keepalive_connections = min( - self._max_connections, self._max_keepalive_connections - ) - - self._keepalive_expiry = keepalive_expiry - self._http1 = http1 - self._http2 = http2 - self._retries = retries - self._local_address = local_address - self._uds = uds - - self._pool: List[ConnectionInterface] = [] - self._requests: List[RequestStatus] = [] - self._pool_lock = Lock() - self._network_backend = ( - SyncBackend() if network_backend is None else network_backend - ) - self._socket_options = socket_options - - def create_connection(self, origin: Origin) -> ConnectionInterface: - return HTTPConnection( - origin=origin, - ssl_context=self._ssl_context, - keepalive_expiry=self._keepalive_expiry, - http1=self._http1, - http2=self._http2, - retries=self._retries, - local_address=self._local_address, - uds=self._uds, - network_backend=self._network_backend, - socket_options=self._socket_options, - ) - - @property - def connections(self) -> List[ConnectionInterface]: - """ - Return a list of the connections currently in the pool. - - For example: - - ```python - >>> pool.connections - [ - , - , - , - ] - ``` - """ - return list(self._pool) - - def _attempt_to_acquire_connection(self, status: RequestStatus) -> bool: - """ - Attempt to provide a connection that can handle the given origin. - """ - origin = status.request.url.origin - - # If there are queued requests in front of us, then don't acquire a - # connection. We handle requests strictly in order. - waiting = [s for s in self._requests if s.connection is None] - if waiting and waiting[0] is not status: - return False - - # Reuse an existing connection if one is currently available. - for idx, connection in enumerate(self._pool): - if connection.can_handle_request(origin) and connection.is_available(): - self._pool.pop(idx) - self._pool.insert(0, connection) - status.set_connection(connection) - return True - - # If the pool is currently full, attempt to close one idle connection. - if len(self._pool) >= self._max_connections: - for idx, connection in reversed(list(enumerate(self._pool))): - if connection.is_idle(): - connection.close() - self._pool.pop(idx) - break - - # If the pool is still full, then we cannot acquire a connection. - if len(self._pool) >= self._max_connections: - return False - - # Otherwise create a new connection. - connection = self.create_connection(origin) - self._pool.insert(0, connection) - status.set_connection(connection) - return True - - def _close_expired_connections(self) -> None: - """ - Clean up the connection pool by closing off any connections that have expired. - """ - # Close any connections that have expired their keep-alive time. - for idx, connection in reversed(list(enumerate(self._pool))): - if connection.has_expired(): - connection.close() - self._pool.pop(idx) - - # If the pool size exceeds the maximum number of allowed keep-alive connections, - # then close off idle connections as required. - pool_size = len(self._pool) - for idx, connection in reversed(list(enumerate(self._pool))): - if connection.is_idle() and pool_size > self._max_keepalive_connections: - connection.close() - self._pool.pop(idx) - pool_size -= 1 - - def handle_request(self, request: Request) -> Response: - """ - Send an HTTP request, and return an HTTP response. - - This is the core implementation that is called into by `.request()` or `.stream()`. - """ - scheme = request.url.scheme.decode() - if scheme == "": - raise UnsupportedProtocol( - "Request URL is missing an 'http://' or 'https://' protocol." - ) - if scheme not in ("http", "https", "ws", "wss"): - raise UnsupportedProtocol( - f"Request URL has an unsupported protocol '{scheme}://'." - ) - - status = RequestStatus(request) - - with self._pool_lock: - self._requests.append(status) - self._close_expired_connections() - self._attempt_to_acquire_connection(status) - - while True: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("pool", None) - try: - connection = status.wait_for_connection(timeout=timeout) - except BaseException as exc: - # If we timeout here, or if the task is cancelled, then make - # sure to remove the request from the queue before bubbling - # up the exception. - with self._pool_lock: - # Ensure only remove when task exists. - if status in self._requests: - self._requests.remove(status) - raise exc - - try: - response = connection.handle_request(request) - except ConnectionNotAvailable: - # The ConnectionNotAvailable exception is a special case, that - # indicates we need to retry the request on a new connection. - # - # The most common case where this can occur is when multiple - # requests are queued waiting for a single connection, which - # might end up as an HTTP/2 connection, but which actually ends - # up as HTTP/1.1. - with self._pool_lock: - # Maintain our position in the request queue, but reset the - # status so that the request becomes queued again. - status.unset_connection() - self._attempt_to_acquire_connection(status) - except BaseException as exc: - with ShieldCancellation(): - self.response_closed(status) - raise exc - else: - break - - # When we return the response, we wrap the stream in a special class - # that handles notifying the connection pool once the response - # has been released. - assert isinstance(response.stream, Iterable) - return Response( - status=response.status, - headers=response.headers, - content=ConnectionPoolByteStream(response.stream, self, status), - extensions=response.extensions, - ) - - def response_closed(self, status: RequestStatus) -> None: - """ - This method acts as a callback once the request/response cycle is complete. - - It is called into from the `ConnectionPoolByteStream.close()` method. - """ - assert status.connection is not None - connection = status.connection - - with self._pool_lock: - # Update the state of the connection pool. - if status in self._requests: - self._requests.remove(status) - - if connection.is_closed() and connection in self._pool: - self._pool.remove(connection) - - # Since we've had a response closed, it's possible we'll now be able - # to service one or more requests that are currently pending. - for status in self._requests: - if status.connection is None: - acquired = self._attempt_to_acquire_connection(status) - # If we could not acquire a connection for a queued request - # then we don't need to check anymore requests that are - # queued later behind it. - if not acquired: - break - - # Housekeeping. - self._close_expired_connections() - - def close(self) -> None: - """ - Close any connections in the pool. - """ - with self._pool_lock: - for connection in self._pool: - connection.close() - self._pool = [] - self._requests = [] - - def __enter__(self) -> "ConnectionPool": - return self - - def __exit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, - ) -> None: - self.close() - - -class ConnectionPoolByteStream: - """ - A wrapper around the response byte stream, that additionally handles - notifying the connection pool when the response has been closed. - """ - - def __init__( - self, - stream: Iterable[bytes], - pool: ConnectionPool, - status: RequestStatus, - ) -> None: - self._stream = stream - self._pool = pool - self._status = status - - def __iter__(self) -> Iterator[bytes]: - for part in self._stream: - yield part - - def close(self) -> None: - try: - if hasattr(self._stream, "close"): - self._stream.close() - finally: - with ShieldCancellation(): - self._pool.response_closed(self._status) diff --git a/server/venv/Lib/site-packages/httpcore/_sync/http11.py b/server/venv/Lib/site-packages/httpcore/_sync/http11.py deleted file mode 100644 index 0cc100e..0000000 --- a/server/venv/Lib/site-packages/httpcore/_sync/http11.py +++ /dev/null @@ -1,343 +0,0 @@ -import enum -import logging -import time -from types import TracebackType -from typing import ( - Iterable, - Iterator, - List, - Optional, - Tuple, - Type, - Union, - cast, -) - -import h11 - -from .._backends.base import NetworkStream -from .._exceptions import ( - ConnectionNotAvailable, - LocalProtocolError, - RemoteProtocolError, - WriteError, - map_exceptions, -) -from .._models import Origin, Request, Response -from .._synchronization import Lock, ShieldCancellation -from .._trace import Trace -from .interfaces import ConnectionInterface - -logger = logging.getLogger("httpcore.http11") - - -# A subset of `h11.Event` types supported by `_send_event` -H11SendEvent = Union[ - h11.Request, - h11.Data, - h11.EndOfMessage, -] - - -class HTTPConnectionState(enum.IntEnum): - NEW = 0 - ACTIVE = 1 - IDLE = 2 - CLOSED = 3 - - -class HTTP11Connection(ConnectionInterface): - READ_NUM_BYTES = 64 * 1024 - MAX_INCOMPLETE_EVENT_SIZE = 100 * 1024 - - def __init__( - self, - origin: Origin, - stream: NetworkStream, - keepalive_expiry: Optional[float] = None, - ) -> None: - self._origin = origin - self._network_stream = stream - self._keepalive_expiry: Optional[float] = keepalive_expiry - self._expire_at: Optional[float] = None - self._state = HTTPConnectionState.NEW - self._state_lock = Lock() - self._request_count = 0 - self._h11_state = h11.Connection( - our_role=h11.CLIENT, - max_incomplete_event_size=self.MAX_INCOMPLETE_EVENT_SIZE, - ) - - def handle_request(self, request: Request) -> Response: - if not self.can_handle_request(request.url.origin): - raise RuntimeError( - f"Attempted to send request to {request.url.origin} on connection " - f"to {self._origin}" - ) - - with self._state_lock: - if self._state in (HTTPConnectionState.NEW, HTTPConnectionState.IDLE): - self._request_count += 1 - self._state = HTTPConnectionState.ACTIVE - self._expire_at = None - else: - raise ConnectionNotAvailable() - - try: - kwargs = {"request": request} - try: - with Trace( - "send_request_headers", logger, request, kwargs - ) as trace: - self._send_request_headers(**kwargs) - with Trace("send_request_body", logger, request, kwargs) as trace: - self._send_request_body(**kwargs) - except WriteError: - # If we get a write error while we're writing the request, - # then we supress this error and move on to attempting to - # read the response. Servers can sometimes close the request - # pre-emptively and then respond with a well formed HTTP - # error response. - pass - - with Trace( - "receive_response_headers", logger, request, kwargs - ) as trace: - ( - http_version, - status, - reason_phrase, - headers, - ) = self._receive_response_headers(**kwargs) - trace.return_value = ( - http_version, - status, - reason_phrase, - headers, - ) - - return Response( - status=status, - headers=headers, - content=HTTP11ConnectionByteStream(self, request), - extensions={ - "http_version": http_version, - "reason_phrase": reason_phrase, - "network_stream": self._network_stream, - }, - ) - except BaseException as exc: - with ShieldCancellation(): - with Trace("response_closed", logger, request) as trace: - self._response_closed() - raise exc - - # Sending the request... - - def _send_request_headers(self, request: Request) -> None: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("write", None) - - with map_exceptions({h11.LocalProtocolError: LocalProtocolError}): - event = h11.Request( - method=request.method, - target=request.url.target, - headers=request.headers, - ) - self._send_event(event, timeout=timeout) - - def _send_request_body(self, request: Request) -> None: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("write", None) - - assert isinstance(request.stream, Iterable) - for chunk in request.stream: - event = h11.Data(data=chunk) - self._send_event(event, timeout=timeout) - - self._send_event(h11.EndOfMessage(), timeout=timeout) - - def _send_event( - self, event: h11.Event, timeout: Optional[float] = None - ) -> None: - bytes_to_send = self._h11_state.send(event) - if bytes_to_send is not None: - self._network_stream.write(bytes_to_send, timeout=timeout) - - # Receiving the response... - - def _receive_response_headers( - self, request: Request - ) -> Tuple[bytes, int, bytes, List[Tuple[bytes, bytes]]]: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("read", None) - - while True: - event = self._receive_event(timeout=timeout) - if isinstance(event, h11.Response): - break - if ( - isinstance(event, h11.InformationalResponse) - and event.status_code == 101 - ): - break - - http_version = b"HTTP/" + event.http_version - - # h11 version 0.11+ supports a `raw_items` interface to get the - # raw header casing, rather than the enforced lowercase headers. - headers = event.headers.raw_items() - - return http_version, event.status_code, event.reason, headers - - def _receive_response_body(self, request: Request) -> Iterator[bytes]: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("read", None) - - while True: - event = self._receive_event(timeout=timeout) - if isinstance(event, h11.Data): - yield bytes(event.data) - elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)): - break - - def _receive_event( - self, timeout: Optional[float] = None - ) -> Union[h11.Event, Type[h11.PAUSED]]: - while True: - with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): - event = self._h11_state.next_event() - - if event is h11.NEED_DATA: - data = self._network_stream.read( - self.READ_NUM_BYTES, timeout=timeout - ) - - # If we feed this case through h11 we'll raise an exception like: - # - # httpcore.RemoteProtocolError: can't handle event type - # ConnectionClosed when role=SERVER and state=SEND_RESPONSE - # - # Which is accurate, but not very informative from an end-user - # perspective. Instead we handle this case distinctly and treat - # it as a ConnectError. - if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE: - msg = "Server disconnected without sending a response." - raise RemoteProtocolError(msg) - - self._h11_state.receive_data(data) - else: - # mypy fails to narrow the type in the above if statement above - return cast(Union[h11.Event, Type[h11.PAUSED]], event) - - def _response_closed(self) -> None: - with self._state_lock: - if ( - self._h11_state.our_state is h11.DONE - and self._h11_state.their_state is h11.DONE - ): - self._state = HTTPConnectionState.IDLE - self._h11_state.start_next_cycle() - if self._keepalive_expiry is not None: - now = time.monotonic() - self._expire_at = now + self._keepalive_expiry - else: - self.close() - - # Once the connection is no longer required... - - def close(self) -> None: - # Note that this method unilaterally closes the connection, and does - # not have any kind of locking in place around it. - self._state = HTTPConnectionState.CLOSED - self._network_stream.close() - - # The ConnectionInterface methods provide information about the state of - # the connection, allowing for a connection pooling implementation to - # determine when to reuse and when to close the connection... - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._origin - - def is_available(self) -> bool: - # Note that HTTP/1.1 connections in the "NEW" state are not treated as - # being "available". The control flow which created the connection will - # be able to send an outgoing request, but the connection will not be - # acquired from the connection pool for any other request. - return self._state == HTTPConnectionState.IDLE - - def has_expired(self) -> bool: - now = time.monotonic() - keepalive_expired = self._expire_at is not None and now > self._expire_at - - # If the HTTP connection is idle but the socket is readable, then the - # only valid state is that the socket is about to return b"", indicating - # a server-initiated disconnect. - server_disconnected = ( - self._state == HTTPConnectionState.IDLE - and self._network_stream.get_extra_info("is_readable") - ) - - return keepalive_expired or server_disconnected - - def is_idle(self) -> bool: - return self._state == HTTPConnectionState.IDLE - - def is_closed(self) -> bool: - return self._state == HTTPConnectionState.CLOSED - - def info(self) -> str: - origin = str(self._origin) - return ( - f"{origin!r}, HTTP/1.1, {self._state.name}, " - f"Request Count: {self._request_count}" - ) - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - origin = str(self._origin) - return ( - f"<{class_name} [{origin!r}, {self._state.name}, " - f"Request Count: {self._request_count}]>" - ) - - # These context managers are not used in the standard flow, but are - # useful for testing or working with connection instances directly. - - def __enter__(self) -> "HTTP11Connection": - return self - - def __exit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, - ) -> None: - self.close() - - -class HTTP11ConnectionByteStream: - def __init__(self, connection: HTTP11Connection, request: Request) -> None: - self._connection = connection - self._request = request - self._closed = False - - def __iter__(self) -> Iterator[bytes]: - kwargs = {"request": self._request} - try: - with Trace("receive_response_body", logger, self._request, kwargs): - for chunk in self._connection._receive_response_body(**kwargs): - yield chunk - except BaseException as exc: - # If we get an exception while streaming the response, - # we want to close the response (and possibly the connection) - # before raising that exception. - with ShieldCancellation(): - self.close() - raise exc - - def close(self) -> None: - if not self._closed: - self._closed = True - with Trace("response_closed", logger, self._request): - self._connection._response_closed() diff --git a/server/venv/Lib/site-packages/httpcore/_sync/http2.py b/server/venv/Lib/site-packages/httpcore/_sync/http2.py deleted file mode 100644 index d141d45..0000000 --- a/server/venv/Lib/site-packages/httpcore/_sync/http2.py +++ /dev/null @@ -1,589 +0,0 @@ -import enum -import logging -import time -import types -import typing - -import h2.config -import h2.connection -import h2.events -import h2.exceptions -import h2.settings - -from .._backends.base import NetworkStream -from .._exceptions import ( - ConnectionNotAvailable, - LocalProtocolError, - RemoteProtocolError, -) -from .._models import Origin, Request, Response -from .._synchronization import Lock, Semaphore, ShieldCancellation -from .._trace import Trace -from .interfaces import ConnectionInterface - -logger = logging.getLogger("httpcore.http2") - - -def has_body_headers(request: Request) -> bool: - return any( - k.lower() == b"content-length" or k.lower() == b"transfer-encoding" - for k, v in request.headers - ) - - -class HTTPConnectionState(enum.IntEnum): - ACTIVE = 1 - IDLE = 2 - CLOSED = 3 - - -class HTTP2Connection(ConnectionInterface): - READ_NUM_BYTES = 64 * 1024 - CONFIG = h2.config.H2Configuration(validate_inbound_headers=False) - - def __init__( - self, - origin: Origin, - stream: NetworkStream, - keepalive_expiry: typing.Optional[float] = None, - ): - self._origin = origin - self._network_stream = stream - self._keepalive_expiry: typing.Optional[float] = keepalive_expiry - self._h2_state = h2.connection.H2Connection(config=self.CONFIG) - self._state = HTTPConnectionState.IDLE - self._expire_at: typing.Optional[float] = None - self._request_count = 0 - self._init_lock = Lock() - self._state_lock = Lock() - self._read_lock = Lock() - self._write_lock = Lock() - self._sent_connection_init = False - self._used_all_stream_ids = False - self._connection_error = False - - # Mapping from stream ID to response stream events. - self._events: typing.Dict[ - int, - typing.Union[ - h2.events.ResponseReceived, - h2.events.DataReceived, - h2.events.StreamEnded, - h2.events.StreamReset, - ], - ] = {} - - # Connection terminated events are stored as state since - # we need to handle them for all streams. - self._connection_terminated: typing.Optional[ - h2.events.ConnectionTerminated - ] = None - - self._read_exception: typing.Optional[Exception] = None - self._write_exception: typing.Optional[Exception] = None - - def handle_request(self, request: Request) -> Response: - if not self.can_handle_request(request.url.origin): - # This cannot occur in normal operation, since the connection pool - # will only send requests on connections that handle them. - # It's in place simply for resilience as a guard against incorrect - # usage, for anyone working directly with httpcore connections. - raise RuntimeError( - f"Attempted to send request to {request.url.origin} on connection " - f"to {self._origin}" - ) - - with self._state_lock: - if self._state in (HTTPConnectionState.ACTIVE, HTTPConnectionState.IDLE): - self._request_count += 1 - self._expire_at = None - self._state = HTTPConnectionState.ACTIVE - else: - raise ConnectionNotAvailable() - - with self._init_lock: - if not self._sent_connection_init: - try: - kwargs = {"request": request} - with Trace("send_connection_init", logger, request, kwargs): - self._send_connection_init(**kwargs) - except BaseException as exc: - with ShieldCancellation(): - self.close() - raise exc - - self._sent_connection_init = True - - # Initially start with just 1 until the remote server provides - # its max_concurrent_streams value - self._max_streams = 1 - - local_settings_max_streams = ( - self._h2_state.local_settings.max_concurrent_streams - ) - self._max_streams_semaphore = Semaphore(local_settings_max_streams) - - for _ in range(local_settings_max_streams - self._max_streams): - self._max_streams_semaphore.acquire() - - self._max_streams_semaphore.acquire() - - try: - stream_id = self._h2_state.get_next_available_stream_id() - self._events[stream_id] = [] - except h2.exceptions.NoAvailableStreamIDError: # pragma: nocover - self._used_all_stream_ids = True - self._request_count -= 1 - raise ConnectionNotAvailable() - - try: - kwargs = {"request": request, "stream_id": stream_id} - with Trace("send_request_headers", logger, request, kwargs): - self._send_request_headers(request=request, stream_id=stream_id) - with Trace("send_request_body", logger, request, kwargs): - self._send_request_body(request=request, stream_id=stream_id) - with Trace( - "receive_response_headers", logger, request, kwargs - ) as trace: - status, headers = self._receive_response( - request=request, stream_id=stream_id - ) - trace.return_value = (status, headers) - - return Response( - status=status, - headers=headers, - content=HTTP2ConnectionByteStream(self, request, stream_id=stream_id), - extensions={ - "http_version": b"HTTP/2", - "network_stream": self._network_stream, - "stream_id": stream_id, - }, - ) - except BaseException as exc: # noqa: PIE786 - with ShieldCancellation(): - kwargs = {"stream_id": stream_id} - with Trace("response_closed", logger, request, kwargs): - self._response_closed(stream_id=stream_id) - - if isinstance(exc, h2.exceptions.ProtocolError): - # One case where h2 can raise a protocol error is when a - # closed frame has been seen by the state machine. - # - # This happens when one stream is reading, and encounters - # a GOAWAY event. Other flows of control may then raise - # a protocol error at any point they interact with the 'h2_state'. - # - # In this case we'll have stored the event, and should raise - # it as a RemoteProtocolError. - if self._connection_terminated: # pragma: nocover - raise RemoteProtocolError(self._connection_terminated) - # If h2 raises a protocol error in some other state then we - # must somehow have made a protocol violation. - raise LocalProtocolError(exc) # pragma: nocover - - raise exc - - def _send_connection_init(self, request: Request) -> None: - """ - The HTTP/2 connection requires some initial setup before we can start - using individual request/response streams on it. - """ - # Need to set these manually here instead of manipulating via - # __setitem__() otherwise the H2Connection will emit SettingsUpdate - # frames in addition to sending the undesired defaults. - self._h2_state.local_settings = h2.settings.Settings( - client=True, - initial_values={ - # Disable PUSH_PROMISE frames from the server since we don't do anything - # with them for now. Maybe when we support caching? - h2.settings.SettingCodes.ENABLE_PUSH: 0, - # These two are taken from h2 for safe defaults - h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 100, - h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 65536, - }, - ) - - # Some websites (*cough* Yahoo *cough*) balk at this setting being - # present in the initial handshake since it's not defined in the original - # RFC despite the RFC mandating ignoring settings you don't know about. - del self._h2_state.local_settings[ - h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL - ] - - self._h2_state.initiate_connection() - self._h2_state.increment_flow_control_window(2**24) - self._write_outgoing_data(request) - - # Sending the request... - - def _send_request_headers(self, request: Request, stream_id: int) -> None: - """ - Send the request headers to a given stream ID. - """ - end_stream = not has_body_headers(request) - - # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'. - # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require - # HTTP/1.1 style headers, and map them appropriately if we end up on - # an HTTP/2 connection. - authority = [v for k, v in request.headers if k.lower() == b"host"][0] - - headers = [ - (b":method", request.method), - (b":authority", authority), - (b":scheme", request.url.scheme), - (b":path", request.url.target), - ] + [ - (k.lower(), v) - for k, v in request.headers - if k.lower() - not in ( - b"host", - b"transfer-encoding", - ) - ] - - self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) - self._h2_state.increment_flow_control_window(2**24, stream_id=stream_id) - self._write_outgoing_data(request) - - def _send_request_body(self, request: Request, stream_id: int) -> None: - """ - Iterate over the request body sending it to a given stream ID. - """ - if not has_body_headers(request): - return - - assert isinstance(request.stream, typing.Iterable) - for data in request.stream: - self._send_stream_data(request, stream_id, data) - self._send_end_stream(request, stream_id) - - def _send_stream_data( - self, request: Request, stream_id: int, data: bytes - ) -> None: - """ - Send a single chunk of data in one or more data frames. - """ - while data: - max_flow = self._wait_for_outgoing_flow(request, stream_id) - chunk_size = min(len(data), max_flow) - chunk, data = data[:chunk_size], data[chunk_size:] - self._h2_state.send_data(stream_id, chunk) - self._write_outgoing_data(request) - - def _send_end_stream(self, request: Request, stream_id: int) -> None: - """ - Send an empty data frame on on a given stream ID with the END_STREAM flag set. - """ - self._h2_state.end_stream(stream_id) - self._write_outgoing_data(request) - - # Receiving the response... - - def _receive_response( - self, request: Request, stream_id: int - ) -> typing.Tuple[int, typing.List[typing.Tuple[bytes, bytes]]]: - """ - Return the response status code and headers for a given stream ID. - """ - while True: - event = self._receive_stream_event(request, stream_id) - if isinstance(event, h2.events.ResponseReceived): - break - - status_code = 200 - headers = [] - for k, v in event.headers: - if k == b":status": - status_code = int(v.decode("ascii", errors="ignore")) - elif not k.startswith(b":"): - headers.append((k, v)) - - return (status_code, headers) - - def _receive_response_body( - self, request: Request, stream_id: int - ) -> typing.Iterator[bytes]: - """ - Iterator that returns the bytes of the response body for a given stream ID. - """ - while True: - event = self._receive_stream_event(request, stream_id) - if isinstance(event, h2.events.DataReceived): - amount = event.flow_controlled_length - self._h2_state.acknowledge_received_data(amount, stream_id) - self._write_outgoing_data(request) - yield event.data - elif isinstance(event, h2.events.StreamEnded): - break - - def _receive_stream_event( - self, request: Request, stream_id: int - ) -> typing.Union[ - h2.events.ResponseReceived, h2.events.DataReceived, h2.events.StreamEnded - ]: - """ - Return the next available event for a given stream ID. - - Will read more data from the network if required. - """ - while not self._events.get(stream_id): - self._receive_events(request, stream_id) - event = self._events[stream_id].pop(0) - if isinstance(event, h2.events.StreamReset): - raise RemoteProtocolError(event) - return event - - def _receive_events( - self, request: Request, stream_id: typing.Optional[int] = None - ) -> None: - """ - Read some data from the network until we see one or more events - for a given stream ID. - """ - with self._read_lock: - if self._connection_terminated is not None: - last_stream_id = self._connection_terminated.last_stream_id - if stream_id and last_stream_id and stream_id > last_stream_id: - self._request_count -= 1 - raise ConnectionNotAvailable() - raise RemoteProtocolError(self._connection_terminated) - - # This conditional is a bit icky. We don't want to block reading if we've - # actually got an event to return for a given stream. We need to do that - # check *within* the atomic read lock. Though it also need to be optional, - # because when we call it from `_wait_for_outgoing_flow` we *do* want to - # block until we've available flow control, event when we have events - # pending for the stream ID we're attempting to send on. - if stream_id is None or not self._events.get(stream_id): - events = self._read_incoming_data(request) - for event in events: - if isinstance(event, h2.events.RemoteSettingsChanged): - with Trace( - "receive_remote_settings", logger, request - ) as trace: - self._receive_remote_settings_change(event) - trace.return_value = event - - elif isinstance( - event, - ( - h2.events.ResponseReceived, - h2.events.DataReceived, - h2.events.StreamEnded, - h2.events.StreamReset, - ), - ): - if event.stream_id in self._events: - self._events[event.stream_id].append(event) - - elif isinstance(event, h2.events.ConnectionTerminated): - self._connection_terminated = event - - self._write_outgoing_data(request) - - def _receive_remote_settings_change(self, event: h2.events.Event) -> None: - max_concurrent_streams = event.changed_settings.get( - h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS - ) - if max_concurrent_streams: - new_max_streams = min( - max_concurrent_streams.new_value, - self._h2_state.local_settings.max_concurrent_streams, - ) - if new_max_streams and new_max_streams != self._max_streams: - while new_max_streams > self._max_streams: - self._max_streams_semaphore.release() - self._max_streams += 1 - while new_max_streams < self._max_streams: - self._max_streams_semaphore.acquire() - self._max_streams -= 1 - - def _response_closed(self, stream_id: int) -> None: - self._max_streams_semaphore.release() - del self._events[stream_id] - with self._state_lock: - if self._connection_terminated and not self._events: - self.close() - - elif self._state == HTTPConnectionState.ACTIVE and not self._events: - self._state = HTTPConnectionState.IDLE - if self._keepalive_expiry is not None: - now = time.monotonic() - self._expire_at = now + self._keepalive_expiry - if self._used_all_stream_ids: # pragma: nocover - self.close() - - def close(self) -> None: - # Note that this method unilaterally closes the connection, and does - # not have any kind of locking in place around it. - self._h2_state.close_connection() - self._state = HTTPConnectionState.CLOSED - self._network_stream.close() - - # Wrappers around network read/write operations... - - def _read_incoming_data( - self, request: Request - ) -> typing.List[h2.events.Event]: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("read", None) - - if self._read_exception is not None: - raise self._read_exception # pragma: nocover - - try: - data = self._network_stream.read(self.READ_NUM_BYTES, timeout) - if data == b"": - raise RemoteProtocolError("Server disconnected") - except Exception as exc: - # If we get a network error we should: - # - # 1. Save the exception and just raise it immediately on any future reads. - # (For example, this means that a single read timeout or disconnect will - # immediately close all pending streams. Without requiring multiple - # sequential timeouts.) - # 2. Mark the connection as errored, so that we don't accept any other - # incoming requests. - self._read_exception = exc - self._connection_error = True - raise exc - - events: typing.List[h2.events.Event] = self._h2_state.receive_data(data) - - return events - - def _write_outgoing_data(self, request: Request) -> None: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("write", None) - - with self._write_lock: - data_to_send = self._h2_state.data_to_send() - - if self._write_exception is not None: - raise self._write_exception # pragma: nocover - - try: - self._network_stream.write(data_to_send, timeout) - except Exception as exc: # pragma: nocover - # If we get a network error we should: - # - # 1. Save the exception and just raise it immediately on any future write. - # (For example, this means that a single write timeout or disconnect will - # immediately close all pending streams. Without requiring multiple - # sequential timeouts.) - # 2. Mark the connection as errored, so that we don't accept any other - # incoming requests. - self._write_exception = exc - self._connection_error = True - raise exc - - # Flow control... - - def _wait_for_outgoing_flow(self, request: Request, stream_id: int) -> int: - """ - Returns the maximum allowable outgoing flow for a given stream. - - If the allowable flow is zero, then waits on the network until - WindowUpdated frames have increased the flow rate. - https://tools.ietf.org/html/rfc7540#section-6.9 - """ - local_flow: int = self._h2_state.local_flow_control_window(stream_id) - max_frame_size: int = self._h2_state.max_outbound_frame_size - flow = min(local_flow, max_frame_size) - while flow == 0: - self._receive_events(request) - local_flow = self._h2_state.local_flow_control_window(stream_id) - max_frame_size = self._h2_state.max_outbound_frame_size - flow = min(local_flow, max_frame_size) - return flow - - # Interface for connection pooling... - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._origin - - def is_available(self) -> bool: - return ( - self._state != HTTPConnectionState.CLOSED - and not self._connection_error - and not self._used_all_stream_ids - and not ( - self._h2_state.state_machine.state - == h2.connection.ConnectionState.CLOSED - ) - ) - - def has_expired(self) -> bool: - now = time.monotonic() - return self._expire_at is not None and now > self._expire_at - - def is_idle(self) -> bool: - return self._state == HTTPConnectionState.IDLE - - def is_closed(self) -> bool: - return self._state == HTTPConnectionState.CLOSED - - def info(self) -> str: - origin = str(self._origin) - return ( - f"{origin!r}, HTTP/2, {self._state.name}, " - f"Request Count: {self._request_count}" - ) - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - origin = str(self._origin) - return ( - f"<{class_name} [{origin!r}, {self._state.name}, " - f"Request Count: {self._request_count}]>" - ) - - # These context managers are not used in the standard flow, but are - # useful for testing or working with connection instances directly. - - def __enter__(self) -> "HTTP2Connection": - return self - - def __exit__( - self, - exc_type: typing.Optional[typing.Type[BaseException]] = None, - exc_value: typing.Optional[BaseException] = None, - traceback: typing.Optional[types.TracebackType] = None, - ) -> None: - self.close() - - -class HTTP2ConnectionByteStream: - def __init__( - self, connection: HTTP2Connection, request: Request, stream_id: int - ) -> None: - self._connection = connection - self._request = request - self._stream_id = stream_id - self._closed = False - - def __iter__(self) -> typing.Iterator[bytes]: - kwargs = {"request": self._request, "stream_id": self._stream_id} - try: - with Trace("receive_response_body", logger, self._request, kwargs): - for chunk in self._connection._receive_response_body( - request=self._request, stream_id=self._stream_id - ): - yield chunk - except BaseException as exc: - # If we get an exception while streaming the response, - # we want to close the response (and possibly the connection) - # before raising that exception. - with ShieldCancellation(): - self.close() - raise exc - - def close(self) -> None: - if not self._closed: - self._closed = True - kwargs = {"stream_id": self._stream_id} - with Trace("response_closed", logger, self._request, kwargs): - self._connection._response_closed(stream_id=self._stream_id) diff --git a/server/venv/Lib/site-packages/httpcore/_sync/http_proxy.py b/server/venv/Lib/site-packages/httpcore/_sync/http_proxy.py deleted file mode 100644 index 6acac9a..0000000 --- a/server/venv/Lib/site-packages/httpcore/_sync/http_proxy.py +++ /dev/null @@ -1,368 +0,0 @@ -import logging -import ssl -from base64 import b64encode -from typing import Iterable, List, Mapping, Optional, Sequence, Tuple, Union - -from .._backends.base import SOCKET_OPTION, NetworkBackend -from .._exceptions import ProxyError -from .._models import ( - URL, - Origin, - Request, - Response, - enforce_bytes, - enforce_headers, - enforce_url, -) -from .._ssl import default_ssl_context -from .._synchronization import Lock -from .._trace import Trace -from .connection import HTTPConnection -from .connection_pool import ConnectionPool -from .http11 import HTTP11Connection -from .interfaces import ConnectionInterface - -HeadersAsSequence = Sequence[Tuple[Union[bytes, str], Union[bytes, str]]] -HeadersAsMapping = Mapping[Union[bytes, str], Union[bytes, str]] - - -logger = logging.getLogger("httpcore.proxy") - - -def merge_headers( - default_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, - override_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, -) -> List[Tuple[bytes, bytes]]: - """ - Append default_headers and override_headers, de-duplicating if a key exists - in both cases. - """ - default_headers = [] if default_headers is None else list(default_headers) - override_headers = [] if override_headers is None else list(override_headers) - has_override = set(key.lower() for key, value in override_headers) - default_headers = [ - (key, value) - for key, value in default_headers - if key.lower() not in has_override - ] - return default_headers + override_headers - - -def build_auth_header(username: bytes, password: bytes) -> bytes: - userpass = username + b":" + password - return b"Basic " + b64encode(userpass) - - -class HTTPProxy(ConnectionPool): - """ - A connection pool that sends requests via an HTTP proxy. - """ - - def __init__( - self, - proxy_url: Union[URL, bytes, str], - proxy_auth: Optional[Tuple[Union[bytes, str], Union[bytes, str]]] = None, - proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, - ssl_context: Optional[ssl.SSLContext] = None, - proxy_ssl_context: Optional[ssl.SSLContext] = None, - max_connections: Optional[int] = 10, - max_keepalive_connections: Optional[int] = None, - keepalive_expiry: Optional[float] = None, - http1: bool = True, - http2: bool = False, - retries: int = 0, - local_address: Optional[str] = None, - uds: Optional[str] = None, - network_backend: Optional[NetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, - ) -> None: - """ - A connection pool for making HTTP requests. - - Parameters: - proxy_url: The URL to use when connecting to the proxy server. - For example `"http://127.0.0.1:8080/"`. - proxy_auth: Any proxy authentication as a two-tuple of - (username, password). May be either bytes or ascii-only str. - proxy_headers: Any HTTP headers to use for the proxy requests. - For example `{"Proxy-Authorization": "Basic :"}`. - ssl_context: An SSL context to use for verifying connections. - If not specified, the default `httpcore.default_ssl_context()` - will be used. - proxy_ssl_context: The same as `ssl_context`, but for a proxy server rather than a remote origin. - max_connections: The maximum number of concurrent HTTP connections that - the pool should allow. Any attempt to send a request on a pool that - would exceed this amount will block until a connection is available. - max_keepalive_connections: The maximum number of idle HTTP connections - that will be maintained in the pool. - keepalive_expiry: The duration in seconds that an idle HTTP connection - may be maintained for before being expired from the pool. - http1: A boolean indicating if HTTP/1.1 requests should be supported - by the connection pool. Defaults to True. - http2: A boolean indicating if HTTP/2 requests should be supported by - the connection pool. Defaults to False. - retries: The maximum number of retries when trying to establish - a connection. - local_address: Local address to connect from. Can also be used to - connect using a particular address family. Using - `local_address="0.0.0.0"` will connect using an `AF_INET` address - (IPv4), while using `local_address="::"` will connect using an - `AF_INET6` address (IPv6). - uds: Path to a Unix Domain Socket to use instead of TCP sockets. - network_backend: A backend instance to use for handling network I/O. - """ - super().__init__( - ssl_context=ssl_context, - max_connections=max_connections, - max_keepalive_connections=max_keepalive_connections, - keepalive_expiry=keepalive_expiry, - http1=http1, - http2=http2, - network_backend=network_backend, - retries=retries, - local_address=local_address, - uds=uds, - socket_options=socket_options, - ) - - self._proxy_url = enforce_url(proxy_url, name="proxy_url") - if ( - self._proxy_url.scheme == b"http" and proxy_ssl_context is not None - ): # pragma: no cover - raise RuntimeError( - "The `proxy_ssl_context` argument is not allowed for the http scheme" - ) - - self._ssl_context = ssl_context - self._proxy_ssl_context = proxy_ssl_context - self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") - if proxy_auth is not None: - username = enforce_bytes(proxy_auth[0], name="proxy_auth") - password = enforce_bytes(proxy_auth[1], name="proxy_auth") - authorization = build_auth_header(username, password) - self._proxy_headers = [ - (b"Proxy-Authorization", authorization) - ] + self._proxy_headers - - def create_connection(self, origin: Origin) -> ConnectionInterface: - if origin.scheme == b"http": - return ForwardHTTPConnection( - proxy_origin=self._proxy_url.origin, - proxy_headers=self._proxy_headers, - remote_origin=origin, - keepalive_expiry=self._keepalive_expiry, - network_backend=self._network_backend, - proxy_ssl_context=self._proxy_ssl_context, - ) - return TunnelHTTPConnection( - proxy_origin=self._proxy_url.origin, - proxy_headers=self._proxy_headers, - remote_origin=origin, - ssl_context=self._ssl_context, - proxy_ssl_context=self._proxy_ssl_context, - keepalive_expiry=self._keepalive_expiry, - http1=self._http1, - http2=self._http2, - network_backend=self._network_backend, - ) - - -class ForwardHTTPConnection(ConnectionInterface): - def __init__( - self, - proxy_origin: Origin, - remote_origin: Origin, - proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, - keepalive_expiry: Optional[float] = None, - network_backend: Optional[NetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, - proxy_ssl_context: Optional[ssl.SSLContext] = None, - ) -> None: - self._connection = HTTPConnection( - origin=proxy_origin, - keepalive_expiry=keepalive_expiry, - network_backend=network_backend, - socket_options=socket_options, - ssl_context=proxy_ssl_context, - ) - self._proxy_origin = proxy_origin - self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") - self._remote_origin = remote_origin - - def handle_request(self, request: Request) -> Response: - headers = merge_headers(self._proxy_headers, request.headers) - url = URL( - scheme=self._proxy_origin.scheme, - host=self._proxy_origin.host, - port=self._proxy_origin.port, - target=bytes(request.url), - ) - proxy_request = Request( - method=request.method, - url=url, - headers=headers, - content=request.stream, - extensions=request.extensions, - ) - return self._connection.handle_request(proxy_request) - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._remote_origin - - def close(self) -> None: - self._connection.close() - - def info(self) -> str: - return self._connection.info() - - def is_available(self) -> bool: - return self._connection.is_available() - - def has_expired(self) -> bool: - return self._connection.has_expired() - - def is_idle(self) -> bool: - return self._connection.is_idle() - - def is_closed(self) -> bool: - return self._connection.is_closed() - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.info()}]>" - - -class TunnelHTTPConnection(ConnectionInterface): - def __init__( - self, - proxy_origin: Origin, - remote_origin: Origin, - ssl_context: Optional[ssl.SSLContext] = None, - proxy_ssl_context: Optional[ssl.SSLContext] = None, - proxy_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, - keepalive_expiry: Optional[float] = None, - http1: bool = True, - http2: bool = False, - network_backend: Optional[NetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, - ) -> None: - self._connection: ConnectionInterface = HTTPConnection( - origin=proxy_origin, - keepalive_expiry=keepalive_expiry, - network_backend=network_backend, - socket_options=socket_options, - ssl_context=proxy_ssl_context, - ) - self._proxy_origin = proxy_origin - self._remote_origin = remote_origin - self._ssl_context = ssl_context - self._proxy_ssl_context = proxy_ssl_context - self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") - self._keepalive_expiry = keepalive_expiry - self._http1 = http1 - self._http2 = http2 - self._connect_lock = Lock() - self._connected = False - - def handle_request(self, request: Request) -> Response: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("connect", None) - - with self._connect_lock: - if not self._connected: - target = b"%b:%d" % (self._remote_origin.host, self._remote_origin.port) - - connect_url = URL( - scheme=self._proxy_origin.scheme, - host=self._proxy_origin.host, - port=self._proxy_origin.port, - target=target, - ) - connect_headers = merge_headers( - [(b"Host", target), (b"Accept", b"*/*")], self._proxy_headers - ) - connect_request = Request( - method=b"CONNECT", - url=connect_url, - headers=connect_headers, - extensions=request.extensions, - ) - connect_response = self._connection.handle_request( - connect_request - ) - - if connect_response.status < 200 or connect_response.status > 299: - reason_bytes = connect_response.extensions.get("reason_phrase", b"") - reason_str = reason_bytes.decode("ascii", errors="ignore") - msg = "%d %s" % (connect_response.status, reason_str) - self._connection.close() - raise ProxyError(msg) - - stream = connect_response.extensions["network_stream"] - - # Upgrade the stream to SSL - ssl_context = ( - default_ssl_context() - if self._ssl_context is None - else self._ssl_context - ) - alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] - ssl_context.set_alpn_protocols(alpn_protocols) - - kwargs = { - "ssl_context": ssl_context, - "server_hostname": self._remote_origin.host.decode("ascii"), - "timeout": timeout, - } - with Trace("start_tls", logger, request, kwargs) as trace: - stream = stream.start_tls(**kwargs) - trace.return_value = stream - - # Determine if we should be using HTTP/1.1 or HTTP/2 - ssl_object = stream.get_extra_info("ssl_object") - http2_negotiated = ( - ssl_object is not None - and ssl_object.selected_alpn_protocol() == "h2" - ) - - # Create the HTTP/1.1 or HTTP/2 connection - if http2_negotiated or (self._http2 and not self._http1): - from .http2 import HTTP2Connection - - self._connection = HTTP2Connection( - origin=self._remote_origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - else: - self._connection = HTTP11Connection( - origin=self._remote_origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - - self._connected = True - return self._connection.handle_request(request) - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._remote_origin - - def close(self) -> None: - self._connection.close() - - def info(self) -> str: - return self._connection.info() - - def is_available(self) -> bool: - return self._connection.is_available() - - def has_expired(self) -> bool: - return self._connection.has_expired() - - def is_idle(self) -> bool: - return self._connection.is_idle() - - def is_closed(self) -> bool: - return self._connection.is_closed() - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/server/venv/Lib/site-packages/httpcore/_sync/interfaces.py b/server/venv/Lib/site-packages/httpcore/_sync/interfaces.py deleted file mode 100644 index 5e95be1..0000000 --- a/server/venv/Lib/site-packages/httpcore/_sync/interfaces.py +++ /dev/null @@ -1,135 +0,0 @@ -from contextlib import contextmanager -from typing import Iterator, Optional, Union - -from .._models import ( - URL, - Extensions, - HeaderTypes, - Origin, - Request, - Response, - enforce_bytes, - enforce_headers, - enforce_url, - include_request_headers, -) - - -class RequestInterface: - def request( - self, - method: Union[bytes, str], - url: Union[URL, bytes, str], - *, - headers: HeaderTypes = None, - content: Union[bytes, Iterator[bytes], None] = None, - extensions: Optional[Extensions] = None, - ) -> Response: - # Strict type checking on our parameters. - method = enforce_bytes(method, name="method") - url = enforce_url(url, name="url") - headers = enforce_headers(headers, name="headers") - - # Include Host header, and optionally Content-Length or Transfer-Encoding. - headers = include_request_headers(headers, url=url, content=content) - - request = Request( - method=method, - url=url, - headers=headers, - content=content, - extensions=extensions, - ) - response = self.handle_request(request) - try: - response.read() - finally: - response.close() - return response - - @contextmanager - def stream( - self, - method: Union[bytes, str], - url: Union[URL, bytes, str], - *, - headers: HeaderTypes = None, - content: Union[bytes, Iterator[bytes], None] = None, - extensions: Optional[Extensions] = None, - ) -> Iterator[Response]: - # Strict type checking on our parameters. - method = enforce_bytes(method, name="method") - url = enforce_url(url, name="url") - headers = enforce_headers(headers, name="headers") - - # Include Host header, and optionally Content-Length or Transfer-Encoding. - headers = include_request_headers(headers, url=url, content=content) - - request = Request( - method=method, - url=url, - headers=headers, - content=content, - extensions=extensions, - ) - response = self.handle_request(request) - try: - yield response - finally: - response.close() - - def handle_request(self, request: Request) -> Response: - raise NotImplementedError() # pragma: nocover - - -class ConnectionInterface(RequestInterface): - def close(self) -> None: - raise NotImplementedError() # pragma: nocover - - def info(self) -> str: - raise NotImplementedError() # pragma: nocover - - def can_handle_request(self, origin: Origin) -> bool: - raise NotImplementedError() # pragma: nocover - - def is_available(self) -> bool: - """ - Return `True` if the connection is currently able to accept an - outgoing request. - - An HTTP/1.1 connection will only be available if it is currently idle. - - An HTTP/2 connection will be available so long as the stream ID space is - not yet exhausted, and the connection is not in an error state. - - While the connection is being established we may not yet know if it is going - to result in an HTTP/1.1 or HTTP/2 connection. The connection should be - treated as being available, but might ultimately raise `NewConnectionRequired` - required exceptions if multiple requests are attempted over a connection - that ends up being established as HTTP/1.1. - """ - raise NotImplementedError() # pragma: nocover - - def has_expired(self) -> bool: - """ - Return `True` if the connection is in a state where it should be closed. - - This either means that the connection is idle and it has passed the - expiry time on its keep-alive, or that server has sent an EOF. - """ - raise NotImplementedError() # pragma: nocover - - def is_idle(self) -> bool: - """ - Return `True` if the connection is currently idle. - """ - raise NotImplementedError() # pragma: nocover - - def is_closed(self) -> bool: - """ - Return `True` if the connection has been closed. - - Used when a response is closed to determine if the connection may be - returned to the connection pool or not. - """ - raise NotImplementedError() # pragma: nocover diff --git a/server/venv/Lib/site-packages/httpcore/_sync/socks_proxy.py b/server/venv/Lib/site-packages/httpcore/_sync/socks_proxy.py deleted file mode 100644 index 502e4d7..0000000 --- a/server/venv/Lib/site-packages/httpcore/_sync/socks_proxy.py +++ /dev/null @@ -1,342 +0,0 @@ -import logging -import ssl -import typing - -from socksio import socks5 - -from .._backends.sync import SyncBackend -from .._backends.base import NetworkBackend, NetworkStream -from .._exceptions import ConnectionNotAvailable, ProxyError -from .._models import URL, Origin, Request, Response, enforce_bytes, enforce_url -from .._ssl import default_ssl_context -from .._synchronization import Lock -from .._trace import Trace -from .connection_pool import ConnectionPool -from .http11 import HTTP11Connection -from .interfaces import ConnectionInterface - -logger = logging.getLogger("httpcore.socks") - - -AUTH_METHODS = { - b"\x00": "NO AUTHENTICATION REQUIRED", - b"\x01": "GSSAPI", - b"\x02": "USERNAME/PASSWORD", - b"\xff": "NO ACCEPTABLE METHODS", -} - -REPLY_CODES = { - b"\x00": "Succeeded", - b"\x01": "General SOCKS server failure", - b"\x02": "Connection not allowed by ruleset", - b"\x03": "Network unreachable", - b"\x04": "Host unreachable", - b"\x05": "Connection refused", - b"\x06": "TTL expired", - b"\x07": "Command not supported", - b"\x08": "Address type not supported", -} - - -def _init_socks5_connection( - stream: NetworkStream, - *, - host: bytes, - port: int, - auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, -) -> None: - conn = socks5.SOCKS5Connection() - - # Auth method request - auth_method = ( - socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED - if auth is None - else socks5.SOCKS5AuthMethod.USERNAME_PASSWORD - ) - conn.send(socks5.SOCKS5AuthMethodsRequest([auth_method])) - outgoing_bytes = conn.data_to_send() - stream.write(outgoing_bytes) - - # Auth method response - incoming_bytes = stream.read(max_bytes=4096) - response = conn.receive_data(incoming_bytes) - assert isinstance(response, socks5.SOCKS5AuthReply) - if response.method != auth_method: - requested = AUTH_METHODS.get(auth_method, "UNKNOWN") - responded = AUTH_METHODS.get(response.method, "UNKNOWN") - raise ProxyError( - f"Requested {requested} from proxy server, but got {responded}." - ) - - if response.method == socks5.SOCKS5AuthMethod.USERNAME_PASSWORD: - # Username/password request - assert auth is not None - username, password = auth - conn.send(socks5.SOCKS5UsernamePasswordRequest(username, password)) - outgoing_bytes = conn.data_to_send() - stream.write(outgoing_bytes) - - # Username/password response - incoming_bytes = stream.read(max_bytes=4096) - response = conn.receive_data(incoming_bytes) - assert isinstance(response, socks5.SOCKS5UsernamePasswordReply) - if not response.success: - raise ProxyError("Invalid username/password") - - # Connect request - conn.send( - socks5.SOCKS5CommandRequest.from_address( - socks5.SOCKS5Command.CONNECT, (host, port) - ) - ) - outgoing_bytes = conn.data_to_send() - stream.write(outgoing_bytes) - - # Connect response - incoming_bytes = stream.read(max_bytes=4096) - response = conn.receive_data(incoming_bytes) - assert isinstance(response, socks5.SOCKS5Reply) - if response.reply_code != socks5.SOCKS5ReplyCode.SUCCEEDED: - reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN") - raise ProxyError(f"Proxy Server could not connect: {reply_code}.") - - -class SOCKSProxy(ConnectionPool): - """ - A connection pool that sends requests via an HTTP proxy. - """ - - def __init__( - self, - proxy_url: typing.Union[URL, bytes, str], - proxy_auth: typing.Optional[ - typing.Tuple[typing.Union[bytes, str], typing.Union[bytes, str]] - ] = None, - ssl_context: typing.Optional[ssl.SSLContext] = None, - max_connections: typing.Optional[int] = 10, - max_keepalive_connections: typing.Optional[int] = None, - keepalive_expiry: typing.Optional[float] = None, - http1: bool = True, - http2: bool = False, - retries: int = 0, - network_backend: typing.Optional[NetworkBackend] = None, - ) -> None: - """ - A connection pool for making HTTP requests. - - Parameters: - proxy_url: The URL to use when connecting to the proxy server. - For example `"http://127.0.0.1:8080/"`. - ssl_context: An SSL context to use for verifying connections. - If not specified, the default `httpcore.default_ssl_context()` - will be used. - max_connections: The maximum number of concurrent HTTP connections that - the pool should allow. Any attempt to send a request on a pool that - would exceed this amount will block until a connection is available. - max_keepalive_connections: The maximum number of idle HTTP connections - that will be maintained in the pool. - keepalive_expiry: The duration in seconds that an idle HTTP connection - may be maintained for before being expired from the pool. - http1: A boolean indicating if HTTP/1.1 requests should be supported - by the connection pool. Defaults to True. - http2: A boolean indicating if HTTP/2 requests should be supported by - the connection pool. Defaults to False. - retries: The maximum number of retries when trying to establish - a connection. - local_address: Local address to connect from. Can also be used to - connect using a particular address family. Using - `local_address="0.0.0.0"` will connect using an `AF_INET` address - (IPv4), while using `local_address="::"` will connect using an - `AF_INET6` address (IPv6). - uds: Path to a Unix Domain Socket to use instead of TCP sockets. - network_backend: A backend instance to use for handling network I/O. - """ - super().__init__( - ssl_context=ssl_context, - max_connections=max_connections, - max_keepalive_connections=max_keepalive_connections, - keepalive_expiry=keepalive_expiry, - http1=http1, - http2=http2, - network_backend=network_backend, - retries=retries, - ) - self._ssl_context = ssl_context - self._proxy_url = enforce_url(proxy_url, name="proxy_url") - if proxy_auth is not None: - username, password = proxy_auth - username_bytes = enforce_bytes(username, name="proxy_auth") - password_bytes = enforce_bytes(password, name="proxy_auth") - self._proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = ( - username_bytes, - password_bytes, - ) - else: - self._proxy_auth = None - - def create_connection(self, origin: Origin) -> ConnectionInterface: - return Socks5Connection( - proxy_origin=self._proxy_url.origin, - remote_origin=origin, - proxy_auth=self._proxy_auth, - ssl_context=self._ssl_context, - keepalive_expiry=self._keepalive_expiry, - http1=self._http1, - http2=self._http2, - network_backend=self._network_backend, - ) - - -class Socks5Connection(ConnectionInterface): - def __init__( - self, - proxy_origin: Origin, - remote_origin: Origin, - proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, - ssl_context: typing.Optional[ssl.SSLContext] = None, - keepalive_expiry: typing.Optional[float] = None, - http1: bool = True, - http2: bool = False, - network_backend: typing.Optional[NetworkBackend] = None, - ) -> None: - self._proxy_origin = proxy_origin - self._remote_origin = remote_origin - self._proxy_auth = proxy_auth - self._ssl_context = ssl_context - self._keepalive_expiry = keepalive_expiry - self._http1 = http1 - self._http2 = http2 - - self._network_backend: NetworkBackend = ( - SyncBackend() if network_backend is None else network_backend - ) - self._connect_lock = Lock() - self._connection: typing.Optional[ConnectionInterface] = None - self._connect_failed = False - - def handle_request(self, request: Request) -> Response: - timeouts = request.extensions.get("timeout", {}) - sni_hostname = request.extensions.get("sni_hostname", None) - timeout = timeouts.get("connect", None) - - with self._connect_lock: - if self._connection is None: - try: - # Connect to the proxy - kwargs = { - "host": self._proxy_origin.host.decode("ascii"), - "port": self._proxy_origin.port, - "timeout": timeout, - } - with Trace("connect_tcp", logger, request, kwargs) as trace: - stream = self._network_backend.connect_tcp(**kwargs) - trace.return_value = stream - - # Connect to the remote host using socks5 - kwargs = { - "stream": stream, - "host": self._remote_origin.host.decode("ascii"), - "port": self._remote_origin.port, - "auth": self._proxy_auth, - } - with Trace( - "setup_socks5_connection", logger, request, kwargs - ) as trace: - _init_socks5_connection(**kwargs) - trace.return_value = stream - - # Upgrade the stream to SSL - if self._remote_origin.scheme == b"https": - ssl_context = ( - default_ssl_context() - if self._ssl_context is None - else self._ssl_context - ) - alpn_protocols = ( - ["http/1.1", "h2"] if self._http2 else ["http/1.1"] - ) - ssl_context.set_alpn_protocols(alpn_protocols) - - kwargs = { - "ssl_context": ssl_context, - "server_hostname": sni_hostname - or self._remote_origin.host.decode("ascii"), - "timeout": timeout, - } - with Trace("start_tls", logger, request, kwargs) as trace: - stream = stream.start_tls(**kwargs) - trace.return_value = stream - - # Determine if we should be using HTTP/1.1 or HTTP/2 - ssl_object = stream.get_extra_info("ssl_object") - http2_negotiated = ( - ssl_object is not None - and ssl_object.selected_alpn_protocol() == "h2" - ) - - # Create the HTTP/1.1 or HTTP/2 connection - if http2_negotiated or ( - self._http2 and not self._http1 - ): # pragma: nocover - from .http2 import HTTP2Connection - - self._connection = HTTP2Connection( - origin=self._remote_origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - else: - self._connection = HTTP11Connection( - origin=self._remote_origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - except Exception as exc: - self._connect_failed = True - raise exc - elif not self._connection.is_available(): # pragma: nocover - raise ConnectionNotAvailable() - - return self._connection.handle_request(request) - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._remote_origin - - def close(self) -> None: - if self._connection is not None: - self._connection.close() - - def is_available(self) -> bool: - if self._connection is None: # pragma: nocover - # If HTTP/2 support is enabled, and the resulting connection could - # end up as HTTP/2 then we should indicate the connection as being - # available to service multiple requests. - return ( - self._http2 - and (self._remote_origin.scheme == b"https" or not self._http1) - and not self._connect_failed - ) - return self._connection.is_available() - - def has_expired(self) -> bool: - if self._connection is None: # pragma: nocover - return self._connect_failed - return self._connection.has_expired() - - def is_idle(self) -> bool: - if self._connection is None: # pragma: nocover - return self._connect_failed - return self._connection.is_idle() - - def is_closed(self) -> bool: - if self._connection is None: # pragma: nocover - return self._connect_failed - return self._connection.is_closed() - - def info(self) -> str: - if self._connection is None: # pragma: nocover - return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" - return self._connection.info() - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/server/venv/Lib/site-packages/httpcore/_synchronization.py b/server/venv/Lib/site-packages/httpcore/_synchronization.py deleted file mode 100644 index bae27c1..0000000 --- a/server/venv/Lib/site-packages/httpcore/_synchronization.py +++ /dev/null @@ -1,279 +0,0 @@ -import threading -from types import TracebackType -from typing import Optional, Type - -import sniffio - -from ._exceptions import ExceptionMapping, PoolTimeout, map_exceptions - -# Our async synchronization primatives use either 'anyio' or 'trio' depending -# on if they're running under asyncio or trio. - -try: - import trio -except ImportError: # pragma: nocover - trio = None # type: ignore - -try: - import anyio -except ImportError: # pragma: nocover - anyio = None # type: ignore - - -class AsyncLock: - def __init__(self) -> None: - self._backend = "" - - def setup(self) -> None: - """ - Detect if we're running under 'asyncio' or 'trio' and create - a lock with the correct implementation. - """ - self._backend = sniffio.current_async_library() - if self._backend == "trio": - if trio is None: # pragma: nocover - raise RuntimeError( - "Running under trio, requires the 'trio' package to be installed." - ) - self._trio_lock = trio.Lock() - else: - if anyio is None: # pragma: nocover - raise RuntimeError( - "Running under asyncio requires the 'anyio' package to be installed." - ) - self._anyio_lock = anyio.Lock() - - async def __aenter__(self) -> "AsyncLock": - if not self._backend: - self.setup() - - if self._backend == "trio": - await self._trio_lock.acquire() - else: - await self._anyio_lock.acquire() - - return self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, - ) -> None: - if self._backend == "trio": - self._trio_lock.release() - else: - self._anyio_lock.release() - - -class AsyncEvent: - def __init__(self) -> None: - self._backend = "" - - def setup(self) -> None: - """ - Detect if we're running under 'asyncio' or 'trio' and create - a lock with the correct implementation. - """ - self._backend = sniffio.current_async_library() - if self._backend == "trio": - if trio is None: # pragma: nocover - raise RuntimeError( - "Running under trio requires the 'trio' package to be installed." - ) - self._trio_event = trio.Event() - else: - if anyio is None: # pragma: nocover - raise RuntimeError( - "Running under asyncio requires the 'anyio' package to be installed." - ) - self._anyio_event = anyio.Event() - - def set(self) -> None: - if not self._backend: - self.setup() - - if self._backend == "trio": - self._trio_event.set() - else: - self._anyio_event.set() - - async def wait(self, timeout: Optional[float] = None) -> None: - if not self._backend: - self.setup() - - if self._backend == "trio": - if trio is None: # pragma: nocover - raise RuntimeError( - "Running under trio requires the 'trio' package to be installed." - ) - - trio_exc_map: ExceptionMapping = {trio.TooSlowError: PoolTimeout} - timeout_or_inf = float("inf") if timeout is None else timeout - with map_exceptions(trio_exc_map): - with trio.fail_after(timeout_or_inf): - await self._trio_event.wait() - else: - if anyio is None: # pragma: nocover - raise RuntimeError( - "Running under asyncio requires the 'anyio' package to be installed." - ) - - anyio_exc_map: ExceptionMapping = {TimeoutError: PoolTimeout} - with map_exceptions(anyio_exc_map): - with anyio.fail_after(timeout): - await self._anyio_event.wait() - - -class AsyncSemaphore: - def __init__(self, bound: int) -> None: - self._bound = bound - self._backend = "" - - def setup(self) -> None: - """ - Detect if we're running under 'asyncio' or 'trio' and create - a semaphore with the correct implementation. - """ - self._backend = sniffio.current_async_library() - if self._backend == "trio": - if trio is None: # pragma: nocover - raise RuntimeError( - "Running under trio requires the 'trio' package to be installed." - ) - - self._trio_semaphore = trio.Semaphore( - initial_value=self._bound, max_value=self._bound - ) - else: - if anyio is None: # pragma: nocover - raise RuntimeError( - "Running under asyncio requires the 'anyio' package to be installed." - ) - - self._anyio_semaphore = anyio.Semaphore( - initial_value=self._bound, max_value=self._bound - ) - - async def acquire(self) -> None: - if not self._backend: - self.setup() - - if self._backend == "trio": - await self._trio_semaphore.acquire() - else: - await self._anyio_semaphore.acquire() - - async def release(self) -> None: - if self._backend == "trio": - self._trio_semaphore.release() - else: - self._anyio_semaphore.release() - - -class AsyncShieldCancellation: - # For certain portions of our codebase where we're dealing with - # closing connections during exception handling we want to shield - # the operation from being cancelled. - # - # with AsyncShieldCancellation(): - # ... # clean-up operations, shielded from cancellation. - - def __init__(self) -> None: - """ - Detect if we're running under 'asyncio' or 'trio' and create - a shielded scope with the correct implementation. - """ - self._backend = sniffio.current_async_library() - - if self._backend == "trio": - if trio is None: # pragma: nocover - raise RuntimeError( - "Running under trio requires the 'trio' package to be installed." - ) - - self._trio_shield = trio.CancelScope(shield=True) - else: - if anyio is None: # pragma: nocover - raise RuntimeError( - "Running under asyncio requires the 'anyio' package to be installed." - ) - - self._anyio_shield = anyio.CancelScope(shield=True) - - def __enter__(self) -> "AsyncShieldCancellation": - if self._backend == "trio": - self._trio_shield.__enter__() - else: - self._anyio_shield.__enter__() - return self - - def __exit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, - ) -> None: - if self._backend == "trio": - self._trio_shield.__exit__(exc_type, exc_value, traceback) - else: - self._anyio_shield.__exit__(exc_type, exc_value, traceback) - - -# Our thread-based synchronization primitives... - - -class Lock: - def __init__(self) -> None: - self._lock = threading.Lock() - - def __enter__(self) -> "Lock": - self._lock.acquire() - return self - - def __exit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, - ) -> None: - self._lock.release() - - -class Event: - def __init__(self) -> None: - self._event = threading.Event() - - def set(self) -> None: - self._event.set() - - def wait(self, timeout: Optional[float] = None) -> None: - if not self._event.wait(timeout=timeout): - raise PoolTimeout() # pragma: nocover - - -class Semaphore: - def __init__(self, bound: int) -> None: - self._semaphore = threading.Semaphore(value=bound) - - def acquire(self) -> None: - self._semaphore.acquire() - - def release(self) -> None: - self._semaphore.release() - - -class ShieldCancellation: - # Thread-synchronous codebases don't support cancellation semantics. - # We have this class because we need to mirror the async and sync - # cases within our package, but it's just a no-op. - def __enter__(self) -> "ShieldCancellation": - return self - - def __exit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, - ) -> None: - pass diff --git a/server/venv/Lib/site-packages/httpcore/_trace.py b/server/venv/Lib/site-packages/httpcore/_trace.py deleted file mode 100644 index b122a53..0000000 --- a/server/venv/Lib/site-packages/httpcore/_trace.py +++ /dev/null @@ -1,105 +0,0 @@ -import inspect -import logging -from types import TracebackType -from typing import Any, Dict, Optional, Type - -from ._models import Request - - -class Trace: - def __init__( - self, - name: str, - logger: logging.Logger, - request: Optional[Request] = None, - kwargs: Optional[Dict[str, Any]] = None, - ) -> None: - self.name = name - self.logger = logger - self.trace_extension = ( - None if request is None else request.extensions.get("trace") - ) - self.debug = self.logger.isEnabledFor(logging.DEBUG) - self.kwargs = kwargs or {} - self.return_value: Any = None - self.should_trace = self.debug or self.trace_extension is not None - self.prefix = self.logger.name.split(".")[-1] - - def trace(self, name: str, info: Dict[str, Any]) -> None: - if self.trace_extension is not None: - prefix_and_name = f"{self.prefix}.{name}" - ret = self.trace_extension(prefix_and_name, info) - if inspect.iscoroutine(ret): # pragma: no cover - raise TypeError( - "If you are using a synchronous interface, " - "the callback of the `trace` extension should " - "be a normal function instead of an asynchronous function." - ) - - if self.debug: - if not info or "return_value" in info and info["return_value"] is None: - message = name - else: - args = " ".join([f"{key}={value!r}" for key, value in info.items()]) - message = f"{name} {args}" - self.logger.debug(message) - - def __enter__(self) -> "Trace": - if self.should_trace: - info = self.kwargs - self.trace(f"{self.name}.started", info) - return self - - def __exit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, - ) -> None: - if self.should_trace: - if exc_value is None: - info = {"return_value": self.return_value} - self.trace(f"{self.name}.complete", info) - else: - info = {"exception": exc_value} - self.trace(f"{self.name}.failed", info) - - async def atrace(self, name: str, info: Dict[str, Any]) -> None: - if self.trace_extension is not None: - prefix_and_name = f"{self.prefix}.{name}" - coro = self.trace_extension(prefix_and_name, info) - if not inspect.iscoroutine(coro): # pragma: no cover - raise TypeError( - "If you're using an asynchronous interface, " - "the callback of the `trace` extension should " - "be an asynchronous function rather than a normal function." - ) - await coro - - if self.debug: - if not info or "return_value" in info and info["return_value"] is None: - message = name - else: - args = " ".join([f"{key}={value!r}" for key, value in info.items()]) - message = f"{name} {args}" - self.logger.debug(message) - - async def __aenter__(self) -> "Trace": - if self.should_trace: - info = self.kwargs - await self.atrace(f"{self.name}.started", info) - return self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, - ) -> None: - if self.should_trace: - if exc_value is None: - info = {"return_value": self.return_value} - await self.atrace(f"{self.name}.complete", info) - else: - info = {"exception": exc_value} - await self.atrace(f"{self.name}.failed", info) diff --git a/server/venv/Lib/site-packages/httpcore/_utils.py b/server/venv/Lib/site-packages/httpcore/_utils.py deleted file mode 100644 index df5dea8..0000000 --- a/server/venv/Lib/site-packages/httpcore/_utils.py +++ /dev/null @@ -1,36 +0,0 @@ -import select -import socket -import sys -import typing - - -def is_socket_readable(sock: typing.Optional[socket.socket]) -> bool: - """ - Return whether a socket, as identifed by its file descriptor, is readable. - "A socket is readable" means that the read buffer isn't empty, i.e. that calling - .recv() on it would immediately return some data. - """ - # NOTE: we want check for readability without actually attempting to read, because - # we don't want to block forever if it's not readable. - - # In the case that the socket no longer exists, or cannot return a file - # descriptor, we treat it as being readable, as if it the next read operation - # on it is ready to return the terminating `b""`. - sock_fd = None if sock is None else sock.fileno() - if sock_fd is None or sock_fd < 0: # pragma: nocover - return True - - # The implementation below was stolen from: - # https://github.com/python-trio/trio/blob/20ee2b1b7376db637435d80e266212a35837ddcc/trio/_socket.py#L471-L478 - # See also: https://github.com/encode/httpcore/pull/193#issuecomment-703129316 - - # Use select.select on Windows, and when poll is unavailable and select.poll - # everywhere else. (E.g. When eventlet is in use. See #327) - if ( - sys.platform == "win32" or getattr(select, "poll", None) is None - ): # pragma: nocover - rready, _, _ = select.select([sock_fd], [], [], 0) - return bool(rready) - p = select.poll() - p.register(sock_fd, select.POLLIN) - return bool(p.poll(0)) diff --git a/server/venv/Lib/site-packages/httpcore/py.typed b/server/venv/Lib/site-packages/httpcore/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/httptools-0.6.1.dist-info/INSTALLER b/server/venv/Lib/site-packages/httptools-0.6.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/server/venv/Lib/site-packages/httptools-0.6.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/server/venv/Lib/site-packages/httptools-0.6.1.dist-info/LICENSE b/server/venv/Lib/site-packages/httptools-0.6.1.dist-info/LICENSE deleted file mode 100644 index 0fa5cad..0000000 --- a/server/venv/Lib/site-packages/httptools-0.6.1.dist-info/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License - -Copyright (c) 2015 MagicStack Inc. http://magic.io - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/server/venv/Lib/site-packages/httptools-0.6.1.dist-info/METADATA b/server/venv/Lib/site-packages/httptools-0.6.1.dist-info/METADATA deleted file mode 100644 index 4516d4d..0000000 --- a/server/venv/Lib/site-packages/httptools-0.6.1.dist-info/METADATA +++ /dev/null @@ -1,133 +0,0 @@ -Metadata-Version: 2.1 -Name: httptools -Version: 0.6.1 -Summary: A collection of framework independent HTTP protocol utils. -Home-page: https://github.com/MagicStack/httptools -Author: Yury Selivanov -Author-email: yury@magic.io -License: MIT -Platform: macOS -Platform: POSIX -Platform: Windows -Classifier: License :: OSI Approved :: MIT License -Classifier: Intended Audience :: Developers -Classifier: Programming Language :: Python :: 3 -Classifier: Operating System :: POSIX -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Environment :: Web Environment -Classifier: Development Status :: 5 - Production/Stable -Requires-Python: >=3.8.0 -Description-Content-Type: text/markdown -License-File: LICENSE -Provides-Extra: test -Requires-Dist: Cython <0.30.0,>=0.29.24 ; extra == 'test' - -![Tests](https://github.com/MagicStack/httptools/workflows/Tests/badge.svg) - -httptools is a Python binding for the nodejs HTTP parser. - -The package is available on PyPI: `pip install httptools`. - - -# APIs - -httptools contains two classes `httptools.HttpRequestParser`, -`httptools.HttpResponseParser` (fulfilled through -[llhttp](https://github.com/nodejs/llhttp)) and a function for -parsing URLs `httptools.parse_url` (through -[http-parse](https://github.com/nodejs/http-parser) for now). -See unittests for examples. - - -```python - -class HttpRequestParser: - - def __init__(self, protocol): - """HttpRequestParser - - protocol -- a Python object with the following methods - (all optional): - - - on_message_begin() - - on_url(url: bytes) - - on_header(name: bytes, value: bytes) - - on_headers_complete() - - on_body(body: bytes) - - on_message_complete() - - on_chunk_header() - - on_chunk_complete() - - on_status(status: bytes) - """ - - def get_http_version(self) -> str: - """Return an HTTP protocol version.""" - - def should_keep_alive(self) -> bool: - """Return ``True`` if keep-alive mode is preferred.""" - - def should_upgrade(self) -> bool: - """Return ``True`` if the parsed request is a valid Upgrade request. - The method exposes a flag set just before on_headers_complete. - Calling this method earlier will only yield `False`. - """ - - def feed_data(self, data: bytes): - """Feed data to the parser. - - Will eventually trigger callbacks on the ``protocol`` - object. - - On HTTP upgrade, this method will raise an - ``HttpParserUpgrade`` exception, with its sole argument - set to the offset of the non-HTTP data in ``data``. - """ - - def get_method(self) -> bytes: - """Return HTTP request method (GET, HEAD, etc)""" - - -class HttpResponseParser: - - """Has all methods except ``get_method()`` that - HttpRequestParser has.""" - - def get_status_code(self) -> int: - """Return the status code of the HTTP response""" - - -def parse_url(url: bytes): - """Parse URL strings into a structured Python object. - - Returns an instance of ``httptools.URL`` class with the - following attributes: - - - schema: bytes - - host: bytes - - port: int - - path: bytes - - query: bytes - - fragment: bytes - - userinfo: bytes - """ -``` - - -# Development - -1. Clone this repository with - `git clone --recursive git@github.com:MagicStack/httptools.git` - -2. Create a virtual environment with Python 3: - `python3 -m venv envname` - -3. Activate the environment with `source envname/bin/activate` - -4. Install development requirements with `pip install -e .[test]` - -5. Run `make` and `make test`. - - -# License - -MIT. diff --git a/server/venv/Lib/site-packages/httptools-0.6.1.dist-info/RECORD b/server/venv/Lib/site-packages/httptools-0.6.1.dist-info/RECORD deleted file mode 100644 index d713c95..0000000 --- a/server/venv/Lib/site-packages/httptools-0.6.1.dist-info/RECORD +++ /dev/null @@ -1,17 +0,0 @@ -httptools-0.6.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -httptools-0.6.1.dist-info/LICENSE,sha256=HAKLBZ16WOFMzaOMq87WmMWR_mFP--T3IUMERuhsjWk,1114 -httptools-0.6.1.dist-info/METADATA,sha256=GdqdFlXm7JDAuiqzSz4n3pSHjCxDNU_94fU4IzcbeMk,3726 -httptools-0.6.1.dist-info/RECORD,, -httptools-0.6.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -httptools-0.6.1.dist-info/WHEEL,sha256=badvNS-y9fEq0X-qzdZYvql_JFjI7Xfw-wR8FsjoK0I,102 -httptools-0.6.1.dist-info/top_level.txt,sha256=APjJKTbZcj0OQ4fdgf2eTCk82nK1n2BFXOD7ky41MPY,10 -httptools/__init__.py,sha256=uWLIa0nghlhLGoS8My_6YVhUlFbv3FhYsvxYpZHecS8,153 -httptools/__pycache__/__init__.cpython-311.pyc,, -httptools/__pycache__/_version.cpython-311.pyc,, -httptools/_version.py,sha256=dzM__G-_22s1tpTTuEJDSeVCek6HbXB7fVAS5_pqEwc,588 -httptools/parser/__init__.py,sha256=1XGlLzcfsdWYFehxDHaQJUl7RhWnSwG9hWpVkUKK50g,171 -httptools/parser/__pycache__/__init__.cpython-311.pyc,, -httptools/parser/__pycache__/errors.cpython-311.pyc,, -httptools/parser/errors.py,sha256=fQeEGiZ6AQH7SIa-7uzMs5QggoXr0A8YBPVHmafSw5Q,596 -httptools/parser/parser.cp311-win_amd64.pyd,sha256=LGkopER4_lysqabye4ISXLSwnTbVzJT4mzRxW3MFQ8w,82432 -httptools/parser/url_parser.cp311-win_amd64.pyd,sha256=rp5Qhf2-tLoFqj9CcfemHXA322V5z2hcR95nNz0rOX8,36352 diff --git a/server/venv/Lib/site-packages/httptools-0.6.1.dist-info/REQUESTED b/server/venv/Lib/site-packages/httptools-0.6.1.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/httptools-0.6.1.dist-info/WHEEL b/server/venv/Lib/site-packages/httptools-0.6.1.dist-info/WHEEL deleted file mode 100644 index 6d16045..0000000 --- a/server/venv/Lib/site-packages/httptools-0.6.1.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.41.2) -Root-Is-Purelib: false -Tag: cp311-cp311-win_amd64 - diff --git a/server/venv/Lib/site-packages/httptools-0.6.1.dist-info/top_level.txt b/server/venv/Lib/site-packages/httptools-0.6.1.dist-info/top_level.txt deleted file mode 100644 index bef3b40..0000000 --- a/server/venv/Lib/site-packages/httptools-0.6.1.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -httptools diff --git a/server/venv/Lib/site-packages/httptools/__init__.py b/server/venv/Lib/site-packages/httptools/__init__.py deleted file mode 100644 index aaa1902..0000000 --- a/server/venv/Lib/site-packages/httptools/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -from . import parser -from .parser import * # NOQA - -from ._version import __version__ # NOQA - -__all__ = parser.__all__ + ('__version__',) # NOQA diff --git a/server/venv/Lib/site-packages/httptools/_version.py b/server/venv/Lib/site-packages/httptools/_version.py deleted file mode 100644 index e748229..0000000 --- a/server/venv/Lib/site-packages/httptools/_version.py +++ /dev/null @@ -1,13 +0,0 @@ -# This file MUST NOT contain anything but the __version__ assignment. -# -# When making a release, change the value of __version__ -# to an appropriate value, and open a pull request against -# the correct branch (master if making a new feature release). -# The commit message MUST contain a properly formatted release -# log, and the commit must be signed. -# -# The release automation will: build and test the packages for the -# supported platforms, publish the packages on PyPI, merge the PR -# to the target branch, create a Git tag pointing to the commit. - -__version__ = '0.6.1' diff --git a/server/venv/Lib/site-packages/httptools/parser/__init__.py b/server/venv/Lib/site-packages/httptools/parser/__init__.py deleted file mode 100644 index d03eafc..0000000 --- a/server/venv/Lib/site-packages/httptools/parser/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -from .parser import * # NoQA -from .errors import * # NoQA -from .url_parser import * # NoQA - -__all__ = parser.__all__ + errors.__all__ + url_parser.__all__ # NoQA diff --git a/server/venv/Lib/site-packages/httptools/parser/errors.py b/server/venv/Lib/site-packages/httptools/parser/errors.py deleted file mode 100644 index 3db30e1..0000000 --- a/server/venv/Lib/site-packages/httptools/parser/errors.py +++ /dev/null @@ -1,30 +0,0 @@ -__all__ = ('HttpParserError', - 'HttpParserCallbackError', - 'HttpParserInvalidStatusError', - 'HttpParserInvalidMethodError', - 'HttpParserInvalidURLError', - 'HttpParserUpgrade') - - -class HttpParserError(Exception): - pass - - -class HttpParserCallbackError(HttpParserError): - pass - - -class HttpParserInvalidStatusError(HttpParserError): - pass - - -class HttpParserInvalidMethodError(HttpParserError): - pass - - -class HttpParserInvalidURLError(HttpParserError): - pass - - -class HttpParserUpgrade(Exception): - pass diff --git a/server/venv/Lib/site-packages/httptools/parser/parser.cp311-win_amd64.pyd b/server/venv/Lib/site-packages/httptools/parser/parser.cp311-win_amd64.pyd deleted file mode 100644 index f3bf1da..0000000 Binary files a/server/venv/Lib/site-packages/httptools/parser/parser.cp311-win_amd64.pyd and /dev/null differ diff --git a/server/venv/Lib/site-packages/httptools/parser/url_parser.cp311-win_amd64.pyd b/server/venv/Lib/site-packages/httptools/parser/url_parser.cp311-win_amd64.pyd deleted file mode 100644 index 7b8dd1f..0000000 Binary files a/server/venv/Lib/site-packages/httptools/parser/url_parser.cp311-win_amd64.pyd and /dev/null differ diff --git a/server/venv/Lib/site-packages/httpx-0.25.0.dist-info/INSTALLER b/server/venv/Lib/site-packages/httpx-0.25.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/server/venv/Lib/site-packages/httpx-0.25.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/server/venv/Lib/site-packages/httpx-0.25.0.dist-info/METADATA b/server/venv/Lib/site-packages/httpx-0.25.0.dist-info/METADATA deleted file mode 100644 index f3a6d50..0000000 --- a/server/venv/Lib/site-packages/httpx-0.25.0.dist-info/METADATA +++ /dev/null @@ -1,216 +0,0 @@ -Metadata-Version: 2.1 -Name: httpx -Version: 0.25.0 -Summary: The next generation HTTP client. -Project-URL: Changelog, https://github.com/encode/httpx/blob/master/CHANGELOG.md -Project-URL: Documentation, https://www.python-httpx.org -Project-URL: Homepage, https://github.com/encode/httpx -Project-URL: Source, https://github.com/encode/httpx -Author-email: Tom Christie -License-Expression: BSD-3-Clause -License-File: LICENSE.md -Classifier: Development Status :: 4 - Beta -Classifier: Environment :: Web Environment -Classifier: Framework :: AsyncIO -Classifier: Framework :: Trio -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Topic :: Internet :: WWW/HTTP -Requires-Python: >=3.8 -Requires-Dist: certifi -Requires-Dist: httpcore<0.19.0,>=0.18.0 -Requires-Dist: idna -Requires-Dist: sniffio -Provides-Extra: brotli -Requires-Dist: brotli; platform_python_implementation == 'CPython' and extra == 'brotli' -Requires-Dist: brotlicffi; platform_python_implementation != 'CPython' and extra == 'brotli' -Provides-Extra: cli -Requires-Dist: click==8.*; extra == 'cli' -Requires-Dist: pygments==2.*; extra == 'cli' -Requires-Dist: rich<14,>=10; extra == 'cli' -Provides-Extra: http2 -Requires-Dist: h2<5,>=3; extra == 'http2' -Provides-Extra: socks -Requires-Dist: socksio==1.*; extra == 'socks' -Description-Content-Type: text/markdown - -

- HTTPX -

- -

HTTPX - A next-generation HTTP client for Python.

- -

- - Test Suite - - - Package version - -

- -HTTPX is a fully featured HTTP client library for Python 3. It includes **an integrated -command line client**, has support for both **HTTP/1.1 and HTTP/2**, and provides both **sync -and async APIs**. - ---- - -Install HTTPX using pip: - -```shell -$ pip install httpx -``` - -Now, let's get started: - -```pycon ->>> import httpx ->>> r = httpx.get('https://www.example.org/') ->>> r - ->>> r.status_code -200 ->>> r.headers['content-type'] -'text/html; charset=UTF-8' ->>> r.text -'\n\n\nExample Domain...' -``` - -Or, using the command-line client. - -```shell -$ pip install 'httpx[cli]' # The command line client is an optional dependency. -``` - -Which now allows us to use HTTPX directly from the command-line... - -

- httpx --help -

- -Sending a request... - -

- httpx http://httpbin.org/json -

- -## Features - -HTTPX builds on the well-established usability of `requests`, and gives you: - -* A broadly [requests-compatible API](https://www.python-httpx.org/compatibility/). -* An integrated command-line client. -* HTTP/1.1 [and HTTP/2 support](https://www.python-httpx.org/http2/). -* Standard synchronous interface, but with [async support if you need it](https://www.python-httpx.org/async/). -* Ability to make requests directly to [WSGI applications](https://www.python-httpx.org/advanced/#calling-into-python-web-apps) or [ASGI applications](https://www.python-httpx.org/async/#calling-into-python-web-apps). -* Strict timeouts everywhere. -* Fully type annotated. -* 100% test coverage. - -Plus all the standard features of `requests`... - -* International Domains and URLs -* Keep-Alive & Connection Pooling -* Sessions with Cookie Persistence -* Browser-style SSL Verification -* Basic/Digest Authentication -* Elegant Key/Value Cookies -* Automatic Decompression -* Automatic Content Decoding -* Unicode Response Bodies -* Multipart File Uploads -* HTTP(S) Proxy Support -* Connection Timeouts -* Streaming Downloads -* .netrc Support -* Chunked Requests - -## Installation - -Install with pip: - -```shell -$ pip install httpx -``` - -Or, to include the optional HTTP/2 support, use: - -```shell -$ pip install httpx[http2] -``` - -HTTPX requires Python 3.8+. - -## Documentation - -Project documentation is available at [https://www.python-httpx.org/](https://www.python-httpx.org/). - -For a run-through of all the basics, head over to the [QuickStart](https://www.python-httpx.org/quickstart/). - -For more advanced topics, see the [Advanced Usage](https://www.python-httpx.org/advanced/) section, the [async support](https://www.python-httpx.org/async/) section, or the [HTTP/2](https://www.python-httpx.org/http2/) section. - -The [Developer Interface](https://www.python-httpx.org/api/) provides a comprehensive API reference. - -To find out about tools that integrate with HTTPX, see [Third Party Packages](https://www.python-httpx.org/third_party_packages/). - -## Contribute - -If you want to contribute with HTTPX check out the [Contributing Guide](https://www.python-httpx.org/contributing/) to learn how to start. - -## Dependencies - -The HTTPX project relies on these excellent libraries: - -* `httpcore` - The underlying transport implementation for `httpx`. - * `h11` - HTTP/1.1 support. -* `certifi` - SSL certificates. -* `idna` - Internationalized domain name support. -* `sniffio` - Async library autodetection. - -As well as these optional installs: - -* `h2` - HTTP/2 support. *(Optional, with `httpx[http2]`)* -* `socksio` - SOCKS proxy support. *(Optional, with `httpx[socks]`)* -* `rich` - Rich terminal support. *(Optional, with `httpx[cli]`)* -* `click` - Command line client support. *(Optional, with `httpx[cli]`)* -* `brotli` or `brotlicffi` - Decoding for "brotli" compressed responses. *(Optional, with `httpx[brotli]`)* - -A huge amount of credit is due to `requests` for the API layout that -much of this work follows, as well as to `urllib3` for plenty of design -inspiration around the lower-level networking details. - ---- - -

HTTPX is BSD licensed code.
Designed & crafted with care.

— 🦋 —

- -## Release Information - -### Removed - -* Drop support for Python 3.7. (#2813) - -### Added - -* Support HTTPS proxies. (#2845) -* Change the type of `Extensions` from `Mapping[Str, Any]` to `MutableMapping[Str, Any]`. (#2803) -* Add `socket_options` argument to `httpx.HTTPTransport` and `httpx.AsyncHTTPTransport` classes. (#2716) -* The `Response.raise_for_status()` method now returns the response instance. For example: `data = httpx.get('...').raise_for_status().json()`. (#2776) - -### Fixed - -* Return `500` error response instead of exceptions when `raise_app_exceptions=False` is set on `ASGITransport`. (#2669) -* Ensure all `WSGITransport` environs have a `SERVER_PROTOCOL`. (#2708) -* Always encode forward slashes as `%2F` in query parameters (#2723) -* Use Mozilla documentation instead of `httpstatuses.com` for HTTP error reference (#2768) - - ---- - -[Full changelog](https://github.com/encode/httpx/blob/master/CHANGELOG.md) diff --git a/server/venv/Lib/site-packages/httpx-0.25.0.dist-info/RECORD b/server/venv/Lib/site-packages/httpx-0.25.0.dist-info/RECORD deleted file mode 100644 index a11e27e..0000000 --- a/server/venv/Lib/site-packages/httpx-0.25.0.dist-info/RECORD +++ /dev/null @@ -1,57 +0,0 @@ -../../Scripts/httpx.exe,sha256=uQAHOJHMzuRC4pFwUWeG9jcv9t9Tz7Vd_C3fIU_Beo0,108445 -httpx-0.25.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -httpx-0.25.0.dist-info/METADATA,sha256=8ZnffZ9BAo4gzHpfUFlvx2tQYM3-Z3FRwOh4BsgKgLk,7630 -httpx-0.25.0.dist-info/RECORD,, -httpx-0.25.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -httpx-0.25.0.dist-info/WHEEL,sha256=9QBuHhg6FNW7lppboF2vKVbCGTVzsFykgRQjjlajrhA,87 -httpx-0.25.0.dist-info/entry_points.txt,sha256=2lVkdQmxLA1pNMgSN2eV89o90HCZezhmNwsy6ryKDSA,37 -httpx-0.25.0.dist-info/licenses/LICENSE.md,sha256=TsWdVE8StfU5o6cW_TIaxYzNgDC0ZSIfLIgCAM3yjY0,1508 -httpx/__init__.py,sha256=oCxVAsePEy5DE9eLhGAAq9H3RBGZUDaUROtGEyzbBRo,3210 -httpx/__pycache__/__init__.cpython-311.pyc,, -httpx/__pycache__/__version__.cpython-311.pyc,, -httpx/__pycache__/_api.cpython-311.pyc,, -httpx/__pycache__/_auth.cpython-311.pyc,, -httpx/__pycache__/_client.cpython-311.pyc,, -httpx/__pycache__/_compat.cpython-311.pyc,, -httpx/__pycache__/_config.cpython-311.pyc,, -httpx/__pycache__/_content.cpython-311.pyc,, -httpx/__pycache__/_decoders.cpython-311.pyc,, -httpx/__pycache__/_exceptions.cpython-311.pyc,, -httpx/__pycache__/_main.cpython-311.pyc,, -httpx/__pycache__/_models.cpython-311.pyc,, -httpx/__pycache__/_multipart.cpython-311.pyc,, -httpx/__pycache__/_status_codes.cpython-311.pyc,, -httpx/__pycache__/_types.cpython-311.pyc,, -httpx/__pycache__/_urlparse.cpython-311.pyc,, -httpx/__pycache__/_urls.cpython-311.pyc,, -httpx/__pycache__/_utils.cpython-311.pyc,, -httpx/__version__.py,sha256=hVLEi3Fe6ziNKrXxfBIkYicKi_9NvZZvMQJMGUkSjIU,108 -httpx/_api.py,sha256=cVU9ErzaXve5rqoPoSHr9yJbovHtICrcxR7yBoNSeOw,13011 -httpx/_auth.py,sha256=58FA-xqqp-XgLZ7Emd4-et-XXuTRaa5buiBYB2MzyvE,11773 -httpx/_client.py,sha256=A9MPP_d1ZlqcO5CeGLgyzVwdHgCpROYSdjoAUA6rpYE,68131 -httpx/_compat.py,sha256=S4sL2QocE1gj0qn6F7egl7dYhY809rXE5P9YD3e_zDg,1602 -httpx/_config.py,sha256=3AKbxOVuh2UeZi95UqALtoO1thrmHUhTxvcFAyspKPw,12491 -httpx/_content.py,sha256=olbWqawdWWweXeW6gDYHPiEGjip5lqFZKv9OmVd-zIg,8092 -httpx/_decoders.py,sha256=dd8GSkEAe45BzRUF47zH_lg3-BcwXtxzPBSGP5Y4F90,9739 -httpx/_exceptions.py,sha256=xKw-U6vW7zmdReUAGYHMegYWZuDAuE5039L087SHe4Q,7880 -httpx/_main.py,sha256=m9C4RuqjOB6UqL3FFHMjmC45f4SDSO-iOREFLdw4IdM,15784 -httpx/_models.py,sha256=gfrZvx3B0R2U7bLT7JfNKt4DMnUoT8leHOiij6Te13A,42791 -httpx/_multipart.py,sha256=qzt35jAgapaRPwdq-lTKSA5YY6ayrfDIsZLdr3t4NWc,8972 -httpx/_status_codes.py,sha256=XKArMrSoo8oKBQCHdFGA-wsM2PcSTaHE8svDYOUcwWk,5584 -httpx/_transports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -httpx/_transports/__pycache__/__init__.cpython-311.pyc,, -httpx/_transports/__pycache__/asgi.cpython-311.pyc,, -httpx/_transports/__pycache__/base.cpython-311.pyc,, -httpx/_transports/__pycache__/default.cpython-311.pyc,, -httpx/_transports/__pycache__/mock.cpython-311.pyc,, -httpx/_transports/__pycache__/wsgi.cpython-311.pyc,, -httpx/_transports/asgi.py,sha256=ZoIHy1-Wu09vZRkjzVzXJFgw9sh0Dq5cG8zfgtqK-SA,5469 -httpx/_transports/base.py,sha256=0BM8yZZEkdFT4tXXSm0h0dK0cSYA4hLgInj_BljGEGw,2510 -httpx/_transports/default.py,sha256=u99Nctd3mDNHfOc-S_ldaEwSvlp8NGLwIW2rjY0VgP4,13192 -httpx/_transports/mock.py,sha256=sDt3BDXbz8-W94kC8OXtGzF1PWH0y73h1De7Q-XkVtg,1179 -httpx/_transports/wsgi.py,sha256=Zt3EhTagyF3o-HC2oPMp-hTy3M3kQThL1ECJRc8eXEM,4797 -httpx/_types.py,sha256=W_lOq_3FnHmZGQuXaGm5JDykFoC0WoqhnfH92nRDNGQ,3367 -httpx/_urlparse.py,sha256=UQbI0l39smQh5UplxFAtLYfuxSx1cC_JPivhBPBSWgk,16774 -httpx/_urls.py,sha256=JAONd-2reXpB_WuQ7WuvhUcLuebiQeYJQPyszADmCow,21840 -httpx/_utils.py,sha256=I_m2rFyEpoU2j8lS0GwdWcUTBTQ8cjvFnZ8ROmnCpR8,15403 -httpx/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/server/venv/Lib/site-packages/httpx-0.25.0.dist-info/REQUESTED b/server/venv/Lib/site-packages/httpx-0.25.0.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/httpx-0.25.0.dist-info/WHEEL b/server/venv/Lib/site-packages/httpx-0.25.0.dist-info/WHEEL deleted file mode 100644 index ba1a8af..0000000 --- a/server/venv/Lib/site-packages/httpx-0.25.0.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: hatchling 1.18.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/server/venv/Lib/site-packages/httpx-0.25.0.dist-info/entry_points.txt b/server/venv/Lib/site-packages/httpx-0.25.0.dist-info/entry_points.txt deleted file mode 100644 index 8ae9600..0000000 --- a/server/venv/Lib/site-packages/httpx-0.25.0.dist-info/entry_points.txt +++ /dev/null @@ -1,2 +0,0 @@ -[console_scripts] -httpx = httpx:main diff --git a/server/venv/Lib/site-packages/httpx-0.25.0.dist-info/licenses/LICENSE.md b/server/venv/Lib/site-packages/httpx-0.25.0.dist-info/licenses/LICENSE.md deleted file mode 100644 index ab79d16..0000000 --- a/server/venv/Lib/site-packages/httpx-0.25.0.dist-info/licenses/LICENSE.md +++ /dev/null @@ -1,12 +0,0 @@ -Copyright © 2019, [Encode OSS Ltd](https://www.encode.io/). -All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - -* Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/server/venv/Lib/site-packages/httpx/__init__.py b/server/venv/Lib/site-packages/httpx/__init__.py deleted file mode 100644 index f61112f..0000000 --- a/server/venv/Lib/site-packages/httpx/__init__.py +++ /dev/null @@ -1,138 +0,0 @@ -from .__version__ import __description__, __title__, __version__ -from ._api import delete, get, head, options, patch, post, put, request, stream -from ._auth import Auth, BasicAuth, DigestAuth, NetRCAuth -from ._client import USE_CLIENT_DEFAULT, AsyncClient, Client -from ._config import Limits, Proxy, Timeout, create_ssl_context -from ._content import ByteStream -from ._exceptions import ( - CloseError, - ConnectError, - ConnectTimeout, - CookieConflict, - DecodingError, - HTTPError, - HTTPStatusError, - InvalidURL, - LocalProtocolError, - NetworkError, - PoolTimeout, - ProtocolError, - ProxyError, - ReadError, - ReadTimeout, - RemoteProtocolError, - RequestError, - RequestNotRead, - ResponseNotRead, - StreamClosed, - StreamConsumed, - StreamError, - TimeoutException, - TooManyRedirects, - TransportError, - UnsupportedProtocol, - WriteError, - WriteTimeout, -) -from ._models import Cookies, Headers, Request, Response -from ._status_codes import codes -from ._transports.asgi import ASGITransport -from ._transports.base import AsyncBaseTransport, BaseTransport -from ._transports.default import AsyncHTTPTransport, HTTPTransport -from ._transports.mock import MockTransport -from ._transports.wsgi import WSGITransport -from ._types import AsyncByteStream, SyncByteStream -from ._urls import URL, QueryParams - -try: - from ._main import main -except ImportError: # pragma: no cover - - def main() -> None: # type: ignore - import sys - - print( - "The httpx command line client could not run because the required " - "dependencies were not installed.\nMake sure you've installed " - "everything with: pip install 'httpx[cli]'" - ) - sys.exit(1) - - -__all__ = [ - "__description__", - "__title__", - "__version__", - "ASGITransport", - "AsyncBaseTransport", - "AsyncByteStream", - "AsyncClient", - "AsyncHTTPTransport", - "Auth", - "BaseTransport", - "BasicAuth", - "ByteStream", - "Client", - "CloseError", - "codes", - "ConnectError", - "ConnectTimeout", - "CookieConflict", - "Cookies", - "create_ssl_context", - "DecodingError", - "delete", - "DigestAuth", - "get", - "head", - "Headers", - "HTTPError", - "HTTPStatusError", - "HTTPTransport", - "InvalidURL", - "Limits", - "LocalProtocolError", - "main", - "MockTransport", - "NetRCAuth", - "NetworkError", - "options", - "patch", - "PoolTimeout", - "post", - "ProtocolError", - "Proxy", - "ProxyError", - "put", - "QueryParams", - "ReadError", - "ReadTimeout", - "RemoteProtocolError", - "request", - "Request", - "RequestError", - "RequestNotRead", - "Response", - "ResponseNotRead", - "stream", - "StreamClosed", - "StreamConsumed", - "StreamError", - "SyncByteStream", - "Timeout", - "TimeoutException", - "TooManyRedirects", - "TransportError", - "UnsupportedProtocol", - "URL", - "USE_CLIENT_DEFAULT", - "WriteError", - "WriteTimeout", - "WSGITransport", -] - - -__locals = locals() -for __name in __all__: - if not __name.startswith("__"): - setattr(__locals[__name], "__module__", "httpx") # noqa diff --git a/server/venv/Lib/site-packages/httpx/__version__.py b/server/venv/Lib/site-packages/httpx/__version__.py deleted file mode 100644 index bfa421a..0000000 --- a/server/venv/Lib/site-packages/httpx/__version__.py +++ /dev/null @@ -1,3 +0,0 @@ -__title__ = "httpx" -__description__ = "A next generation HTTP client, for Python 3." -__version__ = "0.25.0" diff --git a/server/venv/Lib/site-packages/httpx/_api.py b/server/venv/Lib/site-packages/httpx/_api.py deleted file mode 100644 index 571289c..0000000 --- a/server/venv/Lib/site-packages/httpx/_api.py +++ /dev/null @@ -1,445 +0,0 @@ -import typing -from contextlib import contextmanager - -from ._client import Client -from ._config import DEFAULT_TIMEOUT_CONFIG -from ._models import Response -from ._types import ( - AuthTypes, - CertTypes, - CookieTypes, - HeaderTypes, - ProxiesTypes, - QueryParamTypes, - RequestContent, - RequestData, - RequestFiles, - TimeoutTypes, - URLTypes, - VerifyTypes, -) - - -def request( - method: str, - url: URLTypes, - *, - params: typing.Optional[QueryParamTypes] = None, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Optional[AuthTypes] = None, - proxies: typing.Optional[ProxiesTypes] = None, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - follow_redirects: bool = False, - verify: VerifyTypes = True, - cert: typing.Optional[CertTypes] = None, - trust_env: bool = True, -) -> Response: - """ - Sends an HTTP request. - - **Parameters:** - - * **method** - HTTP method for the new `Request` object: `GET`, `OPTIONS`, - `HEAD`, `POST`, `PUT`, `PATCH`, or `DELETE`. - * **url** - URL for the new `Request` object. - * **params** - *(optional)* Query parameters to include in the URL, as a - string, dictionary, or sequence of two-tuples. - * **content** - *(optional)* Binary content to include in the body of the - request, as bytes or a byte iterator. - * **data** - *(optional)* Form data to include in the body of the request, - as a dictionary. - * **files** - *(optional)* A dictionary of upload files to include in the - body of the request. - * **json** - *(optional)* A JSON serializable object to include in the body - of the request. - * **headers** - *(optional)* Dictionary of HTTP headers to include in the - request. - * **cookies** - *(optional)* Dictionary of Cookie items to include in the - request. - * **auth** - *(optional)* An authentication class to use when sending the - request. - * **proxies** - *(optional)* A dictionary mapping proxy keys to proxy URLs. - * **timeout** - *(optional)* The timeout configuration to use when sending - the request. - * **follow_redirects** - *(optional)* Enables or disables HTTP redirects. - * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to - verify the identity of requested hosts. Either `True` (default CA bundle), - a path to an SSL certificate file, an `ssl.SSLContext`, or `False` - (which will disable verification). - * **cert** - *(optional)* An SSL certificate used by the requested host - to authenticate the client. Either a path to an SSL certificate file, or - two-tuple of (certificate file, key file), or a three-tuple of (certificate - file, key file, password). - * **trust_env** - *(optional)* Enables or disables usage of environment - variables for configuration. - - **Returns:** `Response` - - Usage: - - ``` - >>> import httpx - >>> response = httpx.request('GET', 'https://httpbin.org/get') - >>> response - - ``` - """ - with Client( - cookies=cookies, - proxies=proxies, - cert=cert, - verify=verify, - timeout=timeout, - trust_env=trust_env, - ) as client: - return client.request( - method=method, - url=url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - auth=auth, - follow_redirects=follow_redirects, - ) - - -@contextmanager -def stream( - method: str, - url: URLTypes, - *, - params: typing.Optional[QueryParamTypes] = None, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Optional[AuthTypes] = None, - proxies: typing.Optional[ProxiesTypes] = None, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - follow_redirects: bool = False, - verify: VerifyTypes = True, - cert: typing.Optional[CertTypes] = None, - trust_env: bool = True, -) -> typing.Iterator[Response]: - """ - Alternative to `httpx.request()` that streams the response body - instead of loading it into memory at once. - - **Parameters**: See `httpx.request`. - - See also: [Streaming Responses][0] - - [0]: /quickstart#streaming-responses - """ - with Client( - cookies=cookies, - proxies=proxies, - cert=cert, - verify=verify, - timeout=timeout, - trust_env=trust_env, - ) as client: - with client.stream( - method=method, - url=url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - auth=auth, - follow_redirects=follow_redirects, - ) as response: - yield response - - -def get( - url: URLTypes, - *, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Optional[AuthTypes] = None, - proxies: typing.Optional[ProxiesTypes] = None, - follow_redirects: bool = False, - cert: typing.Optional[CertTypes] = None, - verify: VerifyTypes = True, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - trust_env: bool = True, -) -> Response: - """ - Sends a `GET` request. - - **Parameters**: See `httpx.request`. - - Note that the `data`, `files`, `json` and `content` parameters are not available - on this function, as `GET` requests should not include a request body. - """ - return request( - "GET", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - proxies=proxies, - follow_redirects=follow_redirects, - cert=cert, - verify=verify, - timeout=timeout, - trust_env=trust_env, - ) - - -def options( - url: URLTypes, - *, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Optional[AuthTypes] = None, - proxies: typing.Optional[ProxiesTypes] = None, - follow_redirects: bool = False, - cert: typing.Optional[CertTypes] = None, - verify: VerifyTypes = True, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - trust_env: bool = True, -) -> Response: - """ - Sends an `OPTIONS` request. - - **Parameters**: See `httpx.request`. - - Note that the `data`, `files`, `json` and `content` parameters are not available - on this function, as `OPTIONS` requests should not include a request body. - """ - return request( - "OPTIONS", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - proxies=proxies, - follow_redirects=follow_redirects, - cert=cert, - verify=verify, - timeout=timeout, - trust_env=trust_env, - ) - - -def head( - url: URLTypes, - *, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Optional[AuthTypes] = None, - proxies: typing.Optional[ProxiesTypes] = None, - follow_redirects: bool = False, - cert: typing.Optional[CertTypes] = None, - verify: VerifyTypes = True, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - trust_env: bool = True, -) -> Response: - """ - Sends a `HEAD` request. - - **Parameters**: See `httpx.request`. - - Note that the `data`, `files`, `json` and `content` parameters are not available - on this function, as `HEAD` requests should not include a request body. - """ - return request( - "HEAD", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - proxies=proxies, - follow_redirects=follow_redirects, - cert=cert, - verify=verify, - timeout=timeout, - trust_env=trust_env, - ) - - -def post( - url: URLTypes, - *, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Optional[AuthTypes] = None, - proxies: typing.Optional[ProxiesTypes] = None, - follow_redirects: bool = False, - cert: typing.Optional[CertTypes] = None, - verify: VerifyTypes = True, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - trust_env: bool = True, -) -> Response: - """ - Sends a `POST` request. - - **Parameters**: See `httpx.request`. - """ - return request( - "POST", - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - proxies=proxies, - follow_redirects=follow_redirects, - cert=cert, - verify=verify, - timeout=timeout, - trust_env=trust_env, - ) - - -def put( - url: URLTypes, - *, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Optional[AuthTypes] = None, - proxies: typing.Optional[ProxiesTypes] = None, - follow_redirects: bool = False, - cert: typing.Optional[CertTypes] = None, - verify: VerifyTypes = True, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - trust_env: bool = True, -) -> Response: - """ - Sends a `PUT` request. - - **Parameters**: See `httpx.request`. - """ - return request( - "PUT", - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - proxies=proxies, - follow_redirects=follow_redirects, - cert=cert, - verify=verify, - timeout=timeout, - trust_env=trust_env, - ) - - -def patch( - url: URLTypes, - *, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Optional[AuthTypes] = None, - proxies: typing.Optional[ProxiesTypes] = None, - follow_redirects: bool = False, - cert: typing.Optional[CertTypes] = None, - verify: VerifyTypes = True, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - trust_env: bool = True, -) -> Response: - """ - Sends a `PATCH` request. - - **Parameters**: See `httpx.request`. - """ - return request( - "PATCH", - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - proxies=proxies, - follow_redirects=follow_redirects, - cert=cert, - verify=verify, - timeout=timeout, - trust_env=trust_env, - ) - - -def delete( - url: URLTypes, - *, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Optional[AuthTypes] = None, - proxies: typing.Optional[ProxiesTypes] = None, - follow_redirects: bool = False, - cert: typing.Optional[CertTypes] = None, - verify: VerifyTypes = True, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - trust_env: bool = True, -) -> Response: - """ - Sends a `DELETE` request. - - **Parameters**: See `httpx.request`. - - Note that the `data`, `files`, `json` and `content` parameters are not available - on this function, as `DELETE` requests should not include a request body. - """ - return request( - "DELETE", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - proxies=proxies, - follow_redirects=follow_redirects, - cert=cert, - verify=verify, - timeout=timeout, - trust_env=trust_env, - ) diff --git a/server/venv/Lib/site-packages/httpx/_auth.py b/server/venv/Lib/site-packages/httpx/_auth.py deleted file mode 100644 index 1d7385d..0000000 --- a/server/venv/Lib/site-packages/httpx/_auth.py +++ /dev/null @@ -1,347 +0,0 @@ -import hashlib -import netrc -import os -import re -import time -import typing -from base64 import b64encode -from urllib.request import parse_http_list - -from ._exceptions import ProtocolError -from ._models import Request, Response -from ._utils import to_bytes, to_str, unquote - -if typing.TYPE_CHECKING: # pragma: no cover - from hashlib import _Hash - - -class Auth: - """ - Base class for all authentication schemes. - - To implement a custom authentication scheme, subclass `Auth` and override - the `.auth_flow()` method. - - If the authentication scheme does I/O such as disk access or network calls, or uses - synchronization primitives such as locks, you should override `.sync_auth_flow()` - and/or `.async_auth_flow()` instead of `.auth_flow()` to provide specialized - implementations that will be used by `Client` and `AsyncClient` respectively. - """ - - requires_request_body = False - requires_response_body = False - - def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: - """ - Execute the authentication flow. - - To dispatch a request, `yield` it: - - ``` - yield request - ``` - - The client will `.send()` the response back into the flow generator. You can - access it like so: - - ``` - response = yield request - ``` - - A `return` (or reaching the end of the generator) will result in the - client returning the last response obtained from the server. - - You can dispatch as many requests as is necessary. - """ - yield request - - def sync_auth_flow( - self, request: Request - ) -> typing.Generator[Request, Response, None]: - """ - Execute the authentication flow synchronously. - - By default, this defers to `.auth_flow()`. You should override this method - when the authentication scheme does I/O and/or uses concurrency primitives. - """ - if self.requires_request_body: - request.read() - - flow = self.auth_flow(request) - request = next(flow) - - while True: - response = yield request - if self.requires_response_body: - response.read() - - try: - request = flow.send(response) - except StopIteration: - break - - async def async_auth_flow( - self, request: Request - ) -> typing.AsyncGenerator[Request, Response]: - """ - Execute the authentication flow asynchronously. - - By default, this defers to `.auth_flow()`. You should override this method - when the authentication scheme does I/O and/or uses concurrency primitives. - """ - if self.requires_request_body: - await request.aread() - - flow = self.auth_flow(request) - request = next(flow) - - while True: - response = yield request - if self.requires_response_body: - await response.aread() - - try: - request = flow.send(response) - except StopIteration: - break - - -class FunctionAuth(Auth): - """ - Allows the 'auth' argument to be passed as a simple callable function, - that takes the request, and returns a new, modified request. - """ - - def __init__(self, func: typing.Callable[[Request], Request]) -> None: - self._func = func - - def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: - yield self._func(request) - - -class BasicAuth(Auth): - """ - Allows the 'auth' argument to be passed as a (username, password) pair, - and uses HTTP Basic authentication. - """ - - def __init__( - self, username: typing.Union[str, bytes], password: typing.Union[str, bytes] - ): - self._auth_header = self._build_auth_header(username, password) - - def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: - request.headers["Authorization"] = self._auth_header - yield request - - def _build_auth_header( - self, username: typing.Union[str, bytes], password: typing.Union[str, bytes] - ) -> str: - userpass = b":".join((to_bytes(username), to_bytes(password))) - token = b64encode(userpass).decode() - return f"Basic {token}" - - -class NetRCAuth(Auth): - """ - Use a 'netrc' file to lookup basic auth credentials based on the url host. - """ - - def __init__(self, file: typing.Optional[str] = None): - self._netrc_info = netrc.netrc(file) - - def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: - auth_info = self._netrc_info.authenticators(request.url.host) - if auth_info is None or not auth_info[2]: - # The netrc file did not have authentication credentials for this host. - yield request - else: - # Build a basic auth header with credentials from the netrc file. - request.headers["Authorization"] = self._build_auth_header( - username=auth_info[0], password=auth_info[2] - ) - yield request - - def _build_auth_header( - self, username: typing.Union[str, bytes], password: typing.Union[str, bytes] - ) -> str: - userpass = b":".join((to_bytes(username), to_bytes(password))) - token = b64encode(userpass).decode() - return f"Basic {token}" - - -class DigestAuth(Auth): - _ALGORITHM_TO_HASH_FUNCTION: typing.Dict[str, typing.Callable[[bytes], "_Hash"]] = { - "MD5": hashlib.md5, - "MD5-SESS": hashlib.md5, - "SHA": hashlib.sha1, - "SHA-SESS": hashlib.sha1, - "SHA-256": hashlib.sha256, - "SHA-256-SESS": hashlib.sha256, - "SHA-512": hashlib.sha512, - "SHA-512-SESS": hashlib.sha512, - } - - def __init__( - self, username: typing.Union[str, bytes], password: typing.Union[str, bytes] - ) -> None: - self._username = to_bytes(username) - self._password = to_bytes(password) - self._last_challenge: typing.Optional[_DigestAuthChallenge] = None - self._nonce_count = 1 - - def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: - if self._last_challenge: - request.headers["Authorization"] = self._build_auth_header( - request, self._last_challenge - ) - - response = yield request - - if response.status_code != 401 or "www-authenticate" not in response.headers: - # If the response is not a 401 then we don't - # need to build an authenticated request. - return - - for auth_header in response.headers.get_list("www-authenticate"): - if auth_header.lower().startswith("digest "): - break - else: - # If the response does not include a 'WWW-Authenticate: Digest ...' - # header, then we don't need to build an authenticated request. - return - - self._last_challenge = self._parse_challenge(request, response, auth_header) - self._nonce_count = 1 - - request.headers["Authorization"] = self._build_auth_header( - request, self._last_challenge - ) - yield request - - def _parse_challenge( - self, request: Request, response: Response, auth_header: str - ) -> "_DigestAuthChallenge": - """ - Returns a challenge from a Digest WWW-Authenticate header. - These take the form of: - `Digest realm="realm@host.com",qop="auth,auth-int",nonce="abc",opaque="xyz"` - """ - scheme, _, fields = auth_header.partition(" ") - - # This method should only ever have been called with a Digest auth header. - assert scheme.lower() == "digest" - - header_dict: typing.Dict[str, str] = {} - for field in parse_http_list(fields): - key, value = field.strip().split("=", 1) - header_dict[key] = unquote(value) - - try: - realm = header_dict["realm"].encode() - nonce = header_dict["nonce"].encode() - algorithm = header_dict.get("algorithm", "MD5") - opaque = header_dict["opaque"].encode() if "opaque" in header_dict else None - qop = header_dict["qop"].encode() if "qop" in header_dict else None - return _DigestAuthChallenge( - realm=realm, nonce=nonce, algorithm=algorithm, opaque=opaque, qop=qop - ) - except KeyError as exc: - message = "Malformed Digest WWW-Authenticate header" - raise ProtocolError(message, request=request) from exc - - def _build_auth_header( - self, request: Request, challenge: "_DigestAuthChallenge" - ) -> str: - hash_func = self._ALGORITHM_TO_HASH_FUNCTION[challenge.algorithm.upper()] - - def digest(data: bytes) -> bytes: - return hash_func(data).hexdigest().encode() - - A1 = b":".join((self._username, challenge.realm, self._password)) - - path = request.url.raw_path - A2 = b":".join((request.method.encode(), path)) - # TODO: implement auth-int - HA2 = digest(A2) - - nc_value = b"%08x" % self._nonce_count - cnonce = self._get_client_nonce(self._nonce_count, challenge.nonce) - self._nonce_count += 1 - - HA1 = digest(A1) - if challenge.algorithm.lower().endswith("-sess"): - HA1 = digest(b":".join((HA1, challenge.nonce, cnonce))) - - qop = self._resolve_qop(challenge.qop, request=request) - if qop is None: - digest_data = [HA1, challenge.nonce, HA2] - else: - digest_data = [challenge.nonce, nc_value, cnonce, qop, HA2] - key_digest = b":".join(digest_data) - - format_args = { - "username": self._username, - "realm": challenge.realm, - "nonce": challenge.nonce, - "uri": path, - "response": digest(b":".join((HA1, key_digest))), - "algorithm": challenge.algorithm.encode(), - } - if challenge.opaque: - format_args["opaque"] = challenge.opaque - if qop: - format_args["qop"] = b"auth" - format_args["nc"] = nc_value - format_args["cnonce"] = cnonce - - return "Digest " + self._get_header_value(format_args) - - def _get_client_nonce(self, nonce_count: int, nonce: bytes) -> bytes: - s = str(nonce_count).encode() - s += nonce - s += time.ctime().encode() - s += os.urandom(8) - - return hashlib.sha1(s).hexdigest()[:16].encode() - - def _get_header_value(self, header_fields: typing.Dict[str, bytes]) -> str: - NON_QUOTED_FIELDS = ("algorithm", "qop", "nc") - QUOTED_TEMPLATE = '{}="{}"' - NON_QUOTED_TEMPLATE = "{}={}" - - header_value = "" - for i, (field, value) in enumerate(header_fields.items()): - if i > 0: - header_value += ", " - template = ( - QUOTED_TEMPLATE - if field not in NON_QUOTED_FIELDS - else NON_QUOTED_TEMPLATE - ) - header_value += template.format(field, to_str(value)) - - return header_value - - def _resolve_qop( - self, qop: typing.Optional[bytes], request: Request - ) -> typing.Optional[bytes]: - if qop is None: - return None - qops = re.split(b", ?", qop) - if b"auth" in qops: - return b"auth" - - if qops == [b"auth-int"]: - raise NotImplementedError("Digest auth-int support is not yet implemented") - - message = f'Unexpected qop value "{qop!r}" in digest auth' - raise ProtocolError(message, request=request) - - -class _DigestAuthChallenge(typing.NamedTuple): - realm: bytes - nonce: bytes - algorithm: str - opaque: typing.Optional[bytes] - qop: typing.Optional[bytes] diff --git a/server/venv/Lib/site-packages/httpx/_client.py b/server/venv/Lib/site-packages/httpx/_client.py deleted file mode 100644 index cb475e0..0000000 --- a/server/venv/Lib/site-packages/httpx/_client.py +++ /dev/null @@ -1,2006 +0,0 @@ -import datetime -import enum -import logging -import typing -import warnings -from contextlib import asynccontextmanager, contextmanager -from types import TracebackType - -from .__version__ import __version__ -from ._auth import Auth, BasicAuth, FunctionAuth -from ._config import ( - DEFAULT_LIMITS, - DEFAULT_MAX_REDIRECTS, - DEFAULT_TIMEOUT_CONFIG, - Limits, - Proxy, - Timeout, -) -from ._decoders import SUPPORTED_DECODERS -from ._exceptions import ( - InvalidURL, - RemoteProtocolError, - TooManyRedirects, - request_context, -) -from ._models import Cookies, Headers, Request, Response -from ._status_codes import codes -from ._transports.asgi import ASGITransport -from ._transports.base import AsyncBaseTransport, BaseTransport -from ._transports.default import AsyncHTTPTransport, HTTPTransport -from ._transports.wsgi import WSGITransport -from ._types import ( - AsyncByteStream, - AuthTypes, - CertTypes, - CookieTypes, - HeaderTypes, - ProxiesTypes, - QueryParamTypes, - RequestContent, - RequestData, - RequestExtensions, - RequestFiles, - SyncByteStream, - TimeoutTypes, - URLTypes, - VerifyTypes, -) -from ._urls import URL, QueryParams -from ._utils import ( - Timer, - URLPattern, - get_environment_proxies, - is_https_redirect, - same_origin, -) - -# The type annotation for @classmethod and context managers here follows PEP 484 -# https://www.python.org/dev/peps/pep-0484/#annotating-instance-and-class-methods -T = typing.TypeVar("T", bound="Client") -U = typing.TypeVar("U", bound="AsyncClient") - - -class UseClientDefault: - """ - For some parameters such as `auth=...` and `timeout=...` we need to be able - to indicate the default "unset" state, in a way that is distinctly different - to using `None`. - - The default "unset" state indicates that whatever default is set on the - client should be used. This is different to setting `None`, which - explicitly disables the parameter, possibly overriding a client default. - - For example we use `timeout=USE_CLIENT_DEFAULT` in the `request()` signature. - Omitting the `timeout` parameter will send a request using whatever default - timeout has been configured on the client. Including `timeout=None` will - ensure no timeout is used. - - Note that user code shouldn't need to use the `USE_CLIENT_DEFAULT` constant, - but it is used internally when a parameter is not included. - """ - - -USE_CLIENT_DEFAULT = UseClientDefault() - - -logger = logging.getLogger("httpx") - -USER_AGENT = f"python-httpx/{__version__}" -ACCEPT_ENCODING = ", ".join( - [key for key in SUPPORTED_DECODERS.keys() if key != "identity"] -) - - -class ClientState(enum.Enum): - # UNOPENED: - # The client has been instantiated, but has not been used to send a request, - # or been opened by entering the context of a `with` block. - UNOPENED = 1 - # OPENED: - # The client has either sent a request, or is within a `with` block. - OPENED = 2 - # CLOSED: - # The client has either exited the `with` block, or `close()` has - # been called explicitly. - CLOSED = 3 - - -class BoundSyncStream(SyncByteStream): - """ - A byte stream that is bound to a given response instance, and that - ensures the `response.elapsed` is set once the response is closed. - """ - - def __init__( - self, stream: SyncByteStream, response: Response, timer: Timer - ) -> None: - self._stream = stream - self._response = response - self._timer = timer - - def __iter__(self) -> typing.Iterator[bytes]: - for chunk in self._stream: - yield chunk - - def close(self) -> None: - seconds = self._timer.sync_elapsed() - self._response.elapsed = datetime.timedelta(seconds=seconds) - self._stream.close() - - -class BoundAsyncStream(AsyncByteStream): - """ - An async byte stream that is bound to a given response instance, and that - ensures the `response.elapsed` is set once the response is closed. - """ - - def __init__( - self, stream: AsyncByteStream, response: Response, timer: Timer - ) -> None: - self._stream = stream - self._response = response - self._timer = timer - - async def __aiter__(self) -> typing.AsyncIterator[bytes]: - async for chunk in self._stream: - yield chunk - - async def aclose(self) -> None: - seconds = await self._timer.async_elapsed() - self._response.elapsed = datetime.timedelta(seconds=seconds) - await self._stream.aclose() - - -EventHook = typing.Callable[..., typing.Any] - - -class BaseClient: - def __init__( - self, - *, - auth: typing.Optional[AuthTypes] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - follow_redirects: bool = False, - max_redirects: int = DEFAULT_MAX_REDIRECTS, - event_hooks: typing.Optional[ - typing.Mapping[str, typing.List[EventHook]] - ] = None, - base_url: URLTypes = "", - trust_env: bool = True, - default_encoding: typing.Union[str, typing.Callable[[bytes], str]] = "utf-8", - ): - event_hooks = {} if event_hooks is None else event_hooks - - self._base_url = self._enforce_trailing_slash(URL(base_url)) - - self._auth = self._build_auth(auth) - self._params = QueryParams(params) - self.headers = Headers(headers) - self._cookies = Cookies(cookies) - self._timeout = Timeout(timeout) - self.follow_redirects = follow_redirects - self.max_redirects = max_redirects - self._event_hooks = { - "request": list(event_hooks.get("request", [])), - "response": list(event_hooks.get("response", [])), - } - self._trust_env = trust_env - self._default_encoding = default_encoding - self._state = ClientState.UNOPENED - - @property - def is_closed(self) -> bool: - """ - Check if the client being closed - """ - return self._state == ClientState.CLOSED - - @property - def trust_env(self) -> bool: - return self._trust_env - - def _enforce_trailing_slash(self, url: URL) -> URL: - if url.raw_path.endswith(b"/"): - return url - return url.copy_with(raw_path=url.raw_path + b"/") - - def _get_proxy_map( - self, proxies: typing.Optional[ProxiesTypes], allow_env_proxies: bool - ) -> typing.Dict[str, typing.Optional[Proxy]]: - if proxies is None: - if allow_env_proxies: - return { - key: None if url is None else Proxy(url=url) - for key, url in get_environment_proxies().items() - } - return {} - if isinstance(proxies, dict): - new_proxies = {} - for key, value in proxies.items(): - proxy = Proxy(url=value) if isinstance(value, (str, URL)) else value - new_proxies[str(key)] = proxy - return new_proxies - else: - proxy = Proxy(url=proxies) if isinstance(proxies, (str, URL)) else proxies - return {"all://": proxy} - - @property - def timeout(self) -> Timeout: - return self._timeout - - @timeout.setter - def timeout(self, timeout: TimeoutTypes) -> None: - self._timeout = Timeout(timeout) - - @property - def event_hooks(self) -> typing.Dict[str, typing.List[EventHook]]: - return self._event_hooks - - @event_hooks.setter - def event_hooks( - self, event_hooks: typing.Dict[str, typing.List[EventHook]] - ) -> None: - self._event_hooks = { - "request": list(event_hooks.get("request", [])), - "response": list(event_hooks.get("response", [])), - } - - @property - def auth(self) -> typing.Optional[Auth]: - """ - Authentication class used when none is passed at the request-level. - - See also [Authentication][0]. - - [0]: /quickstart/#authentication - """ - return self._auth - - @auth.setter - def auth(self, auth: AuthTypes) -> None: - self._auth = self._build_auth(auth) - - @property - def base_url(self) -> URL: - """ - Base URL to use when sending requests with relative URLs. - """ - return self._base_url - - @base_url.setter - def base_url(self, url: URLTypes) -> None: - self._base_url = self._enforce_trailing_slash(URL(url)) - - @property - def headers(self) -> Headers: - """ - HTTP headers to include when sending requests. - """ - return self._headers - - @headers.setter - def headers(self, headers: HeaderTypes) -> None: - client_headers = Headers( - { - b"Accept": b"*/*", - b"Accept-Encoding": ACCEPT_ENCODING.encode("ascii"), - b"Connection": b"keep-alive", - b"User-Agent": USER_AGENT.encode("ascii"), - } - ) - client_headers.update(headers) - self._headers = client_headers - - @property - def cookies(self) -> Cookies: - """ - Cookie values to include when sending requests. - """ - return self._cookies - - @cookies.setter - def cookies(self, cookies: CookieTypes) -> None: - self._cookies = Cookies(cookies) - - @property - def params(self) -> QueryParams: - """ - Query parameters to include in the URL when sending requests. - """ - return self._params - - @params.setter - def params(self, params: QueryParamTypes) -> None: - self._params = QueryParams(params) - - def build_request( - self, - method: str, - url: URLTypes, - *, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, - ) -> Request: - """ - Build and return a request instance. - - * The `params`, `headers` and `cookies` arguments - are merged with any values set on the client. - * The `url` argument is merged with any `base_url` set on the client. - - See also: [Request instances][0] - - [0]: /advanced/#request-instances - """ - url = self._merge_url(url) - headers = self._merge_headers(headers) - cookies = self._merge_cookies(cookies) - params = self._merge_queryparams(params) - extensions = {} if extensions is None else extensions - if "timeout" not in extensions: - timeout = ( - self.timeout - if isinstance(timeout, UseClientDefault) - else Timeout(timeout) - ) - extensions = dict(**extensions, timeout=timeout.as_dict()) - return Request( - method, - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - extensions=extensions, - ) - - def _merge_url(self, url: URLTypes) -> URL: - """ - Merge a URL argument together with any 'base_url' on the client, - to create the URL used for the outgoing request. - """ - merge_url = URL(url) - if merge_url.is_relative_url: - # To merge URLs we always append to the base URL. To get this - # behaviour correct we always ensure the base URL ends in a '/' - # separator, and strip any leading '/' from the merge URL. - # - # So, eg... - # - # >>> client = Client(base_url="https://www.example.com/subpath") - # >>> client.base_url - # URL('https://www.example.com/subpath/') - # >>> client.build_request("GET", "/path").url - # URL('https://www.example.com/subpath/path') - merge_raw_path = self.base_url.raw_path + merge_url.raw_path.lstrip(b"/") - return self.base_url.copy_with(raw_path=merge_raw_path) - return merge_url - - def _merge_cookies( - self, cookies: typing.Optional[CookieTypes] = None - ) -> typing.Optional[CookieTypes]: - """ - Merge a cookies argument together with any cookies on the client, - to create the cookies used for the outgoing request. - """ - if cookies or self.cookies: - merged_cookies = Cookies(self.cookies) - merged_cookies.update(cookies) - return merged_cookies - return cookies - - def _merge_headers( - self, headers: typing.Optional[HeaderTypes] = None - ) -> typing.Optional[HeaderTypes]: - """ - Merge a headers argument together with any headers on the client, - to create the headers used for the outgoing request. - """ - merged_headers = Headers(self.headers) - merged_headers.update(headers) - return merged_headers - - def _merge_queryparams( - self, params: typing.Optional[QueryParamTypes] = None - ) -> typing.Optional[QueryParamTypes]: - """ - Merge a queryparams argument together with any queryparams on the client, - to create the queryparams used for the outgoing request. - """ - if params or self.params: - merged_queryparams = QueryParams(self.params) - return merged_queryparams.merge(params) - return params - - def _build_auth(self, auth: typing.Optional[AuthTypes]) -> typing.Optional[Auth]: - if auth is None: - return None - elif isinstance(auth, tuple): - return BasicAuth(username=auth[0], password=auth[1]) - elif isinstance(auth, Auth): - return auth - elif callable(auth): - return FunctionAuth(func=auth) - else: - raise TypeError(f'Invalid "auth" argument: {auth!r}') - - def _build_request_auth( - self, - request: Request, - auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, - ) -> Auth: - auth = ( - self._auth if isinstance(auth, UseClientDefault) else self._build_auth(auth) - ) - - if auth is not None: - return auth - - username, password = request.url.username, request.url.password - if username or password: - return BasicAuth(username=username, password=password) - - return Auth() - - def _build_redirect_request(self, request: Request, response: Response) -> Request: - """ - Given a request and a redirect response, return a new request that - should be used to effect the redirect. - """ - method = self._redirect_method(request, response) - url = self._redirect_url(request, response) - headers = self._redirect_headers(request, url, method) - stream = self._redirect_stream(request, method) - cookies = Cookies(self.cookies) - return Request( - method=method, - url=url, - headers=headers, - cookies=cookies, - stream=stream, - extensions=request.extensions, - ) - - def _redirect_method(self, request: Request, response: Response) -> str: - """ - When being redirected we may want to change the method of the request - based on certain specs or browser behavior. - """ - method = request.method - - # https://tools.ietf.org/html/rfc7231#section-6.4.4 - if response.status_code == codes.SEE_OTHER and method != "HEAD": - method = "GET" - - # Do what the browsers do, despite standards... - # Turn 302s into GETs. - if response.status_code == codes.FOUND and method != "HEAD": - method = "GET" - - # If a POST is responded to with a 301, turn it into a GET. - # This bizarre behaviour is explained in 'requests' issue 1704. - if response.status_code == codes.MOVED_PERMANENTLY and method == "POST": - method = "GET" - - return method - - def _redirect_url(self, request: Request, response: Response) -> URL: - """ - Return the URL for the redirect to follow. - """ - location = response.headers["Location"] - - try: - url = URL(location) - except InvalidURL as exc: - raise RemoteProtocolError( - f"Invalid URL in location header: {exc}.", request=request - ) from None - - # Handle malformed 'Location' headers that are "absolute" form, have no host. - # See: https://github.com/encode/httpx/issues/771 - if url.scheme and not url.host: - url = url.copy_with(host=request.url.host) - - # Facilitate relative 'Location' headers, as allowed by RFC 7231. - # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') - if url.is_relative_url: - url = request.url.join(url) - - # Attach previous fragment if needed (RFC 7231 7.1.2) - if request.url.fragment and not url.fragment: - url = url.copy_with(fragment=request.url.fragment) - - return url - - def _redirect_headers(self, request: Request, url: URL, method: str) -> Headers: - """ - Return the headers that should be used for the redirect request. - """ - headers = Headers(request.headers) - - if not same_origin(url, request.url): - if not is_https_redirect(request.url, url): - # Strip Authorization headers when responses are redirected - # away from the origin. (Except for direct HTTP to HTTPS redirects.) - headers.pop("Authorization", None) - - # Update the Host header. - headers["Host"] = url.netloc.decode("ascii") - - if method != request.method and method == "GET": - # If we've switch to a 'GET' request, then strip any headers which - # are only relevant to the request body. - headers.pop("Content-Length", None) - headers.pop("Transfer-Encoding", None) - - # We should use the client cookie store to determine any cookie header, - # rather than whatever was on the original outgoing request. - headers.pop("Cookie", None) - - return headers - - def _redirect_stream( - self, request: Request, method: str - ) -> typing.Optional[typing.Union[SyncByteStream, AsyncByteStream]]: - """ - Return the body that should be used for the redirect request. - """ - if method != request.method and method == "GET": - return None - - return request.stream - - -class Client(BaseClient): - """ - An HTTP client, with connection pooling, HTTP/2, redirects, cookie persistence, etc. - - It can be shared between threads. - - Usage: - - ```python - >>> client = httpx.Client() - >>> response = client.get('https://example.org') - ``` - - **Parameters:** - - * **auth** - *(optional)* An authentication class to use when sending - requests. - * **params** - *(optional)* Query parameters to include in request URLs, as - a string, dictionary, or sequence of two-tuples. - * **headers** - *(optional)* Dictionary of HTTP headers to include when - sending requests. - * **cookies** - *(optional)* Dictionary of Cookie items to include when - sending requests. - * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to - verify the identity of requested hosts. Either `True` (default CA bundle), - a path to an SSL certificate file, an `ssl.SSLContext`, or `False` - (which will disable verification). - * **cert** - *(optional)* An SSL certificate used by the requested host - to authenticate the client. Either a path to an SSL certificate file, or - two-tuple of (certificate file, key file), or a three-tuple of (certificate - file, key file, password). - * **proxies** - *(optional)* A dictionary mapping proxy keys to proxy - URLs. - * **timeout** - *(optional)* The timeout configuration to use when sending - requests. - * **limits** - *(optional)* The limits configuration to use. - * **max_redirects** - *(optional)* The maximum number of redirect responses - that should be followed. - * **base_url** - *(optional)* A URL to use as the base when building - request URLs. - * **transport** - *(optional)* A transport class to use for sending requests - over the network. - * **app** - *(optional)* An WSGI application to send requests to, - rather than sending actual network requests. - * **trust_env** - *(optional)* Enables or disables usage of environment - variables for configuration. - * **default_encoding** - *(optional)* The default encoding to use for decoding - response text, if no charset information is included in a response Content-Type - header. Set to a callable for automatic character set detection. Default: "utf-8". - """ - - def __init__( - self, - *, - auth: typing.Optional[AuthTypes] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - verify: VerifyTypes = True, - cert: typing.Optional[CertTypes] = None, - http1: bool = True, - http2: bool = False, - proxies: typing.Optional[ProxiesTypes] = None, - mounts: typing.Optional[typing.Mapping[str, BaseTransport]] = None, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - follow_redirects: bool = False, - limits: Limits = DEFAULT_LIMITS, - max_redirects: int = DEFAULT_MAX_REDIRECTS, - event_hooks: typing.Optional[ - typing.Mapping[str, typing.List[EventHook]] - ] = None, - base_url: URLTypes = "", - transport: typing.Optional[BaseTransport] = None, - app: typing.Optional[typing.Callable[..., typing.Any]] = None, - trust_env: bool = True, - default_encoding: typing.Union[str, typing.Callable[[bytes], str]] = "utf-8", - ): - super().__init__( - auth=auth, - params=params, - headers=headers, - cookies=cookies, - timeout=timeout, - follow_redirects=follow_redirects, - max_redirects=max_redirects, - event_hooks=event_hooks, - base_url=base_url, - trust_env=trust_env, - default_encoding=default_encoding, - ) - - if http2: - try: - import h2 # noqa - except ImportError: # pragma: no cover - raise ImportError( - "Using http2=True, but the 'h2' package is not installed. " - "Make sure to install httpx using `pip install httpx[http2]`." - ) from None - - allow_env_proxies = trust_env and app is None and transport is None - proxy_map = self._get_proxy_map(proxies, allow_env_proxies) - - self._transport = self._init_transport( - verify=verify, - cert=cert, - http1=http1, - http2=http2, - limits=limits, - transport=transport, - app=app, - trust_env=trust_env, - ) - self._mounts: typing.Dict[URLPattern, typing.Optional[BaseTransport]] = { - URLPattern(key): None - if proxy is None - else self._init_proxy_transport( - proxy, - verify=verify, - cert=cert, - http1=http1, - http2=http2, - limits=limits, - trust_env=trust_env, - ) - for key, proxy in proxy_map.items() - } - if mounts is not None: - self._mounts.update( - {URLPattern(key): transport for key, transport in mounts.items()} - ) - - self._mounts = dict(sorted(self._mounts.items())) - - def _init_transport( - self, - verify: VerifyTypes = True, - cert: typing.Optional[CertTypes] = None, - http1: bool = True, - http2: bool = False, - limits: Limits = DEFAULT_LIMITS, - transport: typing.Optional[BaseTransport] = None, - app: typing.Optional[typing.Callable[..., typing.Any]] = None, - trust_env: bool = True, - ) -> BaseTransport: - if transport is not None: - return transport - - if app is not None: - return WSGITransport(app=app) - - return HTTPTransport( - verify=verify, - cert=cert, - http1=http1, - http2=http2, - limits=limits, - trust_env=trust_env, - ) - - def _init_proxy_transport( - self, - proxy: Proxy, - verify: VerifyTypes = True, - cert: typing.Optional[CertTypes] = None, - http1: bool = True, - http2: bool = False, - limits: Limits = DEFAULT_LIMITS, - trust_env: bool = True, - ) -> BaseTransport: - return HTTPTransport( - verify=verify, - cert=cert, - http1=http1, - http2=http2, - limits=limits, - trust_env=trust_env, - proxy=proxy, - ) - - def _transport_for_url(self, url: URL) -> BaseTransport: - """ - Returns the transport instance that should be used for a given URL. - This will either be the standard connection pool, or a proxy. - """ - for pattern, transport in self._mounts.items(): - if pattern.matches(url): - return self._transport if transport is None else transport - - return self._transport - - def request( - self, - method: str, - url: URLTypes, - *, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, - ) -> Response: - """ - Build and send a request. - - Equivalent to: - - ```python - request = client.build_request(...) - response = client.send(request, ...) - ``` - - See `Client.build_request()`, `Client.send()` and - [Merging of configuration][0] for how the various parameters - are merged with client-level configuration. - - [0]: /advanced/#merging-of-configuration - """ - if cookies is not None: - message = ( - "Setting per-request cookies=<...> is being deprecated, because " - "the expected behaviour on cookie persistence is ambiguous. Set " - "cookies directly on the client instance instead." - ) - warnings.warn(message, DeprecationWarning) - - request = self.build_request( - method=method, - url=url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - timeout=timeout, - extensions=extensions, - ) - return self.send(request, auth=auth, follow_redirects=follow_redirects) - - @contextmanager - def stream( - self, - method: str, - url: URLTypes, - *, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, - ) -> typing.Iterator[Response]: - """ - Alternative to `httpx.request()` that streams the response body - instead of loading it into memory at once. - - **Parameters**: See `httpx.request`. - - See also: [Streaming Responses][0] - - [0]: /quickstart#streaming-responses - """ - request = self.build_request( - method=method, - url=url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - timeout=timeout, - extensions=extensions, - ) - response = self.send( - request=request, - auth=auth, - follow_redirects=follow_redirects, - stream=True, - ) - try: - yield response - finally: - response.close() - - def send( - self, - request: Request, - *, - stream: bool = False, - auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - ) -> Response: - """ - Send a request. - - The request is sent as-is, unmodified. - - Typically you'll want to build one with `Client.build_request()` - so that any client-level configuration is merged into the request, - but passing an explicit `httpx.Request()` is supported as well. - - See also: [Request instances][0] - - [0]: /advanced/#request-instances - """ - if self._state == ClientState.CLOSED: - raise RuntimeError("Cannot send a request, as the client has been closed.") - - self._state = ClientState.OPENED - follow_redirects = ( - self.follow_redirects - if isinstance(follow_redirects, UseClientDefault) - else follow_redirects - ) - - auth = self._build_request_auth(request, auth) - - response = self._send_handling_auth( - request, - auth=auth, - follow_redirects=follow_redirects, - history=[], - ) - try: - if not stream: - response.read() - - return response - - except BaseException as exc: - response.close() - raise exc - - def _send_handling_auth( - self, - request: Request, - auth: Auth, - follow_redirects: bool, - history: typing.List[Response], - ) -> Response: - auth_flow = auth.sync_auth_flow(request) - try: - request = next(auth_flow) - - while True: - response = self._send_handling_redirects( - request, - follow_redirects=follow_redirects, - history=history, - ) - try: - try: - next_request = auth_flow.send(response) - except StopIteration: - return response - - response.history = list(history) - response.read() - request = next_request - history.append(response) - - except BaseException as exc: - response.close() - raise exc - finally: - auth_flow.close() - - def _send_handling_redirects( - self, - request: Request, - follow_redirects: bool, - history: typing.List[Response], - ) -> Response: - while True: - if len(history) > self.max_redirects: - raise TooManyRedirects( - "Exceeded maximum allowed redirects.", request=request - ) - - for hook in self._event_hooks["request"]: - hook(request) - - response = self._send_single_request(request) - try: - for hook in self._event_hooks["response"]: - hook(response) - response.history = list(history) - - if not response.has_redirect_location: - return response - - request = self._build_redirect_request(request, response) - history = history + [response] - - if follow_redirects: - response.read() - else: - response.next_request = request - return response - - except BaseException as exc: - response.close() - raise exc - - def _send_single_request(self, request: Request) -> Response: - """ - Sends a single request, without handling any redirections. - """ - transport = self._transport_for_url(request.url) - timer = Timer() - timer.sync_start() - - if not isinstance(request.stream, SyncByteStream): - raise RuntimeError( - "Attempted to send an async request with a sync Client instance." - ) - - with request_context(request=request): - response = transport.handle_request(request) - - assert isinstance(response.stream, SyncByteStream) - - response.request = request - response.stream = BoundSyncStream( - response.stream, response=response, timer=timer - ) - self.cookies.extract_cookies(response) - response.default_encoding = self._default_encoding - - logger.info( - 'HTTP Request: %s %s "%s %d %s"', - request.method, - request.url, - response.http_version, - response.status_code, - response.reason_phrase, - ) - - return response - - def get( - self, - url: URLTypes, - *, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, - ) -> Response: - """ - Send a `GET` request. - - **Parameters**: See `httpx.request`. - """ - return self.request( - "GET", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - def options( - self, - url: URLTypes, - *, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, - ) -> Response: - """ - Send an `OPTIONS` request. - - **Parameters**: See `httpx.request`. - """ - return self.request( - "OPTIONS", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - def head( - self, - url: URLTypes, - *, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, - ) -> Response: - """ - Send a `HEAD` request. - - **Parameters**: See `httpx.request`. - """ - return self.request( - "HEAD", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - def post( - self, - url: URLTypes, - *, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, - ) -> Response: - """ - Send a `POST` request. - - **Parameters**: See `httpx.request`. - """ - return self.request( - "POST", - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - def put( - self, - url: URLTypes, - *, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, - ) -> Response: - """ - Send a `PUT` request. - - **Parameters**: See `httpx.request`. - """ - return self.request( - "PUT", - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - def patch( - self, - url: URLTypes, - *, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, - ) -> Response: - """ - Send a `PATCH` request. - - **Parameters**: See `httpx.request`. - """ - return self.request( - "PATCH", - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - def delete( - self, - url: URLTypes, - *, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, - ) -> Response: - """ - Send a `DELETE` request. - - **Parameters**: See `httpx.request`. - """ - return self.request( - "DELETE", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - def close(self) -> None: - """ - Close transport and proxies. - """ - if self._state != ClientState.CLOSED: - self._state = ClientState.CLOSED - - self._transport.close() - for transport in self._mounts.values(): - if transport is not None: - transport.close() - - def __enter__(self: T) -> T: - if self._state != ClientState.UNOPENED: - msg = { - ClientState.OPENED: "Cannot open a client instance more than once.", - ClientState.CLOSED: "Cannot reopen a client instance, once it has been closed.", - }[self._state] - raise RuntimeError(msg) - - self._state = ClientState.OPENED - - self._transport.__enter__() - for transport in self._mounts.values(): - if transport is not None: - transport.__enter__() - return self - - def __exit__( - self, - exc_type: typing.Optional[typing.Type[BaseException]] = None, - exc_value: typing.Optional[BaseException] = None, - traceback: typing.Optional[TracebackType] = None, - ) -> None: - self._state = ClientState.CLOSED - - self._transport.__exit__(exc_type, exc_value, traceback) - for transport in self._mounts.values(): - if transport is not None: - transport.__exit__(exc_type, exc_value, traceback) - - -class AsyncClient(BaseClient): - """ - An asynchronous HTTP client, with connection pooling, HTTP/2, redirects, - cookie persistence, etc. - - Usage: - - ```python - >>> async with httpx.AsyncClient() as client: - >>> response = await client.get('https://example.org') - ``` - - **Parameters:** - - * **auth** - *(optional)* An authentication class to use when sending - requests. - * **params** - *(optional)* Query parameters to include in request URLs, as - a string, dictionary, or sequence of two-tuples. - * **headers** - *(optional)* Dictionary of HTTP headers to include when - sending requests. - * **cookies** - *(optional)* Dictionary of Cookie items to include when - sending requests. - * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to - verify the identity of requested hosts. Either `True` (default CA bundle), - a path to an SSL certificate file, an `ssl.SSLContext`, or `False` - (which will disable verification). - * **cert** - *(optional)* An SSL certificate used by the requested host - to authenticate the client. Either a path to an SSL certificate file, or - two-tuple of (certificate file, key file), or a three-tuple of (certificate - file, key file, password). - * **http2** - *(optional)* A boolean indicating if HTTP/2 support should be - enabled. Defaults to `False`. - * **proxies** - *(optional)* A dictionary mapping HTTP protocols to proxy - URLs. - * **timeout** - *(optional)* The timeout configuration to use when sending - requests. - * **limits** - *(optional)* The limits configuration to use. - * **max_redirects** - *(optional)* The maximum number of redirect responses - that should be followed. - * **base_url** - *(optional)* A URL to use as the base when building - request URLs. - * **transport** - *(optional)* A transport class to use for sending requests - over the network. - * **app** - *(optional)* An ASGI application to send requests to, - rather than sending actual network requests. - * **trust_env** - *(optional)* Enables or disables usage of environment - variables for configuration. - * **default_encoding** - *(optional)* The default encoding to use for decoding - response text, if no charset information is included in a response Content-Type - header. Set to a callable for automatic character set detection. Default: "utf-8". - """ - - def __init__( - self, - *, - auth: typing.Optional[AuthTypes] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - verify: VerifyTypes = True, - cert: typing.Optional[CertTypes] = None, - http1: bool = True, - http2: bool = False, - proxies: typing.Optional[ProxiesTypes] = None, - mounts: typing.Optional[typing.Mapping[str, AsyncBaseTransport]] = None, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - follow_redirects: bool = False, - limits: Limits = DEFAULT_LIMITS, - max_redirects: int = DEFAULT_MAX_REDIRECTS, - event_hooks: typing.Optional[ - typing.Mapping[str, typing.List[typing.Callable[..., typing.Any]]] - ] = None, - base_url: URLTypes = "", - transport: typing.Optional[AsyncBaseTransport] = None, - app: typing.Optional[typing.Callable[..., typing.Any]] = None, - trust_env: bool = True, - default_encoding: typing.Union[str, typing.Callable[[bytes], str]] = "utf-8", - ): - super().__init__( - auth=auth, - params=params, - headers=headers, - cookies=cookies, - timeout=timeout, - follow_redirects=follow_redirects, - max_redirects=max_redirects, - event_hooks=event_hooks, - base_url=base_url, - trust_env=trust_env, - default_encoding=default_encoding, - ) - - if http2: - try: - import h2 # noqa - except ImportError: # pragma: no cover - raise ImportError( - "Using http2=True, but the 'h2' package is not installed. " - "Make sure to install httpx using `pip install httpx[http2]`." - ) from None - - allow_env_proxies = trust_env and app is None and transport is None - proxy_map = self._get_proxy_map(proxies, allow_env_proxies) - - self._transport = self._init_transport( - verify=verify, - cert=cert, - http1=http1, - http2=http2, - limits=limits, - transport=transport, - app=app, - trust_env=trust_env, - ) - - self._mounts: typing.Dict[URLPattern, typing.Optional[AsyncBaseTransport]] = { - URLPattern(key): None - if proxy is None - else self._init_proxy_transport( - proxy, - verify=verify, - cert=cert, - http1=http1, - http2=http2, - limits=limits, - trust_env=trust_env, - ) - for key, proxy in proxy_map.items() - } - if mounts is not None: - self._mounts.update( - {URLPattern(key): transport for key, transport in mounts.items()} - ) - self._mounts = dict(sorted(self._mounts.items())) - - def _init_transport( - self, - verify: VerifyTypes = True, - cert: typing.Optional[CertTypes] = None, - http1: bool = True, - http2: bool = False, - limits: Limits = DEFAULT_LIMITS, - transport: typing.Optional[AsyncBaseTransport] = None, - app: typing.Optional[typing.Callable[..., typing.Any]] = None, - trust_env: bool = True, - ) -> AsyncBaseTransport: - if transport is not None: - return transport - - if app is not None: - return ASGITransport(app=app) - - return AsyncHTTPTransport( - verify=verify, - cert=cert, - http1=http1, - http2=http2, - limits=limits, - trust_env=trust_env, - ) - - def _init_proxy_transport( - self, - proxy: Proxy, - verify: VerifyTypes = True, - cert: typing.Optional[CertTypes] = None, - http1: bool = True, - http2: bool = False, - limits: Limits = DEFAULT_LIMITS, - trust_env: bool = True, - ) -> AsyncBaseTransport: - return AsyncHTTPTransport( - verify=verify, - cert=cert, - http2=http2, - limits=limits, - trust_env=trust_env, - proxy=proxy, - ) - - def _transport_for_url(self, url: URL) -> AsyncBaseTransport: - """ - Returns the transport instance that should be used for a given URL. - This will either be the standard connection pool, or a proxy. - """ - for pattern, transport in self._mounts.items(): - if pattern.matches(url): - return self._transport if transport is None else transport - - return self._transport - - async def request( - self, - method: str, - url: URLTypes, - *, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, - ) -> Response: - """ - Build and send a request. - - Equivalent to: - - ```python - request = client.build_request(...) - response = await client.send(request, ...) - ``` - - See `AsyncClient.build_request()`, `AsyncClient.send()` - and [Merging of configuration][0] for how the various parameters - are merged with client-level configuration. - - [0]: /advanced/#merging-of-configuration - """ - request = self.build_request( - method=method, - url=url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - timeout=timeout, - extensions=extensions, - ) - return await self.send(request, auth=auth, follow_redirects=follow_redirects) - - @asynccontextmanager - async def stream( - self, - method: str, - url: URLTypes, - *, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, - ) -> typing.AsyncIterator[Response]: - """ - Alternative to `httpx.request()` that streams the response body - instead of loading it into memory at once. - - **Parameters**: See `httpx.request`. - - See also: [Streaming Responses][0] - - [0]: /quickstart#streaming-responses - """ - request = self.build_request( - method=method, - url=url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - timeout=timeout, - extensions=extensions, - ) - response = await self.send( - request=request, - auth=auth, - follow_redirects=follow_redirects, - stream=True, - ) - try: - yield response - finally: - await response.aclose() - - async def send( - self, - request: Request, - *, - stream: bool = False, - auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - ) -> Response: - """ - Send a request. - - The request is sent as-is, unmodified. - - Typically you'll want to build one with `AsyncClient.build_request()` - so that any client-level configuration is merged into the request, - but passing an explicit `httpx.Request()` is supported as well. - - See also: [Request instances][0] - - [0]: /advanced/#request-instances - """ - if self._state == ClientState.CLOSED: - raise RuntimeError("Cannot send a request, as the client has been closed.") - - self._state = ClientState.OPENED - follow_redirects = ( - self.follow_redirects - if isinstance(follow_redirects, UseClientDefault) - else follow_redirects - ) - - auth = self._build_request_auth(request, auth) - - response = await self._send_handling_auth( - request, - auth=auth, - follow_redirects=follow_redirects, - history=[], - ) - try: - if not stream: - await response.aread() - - return response - - except BaseException as exc: # pragma: no cover - await response.aclose() - raise exc - - async def _send_handling_auth( - self, - request: Request, - auth: Auth, - follow_redirects: bool, - history: typing.List[Response], - ) -> Response: - auth_flow = auth.async_auth_flow(request) - try: - request = await auth_flow.__anext__() - - while True: - response = await self._send_handling_redirects( - request, - follow_redirects=follow_redirects, - history=history, - ) - try: - try: - next_request = await auth_flow.asend(response) - except StopAsyncIteration: - return response - - response.history = list(history) - await response.aread() - request = next_request - history.append(response) - - except BaseException as exc: - await response.aclose() - raise exc - finally: - await auth_flow.aclose() - - async def _send_handling_redirects( - self, - request: Request, - follow_redirects: bool, - history: typing.List[Response], - ) -> Response: - while True: - if len(history) > self.max_redirects: - raise TooManyRedirects( - "Exceeded maximum allowed redirects.", request=request - ) - - for hook in self._event_hooks["request"]: - await hook(request) - - response = await self._send_single_request(request) - try: - for hook in self._event_hooks["response"]: - await hook(response) - - response.history = list(history) - - if not response.has_redirect_location: - return response - - request = self._build_redirect_request(request, response) - history = history + [response] - - if follow_redirects: - await response.aread() - else: - response.next_request = request - return response - - except BaseException as exc: - await response.aclose() - raise exc - - async def _send_single_request(self, request: Request) -> Response: - """ - Sends a single request, without handling any redirections. - """ - transport = self._transport_for_url(request.url) - timer = Timer() - await timer.async_start() - - if not isinstance(request.stream, AsyncByteStream): - raise RuntimeError( - "Attempted to send an sync request with an AsyncClient instance." - ) - - with request_context(request=request): - response = await transport.handle_async_request(request) - - assert isinstance(response.stream, AsyncByteStream) - response.request = request - response.stream = BoundAsyncStream( - response.stream, response=response, timer=timer - ) - self.cookies.extract_cookies(response) - response.default_encoding = self._default_encoding - - logger.info( - 'HTTP Request: %s %s "%s %d %s"', - request.method, - request.url, - response.http_version, - response.status_code, - response.reason_phrase, - ) - - return response - - async def get( - self, - url: URLTypes, - *, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault, None] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, - ) -> Response: - """ - Send a `GET` request. - - **Parameters**: See `httpx.request`. - """ - return await self.request( - "GET", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - async def options( - self, - url: URLTypes, - *, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, - ) -> Response: - """ - Send an `OPTIONS` request. - - **Parameters**: See `httpx.request`. - """ - return await self.request( - "OPTIONS", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - async def head( - self, - url: URLTypes, - *, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, - ) -> Response: - """ - Send a `HEAD` request. - - **Parameters**: See `httpx.request`. - """ - return await self.request( - "HEAD", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - async def post( - self, - url: URLTypes, - *, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, - ) -> Response: - """ - Send a `POST` request. - - **Parameters**: See `httpx.request`. - """ - return await self.request( - "POST", - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - async def put( - self, - url: URLTypes, - *, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, - ) -> Response: - """ - Send a `PUT` request. - - **Parameters**: See `httpx.request`. - """ - return await self.request( - "PUT", - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - async def patch( - self, - url: URLTypes, - *, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, - ) -> Response: - """ - Send a `PATCH` request. - - **Parameters**: See `httpx.request`. - """ - return await self.request( - "PATCH", - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - async def delete( - self, - url: URLTypes, - *, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - auth: typing.Union[AuthTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - follow_redirects: typing.Union[bool, UseClientDefault] = USE_CLIENT_DEFAULT, - timeout: typing.Union[TimeoutTypes, UseClientDefault] = USE_CLIENT_DEFAULT, - extensions: typing.Optional[RequestExtensions] = None, - ) -> Response: - """ - Send a `DELETE` request. - - **Parameters**: See `httpx.request`. - """ - return await self.request( - "DELETE", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - async def aclose(self) -> None: - """ - Close transport and proxies. - """ - if self._state != ClientState.CLOSED: - self._state = ClientState.CLOSED - - await self._transport.aclose() - for proxy in self._mounts.values(): - if proxy is not None: - await proxy.aclose() - - async def __aenter__(self: U) -> U: - if self._state != ClientState.UNOPENED: - msg = { - ClientState.OPENED: "Cannot open a client instance more than once.", - ClientState.CLOSED: "Cannot reopen a client instance, once it has been closed.", - }[self._state] - raise RuntimeError(msg) - - self._state = ClientState.OPENED - - await self._transport.__aenter__() - for proxy in self._mounts.values(): - if proxy is not None: - await proxy.__aenter__() - return self - - async def __aexit__( - self, - exc_type: typing.Optional[typing.Type[BaseException]] = None, - exc_value: typing.Optional[BaseException] = None, - traceback: typing.Optional[TracebackType] = None, - ) -> None: - self._state = ClientState.CLOSED - - await self._transport.__aexit__(exc_type, exc_value, traceback) - for proxy in self._mounts.values(): - if proxy is not None: - await proxy.__aexit__(exc_type, exc_value, traceback) diff --git a/server/venv/Lib/site-packages/httpx/_compat.py b/server/venv/Lib/site-packages/httpx/_compat.py deleted file mode 100644 index a271c6b..0000000 --- a/server/venv/Lib/site-packages/httpx/_compat.py +++ /dev/null @@ -1,43 +0,0 @@ -""" -The _compat module is used for code which requires branching between different -Python environments. It is excluded from the code coverage checks. -""" -import ssl -import sys - -# Brotli support is optional -# The C bindings in `brotli` are recommended for CPython. -# The CFFI bindings in `brotlicffi` are recommended for PyPy and everything else. -try: - import brotlicffi as brotli -except ImportError: # pragma: no cover - try: - import brotli - except ImportError: - brotli = None - -if sys.version_info >= (3, 10) or ( - sys.version_info >= (3, 8) and ssl.OPENSSL_VERSION_INFO >= (1, 1, 0, 7) -): - - def set_minimum_tls_version_1_2(context: ssl.SSLContext) -> None: - # The OP_NO_SSL* and OP_NO_TLS* become deprecated in favor of - # 'SSLContext.minimum_version' from Python 3.7 onwards, however - # this attribute is not available unless the ssl module is compiled - # with OpenSSL 1.1.0g or newer. - # https://docs.python.org/3.10/library/ssl.html#ssl.SSLContext.minimum_version - # https://docs.python.org/3.7/library/ssl.html#ssl.SSLContext.minimum_version - context.minimum_version = ssl.TLSVersion.TLSv1_2 - -else: - - def set_minimum_tls_version_1_2(context: ssl.SSLContext) -> None: - # If 'minimum_version' isn't available, we configure these options with - # the older deprecated variants. - context.options |= ssl.OP_NO_SSLv2 - context.options |= ssl.OP_NO_SSLv3 - context.options |= ssl.OP_NO_TLSv1 - context.options |= ssl.OP_NO_TLSv1_1 - - -__all__ = ["brotli", "set_minimum_tls_version_1_2"] diff --git a/server/venv/Lib/site-packages/httpx/_config.py b/server/venv/Lib/site-packages/httpx/_config.py deleted file mode 100644 index 39d81a2..0000000 --- a/server/venv/Lib/site-packages/httpx/_config.py +++ /dev/null @@ -1,371 +0,0 @@ -import logging -import os -import ssl -import sys -import typing -from pathlib import Path - -import certifi - -from ._compat import set_minimum_tls_version_1_2 -from ._models import Headers -from ._types import CertTypes, HeaderTypes, TimeoutTypes, URLTypes, VerifyTypes -from ._urls import URL -from ._utils import get_ca_bundle_from_env - -DEFAULT_CIPHERS = ":".join( - [ - "ECDHE+AESGCM", - "ECDHE+CHACHA20", - "DHE+AESGCM", - "DHE+CHACHA20", - "ECDH+AESGCM", - "DH+AESGCM", - "ECDH+AES", - "DH+AES", - "RSA+AESGCM", - "RSA+AES", - "!aNULL", - "!eNULL", - "!MD5", - "!DSS", - ] -) - - -logger = logging.getLogger("httpx") - - -class UnsetType: - pass # pragma: no cover - - -UNSET = UnsetType() - - -def create_ssl_context( - cert: typing.Optional[CertTypes] = None, - verify: VerifyTypes = True, - trust_env: bool = True, - http2: bool = False, -) -> ssl.SSLContext: - return SSLConfig( - cert=cert, verify=verify, trust_env=trust_env, http2=http2 - ).ssl_context - - -class SSLConfig: - """ - SSL Configuration. - """ - - DEFAULT_CA_BUNDLE_PATH = Path(certifi.where()) - - def __init__( - self, - *, - cert: typing.Optional[CertTypes] = None, - verify: VerifyTypes = True, - trust_env: bool = True, - http2: bool = False, - ): - self.cert = cert - self.verify = verify - self.trust_env = trust_env - self.http2 = http2 - self.ssl_context = self.load_ssl_context() - - def load_ssl_context(self) -> ssl.SSLContext: - logger.debug( - "load_ssl_context verify=%r cert=%r trust_env=%r http2=%r", - self.verify, - self.cert, - self.trust_env, - self.http2, - ) - - if self.verify: - return self.load_ssl_context_verify() - return self.load_ssl_context_no_verify() - - def load_ssl_context_no_verify(self) -> ssl.SSLContext: - """ - Return an SSL context for unverified connections. - """ - context = self._create_default_ssl_context() - context.check_hostname = False - context.verify_mode = ssl.CERT_NONE - self._load_client_certs(context) - return context - - def load_ssl_context_verify(self) -> ssl.SSLContext: - """ - Return an SSL context for verified connections. - """ - if self.trust_env and self.verify is True: - ca_bundle = get_ca_bundle_from_env() - if ca_bundle is not None: - self.verify = ca_bundle - - if isinstance(self.verify, ssl.SSLContext): - # Allow passing in our own SSLContext object that's pre-configured. - context = self.verify - self._load_client_certs(context) - return context - elif isinstance(self.verify, bool): - ca_bundle_path = self.DEFAULT_CA_BUNDLE_PATH - elif Path(self.verify).exists(): - ca_bundle_path = Path(self.verify) - else: - raise IOError( - "Could not find a suitable TLS CA certificate bundle, " - "invalid path: {}".format(self.verify) - ) - - context = self._create_default_ssl_context() - context.verify_mode = ssl.CERT_REQUIRED - context.check_hostname = True - - # Signal to server support for PHA in TLS 1.3. Raises an - # AttributeError if only read-only access is implemented. - if sys.version_info >= (3, 8): # pragma: no cover - try: - context.post_handshake_auth = True - except AttributeError: # pragma: no cover - pass - - # Disable using 'commonName' for SSLContext.check_hostname - # when the 'subjectAltName' extension isn't available. - try: - context.hostname_checks_common_name = False - except AttributeError: # pragma: no cover - pass - - if ca_bundle_path.is_file(): - cafile = str(ca_bundle_path) - logger.debug("load_verify_locations cafile=%r", cafile) - context.load_verify_locations(cafile=cafile) - elif ca_bundle_path.is_dir(): - capath = str(ca_bundle_path) - logger.debug("load_verify_locations capath=%r", capath) - context.load_verify_locations(capath=capath) - - self._load_client_certs(context) - - return context - - def _create_default_ssl_context(self) -> ssl.SSLContext: - """ - Creates the default SSLContext object that's used for both verified - and unverified connections. - """ - context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) - set_minimum_tls_version_1_2(context) - context.options |= ssl.OP_NO_COMPRESSION - context.set_ciphers(DEFAULT_CIPHERS) - - if ssl.HAS_ALPN: - alpn_idents = ["http/1.1", "h2"] if self.http2 else ["http/1.1"] - context.set_alpn_protocols(alpn_idents) - - if sys.version_info >= (3, 8): # pragma: no cover - keylogfile = os.environ.get("SSLKEYLOGFILE") - if keylogfile and self.trust_env: - context.keylog_filename = keylogfile - - return context - - def _load_client_certs(self, ssl_context: ssl.SSLContext) -> None: - """ - Loads client certificates into our SSLContext object - """ - if self.cert is not None: - if isinstance(self.cert, str): - ssl_context.load_cert_chain(certfile=self.cert) - elif isinstance(self.cert, tuple) and len(self.cert) == 2: - ssl_context.load_cert_chain(certfile=self.cert[0], keyfile=self.cert[1]) - elif isinstance(self.cert, tuple) and len(self.cert) == 3: - ssl_context.load_cert_chain( - certfile=self.cert[0], - keyfile=self.cert[1], - password=self.cert[2], # type: ignore - ) - - -class Timeout: - """ - Timeout configuration. - - **Usage**: - - Timeout(None) # No timeouts. - Timeout(5.0) # 5s timeout on all operations. - Timeout(None, connect=5.0) # 5s timeout on connect, no other timeouts. - Timeout(5.0, connect=10.0) # 10s timeout on connect. 5s timeout elsewhere. - Timeout(5.0, pool=None) # No timeout on acquiring connection from pool. - # 5s timeout elsewhere. - """ - - def __init__( - self, - timeout: typing.Union[TimeoutTypes, UnsetType] = UNSET, - *, - connect: typing.Union[None, float, UnsetType] = UNSET, - read: typing.Union[None, float, UnsetType] = UNSET, - write: typing.Union[None, float, UnsetType] = UNSET, - pool: typing.Union[None, float, UnsetType] = UNSET, - ): - if isinstance(timeout, Timeout): - # Passed as a single explicit Timeout. - assert connect is UNSET - assert read is UNSET - assert write is UNSET - assert pool is UNSET - self.connect = timeout.connect # type: typing.Optional[float] - self.read = timeout.read # type: typing.Optional[float] - self.write = timeout.write # type: typing.Optional[float] - self.pool = timeout.pool # type: typing.Optional[float] - elif isinstance(timeout, tuple): - # Passed as a tuple. - self.connect = timeout[0] - self.read = timeout[1] - self.write = None if len(timeout) < 3 else timeout[2] - self.pool = None if len(timeout) < 4 else timeout[3] - elif not ( - isinstance(connect, UnsetType) - or isinstance(read, UnsetType) - or isinstance(write, UnsetType) - or isinstance(pool, UnsetType) - ): - self.connect = connect - self.read = read - self.write = write - self.pool = pool - else: - if isinstance(timeout, UnsetType): - raise ValueError( - "httpx.Timeout must either include a default, or set all " - "four parameters explicitly." - ) - self.connect = timeout if isinstance(connect, UnsetType) else connect - self.read = timeout if isinstance(read, UnsetType) else read - self.write = timeout if isinstance(write, UnsetType) else write - self.pool = timeout if isinstance(pool, UnsetType) else pool - - def as_dict(self) -> typing.Dict[str, typing.Optional[float]]: - return { - "connect": self.connect, - "read": self.read, - "write": self.write, - "pool": self.pool, - } - - def __eq__(self, other: typing.Any) -> bool: - return ( - isinstance(other, self.__class__) - and self.connect == other.connect - and self.read == other.read - and self.write == other.write - and self.pool == other.pool - ) - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - if len({self.connect, self.read, self.write, self.pool}) == 1: - return f"{class_name}(timeout={self.connect})" - return ( - f"{class_name}(connect={self.connect}, " - f"read={self.read}, write={self.write}, pool={self.pool})" - ) - - -class Limits: - """ - Configuration for limits to various client behaviors. - - **Parameters:** - - * **max_connections** - The maximum number of concurrent connections that may be - established. - * **max_keepalive_connections** - Allow the connection pool to maintain - keep-alive connections below this point. Should be less than or equal - to `max_connections`. - * **keepalive_expiry** - Time limit on idle keep-alive connections in seconds. - """ - - def __init__( - self, - *, - max_connections: typing.Optional[int] = None, - max_keepalive_connections: typing.Optional[int] = None, - keepalive_expiry: typing.Optional[float] = 5.0, - ): - self.max_connections = max_connections - self.max_keepalive_connections = max_keepalive_connections - self.keepalive_expiry = keepalive_expiry - - def __eq__(self, other: typing.Any) -> bool: - return ( - isinstance(other, self.__class__) - and self.max_connections == other.max_connections - and self.max_keepalive_connections == other.max_keepalive_connections - and self.keepalive_expiry == other.keepalive_expiry - ) - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - return ( - f"{class_name}(max_connections={self.max_connections}, " - f"max_keepalive_connections={self.max_keepalive_connections}, " - f"keepalive_expiry={self.keepalive_expiry})" - ) - - -class Proxy: - def __init__( - self, - url: URLTypes, - *, - ssl_context: typing.Optional[ssl.SSLContext] = None, - auth: typing.Optional[typing.Tuple[str, str]] = None, - headers: typing.Optional[HeaderTypes] = None, - ): - url = URL(url) - headers = Headers(headers) - - if url.scheme not in ("http", "https", "socks5"): - raise ValueError(f"Unknown scheme for proxy URL {url!r}") - - if url.username or url.password: - # Remove any auth credentials from the URL. - auth = (url.username, url.password) - url = url.copy_with(username=None, password=None) - - self.url = url - self.auth = auth - self.headers = headers - self.ssl_context = ssl_context - - @property - def raw_auth(self) -> typing.Optional[typing.Tuple[bytes, bytes]]: - # The proxy authentication as raw bytes. - return ( - None - if self.auth is None - else (self.auth[0].encode("utf-8"), self.auth[1].encode("utf-8")) - ) - - def __repr__(self) -> str: - # The authentication is represented with the password component masked. - auth = (self.auth[0], "********") if self.auth else None - - # Build a nice concise representation. - url_str = f"{str(self.url)!r}" - auth_str = f", auth={auth!r}" if auth else "" - headers_str = f", headers={dict(self.headers)!r}" if self.headers else "" - return f"Proxy({url_str}{auth_str}{headers_str})" - - -DEFAULT_TIMEOUT_CONFIG = Timeout(timeout=5.0) -DEFAULT_LIMITS = Limits(max_connections=100, max_keepalive_connections=20) -DEFAULT_MAX_REDIRECTS = 20 diff --git a/server/venv/Lib/site-packages/httpx/_content.py b/server/venv/Lib/site-packages/httpx/_content.py deleted file mode 100644 index b16e12d..0000000 --- a/server/venv/Lib/site-packages/httpx/_content.py +++ /dev/null @@ -1,238 +0,0 @@ -import inspect -import warnings -from json import dumps as json_dumps -from typing import ( - Any, - AsyncIterable, - AsyncIterator, - Dict, - Iterable, - Iterator, - Mapping, - Optional, - Tuple, - Union, -) -from urllib.parse import urlencode - -from ._exceptions import StreamClosed, StreamConsumed -from ._multipart import MultipartStream -from ._types import ( - AsyncByteStream, - RequestContent, - RequestData, - RequestFiles, - ResponseContent, - SyncByteStream, -) -from ._utils import peek_filelike_length, primitive_value_to_str - - -class ByteStream(AsyncByteStream, SyncByteStream): - def __init__(self, stream: bytes) -> None: - self._stream = stream - - def __iter__(self) -> Iterator[bytes]: - yield self._stream - - async def __aiter__(self) -> AsyncIterator[bytes]: - yield self._stream - - -class IteratorByteStream(SyncByteStream): - CHUNK_SIZE = 65_536 - - def __init__(self, stream: Iterable[bytes]): - self._stream = stream - self._is_stream_consumed = False - self._is_generator = inspect.isgenerator(stream) - - def __iter__(self) -> Iterator[bytes]: - if self._is_stream_consumed and self._is_generator: - raise StreamConsumed() - - self._is_stream_consumed = True - if hasattr(self._stream, "read"): - # File-like interfaces should use 'read' directly. - chunk = self._stream.read(self.CHUNK_SIZE) - while chunk: - yield chunk - chunk = self._stream.read(self.CHUNK_SIZE) - else: - # Otherwise iterate. - for part in self._stream: - yield part - - -class AsyncIteratorByteStream(AsyncByteStream): - CHUNK_SIZE = 65_536 - - def __init__(self, stream: AsyncIterable[bytes]): - self._stream = stream - self._is_stream_consumed = False - self._is_generator = inspect.isasyncgen(stream) - - async def __aiter__(self) -> AsyncIterator[bytes]: - if self._is_stream_consumed and self._is_generator: - raise StreamConsumed() - - self._is_stream_consumed = True - if hasattr(self._stream, "aread"): - # File-like interfaces should use 'aread' directly. - chunk = await self._stream.aread(self.CHUNK_SIZE) - while chunk: - yield chunk - chunk = await self._stream.aread(self.CHUNK_SIZE) - else: - # Otherwise iterate. - async for part in self._stream: - yield part - - -class UnattachedStream(AsyncByteStream, SyncByteStream): - """ - If a request or response is serialized using pickle, then it is no longer - attached to a stream for I/O purposes. Any stream operations should result - in `httpx.StreamClosed`. - """ - - def __iter__(self) -> Iterator[bytes]: - raise StreamClosed() - - async def __aiter__(self) -> AsyncIterator[bytes]: - raise StreamClosed() - yield b"" # pragma: no cover - - -def encode_content( - content: Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] -) -> Tuple[Dict[str, str], Union[SyncByteStream, AsyncByteStream]]: - if isinstance(content, (bytes, str)): - body = content.encode("utf-8") if isinstance(content, str) else content - content_length = len(body) - headers = {"Content-Length": str(content_length)} if body else {} - return headers, ByteStream(body) - - elif isinstance(content, Iterable) and not isinstance(content, dict): - # `not isinstance(content, dict)` is a bit oddly specific, but it - # catches a case that's easy for users to make in error, and would - # otherwise pass through here, like any other bytes-iterable, - # because `dict` happens to be iterable. See issue #2491. - content_length_or_none = peek_filelike_length(content) - - if content_length_or_none is None: - headers = {"Transfer-Encoding": "chunked"} - else: - headers = {"Content-Length": str(content_length_or_none)} - return headers, IteratorByteStream(content) # type: ignore - - elif isinstance(content, AsyncIterable): - headers = {"Transfer-Encoding": "chunked"} - return headers, AsyncIteratorByteStream(content) - - raise TypeError(f"Unexpected type for 'content', {type(content)!r}") - - -def encode_urlencoded_data( - data: RequestData, -) -> Tuple[Dict[str, str], ByteStream]: - plain_data = [] - for key, value in data.items(): - if isinstance(value, (list, tuple)): - plain_data.extend([(key, primitive_value_to_str(item)) for item in value]) - else: - plain_data.append((key, primitive_value_to_str(value))) - body = urlencode(plain_data, doseq=True).encode("utf-8") - content_length = str(len(body)) - content_type = "application/x-www-form-urlencoded" - headers = {"Content-Length": content_length, "Content-Type": content_type} - return headers, ByteStream(body) - - -def encode_multipart_data( - data: RequestData, files: RequestFiles, boundary: Optional[bytes] -) -> Tuple[Dict[str, str], MultipartStream]: - multipart = MultipartStream(data=data, files=files, boundary=boundary) - headers = multipart.get_headers() - return headers, multipart - - -def encode_text(text: str) -> Tuple[Dict[str, str], ByteStream]: - body = text.encode("utf-8") - content_length = str(len(body)) - content_type = "text/plain; charset=utf-8" - headers = {"Content-Length": content_length, "Content-Type": content_type} - return headers, ByteStream(body) - - -def encode_html(html: str) -> Tuple[Dict[str, str], ByteStream]: - body = html.encode("utf-8") - content_length = str(len(body)) - content_type = "text/html; charset=utf-8" - headers = {"Content-Length": content_length, "Content-Type": content_type} - return headers, ByteStream(body) - - -def encode_json(json: Any) -> Tuple[Dict[str, str], ByteStream]: - body = json_dumps(json).encode("utf-8") - content_length = str(len(body)) - content_type = "application/json" - headers = {"Content-Length": content_length, "Content-Type": content_type} - return headers, ByteStream(body) - - -def encode_request( - content: Optional[RequestContent] = None, - data: Optional[RequestData] = None, - files: Optional[RequestFiles] = None, - json: Optional[Any] = None, - boundary: Optional[bytes] = None, -) -> Tuple[Dict[str, str], Union[SyncByteStream, AsyncByteStream]]: - """ - Handles encoding the given `content`, `data`, `files`, and `json`, - returning a two-tuple of (, ). - """ - if data is not None and not isinstance(data, Mapping): - # We prefer to separate `content=` - # for raw request content, and `data=
` for url encoded or - # multipart form content. - # - # However for compat with requests, we *do* still support - # `data=` usages. We deal with that case here, treating it - # as if `content=<...>` had been supplied instead. - message = "Use 'content=<...>' to upload raw bytes/text content." - warnings.warn(message, DeprecationWarning) - return encode_content(data) - - if content is not None: - return encode_content(content) - elif files: - return encode_multipart_data(data or {}, files, boundary) - elif data: - return encode_urlencoded_data(data) - elif json is not None: - return encode_json(json) - - return {}, ByteStream(b"") - - -def encode_response( - content: Optional[ResponseContent] = None, - text: Optional[str] = None, - html: Optional[str] = None, - json: Optional[Any] = None, -) -> Tuple[Dict[str, str], Union[SyncByteStream, AsyncByteStream]]: - """ - Handles encoding the given `content`, returning a two-tuple of - (, ). - """ - if content is not None: - return encode_content(content) - elif text is not None: - return encode_text(text) - elif html is not None: - return encode_html(html) - elif json is not None: - return encode_json(json) - - return {}, ByteStream(b"") diff --git a/server/venv/Lib/site-packages/httpx/_decoders.py b/server/venv/Lib/site-packages/httpx/_decoders.py deleted file mode 100644 index 500ce7f..0000000 --- a/server/venv/Lib/site-packages/httpx/_decoders.py +++ /dev/null @@ -1,324 +0,0 @@ -""" -Handlers for Content-Encoding. - -See: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding -""" -import codecs -import io -import typing -import zlib - -from ._compat import brotli -from ._exceptions import DecodingError - - -class ContentDecoder: - def decode(self, data: bytes) -> bytes: - raise NotImplementedError() # pragma: no cover - - def flush(self) -> bytes: - raise NotImplementedError() # pragma: no cover - - -class IdentityDecoder(ContentDecoder): - """ - Handle unencoded data. - """ - - def decode(self, data: bytes) -> bytes: - return data - - def flush(self) -> bytes: - return b"" - - -class DeflateDecoder(ContentDecoder): - """ - Handle 'deflate' decoding. - - See: https://stackoverflow.com/questions/1838699 - """ - - def __init__(self) -> None: - self.first_attempt = True - self.decompressor = zlib.decompressobj() - - def decode(self, data: bytes) -> bytes: - was_first_attempt = self.first_attempt - self.first_attempt = False - try: - return self.decompressor.decompress(data) - except zlib.error as exc: - if was_first_attempt: - self.decompressor = zlib.decompressobj(-zlib.MAX_WBITS) - return self.decode(data) - raise DecodingError(str(exc)) from exc - - def flush(self) -> bytes: - try: - return self.decompressor.flush() - except zlib.error as exc: # pragma: no cover - raise DecodingError(str(exc)) from exc - - -class GZipDecoder(ContentDecoder): - """ - Handle 'gzip' decoding. - - See: https://stackoverflow.com/questions/1838699 - """ - - def __init__(self) -> None: - self.decompressor = zlib.decompressobj(zlib.MAX_WBITS | 16) - - def decode(self, data: bytes) -> bytes: - try: - return self.decompressor.decompress(data) - except zlib.error as exc: - raise DecodingError(str(exc)) from exc - - def flush(self) -> bytes: - try: - return self.decompressor.flush() - except zlib.error as exc: # pragma: no cover - raise DecodingError(str(exc)) from exc - - -class BrotliDecoder(ContentDecoder): - """ - Handle 'brotli' decoding. - - Requires `pip install brotlipy`. See: https://brotlipy.readthedocs.io/ - or `pip install brotli`. See https://github.com/google/brotli - Supports both 'brotlipy' and 'Brotli' packages since they share an import - name. The top branches are for 'brotlipy' and bottom branches for 'Brotli' - """ - - def __init__(self) -> None: - if brotli is None: # pragma: no cover - raise ImportError( - "Using 'BrotliDecoder', but neither of the 'brotlicffi' or 'brotli' " - "packages have been installed. " - "Make sure to install httpx using `pip install httpx[brotli]`." - ) from None - - self.decompressor = brotli.Decompressor() - self.seen_data = False - self._decompress: typing.Callable[[bytes], bytes] - if hasattr(self.decompressor, "decompress"): - # The 'brotlicffi' package. - self._decompress = self.decompressor.decompress # pragma: no cover - else: - # The 'brotli' package. - self._decompress = self.decompressor.process # pragma: no cover - - def decode(self, data: bytes) -> bytes: - if not data: - return b"" - self.seen_data = True - try: - return self._decompress(data) - except brotli.error as exc: - raise DecodingError(str(exc)) from exc - - def flush(self) -> bytes: - if not self.seen_data: - return b"" - try: - if hasattr(self.decompressor, "finish"): - # Only available in the 'brotlicffi' package. - - # As the decompressor decompresses eagerly, this - # will never actually emit any data. However, it will potentially throw - # errors if a truncated or damaged data stream has been used. - self.decompressor.finish() # pragma: no cover - return b"" - except brotli.error as exc: # pragma: no cover - raise DecodingError(str(exc)) from exc - - -class MultiDecoder(ContentDecoder): - """ - Handle the case where multiple encodings have been applied. - """ - - def __init__(self, children: typing.Sequence[ContentDecoder]) -> None: - """ - 'children' should be a sequence of decoders in the order in which - each was applied. - """ - # Note that we reverse the order for decoding. - self.children = list(reversed(children)) - - def decode(self, data: bytes) -> bytes: - for child in self.children: - data = child.decode(data) - return data - - def flush(self) -> bytes: - data = b"" - for child in self.children: - data = child.decode(data) + child.flush() - return data - - -class ByteChunker: - """ - Handles returning byte content in fixed-size chunks. - """ - - def __init__(self, chunk_size: typing.Optional[int] = None) -> None: - self._buffer = io.BytesIO() - self._chunk_size = chunk_size - - def decode(self, content: bytes) -> typing.List[bytes]: - if self._chunk_size is None: - return [content] if content else [] - - self._buffer.write(content) - if self._buffer.tell() >= self._chunk_size: - value = self._buffer.getvalue() - chunks = [ - value[i : i + self._chunk_size] - for i in range(0, len(value), self._chunk_size) - ] - if len(chunks[-1]) == self._chunk_size: - self._buffer.seek(0) - self._buffer.truncate() - return chunks - else: - self._buffer.seek(0) - self._buffer.write(chunks[-1]) - self._buffer.truncate() - return chunks[:-1] - else: - return [] - - def flush(self) -> typing.List[bytes]: - value = self._buffer.getvalue() - self._buffer.seek(0) - self._buffer.truncate() - return [value] if value else [] - - -class TextChunker: - """ - Handles returning text content in fixed-size chunks. - """ - - def __init__(self, chunk_size: typing.Optional[int] = None) -> None: - self._buffer = io.StringIO() - self._chunk_size = chunk_size - - def decode(self, content: str) -> typing.List[str]: - if self._chunk_size is None: - return [content] - - self._buffer.write(content) - if self._buffer.tell() >= self._chunk_size: - value = self._buffer.getvalue() - chunks = [ - value[i : i + self._chunk_size] - for i in range(0, len(value), self._chunk_size) - ] - if len(chunks[-1]) == self._chunk_size: - self._buffer.seek(0) - self._buffer.truncate() - return chunks - else: - self._buffer.seek(0) - self._buffer.write(chunks[-1]) - self._buffer.truncate() - return chunks[:-1] - else: - return [] - - def flush(self) -> typing.List[str]: - value = self._buffer.getvalue() - self._buffer.seek(0) - self._buffer.truncate() - return [value] if value else [] - - -class TextDecoder: - """ - Handles incrementally decoding bytes into text - """ - - def __init__(self, encoding: str = "utf-8"): - self.decoder = codecs.getincrementaldecoder(encoding)(errors="replace") - - def decode(self, data: bytes) -> str: - return self.decoder.decode(data) - - def flush(self) -> str: - return self.decoder.decode(b"", True) - - -class LineDecoder: - """ - Handles incrementally reading lines from text. - - Has the same behaviour as the stdllib splitlines, but handling the input iteratively. - """ - - def __init__(self) -> None: - self.buffer: typing.List[str] = [] - self.trailing_cr: bool = False - - def decode(self, text: str) -> typing.List[str]: - # See https://docs.python.org/3/library/stdtypes.html#str.splitlines - NEWLINE_CHARS = "\n\r\x0b\x0c\x1c\x1d\x1e\x85\u2028\u2029" - - # We always push a trailing `\r` into the next decode iteration. - if self.trailing_cr: - text = "\r" + text - self.trailing_cr = False - if text.endswith("\r"): - self.trailing_cr = True - text = text[:-1] - - if not text: - return [] - - trailing_newline = text[-1] in NEWLINE_CHARS - lines = text.splitlines() - - if len(lines) == 1 and not trailing_newline: - # No new lines, buffer the input and continue. - self.buffer.append(lines[0]) - return [] - - if self.buffer: - # Include any existing buffer in the first portion of the - # splitlines result. - lines = ["".join(self.buffer) + lines[0]] + lines[1:] - self.buffer = [] - - if not trailing_newline: - # If the last segment of splitlines is not newline terminated, - # then drop it from our output and start a new buffer. - self.buffer = [lines.pop()] - - return lines - - def flush(self) -> typing.List[str]: - if not self.buffer and not self.trailing_cr: - return [] - - lines = ["".join(self.buffer)] - self.buffer = [] - self.trailing_cr = False - return lines - - -SUPPORTED_DECODERS = { - "identity": IdentityDecoder, - "gzip": GZipDecoder, - "deflate": DeflateDecoder, - "br": BrotliDecoder, -} - - -if brotli is None: - SUPPORTED_DECODERS.pop("br") # pragma: no cover diff --git a/server/venv/Lib/site-packages/httpx/_exceptions.py b/server/venv/Lib/site-packages/httpx/_exceptions.py deleted file mode 100644 index 24a4f8a..0000000 --- a/server/venv/Lib/site-packages/httpx/_exceptions.py +++ /dev/null @@ -1,343 +0,0 @@ -""" -Our exception hierarchy: - -* HTTPError - x RequestError - + TransportError - - TimeoutException - · ConnectTimeout - · ReadTimeout - · WriteTimeout - · PoolTimeout - - NetworkError - · ConnectError - · ReadError - · WriteError - · CloseError - - ProtocolError - · LocalProtocolError - · RemoteProtocolError - - ProxyError - - UnsupportedProtocol - + DecodingError - + TooManyRedirects - x HTTPStatusError -* InvalidURL -* CookieConflict -* StreamError - x StreamConsumed - x StreamClosed - x ResponseNotRead - x RequestNotRead -""" -import contextlib -import typing - -if typing.TYPE_CHECKING: - from ._models import Request, Response # pragma: no cover - - -class HTTPError(Exception): - """ - Base class for `RequestError` and `HTTPStatusError`. - - Useful for `try...except` blocks when issuing a request, - and then calling `.raise_for_status()`. - - For example: - - ``` - try: - response = httpx.get("https://www.example.com") - response.raise_for_status() - except httpx.HTTPError as exc: - print(f"HTTP Exception for {exc.request.url} - {exc}") - ``` - """ - - def __init__(self, message: str) -> None: - super().__init__(message) - self._request: typing.Optional["Request"] = None - - @property - def request(self) -> "Request": - if self._request is None: - raise RuntimeError("The .request property has not been set.") - return self._request - - @request.setter - def request(self, request: "Request") -> None: - self._request = request - - -class RequestError(HTTPError): - """ - Base class for all exceptions that may occur when issuing a `.request()`. - """ - - def __init__( - self, message: str, *, request: typing.Optional["Request"] = None - ) -> None: - super().__init__(message) - # At the point an exception is raised we won't typically have a request - # instance to associate it with. - # - # The 'request_context' context manager is used within the Client and - # Response methods in order to ensure that any raised exceptions - # have a `.request` property set on them. - self._request = request - - -class TransportError(RequestError): - """ - Base class for all exceptions that occur at the level of the Transport API. - """ - - -# Timeout exceptions... - - -class TimeoutException(TransportError): - """ - The base class for timeout errors. - - An operation has timed out. - """ - - -class ConnectTimeout(TimeoutException): - """ - Timed out while connecting to the host. - """ - - -class ReadTimeout(TimeoutException): - """ - Timed out while receiving data from the host. - """ - - -class WriteTimeout(TimeoutException): - """ - Timed out while sending data to the host. - """ - - -class PoolTimeout(TimeoutException): - """ - Timed out waiting to acquire a connection from the pool. - """ - - -# Core networking exceptions... - - -class NetworkError(TransportError): - """ - The base class for network-related errors. - - An error occurred while interacting with the network. - """ - - -class ReadError(NetworkError): - """ - Failed to receive data from the network. - """ - - -class WriteError(NetworkError): - """ - Failed to send data through the network. - """ - - -class ConnectError(NetworkError): - """ - Failed to establish a connection. - """ - - -class CloseError(NetworkError): - """ - Failed to close a connection. - """ - - -# Other transport exceptions... - - -class ProxyError(TransportError): - """ - An error occurred while establishing a proxy connection. - """ - - -class UnsupportedProtocol(TransportError): - """ - Attempted to make a request to an unsupported protocol. - - For example issuing a request to `ftp://www.example.com`. - """ - - -class ProtocolError(TransportError): - """ - The protocol was violated. - """ - - -class LocalProtocolError(ProtocolError): - """ - A protocol was violated by the client. - - For example if the user instantiated a `Request` instance explicitly, - failed to include the mandatory `Host:` header, and then issued it directly - using `client.send()`. - """ - - -class RemoteProtocolError(ProtocolError): - """ - The protocol was violated by the server. - - For example, returning malformed HTTP. - """ - - -# Other request exceptions... - - -class DecodingError(RequestError): - """ - Decoding of the response failed, due to a malformed encoding. - """ - - -class TooManyRedirects(RequestError): - """ - Too many redirects. - """ - - -# Client errors - - -class HTTPStatusError(HTTPError): - """ - The response had an error HTTP status of 4xx or 5xx. - - May be raised when calling `response.raise_for_status()` - """ - - def __init__( - self, message: str, *, request: "Request", response: "Response" - ) -> None: - super().__init__(message) - self.request = request - self.response = response - - -class InvalidURL(Exception): - """ - URL is improperly formed or cannot be parsed. - """ - - def __init__(self, message: str) -> None: - super().__init__(message) - - -class CookieConflict(Exception): - """ - Attempted to lookup a cookie by name, but multiple cookies existed. - - Can occur when calling `response.cookies.get(...)`. - """ - - def __init__(self, message: str) -> None: - super().__init__(message) - - -# Stream exceptions... - -# These may occur as the result of a programming error, by accessing -# the request/response stream in an invalid manner. - - -class StreamError(RuntimeError): - """ - The base class for stream exceptions. - - The developer made an error in accessing the request stream in - an invalid way. - """ - - def __init__(self, message: str) -> None: - super().__init__(message) - - -class StreamConsumed(StreamError): - """ - Attempted to read or stream content, but the content has already - been streamed. - """ - - def __init__(self) -> None: - message = ( - "Attempted to read or stream some content, but the content has " - "already been streamed. For requests, this could be due to passing " - "a generator as request content, and then receiving a redirect " - "response or a secondary request as part of an authentication flow." - "For responses, this could be due to attempting to stream the response " - "content more than once." - ) - super().__init__(message) - - -class StreamClosed(StreamError): - """ - Attempted to read or stream response content, but the request has been - closed. - """ - - def __init__(self) -> None: - message = ( - "Attempted to read or stream content, but the stream has " "been closed." - ) - super().__init__(message) - - -class ResponseNotRead(StreamError): - """ - Attempted to access streaming response content, without having called `read()`. - """ - - def __init__(self) -> None: - message = "Attempted to access streaming response content, without having called `read()`." - super().__init__(message) - - -class RequestNotRead(StreamError): - """ - Attempted to access streaming request content, without having called `read()`. - """ - - def __init__(self) -> None: - message = "Attempted to access streaming request content, without having called `read()`." - super().__init__(message) - - -@contextlib.contextmanager -def request_context( - request: typing.Optional["Request"] = None, -) -> typing.Iterator[None]: - """ - A context manager that can be used to attach the given request context - to any `RequestError` exceptions that are raised within the block. - """ - try: - yield - except RequestError as exc: - if request is not None: - exc.request = request - raise exc diff --git a/server/venv/Lib/site-packages/httpx/_main.py b/server/venv/Lib/site-packages/httpx/_main.py deleted file mode 100644 index 7c12ce8..0000000 --- a/server/venv/Lib/site-packages/httpx/_main.py +++ /dev/null @@ -1,506 +0,0 @@ -import functools -import json -import sys -import typing - -import click -import httpcore -import pygments.lexers -import pygments.util -import rich.console -import rich.markup -import rich.progress -import rich.syntax -import rich.table - -from ._client import Client -from ._exceptions import RequestError -from ._models import Response -from ._status_codes import codes - - -def print_help() -> None: - console = rich.console.Console() - - console.print("[bold]HTTPX :butterfly:", justify="center") - console.print() - console.print("A next generation HTTP client.", justify="center") - console.print() - console.print( - "Usage: [bold]httpx[/bold] [cyan] [OPTIONS][/cyan] ", justify="left" - ) - console.print() - - table = rich.table.Table.grid(padding=1, pad_edge=True) - table.add_column("Parameter", no_wrap=True, justify="left", style="bold") - table.add_column("Description") - table.add_row( - "-m, --method [cyan]METHOD", - "Request method, such as GET, POST, PUT, PATCH, DELETE, OPTIONS, HEAD.\n" - "[Default: GET, or POST if a request body is included]", - ) - table.add_row( - "-p, --params [cyan] ...", - "Query parameters to include in the request URL.", - ) - table.add_row( - "-c, --content [cyan]TEXT", "Byte content to include in the request body." - ) - table.add_row( - "-d, --data [cyan] ...", "Form data to include in the request body." - ) - table.add_row( - "-f, --files [cyan] ...", - "Form files to include in the request body.", - ) - table.add_row("-j, --json [cyan]TEXT", "JSON data to include in the request body.") - table.add_row( - "-h, --headers [cyan] ...", - "Include additional HTTP headers in the request.", - ) - table.add_row( - "--cookies [cyan] ...", "Cookies to include in the request." - ) - table.add_row( - "--auth [cyan]", - "Username and password to include in the request. Specify '-' for the password to use " - "a password prompt. Note that using --verbose/-v will expose the Authorization " - "header, including the password encoding in a trivially reversible format.", - ) - - table.add_row( - "--proxies [cyan]URL", - "Send the request via a proxy. Should be the URL giving the proxy address.", - ) - - table.add_row( - "--timeout [cyan]FLOAT", - "Timeout value to use for network operations, such as establishing the connection, " - "reading some data, etc... [Default: 5.0]", - ) - - table.add_row("--follow-redirects", "Automatically follow redirects.") - table.add_row("--no-verify", "Disable SSL verification.") - table.add_row( - "--http2", "Send the request using HTTP/2, if the remote server supports it." - ) - - table.add_row( - "--download [cyan]FILE", - "Save the response content as a file, rather than displaying it.", - ) - - table.add_row("-v, --verbose", "Verbose output. Show request as well as response.") - table.add_row("--help", "Show this message and exit.") - console.print(table) - - -def get_lexer_for_response(response: Response) -> str: - content_type = response.headers.get("Content-Type") - if content_type is not None: - mime_type, _, _ = content_type.partition(";") - try: - return typing.cast( - str, pygments.lexers.get_lexer_for_mimetype(mime_type.strip()).name - ) - except pygments.util.ClassNotFound: # pragma: no cover - pass - return "" # pragma: no cover - - -def format_request_headers(request: httpcore.Request, http2: bool = False) -> str: - version = "HTTP/2" if http2 else "HTTP/1.1" - headers = [ - (name.lower() if http2 else name, value) for name, value in request.headers - ] - method = request.method.decode("ascii") - target = request.url.target.decode("ascii") - lines = [f"{method} {target} {version}"] + [ - f"{name.decode('ascii')}: {value.decode('ascii')}" for name, value in headers - ] - return "\n".join(lines) - - -def format_response_headers( - http_version: bytes, - status: int, - reason_phrase: typing.Optional[bytes], - headers: typing.List[typing.Tuple[bytes, bytes]], -) -> str: - version = http_version.decode("ascii") - reason = ( - codes.get_reason_phrase(status) - if reason_phrase is None - else reason_phrase.decode("ascii") - ) - lines = [f"{version} {status} {reason}"] + [ - f"{name.decode('ascii')}: {value.decode('ascii')}" for name, value in headers - ] - return "\n".join(lines) - - -def print_request_headers(request: httpcore.Request, http2: bool = False) -> None: - console = rich.console.Console() - http_text = format_request_headers(request, http2=http2) - syntax = rich.syntax.Syntax(http_text, "http", theme="ansi_dark", word_wrap=True) - console.print(syntax) - syntax = rich.syntax.Syntax("", "http", theme="ansi_dark", word_wrap=True) - console.print(syntax) - - -def print_response_headers( - http_version: bytes, - status: int, - reason_phrase: typing.Optional[bytes], - headers: typing.List[typing.Tuple[bytes, bytes]], -) -> None: - console = rich.console.Console() - http_text = format_response_headers(http_version, status, reason_phrase, headers) - syntax = rich.syntax.Syntax(http_text, "http", theme="ansi_dark", word_wrap=True) - console.print(syntax) - syntax = rich.syntax.Syntax("", "http", theme="ansi_dark", word_wrap=True) - console.print(syntax) - - -def print_response(response: Response) -> None: - console = rich.console.Console() - lexer_name = get_lexer_for_response(response) - if lexer_name: - if lexer_name.lower() == "json": - try: - data = response.json() - text = json.dumps(data, indent=4) - except ValueError: # pragma: no cover - text = response.text - else: - text = response.text - - syntax = rich.syntax.Syntax(text, lexer_name, theme="ansi_dark", word_wrap=True) - console.print(syntax) - else: - console.print(f"<{len(response.content)} bytes of binary data>") - - -_PCTRTT = typing.Tuple[typing.Tuple[str, str], ...] -_PCTRTTT = typing.Tuple[_PCTRTT, ...] -_PeerCertRetDictType = typing.Dict[str, typing.Union[str, _PCTRTTT, _PCTRTT]] - - -def format_certificate(cert: _PeerCertRetDictType) -> str: # pragma: no cover - lines = [] - for key, value in cert.items(): - if isinstance(value, (list, tuple)): - lines.append(f"* {key}:") - for item in value: - if key in ("subject", "issuer"): - for sub_item in item: - lines.append(f"* {sub_item[0]}: {sub_item[1]!r}") - elif isinstance(item, tuple) and len(item) == 2: - lines.append(f"* {item[0]}: {item[1]!r}") - else: - lines.append(f"* {item!r}") - else: - lines.append(f"* {key}: {value!r}") - return "\n".join(lines) - - -def trace( - name: str, info: typing.Mapping[str, typing.Any], verbose: bool = False -) -> None: - console = rich.console.Console() - if name == "connection.connect_tcp.started" and verbose: - host = info["host"] - console.print(f"* Connecting to {host!r}") - elif name == "connection.connect_tcp.complete" and verbose: - stream = info["return_value"] - server_addr = stream.get_extra_info("server_addr") - console.print(f"* Connected to {server_addr[0]!r} on port {server_addr[1]}") - elif name == "connection.start_tls.complete" and verbose: # pragma: no cover - stream = info["return_value"] - ssl_object = stream.get_extra_info("ssl_object") - version = ssl_object.version() - cipher = ssl_object.cipher() - server_cert = ssl_object.getpeercert() - alpn = ssl_object.selected_alpn_protocol() - console.print(f"* SSL established using {version!r} / {cipher[0]!r}") - console.print(f"* Selected ALPN protocol: {alpn!r}") - if server_cert: - console.print("* Server certificate:") - console.print(format_certificate(server_cert)) - elif name == "http11.send_request_headers.started" and verbose: - request = info["request"] - print_request_headers(request, http2=False) - elif name == "http2.send_request_headers.started" and verbose: # pragma: no cover - request = info["request"] - print_request_headers(request, http2=True) - elif name == "http11.receive_response_headers.complete": - http_version, status, reason_phrase, headers = info["return_value"] - print_response_headers(http_version, status, reason_phrase, headers) - elif name == "http2.receive_response_headers.complete": # pragma: no cover - status, headers = info["return_value"] - http_version = b"HTTP/2" - reason_phrase = None - print_response_headers(http_version, status, reason_phrase, headers) - - -def download_response(response: Response, download: typing.BinaryIO) -> None: - console = rich.console.Console() - console.print() - content_length = response.headers.get("Content-Length") - with rich.progress.Progress( - "[progress.description]{task.description}", - "[progress.percentage]{task.percentage:>3.0f}%", - rich.progress.BarColumn(bar_width=None), - rich.progress.DownloadColumn(), - rich.progress.TransferSpeedColumn(), - ) as progress: - description = f"Downloading [bold]{rich.markup.escape(download.name)}" - download_task = progress.add_task( - description, - total=int(content_length or 0), - start=content_length is not None, - ) - for chunk in response.iter_bytes(): - download.write(chunk) - progress.update(download_task, completed=response.num_bytes_downloaded) - - -def validate_json( - ctx: click.Context, - param: typing.Union[click.Option, click.Parameter], - value: typing.Any, -) -> typing.Any: - if value is None: - return None - - try: - return json.loads(value) - except json.JSONDecodeError: # pragma: no cover - raise click.BadParameter("Not valid JSON") - - -def validate_auth( - ctx: click.Context, - param: typing.Union[click.Option, click.Parameter], - value: typing.Any, -) -> typing.Any: - if value == (None, None): - return None - - username, password = value - if password == "-": # pragma: no cover - password = click.prompt("Password", hide_input=True) - return (username, password) - - -def handle_help( - ctx: click.Context, - param: typing.Union[click.Option, click.Parameter], - value: typing.Any, -) -> None: - if not value or ctx.resilient_parsing: - return - - print_help() - ctx.exit() - - -@click.command(add_help_option=False) -@click.argument("url", type=str) -@click.option( - "--method", - "-m", - "method", - type=str, - help=( - "Request method, such as GET, POST, PUT, PATCH, DELETE, OPTIONS, HEAD. " - "[Default: GET, or POST if a request body is included]" - ), -) -@click.option( - "--params", - "-p", - "params", - type=(str, str), - multiple=True, - help="Query parameters to include in the request URL.", -) -@click.option( - "--content", - "-c", - "content", - type=str, - help="Byte content to include in the request body.", -) -@click.option( - "--data", - "-d", - "data", - type=(str, str), - multiple=True, - help="Form data to include in the request body.", -) -@click.option( - "--files", - "-f", - "files", - type=(str, click.File(mode="rb")), - multiple=True, - help="Form files to include in the request body.", -) -@click.option( - "--json", - "-j", - "json", - type=str, - callback=validate_json, - help="JSON data to include in the request body.", -) -@click.option( - "--headers", - "-h", - "headers", - type=(str, str), - multiple=True, - help="Include additional HTTP headers in the request.", -) -@click.option( - "--cookies", - "cookies", - type=(str, str), - multiple=True, - help="Cookies to include in the request.", -) -@click.option( - "--auth", - "auth", - type=(str, str), - default=(None, None), - callback=validate_auth, - help=( - "Username and password to include in the request. " - "Specify '-' for the password to use a password prompt. " - "Note that using --verbose/-v will expose the Authorization header, " - "including the password encoding in a trivially reversible format." - ), -) -@click.option( - "--proxies", - "proxies", - type=str, - default=None, - help="Send the request via a proxy. Should be the URL giving the proxy address.", -) -@click.option( - "--timeout", - "timeout", - type=float, - default=5.0, - help=( - "Timeout value to use for network operations, such as establishing the " - "connection, reading some data, etc... [Default: 5.0]" - ), -) -@click.option( - "--follow-redirects", - "follow_redirects", - is_flag=True, - default=False, - help="Automatically follow redirects.", -) -@click.option( - "--no-verify", - "verify", - is_flag=True, - default=True, - help="Disable SSL verification.", -) -@click.option( - "--http2", - "http2", - type=bool, - is_flag=True, - default=False, - help="Send the request using HTTP/2, if the remote server supports it.", -) -@click.option( - "--download", - type=click.File("wb"), - help="Save the response content as a file, rather than displaying it.", -) -@click.option( - "--verbose", - "-v", - type=bool, - is_flag=True, - default=False, - help="Verbose. Show request as well as response.", -) -@click.option( - "--help", - is_flag=True, - is_eager=True, - expose_value=False, - callback=handle_help, - help="Show this message and exit.", -) -def main( - url: str, - method: str, - params: typing.List[typing.Tuple[str, str]], - content: str, - data: typing.List[typing.Tuple[str, str]], - files: typing.List[typing.Tuple[str, click.File]], - json: str, - headers: typing.List[typing.Tuple[str, str]], - cookies: typing.List[typing.Tuple[str, str]], - auth: typing.Optional[typing.Tuple[str, str]], - proxies: str, - timeout: float, - follow_redirects: bool, - verify: bool, - http2: bool, - download: typing.Optional[typing.BinaryIO], - verbose: bool, -) -> None: - """ - An HTTP command line client. - Sends a request and displays the response. - """ - if not method: - method = "POST" if content or data or files or json else "GET" - - try: - with Client( - proxies=proxies, - timeout=timeout, - verify=verify, - http2=http2, - ) as client: - with client.stream( - method, - url, - params=list(params), - content=content, - data=dict(data), - files=files, # type: ignore - json=json, - headers=headers, - cookies=dict(cookies), - auth=auth, - follow_redirects=follow_redirects, - extensions={"trace": functools.partial(trace, verbose=verbose)}, - ) as response: - if download is not None: - download_response(response, download) - else: - response.read() - if response.content: - print_response(response) - - except RequestError as exc: - console = rich.console.Console() - console.print(f"[red]{type(exc).__name__}[/red]: {exc}") - sys.exit(1) - - sys.exit(0 if response.is_success else 1) diff --git a/server/venv/Lib/site-packages/httpx/_models.py b/server/venv/Lib/site-packages/httpx/_models.py deleted file mode 100644 index e1e45cf..0000000 --- a/server/venv/Lib/site-packages/httpx/_models.py +++ /dev/null @@ -1,1209 +0,0 @@ -import datetime -import email.message -import json as jsonlib -import typing -import urllib.request -from collections.abc import Mapping -from http.cookiejar import Cookie, CookieJar - -from ._content import ByteStream, UnattachedStream, encode_request, encode_response -from ._decoders import ( - SUPPORTED_DECODERS, - ByteChunker, - ContentDecoder, - IdentityDecoder, - LineDecoder, - MultiDecoder, - TextChunker, - TextDecoder, -) -from ._exceptions import ( - CookieConflict, - HTTPStatusError, - RequestNotRead, - ResponseNotRead, - StreamClosed, - StreamConsumed, - request_context, -) -from ._multipart import get_multipart_boundary_from_content_type -from ._status_codes import codes -from ._types import ( - AsyncByteStream, - CookieTypes, - HeaderTypes, - QueryParamTypes, - RequestContent, - RequestData, - RequestExtensions, - RequestFiles, - ResponseContent, - ResponseExtensions, - SyncByteStream, -) -from ._urls import URL -from ._utils import ( - guess_json_utf, - is_known_encoding, - normalize_header_key, - normalize_header_value, - obfuscate_sensitive_headers, - parse_content_type_charset, - parse_header_links, -) - - -class Headers(typing.MutableMapping[str, str]): - """ - HTTP headers, as a case-insensitive multi-dict. - """ - - def __init__( - self, - headers: typing.Optional[HeaderTypes] = None, - encoding: typing.Optional[str] = None, - ) -> None: - if headers is None: - self._list = [] # type: typing.List[typing.Tuple[bytes, bytes, bytes]] - elif isinstance(headers, Headers): - self._list = list(headers._list) - elif isinstance(headers, Mapping): - self._list = [ - ( - normalize_header_key(k, lower=False, encoding=encoding), - normalize_header_key(k, lower=True, encoding=encoding), - normalize_header_value(v, encoding), - ) - for k, v in headers.items() - ] - else: - self._list = [ - ( - normalize_header_key(k, lower=False, encoding=encoding), - normalize_header_key(k, lower=True, encoding=encoding), - normalize_header_value(v, encoding), - ) - for k, v in headers - ] - - self._encoding = encoding - - @property - def encoding(self) -> str: - """ - Header encoding is mandated as ascii, but we allow fallbacks to utf-8 - or iso-8859-1. - """ - if self._encoding is None: - for encoding in ["ascii", "utf-8"]: - for key, value in self.raw: - try: - key.decode(encoding) - value.decode(encoding) - except UnicodeDecodeError: - break - else: - # The else block runs if 'break' did not occur, meaning - # all values fitted the encoding. - self._encoding = encoding - break - else: - # The ISO-8859-1 encoding covers all 256 code points in a byte, - # so will never raise decode errors. - self._encoding = "iso-8859-1" - return self._encoding - - @encoding.setter - def encoding(self, value: str) -> None: - self._encoding = value - - @property - def raw(self) -> typing.List[typing.Tuple[bytes, bytes]]: - """ - Returns a list of the raw header items, as byte pairs. - """ - return [(raw_key, value) for raw_key, _, value in self._list] - - def keys(self) -> typing.KeysView[str]: - return {key.decode(self.encoding): None for _, key, value in self._list}.keys() - - def values(self) -> typing.ValuesView[str]: - values_dict: typing.Dict[str, str] = {} - for _, key, value in self._list: - str_key = key.decode(self.encoding) - str_value = value.decode(self.encoding) - if str_key in values_dict: - values_dict[str_key] += f", {str_value}" - else: - values_dict[str_key] = str_value - return values_dict.values() - - def items(self) -> typing.ItemsView[str, str]: - """ - Return `(key, value)` items of headers. Concatenate headers - into a single comma separated value when a key occurs multiple times. - """ - values_dict: typing.Dict[str, str] = {} - for _, key, value in self._list: - str_key = key.decode(self.encoding) - str_value = value.decode(self.encoding) - if str_key in values_dict: - values_dict[str_key] += f", {str_value}" - else: - values_dict[str_key] = str_value - return values_dict.items() - - def multi_items(self) -> typing.List[typing.Tuple[str, str]]: - """ - Return a list of `(key, value)` pairs of headers. Allow multiple - occurrences of the same key without concatenating into a single - comma separated value. - """ - return [ - (key.decode(self.encoding), value.decode(self.encoding)) - for _, key, value in self._list - ] - - def get(self, key: str, default: typing.Any = None) -> typing.Any: - """ - Return a header value. If multiple occurrences of the header occur - then concatenate them together with commas. - """ - try: - return self[key] - except KeyError: - return default - - def get_list(self, key: str, split_commas: bool = False) -> typing.List[str]: - """ - Return a list of all header values for a given key. - If `split_commas=True` is passed, then any comma separated header - values are split into multiple return strings. - """ - get_header_key = key.lower().encode(self.encoding) - - values = [ - item_value.decode(self.encoding) - for _, item_key, item_value in self._list - if item_key.lower() == get_header_key - ] - - if not split_commas: - return values - - split_values = [] - for value in values: - split_values.extend([item.strip() for item in value.split(",")]) - return split_values - - def update(self, headers: typing.Optional[HeaderTypes] = None) -> None: # type: ignore - headers = Headers(headers) - for key in headers.keys(): - if key in self: - self.pop(key) - self._list.extend(headers._list) - - def copy(self) -> "Headers": - return Headers(self, encoding=self.encoding) - - def __getitem__(self, key: str) -> str: - """ - Return a single header value. - - If there are multiple headers with the same key, then we concatenate - them with commas. See: https://tools.ietf.org/html/rfc7230#section-3.2.2 - """ - normalized_key = key.lower().encode(self.encoding) - - items = [ - header_value.decode(self.encoding) - for _, header_key, header_value in self._list - if header_key == normalized_key - ] - - if items: - return ", ".join(items) - - raise KeyError(key) - - def __setitem__(self, key: str, value: str) -> None: - """ - Set the header `key` to `value`, removing any duplicate entries. - Retains insertion order. - """ - set_key = key.encode(self._encoding or "utf-8") - set_value = value.encode(self._encoding or "utf-8") - lookup_key = set_key.lower() - - found_indexes = [ - idx - for idx, (_, item_key, _) in enumerate(self._list) - if item_key == lookup_key - ] - - for idx in reversed(found_indexes[1:]): - del self._list[idx] - - if found_indexes: - idx = found_indexes[0] - self._list[idx] = (set_key, lookup_key, set_value) - else: - self._list.append((set_key, lookup_key, set_value)) - - def __delitem__(self, key: str) -> None: - """ - Remove the header `key`. - """ - del_key = key.lower().encode(self.encoding) - - pop_indexes = [ - idx - for idx, (_, item_key, _) in enumerate(self._list) - if item_key.lower() == del_key - ] - - if not pop_indexes: - raise KeyError(key) - - for idx in reversed(pop_indexes): - del self._list[idx] - - def __contains__(self, key: typing.Any) -> bool: - header_key = key.lower().encode(self.encoding) - return header_key in [key for _, key, _ in self._list] - - def __iter__(self) -> typing.Iterator[typing.Any]: - return iter(self.keys()) - - def __len__(self) -> int: - return len(self._list) - - def __eq__(self, other: typing.Any) -> bool: - try: - other_headers = Headers(other) - except ValueError: - return False - - self_list = [(key, value) for _, key, value in self._list] - other_list = [(key, value) for _, key, value in other_headers._list] - return sorted(self_list) == sorted(other_list) - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - - encoding_str = "" - if self.encoding != "ascii": - encoding_str = f", encoding={self.encoding!r}" - - as_list = list(obfuscate_sensitive_headers(self.multi_items())) - as_dict = dict(as_list) - - no_duplicate_keys = len(as_dict) == len(as_list) - if no_duplicate_keys: - return f"{class_name}({as_dict!r}{encoding_str})" - return f"{class_name}({as_list!r}{encoding_str})" - - -class Request: - def __init__( - self, - method: typing.Union[str, bytes], - url: typing.Union["URL", str], - *, - params: typing.Optional[QueryParamTypes] = None, - headers: typing.Optional[HeaderTypes] = None, - cookies: typing.Optional[CookieTypes] = None, - content: typing.Optional[RequestContent] = None, - data: typing.Optional[RequestData] = None, - files: typing.Optional[RequestFiles] = None, - json: typing.Optional[typing.Any] = None, - stream: typing.Union[SyncByteStream, AsyncByteStream, None] = None, - extensions: typing.Optional[RequestExtensions] = None, - ): - self.method = ( - method.decode("ascii").upper() - if isinstance(method, bytes) - else method.upper() - ) - self.url = URL(url) - if params is not None: - self.url = self.url.copy_merge_params(params=params) - self.headers = Headers(headers) - self.extensions = {} if extensions is None else extensions - - if cookies: - Cookies(cookies).set_cookie_header(self) - - if stream is None: - content_type: typing.Optional[str] = self.headers.get("content-type") - headers, stream = encode_request( - content=content, - data=data, - files=files, - json=json, - boundary=get_multipart_boundary_from_content_type( - content_type=content_type.encode(self.headers.encoding) - if content_type - else None - ), - ) - self._prepare(headers) - self.stream = stream - # Load the request body, except for streaming content. - if isinstance(stream, ByteStream): - self.read() - else: - # There's an important distinction between `Request(content=...)`, - # and `Request(stream=...)`. - # - # Using `content=...` implies automatically populated `Host` and content - # headers, of either `Content-Length: ...` or `Transfer-Encoding: chunked`. - # - # Using `stream=...` will not automatically include *any* auto-populated headers. - # - # As an end-user you don't really need `stream=...`. It's only - # useful when: - # - # * Preserving the request stream when copying requests, eg for redirects. - # * Creating request instances on the *server-side* of the transport API. - self.stream = stream - - def _prepare(self, default_headers: typing.Dict[str, str]) -> None: - for key, value in default_headers.items(): - # Ignore Transfer-Encoding if the Content-Length has been set explicitly. - if key.lower() == "transfer-encoding" and "Content-Length" in self.headers: - continue - self.headers.setdefault(key, value) - - auto_headers: typing.List[typing.Tuple[bytes, bytes]] = [] - - has_host = "Host" in self.headers - has_content_length = ( - "Content-Length" in self.headers or "Transfer-Encoding" in self.headers - ) - - if not has_host and self.url.host: - auto_headers.append((b"Host", self.url.netloc)) - if not has_content_length and self.method in ("POST", "PUT", "PATCH"): - auto_headers.append((b"Content-Length", b"0")) - - self.headers = Headers(auto_headers + self.headers.raw) - - @property - def content(self) -> bytes: - if not hasattr(self, "_content"): - raise RequestNotRead() - return self._content - - def read(self) -> bytes: - """ - Read and return the request content. - """ - if not hasattr(self, "_content"): - assert isinstance(self.stream, typing.Iterable) - self._content = b"".join(self.stream) - if not isinstance(self.stream, ByteStream): - # If a streaming request has been read entirely into memory, then - # we can replace the stream with a raw bytes implementation, - # to ensure that any non-replayable streams can still be used. - self.stream = ByteStream(self._content) - return self._content - - async def aread(self) -> bytes: - """ - Read and return the request content. - """ - if not hasattr(self, "_content"): - assert isinstance(self.stream, typing.AsyncIterable) - self._content = b"".join([part async for part in self.stream]) - if not isinstance(self.stream, ByteStream): - # If a streaming request has been read entirely into memory, then - # we can replace the stream with a raw bytes implementation, - # to ensure that any non-replayable streams can still be used. - self.stream = ByteStream(self._content) - return self._content - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - url = str(self.url) - return f"<{class_name}({self.method!r}, {url!r})>" - - def __getstate__(self) -> typing.Dict[str, typing.Any]: - return { - name: value - for name, value in self.__dict__.items() - if name not in ["extensions", "stream"] - } - - def __setstate__(self, state: typing.Dict[str, typing.Any]) -> None: - for name, value in state.items(): - setattr(self, name, value) - self.extensions = {} - self.stream = UnattachedStream() - - -class Response: - def __init__( - self, - status_code: int, - *, - headers: typing.Optional[HeaderTypes] = None, - content: typing.Optional[ResponseContent] = None, - text: typing.Optional[str] = None, - html: typing.Optional[str] = None, - json: typing.Any = None, - stream: typing.Union[SyncByteStream, AsyncByteStream, None] = None, - request: typing.Optional[Request] = None, - extensions: typing.Optional[ResponseExtensions] = None, - history: typing.Optional[typing.List["Response"]] = None, - default_encoding: typing.Union[str, typing.Callable[[bytes], str]] = "utf-8", - ): - self.status_code = status_code - self.headers = Headers(headers) - - self._request: typing.Optional[Request] = request - - # When follow_redirects=False and a redirect is received, - # the client will set `response.next_request`. - self.next_request: typing.Optional[Request] = None - - self.extensions: ResponseExtensions = {} if extensions is None else extensions - self.history = [] if history is None else list(history) - - self.is_closed = False - self.is_stream_consumed = False - - self.default_encoding = default_encoding - - if stream is None: - headers, stream = encode_response(content, text, html, json) - self._prepare(headers) - self.stream = stream - if isinstance(stream, ByteStream): - # Load the response body, except for streaming content. - self.read() - else: - # There's an important distinction between `Response(content=...)`, - # and `Response(stream=...)`. - # - # Using `content=...` implies automatically populated content headers, - # of either `Content-Length: ...` or `Transfer-Encoding: chunked`. - # - # Using `stream=...` will not automatically include any content headers. - # - # As an end-user you don't really need `stream=...`. It's only - # useful when creating response instances having received a stream - # from the transport API. - self.stream = stream - - self._num_bytes_downloaded = 0 - - def _prepare(self, default_headers: typing.Dict[str, str]) -> None: - for key, value in default_headers.items(): - # Ignore Transfer-Encoding if the Content-Length has been set explicitly. - if key.lower() == "transfer-encoding" and "content-length" in self.headers: - continue - self.headers.setdefault(key, value) - - @property - def elapsed(self) -> datetime.timedelta: - """ - Returns the time taken for the complete request/response - cycle to complete. - """ - if not hasattr(self, "_elapsed"): - raise RuntimeError( - "'.elapsed' may only be accessed after the response " - "has been read or closed." - ) - return self._elapsed - - @elapsed.setter - def elapsed(self, elapsed: datetime.timedelta) -> None: - self._elapsed = elapsed - - @property - def request(self) -> Request: - """ - Returns the request instance associated to the current response. - """ - if self._request is None: - raise RuntimeError( - "The request instance has not been set on this response." - ) - return self._request - - @request.setter - def request(self, value: Request) -> None: - self._request = value - - @property - def http_version(self) -> str: - try: - http_version: bytes = self.extensions["http_version"] - except KeyError: - return "HTTP/1.1" - else: - return http_version.decode("ascii", errors="ignore") - - @property - def reason_phrase(self) -> str: - try: - reason_phrase: bytes = self.extensions["reason_phrase"] - except KeyError: - return codes.get_reason_phrase(self.status_code) - else: - return reason_phrase.decode("ascii", errors="ignore") - - @property - def url(self) -> URL: - """ - Returns the URL for which the request was made. - """ - return self.request.url - - @property - def content(self) -> bytes: - if not hasattr(self, "_content"): - raise ResponseNotRead() - return self._content - - @property - def text(self) -> str: - if not hasattr(self, "_text"): - content = self.content - if not content: - self._text = "" - else: - decoder = TextDecoder(encoding=self.encoding or "utf-8") - self._text = "".join([decoder.decode(self.content), decoder.flush()]) - return self._text - - @property - def encoding(self) -> typing.Optional[str]: - """ - Return an encoding to use for decoding the byte content into text. - The priority for determining this is given by... - - * `.encoding = <>` has been set explicitly. - * The encoding as specified by the charset parameter in the Content-Type header. - * The encoding as determined by `default_encoding`, which may either be - a string like "utf-8" indicating the encoding to use, or may be a callable - which enables charset autodetection. - """ - if not hasattr(self, "_encoding"): - encoding = self.charset_encoding - if encoding is None or not is_known_encoding(encoding): - if isinstance(self.default_encoding, str): - encoding = self.default_encoding - elif hasattr(self, "_content"): - encoding = self.default_encoding(self._content) - self._encoding = encoding or "utf-8" - return self._encoding - - @encoding.setter - def encoding(self, value: str) -> None: - self._encoding = value - - @property - def charset_encoding(self) -> typing.Optional[str]: - """ - Return the encoding, as specified by the Content-Type header. - """ - content_type = self.headers.get("Content-Type") - if content_type is None: - return None - - return parse_content_type_charset(content_type) - - def _get_content_decoder(self) -> ContentDecoder: - """ - Returns a decoder instance which can be used to decode the raw byte - content, depending on the Content-Encoding used in the response. - """ - if not hasattr(self, "_decoder"): - decoders: typing.List[ContentDecoder] = [] - values = self.headers.get_list("content-encoding", split_commas=True) - for value in values: - value = value.strip().lower() - try: - decoder_cls = SUPPORTED_DECODERS[value] - decoders.append(decoder_cls()) - except KeyError: - continue - - if len(decoders) == 1: - self._decoder = decoders[0] - elif len(decoders) > 1: - self._decoder = MultiDecoder(children=decoders) - else: - self._decoder = IdentityDecoder() - - return self._decoder - - @property - def is_informational(self) -> bool: - """ - A property which is `True` for 1xx status codes, `False` otherwise. - """ - return codes.is_informational(self.status_code) - - @property - def is_success(self) -> bool: - """ - A property which is `True` for 2xx status codes, `False` otherwise. - """ - return codes.is_success(self.status_code) - - @property - def is_redirect(self) -> bool: - """ - A property which is `True` for 3xx status codes, `False` otherwise. - - Note that not all responses with a 3xx status code indicate a URL redirect. - - Use `response.has_redirect_location` to determine responses with a properly - formed URL redirection. - """ - return codes.is_redirect(self.status_code) - - @property - def is_client_error(self) -> bool: - """ - A property which is `True` for 4xx status codes, `False` otherwise. - """ - return codes.is_client_error(self.status_code) - - @property - def is_server_error(self) -> bool: - """ - A property which is `True` for 5xx status codes, `False` otherwise. - """ - return codes.is_server_error(self.status_code) - - @property - def is_error(self) -> bool: - """ - A property which is `True` for 4xx and 5xx status codes, `False` otherwise. - """ - return codes.is_error(self.status_code) - - @property - def has_redirect_location(self) -> bool: - """ - Returns True for 3xx responses with a properly formed URL redirection, - `False` otherwise. - """ - return ( - self.status_code - in ( - # 301 (Cacheable redirect. Method may change to GET.) - codes.MOVED_PERMANENTLY, - # 302 (Uncacheable redirect. Method may change to GET.) - codes.FOUND, - # 303 (Client should make a GET or HEAD request.) - codes.SEE_OTHER, - # 307 (Equiv. 302, but retain method) - codes.TEMPORARY_REDIRECT, - # 308 (Equiv. 301, but retain method) - codes.PERMANENT_REDIRECT, - ) - and "Location" in self.headers - ) - - def raise_for_status(self) -> "Response": - """ - Raise the `HTTPStatusError` if one occurred. - """ - request = self._request - if request is None: - raise RuntimeError( - "Cannot call `raise_for_status` as the request " - "instance has not been set on this response." - ) - - if self.is_success: - return self - - if self.has_redirect_location: - message = ( - "{error_type} '{0.status_code} {0.reason_phrase}' for url '{0.url}'\n" - "Redirect location: '{0.headers[location]}'\n" - "For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/{0.status_code}" - ) - else: - message = ( - "{error_type} '{0.status_code} {0.reason_phrase}' for url '{0.url}'\n" - "For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/{0.status_code}" - ) - - status_class = self.status_code // 100 - error_types = { - 1: "Informational response", - 3: "Redirect response", - 4: "Client error", - 5: "Server error", - } - error_type = error_types.get(status_class, "Invalid status code") - message = message.format(self, error_type=error_type) - raise HTTPStatusError(message, request=request, response=self) - - def json(self, **kwargs: typing.Any) -> typing.Any: - if self.charset_encoding is None and self.content and len(self.content) > 3: - encoding = guess_json_utf(self.content) - if encoding is not None: - return jsonlib.loads(self.content.decode(encoding), **kwargs) - return jsonlib.loads(self.text, **kwargs) - - @property - def cookies(self) -> "Cookies": - if not hasattr(self, "_cookies"): - self._cookies = Cookies() - self._cookies.extract_cookies(self) - return self._cookies - - @property - def links(self) -> typing.Dict[typing.Optional[str], typing.Dict[str, str]]: - """ - Returns the parsed header links of the response, if any - """ - header = self.headers.get("link") - ldict = {} - if header: - links = parse_header_links(header) - for link in links: - key = link.get("rel") or link.get("url") - ldict[key] = link - return ldict - - @property - def num_bytes_downloaded(self) -> int: - return self._num_bytes_downloaded - - def __repr__(self) -> str: - return f"" - - def __getstate__(self) -> typing.Dict[str, typing.Any]: - return { - name: value - for name, value in self.__dict__.items() - if name not in ["extensions", "stream", "is_closed", "_decoder"] - } - - def __setstate__(self, state: typing.Dict[str, typing.Any]) -> None: - for name, value in state.items(): - setattr(self, name, value) - self.is_closed = True - self.extensions = {} - self.stream = UnattachedStream() - - def read(self) -> bytes: - """ - Read and return the response content. - """ - if not hasattr(self, "_content"): - self._content = b"".join(self.iter_bytes()) - return self._content - - def iter_bytes( - self, chunk_size: typing.Optional[int] = None - ) -> typing.Iterator[bytes]: - """ - A byte-iterator over the decoded response content. - This allows us to handle gzip, deflate, and brotli encoded responses. - """ - if hasattr(self, "_content"): - chunk_size = len(self._content) if chunk_size is None else chunk_size - for i in range(0, len(self._content), max(chunk_size, 1)): - yield self._content[i : i + chunk_size] - else: - decoder = self._get_content_decoder() - chunker = ByteChunker(chunk_size=chunk_size) - with request_context(request=self._request): - for raw_bytes in self.iter_raw(): - decoded = decoder.decode(raw_bytes) - for chunk in chunker.decode(decoded): - yield chunk - decoded = decoder.flush() - for chunk in chunker.decode(decoded): - yield chunk # pragma: no cover - for chunk in chunker.flush(): - yield chunk - - def iter_text( - self, chunk_size: typing.Optional[int] = None - ) -> typing.Iterator[str]: - """ - A str-iterator over the decoded response content - that handles both gzip, deflate, etc but also detects the content's - string encoding. - """ - decoder = TextDecoder(encoding=self.encoding or "utf-8") - chunker = TextChunker(chunk_size=chunk_size) - with request_context(request=self._request): - for byte_content in self.iter_bytes(): - text_content = decoder.decode(byte_content) - for chunk in chunker.decode(text_content): - yield chunk - text_content = decoder.flush() - for chunk in chunker.decode(text_content): - yield chunk - for chunk in chunker.flush(): - yield chunk - - def iter_lines(self) -> typing.Iterator[str]: - decoder = LineDecoder() - with request_context(request=self._request): - for text in self.iter_text(): - for line in decoder.decode(text): - yield line - for line in decoder.flush(): - yield line - - def iter_raw( - self, chunk_size: typing.Optional[int] = None - ) -> typing.Iterator[bytes]: - """ - A byte-iterator over the raw response content. - """ - if self.is_stream_consumed: - raise StreamConsumed() - if self.is_closed: - raise StreamClosed() - if not isinstance(self.stream, SyncByteStream): - raise RuntimeError("Attempted to call a sync iterator on an async stream.") - - self.is_stream_consumed = True - self._num_bytes_downloaded = 0 - chunker = ByteChunker(chunk_size=chunk_size) - - with request_context(request=self._request): - for raw_stream_bytes in self.stream: - self._num_bytes_downloaded += len(raw_stream_bytes) - for chunk in chunker.decode(raw_stream_bytes): - yield chunk - - for chunk in chunker.flush(): - yield chunk - - self.close() - - def close(self) -> None: - """ - Close the response and release the connection. - Automatically called if the response body is read to completion. - """ - if not isinstance(self.stream, SyncByteStream): - raise RuntimeError("Attempted to call an sync close on an async stream.") - - if not self.is_closed: - self.is_closed = True - with request_context(request=self._request): - self.stream.close() - - async def aread(self) -> bytes: - """ - Read and return the response content. - """ - if not hasattr(self, "_content"): - self._content = b"".join([part async for part in self.aiter_bytes()]) - return self._content - - async def aiter_bytes( - self, chunk_size: typing.Optional[int] = None - ) -> typing.AsyncIterator[bytes]: - """ - A byte-iterator over the decoded response content. - This allows us to handle gzip, deflate, and brotli encoded responses. - """ - if hasattr(self, "_content"): - chunk_size = len(self._content) if chunk_size is None else chunk_size - for i in range(0, len(self._content), max(chunk_size, 1)): - yield self._content[i : i + chunk_size] - else: - decoder = self._get_content_decoder() - chunker = ByteChunker(chunk_size=chunk_size) - with request_context(request=self._request): - async for raw_bytes in self.aiter_raw(): - decoded = decoder.decode(raw_bytes) - for chunk in chunker.decode(decoded): - yield chunk - decoded = decoder.flush() - for chunk in chunker.decode(decoded): - yield chunk # pragma: no cover - for chunk in chunker.flush(): - yield chunk - - async def aiter_text( - self, chunk_size: typing.Optional[int] = None - ) -> typing.AsyncIterator[str]: - """ - A str-iterator over the decoded response content - that handles both gzip, deflate, etc but also detects the content's - string encoding. - """ - decoder = TextDecoder(encoding=self.encoding or "utf-8") - chunker = TextChunker(chunk_size=chunk_size) - with request_context(request=self._request): - async for byte_content in self.aiter_bytes(): - text_content = decoder.decode(byte_content) - for chunk in chunker.decode(text_content): - yield chunk - text_content = decoder.flush() - for chunk in chunker.decode(text_content): - yield chunk - for chunk in chunker.flush(): - yield chunk - - async def aiter_lines(self) -> typing.AsyncIterator[str]: - decoder = LineDecoder() - with request_context(request=self._request): - async for text in self.aiter_text(): - for line in decoder.decode(text): - yield line - for line in decoder.flush(): - yield line - - async def aiter_raw( - self, chunk_size: typing.Optional[int] = None - ) -> typing.AsyncIterator[bytes]: - """ - A byte-iterator over the raw response content. - """ - if self.is_stream_consumed: - raise StreamConsumed() - if self.is_closed: - raise StreamClosed() - if not isinstance(self.stream, AsyncByteStream): - raise RuntimeError("Attempted to call an async iterator on an sync stream.") - - self.is_stream_consumed = True - self._num_bytes_downloaded = 0 - chunker = ByteChunker(chunk_size=chunk_size) - - with request_context(request=self._request): - async for raw_stream_bytes in self.stream: - self._num_bytes_downloaded += len(raw_stream_bytes) - for chunk in chunker.decode(raw_stream_bytes): - yield chunk - - for chunk in chunker.flush(): - yield chunk - - await self.aclose() - - async def aclose(self) -> None: - """ - Close the response and release the connection. - Automatically called if the response body is read to completion. - """ - if not isinstance(self.stream, AsyncByteStream): - raise RuntimeError("Attempted to call an async close on an sync stream.") - - if not self.is_closed: - self.is_closed = True - with request_context(request=self._request): - await self.stream.aclose() - - -class Cookies(typing.MutableMapping[str, str]): - """ - HTTP Cookies, as a mutable mapping. - """ - - def __init__(self, cookies: typing.Optional[CookieTypes] = None) -> None: - if cookies is None or isinstance(cookies, dict): - self.jar = CookieJar() - if isinstance(cookies, dict): - for key, value in cookies.items(): - self.set(key, value) - elif isinstance(cookies, list): - self.jar = CookieJar() - for key, value in cookies: - self.set(key, value) - elif isinstance(cookies, Cookies): - self.jar = CookieJar() - for cookie in cookies.jar: - self.jar.set_cookie(cookie) - else: - self.jar = cookies - - def extract_cookies(self, response: Response) -> None: - """ - Loads any cookies based on the response `Set-Cookie` headers. - """ - urllib_response = self._CookieCompatResponse(response) - urllib_request = self._CookieCompatRequest(response.request) - - self.jar.extract_cookies(urllib_response, urllib_request) # type: ignore - - def set_cookie_header(self, request: Request) -> None: - """ - Sets an appropriate 'Cookie:' HTTP header on the `Request`. - """ - urllib_request = self._CookieCompatRequest(request) - self.jar.add_cookie_header(urllib_request) - - def set(self, name: str, value: str, domain: str = "", path: str = "/") -> None: - """ - Set a cookie value by name. May optionally include domain and path. - """ - kwargs = { - "version": 0, - "name": name, - "value": value, - "port": None, - "port_specified": False, - "domain": domain, - "domain_specified": bool(domain), - "domain_initial_dot": domain.startswith("."), - "path": path, - "path_specified": bool(path), - "secure": False, - "expires": None, - "discard": True, - "comment": None, - "comment_url": None, - "rest": {"HttpOnly": None}, - "rfc2109": False, - } - cookie = Cookie(**kwargs) # type: ignore - self.jar.set_cookie(cookie) - - def get( # type: ignore - self, - name: str, - default: typing.Optional[str] = None, - domain: typing.Optional[str] = None, - path: typing.Optional[str] = None, - ) -> typing.Optional[str]: - """ - Get a cookie by name. May optionally include domain and path - in order to specify exactly which cookie to retrieve. - """ - value = None - for cookie in self.jar: - if cookie.name == name: - if domain is None or cookie.domain == domain: - if path is None or cookie.path == path: - if value is not None: - message = f"Multiple cookies exist with name={name}" - raise CookieConflict(message) - value = cookie.value - - if value is None: - return default - return value - - def delete( - self, - name: str, - domain: typing.Optional[str] = None, - path: typing.Optional[str] = None, - ) -> None: - """ - Delete a cookie by name. May optionally include domain and path - in order to specify exactly which cookie to delete. - """ - if domain is not None and path is not None: - return self.jar.clear(domain, path, name) - - remove = [ - cookie - for cookie in self.jar - if cookie.name == name - and (domain is None or cookie.domain == domain) - and (path is None or cookie.path == path) - ] - - for cookie in remove: - self.jar.clear(cookie.domain, cookie.path, cookie.name) - - def clear( - self, domain: typing.Optional[str] = None, path: typing.Optional[str] = None - ) -> None: - """ - Delete all cookies. Optionally include a domain and path in - order to only delete a subset of all the cookies. - """ - args = [] - if domain is not None: - args.append(domain) - if path is not None: - assert domain is not None - args.append(path) - self.jar.clear(*args) - - def update(self, cookies: typing.Optional[CookieTypes] = None) -> None: # type: ignore - cookies = Cookies(cookies) - for cookie in cookies.jar: - self.jar.set_cookie(cookie) - - def __setitem__(self, name: str, value: str) -> None: - return self.set(name, value) - - def __getitem__(self, name: str) -> str: - value = self.get(name) - if value is None: - raise KeyError(name) - return value - - def __delitem__(self, name: str) -> None: - return self.delete(name) - - def __len__(self) -> int: - return len(self.jar) - - def __iter__(self) -> typing.Iterator[str]: - return (cookie.name for cookie in self.jar) - - def __bool__(self) -> bool: - for _ in self.jar: - return True - return False - - def __repr__(self) -> str: - cookies_repr = ", ".join( - [ - f"" - for cookie in self.jar - ] - ) - - return f"" - - class _CookieCompatRequest(urllib.request.Request): - """ - Wraps a `Request` instance up in a compatibility interface suitable - for use with `CookieJar` operations. - """ - - def __init__(self, request: Request) -> None: - super().__init__( - url=str(request.url), - headers=dict(request.headers), - method=request.method, - ) - self.request = request - - def add_unredirected_header(self, key: str, value: str) -> None: - super().add_unredirected_header(key, value) - self.request.headers[key] = value - - class _CookieCompatResponse: - """ - Wraps a `Request` instance up in a compatibility interface suitable - for use with `CookieJar` operations. - """ - - def __init__(self, response: Response): - self.response = response - - def info(self) -> email.message.Message: - info = email.message.Message() - for key, value in self.response.headers.multi_items(): - # Note that setting `info[key]` here is an "append" operation, - # not a "replace" operation. - # https://docs.python.org/3/library/email.compat32-message.html#email.message.Message.__setitem__ - info[key] = value - return info diff --git a/server/venv/Lib/site-packages/httpx/_multipart.py b/server/venv/Lib/site-packages/httpx/_multipart.py deleted file mode 100644 index 446f4ad..0000000 --- a/server/venv/Lib/site-packages/httpx/_multipart.py +++ /dev/null @@ -1,267 +0,0 @@ -import binascii -import io -import os -import typing -from pathlib import Path - -from ._types import ( - AsyncByteStream, - FileContent, - FileTypes, - RequestData, - RequestFiles, - SyncByteStream, -) -from ._utils import ( - format_form_param, - guess_content_type, - peek_filelike_length, - primitive_value_to_str, - to_bytes, -) - - -def get_multipart_boundary_from_content_type( - content_type: typing.Optional[bytes], -) -> typing.Optional[bytes]: - if not content_type or not content_type.startswith(b"multipart/form-data"): - return None - # parse boundary according to - # https://www.rfc-editor.org/rfc/rfc2046#section-5.1.1 - if b";" in content_type: - for section in content_type.split(b";"): - if section.strip().lower().startswith(b"boundary="): - return section.strip()[len(b"boundary=") :].strip(b'"') - return None - - -class DataField: - """ - A single form field item, within a multipart form field. - """ - - def __init__( - self, name: str, value: typing.Union[str, bytes, int, float, None] - ) -> None: - if not isinstance(name, str): - raise TypeError( - f"Invalid type for name. Expected str, got {type(name)}: {name!r}" - ) - if value is not None and not isinstance(value, (str, bytes, int, float)): - raise TypeError( - f"Invalid type for value. Expected primitive type, got {type(value)}: {value!r}" - ) - self.name = name - self.value: typing.Union[str, bytes] = ( - value if isinstance(value, bytes) else primitive_value_to_str(value) - ) - - def render_headers(self) -> bytes: - if not hasattr(self, "_headers"): - name = format_form_param("name", self.name) - self._headers = b"".join( - [b"Content-Disposition: form-data; ", name, b"\r\n\r\n"] - ) - - return self._headers - - def render_data(self) -> bytes: - if not hasattr(self, "_data"): - self._data = to_bytes(self.value) - - return self._data - - def get_length(self) -> int: - headers = self.render_headers() - data = self.render_data() - return len(headers) + len(data) - - def render(self) -> typing.Iterator[bytes]: - yield self.render_headers() - yield self.render_data() - - -class FileField: - """ - A single file field item, within a multipart form field. - """ - - CHUNK_SIZE = 64 * 1024 - - def __init__(self, name: str, value: FileTypes) -> None: - self.name = name - - fileobj: FileContent - - headers: typing.Dict[str, str] = {} - content_type: typing.Optional[str] = None - - # This large tuple based API largely mirror's requests' API - # It would be good to think of better APIs for this that we could include in httpx 2.0 - # since variable length tuples (especially of 4 elements) are quite unwieldly - if isinstance(value, tuple): - if len(value) == 2: - # neither the 3rd parameter (content_type) nor the 4th (headers) was included - filename, fileobj = value # type: ignore - elif len(value) == 3: - filename, fileobj, content_type = value # type: ignore - else: - # all 4 parameters included - filename, fileobj, content_type, headers = value # type: ignore - else: - filename = Path(str(getattr(value, "name", "upload"))).name - fileobj = value - - if content_type is None: - content_type = guess_content_type(filename) - - has_content_type_header = any("content-type" in key.lower() for key in headers) - if content_type is not None and not has_content_type_header: - # note that unlike requests, we ignore the content_type - # provided in the 3rd tuple element if it is also included in the headers - # requests does the opposite (it overwrites the header with the 3rd tuple element) - headers["Content-Type"] = content_type - - if isinstance(fileobj, io.StringIO): - raise TypeError( - "Multipart file uploads require 'io.BytesIO', not 'io.StringIO'." - ) - if isinstance(fileobj, io.TextIOBase): - raise TypeError( - "Multipart file uploads must be opened in binary mode, not text mode." - ) - - self.filename = filename - self.file = fileobj - self.headers = headers - - def get_length(self) -> typing.Optional[int]: - headers = self.render_headers() - - if isinstance(self.file, (str, bytes)): - return len(headers) + len(to_bytes(self.file)) - - file_length = peek_filelike_length(self.file) - - # If we can't determine the filesize without reading it into memory, - # then return `None` here, to indicate an unknown file length. - if file_length is None: - return None - - return len(headers) + file_length - - def render_headers(self) -> bytes: - if not hasattr(self, "_headers"): - parts = [ - b"Content-Disposition: form-data; ", - format_form_param("name", self.name), - ] - if self.filename: - filename = format_form_param("filename", self.filename) - parts.extend([b"; ", filename]) - for header_name, header_value in self.headers.items(): - key, val = f"\r\n{header_name}: ".encode(), header_value.encode() - parts.extend([key, val]) - parts.append(b"\r\n\r\n") - self._headers = b"".join(parts) - - return self._headers - - def render_data(self) -> typing.Iterator[bytes]: - if isinstance(self.file, (str, bytes)): - yield to_bytes(self.file) - return - - if hasattr(self.file, "seek"): - try: - self.file.seek(0) - except io.UnsupportedOperation: - pass - - chunk = self.file.read(self.CHUNK_SIZE) - while chunk: - yield to_bytes(chunk) - chunk = self.file.read(self.CHUNK_SIZE) - - def render(self) -> typing.Iterator[bytes]: - yield self.render_headers() - yield from self.render_data() - - -class MultipartStream(SyncByteStream, AsyncByteStream): - """ - Request content as streaming multipart encoded form data. - """ - - def __init__( - self, - data: RequestData, - files: RequestFiles, - boundary: typing.Optional[bytes] = None, - ) -> None: - if boundary is None: - boundary = binascii.hexlify(os.urandom(16)) - - self.boundary = boundary - self.content_type = "multipart/form-data; boundary=%s" % boundary.decode( - "ascii" - ) - self.fields = list(self._iter_fields(data, files)) - - def _iter_fields( - self, data: RequestData, files: RequestFiles - ) -> typing.Iterator[typing.Union[FileField, DataField]]: - for name, value in data.items(): - if isinstance(value, (tuple, list)): - for item in value: - yield DataField(name=name, value=item) - else: - yield DataField(name=name, value=value) - - file_items = files.items() if isinstance(files, typing.Mapping) else files - for name, value in file_items: - yield FileField(name=name, value=value) - - def iter_chunks(self) -> typing.Iterator[bytes]: - for field in self.fields: - yield b"--%s\r\n" % self.boundary - yield from field.render() - yield b"\r\n" - yield b"--%s--\r\n" % self.boundary - - def get_content_length(self) -> typing.Optional[int]: - """ - Return the length of the multipart encoded content, or `None` if - any of the files have a length that cannot be determined upfront. - """ - boundary_length = len(self.boundary) - length = 0 - - for field in self.fields: - field_length = field.get_length() - if field_length is None: - return None - - length += 2 + boundary_length + 2 # b"--{boundary}\r\n" - length += field_length - length += 2 # b"\r\n" - - length += 2 + boundary_length + 4 # b"--{boundary}--\r\n" - return length - - # Content stream interface. - - def get_headers(self) -> typing.Dict[str, str]: - content_length = self.get_content_length() - content_type = self.content_type - if content_length is None: - return {"Transfer-Encoding": "chunked", "Content-Type": content_type} - return {"Content-Length": str(content_length), "Content-Type": content_type} - - def __iter__(self) -> typing.Iterator[bytes]: - for chunk in self.iter_chunks(): - yield chunk - - async def __aiter__(self) -> typing.AsyncIterator[bytes]: - for chunk in self.iter_chunks(): - yield chunk diff --git a/server/venv/Lib/site-packages/httpx/_status_codes.py b/server/venv/Lib/site-packages/httpx/_status_codes.py deleted file mode 100644 index 671c30e..0000000 --- a/server/venv/Lib/site-packages/httpx/_status_codes.py +++ /dev/null @@ -1,158 +0,0 @@ -from enum import IntEnum - - -class codes(IntEnum): - """HTTP status codes and reason phrases - - Status codes from the following RFCs are all observed: - - * RFC 7231: Hypertext Transfer Protocol (HTTP/1.1), obsoletes 2616 - * RFC 6585: Additional HTTP Status Codes - * RFC 3229: Delta encoding in HTTP - * RFC 4918: HTTP Extensions for WebDAV, obsoletes 2518 - * RFC 5842: Binding Extensions to WebDAV - * RFC 7238: Permanent Redirect - * RFC 2295: Transparent Content Negotiation in HTTP - * RFC 2774: An HTTP Extension Framework - * RFC 7540: Hypertext Transfer Protocol Version 2 (HTTP/2) - * RFC 2324: Hyper Text Coffee Pot Control Protocol (HTCPCP/1.0) - * RFC 7725: An HTTP Status Code to Report Legal Obstacles - * RFC 8297: An HTTP Status Code for Indicating Hints - * RFC 8470: Using Early Data in HTTP - """ - - def __new__(cls, value: int, phrase: str = "") -> "codes": - obj = int.__new__(cls, value) - obj._value_ = value - - obj.phrase = phrase # type: ignore[attr-defined] - return obj - - def __str__(self) -> str: - return str(self.value) - - @classmethod - def get_reason_phrase(cls, value: int) -> str: - try: - return codes(value).phrase # type: ignore - except ValueError: - return "" - - @classmethod - def is_informational(cls, value: int) -> bool: - """ - Returns `True` for 1xx status codes, `False` otherwise. - """ - return 100 <= value <= 199 - - @classmethod - def is_success(cls, value: int) -> bool: - """ - Returns `True` for 2xx status codes, `False` otherwise. - """ - return 200 <= value <= 299 - - @classmethod - def is_redirect(cls, value: int) -> bool: - """ - Returns `True` for 3xx status codes, `False` otherwise. - """ - return 300 <= value <= 399 - - @classmethod - def is_client_error(cls, value: int) -> bool: - """ - Returns `True` for 4xx status codes, `False` otherwise. - """ - return 400 <= value <= 499 - - @classmethod - def is_server_error(cls, value: int) -> bool: - """ - Returns `True` for 5xx status codes, `False` otherwise. - """ - return 500 <= value <= 599 - - @classmethod - def is_error(cls, value: int) -> bool: - """ - Returns `True` for 4xx or 5xx status codes, `False` otherwise. - """ - return 400 <= value <= 599 - - # informational - CONTINUE = 100, "Continue" - SWITCHING_PROTOCOLS = 101, "Switching Protocols" - PROCESSING = 102, "Processing" - EARLY_HINTS = 103, "Early Hints" - - # success - OK = 200, "OK" - CREATED = 201, "Created" - ACCEPTED = 202, "Accepted" - NON_AUTHORITATIVE_INFORMATION = 203, "Non-Authoritative Information" - NO_CONTENT = 204, "No Content" - RESET_CONTENT = 205, "Reset Content" - PARTIAL_CONTENT = 206, "Partial Content" - MULTI_STATUS = 207, "Multi-Status" - ALREADY_REPORTED = 208, "Already Reported" - IM_USED = 226, "IM Used" - - # redirection - MULTIPLE_CHOICES = 300, "Multiple Choices" - MOVED_PERMANENTLY = 301, "Moved Permanently" - FOUND = 302, "Found" - SEE_OTHER = 303, "See Other" - NOT_MODIFIED = 304, "Not Modified" - USE_PROXY = 305, "Use Proxy" - TEMPORARY_REDIRECT = 307, "Temporary Redirect" - PERMANENT_REDIRECT = 308, "Permanent Redirect" - - # client error - BAD_REQUEST = 400, "Bad Request" - UNAUTHORIZED = 401, "Unauthorized" - PAYMENT_REQUIRED = 402, "Payment Required" - FORBIDDEN = 403, "Forbidden" - NOT_FOUND = 404, "Not Found" - METHOD_NOT_ALLOWED = 405, "Method Not Allowed" - NOT_ACCEPTABLE = 406, "Not Acceptable" - PROXY_AUTHENTICATION_REQUIRED = 407, "Proxy Authentication Required" - REQUEST_TIMEOUT = 408, "Request Timeout" - CONFLICT = 409, "Conflict" - GONE = 410, "Gone" - LENGTH_REQUIRED = 411, "Length Required" - PRECONDITION_FAILED = 412, "Precondition Failed" - REQUEST_ENTITY_TOO_LARGE = 413, "Request Entity Too Large" - REQUEST_URI_TOO_LONG = 414, "Request-URI Too Long" - UNSUPPORTED_MEDIA_TYPE = 415, "Unsupported Media Type" - REQUESTED_RANGE_NOT_SATISFIABLE = 416, "Requested Range Not Satisfiable" - EXPECTATION_FAILED = 417, "Expectation Failed" - IM_A_TEAPOT = 418, "I'm a teapot" - MISDIRECTED_REQUEST = 421, "Misdirected Request" - UNPROCESSABLE_ENTITY = 422, "Unprocessable Entity" - LOCKED = 423, "Locked" - FAILED_DEPENDENCY = 424, "Failed Dependency" - TOO_EARLY = 425, "Too Early" - UPGRADE_REQUIRED = 426, "Upgrade Required" - PRECONDITION_REQUIRED = 428, "Precondition Required" - TOO_MANY_REQUESTS = 429, "Too Many Requests" - REQUEST_HEADER_FIELDS_TOO_LARGE = 431, "Request Header Fields Too Large" - UNAVAILABLE_FOR_LEGAL_REASONS = 451, "Unavailable For Legal Reasons" - - # server errors - INTERNAL_SERVER_ERROR = 500, "Internal Server Error" - NOT_IMPLEMENTED = 501, "Not Implemented" - BAD_GATEWAY = 502, "Bad Gateway" - SERVICE_UNAVAILABLE = 503, "Service Unavailable" - GATEWAY_TIMEOUT = 504, "Gateway Timeout" - HTTP_VERSION_NOT_SUPPORTED = 505, "HTTP Version Not Supported" - VARIANT_ALSO_NEGOTIATES = 506, "Variant Also Negotiates" - INSUFFICIENT_STORAGE = 507, "Insufficient Storage" - LOOP_DETECTED = 508, "Loop Detected" - NOT_EXTENDED = 510, "Not Extended" - NETWORK_AUTHENTICATION_REQUIRED = 511, "Network Authentication Required" - - -# Include lower-case styles for `requests` compatibility. -for code in codes: - setattr(codes, code._name_.lower(), int(code)) diff --git a/server/venv/Lib/site-packages/httpx/_transports/__init__.py b/server/venv/Lib/site-packages/httpx/_transports/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/httpx/_transports/asgi.py b/server/venv/Lib/site-packages/httpx/_transports/asgi.py deleted file mode 100644 index f67f0fb..0000000 --- a/server/venv/Lib/site-packages/httpx/_transports/asgi.py +++ /dev/null @@ -1,179 +0,0 @@ -import typing - -import sniffio - -from .._models import Request, Response -from .._types import AsyncByteStream -from .base import AsyncBaseTransport - -if typing.TYPE_CHECKING: # pragma: no cover - import asyncio - - import trio - - Event = typing.Union[asyncio.Event, trio.Event] - - -_Message = typing.Dict[str, typing.Any] -_Receive = typing.Callable[[], typing.Awaitable[_Message]] -_Send = typing.Callable[ - [typing.Dict[str, typing.Any]], typing.Coroutine[None, None, None] -] -_ASGIApp = typing.Callable[ - [typing.Dict[str, typing.Any], _Receive, _Send], typing.Coroutine[None, None, None] -] - - -def create_event() -> "Event": - if sniffio.current_async_library() == "trio": - import trio - - return trio.Event() - else: - import asyncio - - return asyncio.Event() - - -class ASGIResponseStream(AsyncByteStream): - def __init__(self, body: typing.List[bytes]) -> None: - self._body = body - - async def __aiter__(self) -> typing.AsyncIterator[bytes]: - yield b"".join(self._body) - - -class ASGITransport(AsyncBaseTransport): - """ - A custom AsyncTransport that handles sending requests directly to an ASGI app. - The simplest way to use this functionality is to use the `app` argument. - - ``` - client = httpx.AsyncClient(app=app) - ``` - - Alternatively, you can setup the transport instance explicitly. - This allows you to include any additional configuration arguments specific - to the ASGITransport class: - - ``` - transport = httpx.ASGITransport( - app=app, - root_path="/submount", - client=("1.2.3.4", 123) - ) - client = httpx.AsyncClient(transport=transport) - ``` - - Arguments: - - * `app` - The ASGI application. - * `raise_app_exceptions` - Boolean indicating if exceptions in the application - should be raised. Default to `True`. Can be set to `False` for use cases - such as testing the content of a client 500 response. - * `root_path` - The root path on which the ASGI application should be mounted. - * `client` - A two-tuple indicating the client IP and port of incoming requests. - ``` - """ - - def __init__( - self, - app: _ASGIApp, - raise_app_exceptions: bool = True, - root_path: str = "", - client: typing.Tuple[str, int] = ("127.0.0.1", 123), - ) -> None: - self.app = app - self.raise_app_exceptions = raise_app_exceptions - self.root_path = root_path - self.client = client - - async def handle_async_request( - self, - request: Request, - ) -> Response: - assert isinstance(request.stream, AsyncByteStream) - - # ASGI scope. - scope = { - "type": "http", - "asgi": {"version": "3.0"}, - "http_version": "1.1", - "method": request.method, - "headers": [(k.lower(), v) for (k, v) in request.headers.raw], - "scheme": request.url.scheme, - "path": request.url.path, - "raw_path": request.url.raw_path, - "query_string": request.url.query, - "server": (request.url.host, request.url.port), - "client": self.client, - "root_path": self.root_path, - } - - # Request. - request_body_chunks = request.stream.__aiter__() - request_complete = False - - # Response. - status_code = None - response_headers = None - body_parts = [] - response_started = False - response_complete = create_event() - - # ASGI callables. - - async def receive() -> typing.Dict[str, typing.Any]: - nonlocal request_complete - - if request_complete: - await response_complete.wait() - return {"type": "http.disconnect"} - - try: - body = await request_body_chunks.__anext__() - except StopAsyncIteration: - request_complete = True - return {"type": "http.request", "body": b"", "more_body": False} - return {"type": "http.request", "body": body, "more_body": True} - - async def send(message: typing.Dict[str, typing.Any]) -> None: - nonlocal status_code, response_headers, response_started - - if message["type"] == "http.response.start": - assert not response_started - - status_code = message["status"] - response_headers = message.get("headers", []) - response_started = True - - elif message["type"] == "http.response.body": - assert not response_complete.is_set() - body = message.get("body", b"") - more_body = message.get("more_body", False) - - if body and request.method != "HEAD": - body_parts.append(body) - - if not more_body: - response_complete.set() - - try: - await self.app(scope, receive, send) - except Exception: # noqa: PIE-786 - if self.raise_app_exceptions: - raise - - response_complete.set() - if status_code is None: - status_code = 500 - if response_headers is None: - response_headers = {} - - assert response_complete.is_set() - assert status_code is not None - assert response_headers is not None - - stream = ASGIResponseStream(body_parts) - - return Response(status_code, headers=response_headers, stream=stream) diff --git a/server/venv/Lib/site-packages/httpx/_transports/base.py b/server/venv/Lib/site-packages/httpx/_transports/base.py deleted file mode 100644 index f6fdfe6..0000000 --- a/server/venv/Lib/site-packages/httpx/_transports/base.py +++ /dev/null @@ -1,82 +0,0 @@ -import typing -from types import TracebackType - -from .._models import Request, Response - -T = typing.TypeVar("T", bound="BaseTransport") -A = typing.TypeVar("A", bound="AsyncBaseTransport") - - -class BaseTransport: - def __enter__(self: T) -> T: - return self - - def __exit__( - self, - exc_type: typing.Optional[typing.Type[BaseException]] = None, - exc_value: typing.Optional[BaseException] = None, - traceback: typing.Optional[TracebackType] = None, - ) -> None: - self.close() - - def handle_request(self, request: Request) -> Response: - """ - Send a single HTTP request and return a response. - - Developers shouldn't typically ever need to call into this API directly, - since the Client class provides all the higher level user-facing API - niceties. - - In order to properly release any network resources, the response - stream should *either* be consumed immediately, with a call to - `response.stream.read()`, or else the `handle_request` call should - be followed with a try/finally block to ensuring the stream is - always closed. - - Example usage: - - with httpx.HTTPTransport() as transport: - req = httpx.Request( - method=b"GET", - url=(b"https", b"www.example.com", 443, b"/"), - headers=[(b"Host", b"www.example.com")], - ) - resp = transport.handle_request(req) - body = resp.stream.read() - print(resp.status_code, resp.headers, body) - - - Takes a `Request` instance as the only argument. - - Returns a `Response` instance. - """ - raise NotImplementedError( - "The 'handle_request' method must be implemented." - ) # pragma: no cover - - def close(self) -> None: - pass - - -class AsyncBaseTransport: - async def __aenter__(self: A) -> A: - return self - - async def __aexit__( - self, - exc_type: typing.Optional[typing.Type[BaseException]] = None, - exc_value: typing.Optional[BaseException] = None, - traceback: typing.Optional[TracebackType] = None, - ) -> None: - await self.aclose() - - async def handle_async_request( - self, - request: Request, - ) -> Response: - raise NotImplementedError( - "The 'handle_async_request' method must be implemented." - ) # pragma: no cover - - async def aclose(self) -> None: - pass diff --git a/server/venv/Lib/site-packages/httpx/_transports/default.py b/server/venv/Lib/site-packages/httpx/_transports/default.py deleted file mode 100644 index 7dba5b8..0000000 --- a/server/venv/Lib/site-packages/httpx/_transports/default.py +++ /dev/null @@ -1,378 +0,0 @@ -""" -Custom transports, with nicely configured defaults. - -The following additional keyword arguments are currently supported by httpcore... - -* uds: str -* local_address: str -* retries: int - -Example usages... - -# Disable HTTP/2 on a single specific domain. -mounts = { - "all://": httpx.HTTPTransport(http2=True), - "all://*example.org": httpx.HTTPTransport() -} - -# Using advanced httpcore configuration, with connection retries. -transport = httpx.HTTPTransport(retries=1) -client = httpx.Client(transport=transport) - -# Using advanced httpcore configuration, with unix domain sockets. -transport = httpx.HTTPTransport(uds="socket.uds") -client = httpx.Client(transport=transport) -""" -import contextlib -import typing -from types import TracebackType - -import httpcore - -from .._config import DEFAULT_LIMITS, Limits, Proxy, create_ssl_context -from .._exceptions import ( - ConnectError, - ConnectTimeout, - LocalProtocolError, - NetworkError, - PoolTimeout, - ProtocolError, - ProxyError, - ReadError, - ReadTimeout, - RemoteProtocolError, - TimeoutException, - UnsupportedProtocol, - WriteError, - WriteTimeout, -) -from .._models import Request, Response -from .._types import AsyncByteStream, CertTypes, SyncByteStream, VerifyTypes -from .base import AsyncBaseTransport, BaseTransport - -T = typing.TypeVar("T", bound="HTTPTransport") -A = typing.TypeVar("A", bound="AsyncHTTPTransport") - -SOCKET_OPTION = typing.Union[ - typing.Tuple[int, int, int], - typing.Tuple[int, int, typing.Union[bytes, bytearray]], - typing.Tuple[int, int, None, int], -] - - -@contextlib.contextmanager -def map_httpcore_exceptions() -> typing.Iterator[None]: - try: - yield - except Exception as exc: # noqa: PIE-786 - mapped_exc = None - - for from_exc, to_exc in HTTPCORE_EXC_MAP.items(): - if not isinstance(exc, from_exc): - continue - # We want to map to the most specific exception we can find. - # Eg if `exc` is an `httpcore.ReadTimeout`, we want to map to - # `httpx.ReadTimeout`, not just `httpx.TimeoutException`. - if mapped_exc is None or issubclass(to_exc, mapped_exc): - mapped_exc = to_exc - - if mapped_exc is None: # pragma: no cover - raise - - message = str(exc) - raise mapped_exc(message) from exc - - -HTTPCORE_EXC_MAP = { - httpcore.TimeoutException: TimeoutException, - httpcore.ConnectTimeout: ConnectTimeout, - httpcore.ReadTimeout: ReadTimeout, - httpcore.WriteTimeout: WriteTimeout, - httpcore.PoolTimeout: PoolTimeout, - httpcore.NetworkError: NetworkError, - httpcore.ConnectError: ConnectError, - httpcore.ReadError: ReadError, - httpcore.WriteError: WriteError, - httpcore.ProxyError: ProxyError, - httpcore.UnsupportedProtocol: UnsupportedProtocol, - httpcore.ProtocolError: ProtocolError, - httpcore.LocalProtocolError: LocalProtocolError, - httpcore.RemoteProtocolError: RemoteProtocolError, -} - - -class ResponseStream(SyncByteStream): - def __init__(self, httpcore_stream: typing.Iterable[bytes]): - self._httpcore_stream = httpcore_stream - - def __iter__(self) -> typing.Iterator[bytes]: - with map_httpcore_exceptions(): - for part in self._httpcore_stream: - yield part - - def close(self) -> None: - if hasattr(self._httpcore_stream, "close"): - self._httpcore_stream.close() - - -class HTTPTransport(BaseTransport): - def __init__( - self, - verify: VerifyTypes = True, - cert: typing.Optional[CertTypes] = None, - http1: bool = True, - http2: bool = False, - limits: Limits = DEFAULT_LIMITS, - trust_env: bool = True, - proxy: typing.Optional[Proxy] = None, - uds: typing.Optional[str] = None, - local_address: typing.Optional[str] = None, - retries: int = 0, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> None: - ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env) - - if proxy is None: - self._pool = httpcore.ConnectionPool( - ssl_context=ssl_context, - max_connections=limits.max_connections, - max_keepalive_connections=limits.max_keepalive_connections, - keepalive_expiry=limits.keepalive_expiry, - http1=http1, - http2=http2, - uds=uds, - local_address=local_address, - retries=retries, - socket_options=socket_options, - ) - elif proxy.url.scheme in ("http", "https"): - self._pool = httpcore.HTTPProxy( - proxy_url=httpcore.URL( - scheme=proxy.url.raw_scheme, - host=proxy.url.raw_host, - port=proxy.url.port, - target=proxy.url.raw_path, - ), - proxy_auth=proxy.raw_auth, - proxy_headers=proxy.headers.raw, - ssl_context=ssl_context, - proxy_ssl_context=proxy.ssl_context, - max_connections=limits.max_connections, - max_keepalive_connections=limits.max_keepalive_connections, - keepalive_expiry=limits.keepalive_expiry, - http1=http1, - http2=http2, - socket_options=socket_options, - ) - elif proxy.url.scheme == "socks5": - try: - import socksio # noqa - except ImportError: # pragma: no cover - raise ImportError( - "Using SOCKS proxy, but the 'socksio' package is not installed. " - "Make sure to install httpx using `pip install httpx[socks]`." - ) from None - - self._pool = httpcore.SOCKSProxy( - proxy_url=httpcore.URL( - scheme=proxy.url.raw_scheme, - host=proxy.url.raw_host, - port=proxy.url.port, - target=proxy.url.raw_path, - ), - proxy_auth=proxy.raw_auth, - ssl_context=ssl_context, - max_connections=limits.max_connections, - max_keepalive_connections=limits.max_keepalive_connections, - keepalive_expiry=limits.keepalive_expiry, - http1=http1, - http2=http2, - ) - else: # pragma: no cover - raise ValueError( - f"Proxy protocol must be either 'http', 'https', or 'socks5', but got {proxy.url.scheme!r}." - ) - - def __enter__(self: T) -> T: # Use generics for subclass support. - self._pool.__enter__() - return self - - def __exit__( - self, - exc_type: typing.Optional[typing.Type[BaseException]] = None, - exc_value: typing.Optional[BaseException] = None, - traceback: typing.Optional[TracebackType] = None, - ) -> None: - with map_httpcore_exceptions(): - self._pool.__exit__(exc_type, exc_value, traceback) - - def handle_request( - self, - request: Request, - ) -> Response: - assert isinstance(request.stream, SyncByteStream) - - req = httpcore.Request( - method=request.method, - url=httpcore.URL( - scheme=request.url.raw_scheme, - host=request.url.raw_host, - port=request.url.port, - target=request.url.raw_path, - ), - headers=request.headers.raw, - content=request.stream, - extensions=request.extensions, - ) - with map_httpcore_exceptions(): - resp = self._pool.handle_request(req) - - assert isinstance(resp.stream, typing.Iterable) - - return Response( - status_code=resp.status, - headers=resp.headers, - stream=ResponseStream(resp.stream), - extensions=resp.extensions, - ) - - def close(self) -> None: - self._pool.close() - - -class AsyncResponseStream(AsyncByteStream): - def __init__(self, httpcore_stream: typing.AsyncIterable[bytes]): - self._httpcore_stream = httpcore_stream - - async def __aiter__(self) -> typing.AsyncIterator[bytes]: - with map_httpcore_exceptions(): - async for part in self._httpcore_stream: - yield part - - async def aclose(self) -> None: - if hasattr(self._httpcore_stream, "aclose"): - await self._httpcore_stream.aclose() - - -class AsyncHTTPTransport(AsyncBaseTransport): - def __init__( - self, - verify: VerifyTypes = True, - cert: typing.Optional[CertTypes] = None, - http1: bool = True, - http2: bool = False, - limits: Limits = DEFAULT_LIMITS, - trust_env: bool = True, - proxy: typing.Optional[Proxy] = None, - uds: typing.Optional[str] = None, - local_address: typing.Optional[str] = None, - retries: int = 0, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> None: - ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env) - - if proxy is None: - self._pool = httpcore.AsyncConnectionPool( - ssl_context=ssl_context, - max_connections=limits.max_connections, - max_keepalive_connections=limits.max_keepalive_connections, - keepalive_expiry=limits.keepalive_expiry, - http1=http1, - http2=http2, - uds=uds, - local_address=local_address, - retries=retries, - socket_options=socket_options, - ) - elif proxy.url.scheme in ("http", "https"): - self._pool = httpcore.AsyncHTTPProxy( - proxy_url=httpcore.URL( - scheme=proxy.url.raw_scheme, - host=proxy.url.raw_host, - port=proxy.url.port, - target=proxy.url.raw_path, - ), - proxy_auth=proxy.raw_auth, - proxy_headers=proxy.headers.raw, - ssl_context=ssl_context, - max_connections=limits.max_connections, - max_keepalive_connections=limits.max_keepalive_connections, - keepalive_expiry=limits.keepalive_expiry, - http1=http1, - http2=http2, - socket_options=socket_options, - ) - elif proxy.url.scheme == "socks5": - try: - import socksio # noqa - except ImportError: # pragma: no cover - raise ImportError( - "Using SOCKS proxy, but the 'socksio' package is not installed. " - "Make sure to install httpx using `pip install httpx[socks]`." - ) from None - - self._pool = httpcore.AsyncSOCKSProxy( - proxy_url=httpcore.URL( - scheme=proxy.url.raw_scheme, - host=proxy.url.raw_host, - port=proxy.url.port, - target=proxy.url.raw_path, - ), - proxy_auth=proxy.raw_auth, - ssl_context=ssl_context, - max_connections=limits.max_connections, - max_keepalive_connections=limits.max_keepalive_connections, - keepalive_expiry=limits.keepalive_expiry, - http1=http1, - http2=http2, - ) - else: # pragma: no cover - raise ValueError( - f"Proxy protocol must be either 'http', 'https', or 'socks5', but got {proxy.url.scheme!r}." - ) - - async def __aenter__(self: A) -> A: # Use generics for subclass support. - await self._pool.__aenter__() - return self - - async def __aexit__( - self, - exc_type: typing.Optional[typing.Type[BaseException]] = None, - exc_value: typing.Optional[BaseException] = None, - traceback: typing.Optional[TracebackType] = None, - ) -> None: - with map_httpcore_exceptions(): - await self._pool.__aexit__(exc_type, exc_value, traceback) - - async def handle_async_request( - self, - request: Request, - ) -> Response: - assert isinstance(request.stream, AsyncByteStream) - - req = httpcore.Request( - method=request.method, - url=httpcore.URL( - scheme=request.url.raw_scheme, - host=request.url.raw_host, - port=request.url.port, - target=request.url.raw_path, - ), - headers=request.headers.raw, - content=request.stream, - extensions=request.extensions, - ) - with map_httpcore_exceptions(): - resp = await self._pool.handle_async_request(req) - - assert isinstance(resp.stream, typing.AsyncIterable) - - return Response( - status_code=resp.status, - headers=resp.headers, - stream=AsyncResponseStream(resp.stream), - extensions=resp.extensions, - ) - - async def aclose(self) -> None: - await self._pool.aclose() diff --git a/server/venv/Lib/site-packages/httpx/_transports/mock.py b/server/venv/Lib/site-packages/httpx/_transports/mock.py deleted file mode 100644 index 82043da..0000000 --- a/server/venv/Lib/site-packages/httpx/_transports/mock.py +++ /dev/null @@ -1,38 +0,0 @@ -import typing - -from .._models import Request, Response -from .base import AsyncBaseTransport, BaseTransport - -SyncHandler = typing.Callable[[Request], Response] -AsyncHandler = typing.Callable[[Request], typing.Coroutine[None, None, Response]] - - -class MockTransport(AsyncBaseTransport, BaseTransport): - def __init__(self, handler: typing.Union[SyncHandler, AsyncHandler]) -> None: - self.handler = handler - - def handle_request( - self, - request: Request, - ) -> Response: - request.read() - response = self.handler(request) - if not isinstance(response, Response): # pragma: no cover - raise TypeError("Cannot use an async handler in a sync Client") - return response - - async def handle_async_request( - self, - request: Request, - ) -> Response: - await request.aread() - response = self.handler(request) - - # Allow handler to *optionally* be an `async` function. - # If it is, then the `response` variable need to be awaited to actually - # return the result. - - if not isinstance(response, Response): - response = await response - - return response diff --git a/server/venv/Lib/site-packages/httpx/_transports/wsgi.py b/server/venv/Lib/site-packages/httpx/_transports/wsgi.py deleted file mode 100644 index a23d42c..0000000 --- a/server/venv/Lib/site-packages/httpx/_transports/wsgi.py +++ /dev/null @@ -1,144 +0,0 @@ -import io -import itertools -import sys -import typing - -from .._models import Request, Response -from .._types import SyncByteStream -from .base import BaseTransport - -if typing.TYPE_CHECKING: - from _typeshed import OptExcInfo # pragma: no cover - from _typeshed.wsgi import WSGIApplication # pragma: no cover - -_T = typing.TypeVar("_T") - - -def _skip_leading_empty_chunks(body: typing.Iterable[_T]) -> typing.Iterable[_T]: - body = iter(body) - for chunk in body: - if chunk: - return itertools.chain([chunk], body) - return [] - - -class WSGIByteStream(SyncByteStream): - def __init__(self, result: typing.Iterable[bytes]) -> None: - self._close = getattr(result, "close", None) - self._result = _skip_leading_empty_chunks(result) - - def __iter__(self) -> typing.Iterator[bytes]: - for part in self._result: - yield part - - def close(self) -> None: - if self._close is not None: - self._close() - - -class WSGITransport(BaseTransport): - """ - A custom transport that handles sending requests directly to an WSGI app. - The simplest way to use this functionality is to use the `app` argument. - - ``` - client = httpx.Client(app=app) - ``` - - Alternatively, you can setup the transport instance explicitly. - This allows you to include any additional configuration arguments specific - to the WSGITransport class: - - ``` - transport = httpx.WSGITransport( - app=app, - script_name="/submount", - remote_addr="1.2.3.4" - ) - client = httpx.Client(transport=transport) - ``` - - Arguments: - - * `app` - The WSGI application. - * `raise_app_exceptions` - Boolean indicating if exceptions in the application - should be raised. Default to `True`. Can be set to `False` for use cases - such as testing the content of a client 500 response. - * `script_name` - The root path on which the WSGI application should be mounted. - * `remote_addr` - A string indicating the client IP of incoming requests. - ``` - """ - - def __init__( - self, - app: "WSGIApplication", - raise_app_exceptions: bool = True, - script_name: str = "", - remote_addr: str = "127.0.0.1", - wsgi_errors: typing.Optional[typing.TextIO] = None, - ) -> None: - self.app = app - self.raise_app_exceptions = raise_app_exceptions - self.script_name = script_name - self.remote_addr = remote_addr - self.wsgi_errors = wsgi_errors - - def handle_request(self, request: Request) -> Response: - request.read() - wsgi_input = io.BytesIO(request.content) - - port = request.url.port or {"http": 80, "https": 443}[request.url.scheme] - environ = { - "wsgi.version": (1, 0), - "wsgi.url_scheme": request.url.scheme, - "wsgi.input": wsgi_input, - "wsgi.errors": self.wsgi_errors or sys.stderr, - "wsgi.multithread": True, - "wsgi.multiprocess": False, - "wsgi.run_once": False, - "REQUEST_METHOD": request.method, - "SCRIPT_NAME": self.script_name, - "PATH_INFO": request.url.path, - "QUERY_STRING": request.url.query.decode("ascii"), - "SERVER_NAME": request.url.host, - "SERVER_PORT": str(port), - "SERVER_PROTOCOL": "HTTP/1.1", - "REMOTE_ADDR": self.remote_addr, - } - for header_key, header_value in request.headers.raw: - key = header_key.decode("ascii").upper().replace("-", "_") - if key not in ("CONTENT_TYPE", "CONTENT_LENGTH"): - key = "HTTP_" + key - environ[key] = header_value.decode("ascii") - - seen_status = None - seen_response_headers = None - seen_exc_info = None - - def start_response( - status: str, - response_headers: typing.List[typing.Tuple[str, str]], - exc_info: typing.Optional["OptExcInfo"] = None, - ) -> typing.Callable[[bytes], typing.Any]: - nonlocal seen_status, seen_response_headers, seen_exc_info - seen_status = status - seen_response_headers = response_headers - seen_exc_info = exc_info - return lambda _: None - - result = self.app(environ, start_response) - - stream = WSGIByteStream(result) - - assert seen_status is not None - assert seen_response_headers is not None - if seen_exc_info and seen_exc_info[0] and self.raise_app_exceptions: - raise seen_exc_info[1] - - status_code = int(seen_status.split()[0]) - headers = [ - (key.encode("ascii"), value.encode("ascii")) - for key, value in seen_response_headers - ] - - return Response(status_code, headers=headers, stream=stream) diff --git a/server/venv/Lib/site-packages/httpx/_types.py b/server/venv/Lib/site-packages/httpx/_types.py deleted file mode 100644 index 83cf35a..0000000 --- a/server/venv/Lib/site-packages/httpx/_types.py +++ /dev/null @@ -1,133 +0,0 @@ -""" -Type definitions for type checking purposes. -""" - -import ssl -from http.cookiejar import CookieJar -from typing import ( - IO, - TYPE_CHECKING, - Any, - AsyncIterable, - AsyncIterator, - Callable, - Dict, - Iterable, - Iterator, - List, - Mapping, - MutableMapping, - NamedTuple, - Optional, - Sequence, - Tuple, - Union, -) - -if TYPE_CHECKING: # pragma: no cover - from ._auth import Auth # noqa: F401 - from ._config import Proxy, Timeout # noqa: F401 - from ._models import Cookies, Headers, Request # noqa: F401 - from ._urls import URL, QueryParams # noqa: F401 - - -PrimitiveData = Optional[Union[str, int, float, bool]] - -RawURL = NamedTuple( - "RawURL", - [ - ("raw_scheme", bytes), - ("raw_host", bytes), - ("port", Optional[int]), - ("raw_path", bytes), - ], -) - -URLTypes = Union["URL", str] - -QueryParamTypes = Union[ - "QueryParams", - Mapping[str, Union[PrimitiveData, Sequence[PrimitiveData]]], - List[Tuple[str, PrimitiveData]], - Tuple[Tuple[str, PrimitiveData], ...], - str, - bytes, -] - -HeaderTypes = Union[ - "Headers", - Mapping[str, str], - Mapping[bytes, bytes], - Sequence[Tuple[str, str]], - Sequence[Tuple[bytes, bytes]], -] - -CookieTypes = Union["Cookies", CookieJar, Dict[str, str], List[Tuple[str, str]]] - -CertTypes = Union[ - # certfile - str, - # (certfile, keyfile) - Tuple[str, Optional[str]], - # (certfile, keyfile, password) - Tuple[str, Optional[str], Optional[str]], -] -VerifyTypes = Union[str, bool, ssl.SSLContext] -TimeoutTypes = Union[ - Optional[float], - Tuple[Optional[float], Optional[float], Optional[float], Optional[float]], - "Timeout", -] -ProxiesTypes = Union[URLTypes, "Proxy", Dict[URLTypes, Union[None, URLTypes, "Proxy"]]] - -AuthTypes = Union[ - Tuple[Union[str, bytes], Union[str, bytes]], - Callable[["Request"], "Request"], - "Auth", -] - -RequestContent = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] -ResponseContent = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] -ResponseExtensions = MutableMapping[str, Any] - -RequestData = Mapping[str, Any] - -FileContent = Union[IO[bytes], bytes, str] -FileTypes = Union[ - # file (or bytes) - FileContent, - # (filename, file (or bytes)) - Tuple[Optional[str], FileContent], - # (filename, file (or bytes), content_type) - Tuple[Optional[str], FileContent, Optional[str]], - # (filename, file (or bytes), content_type, headers) - Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], -] -RequestFiles = Union[Mapping[str, FileTypes], Sequence[Tuple[str, FileTypes]]] - -RequestExtensions = MutableMapping[str, Any] - - -class SyncByteStream: - def __iter__(self) -> Iterator[bytes]: - raise NotImplementedError( - "The '__iter__' method must be implemented." - ) # pragma: no cover - yield b"" # pragma: no cover - - def close(self) -> None: - """ - Subclasses can override this method to release any network resources - after a request/response cycle is complete. - """ - - -class AsyncByteStream: - async def __aiter__(self) -> AsyncIterator[bytes]: - raise NotImplementedError( - "The '__aiter__' method must be implemented." - ) # pragma: no cover - yield b"" # pragma: no cover - - async def aclose(self) -> None: - pass diff --git a/server/venv/Lib/site-packages/httpx/_urlparse.py b/server/venv/Lib/site-packages/httpx/_urlparse.py deleted file mode 100644 index e1ba8dc..0000000 --- a/server/venv/Lib/site-packages/httpx/_urlparse.py +++ /dev/null @@ -1,464 +0,0 @@ -""" -An implementation of `urlparse` that provides URL validation and normalization -as described by RFC3986. - -We rely on this implementation rather than the one in Python's stdlib, because: - -* It provides more complete URL validation. -* It properly differentiates between an empty querystring and an absent querystring, - to distinguish URLs with a trailing '?'. -* It handles scheme, hostname, port, and path normalization. -* It supports IDNA hostnames, normalizing them to their encoded form. -* The API supports passing individual components, as well as the complete URL string. - -Previously we relied on the excellent `rfc3986` package to handle URL parsing and -validation, but this module provides a simpler alternative, with less indirection -required. -""" -import ipaddress -import re -import typing - -import idna - -from ._exceptions import InvalidURL - -MAX_URL_LENGTH = 65536 - -# https://datatracker.ietf.org/doc/html/rfc3986.html#section-2.3 -UNRESERVED_CHARACTERS = ( - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~" -) -SUB_DELIMS = "!$&'()*+,;=" - -PERCENT_ENCODED_REGEX = re.compile("%[A-Fa-f0-9]{2}") - - -# {scheme}: (optional) -# //{authority} (optional) -# {path} -# ?{query} (optional) -# #{fragment} (optional) -URL_REGEX = re.compile( - ( - r"(?:(?P{scheme}):)?" - r"(?://(?P{authority}))?" - r"(?P{path})" - r"(?:\?(?P{query}))?" - r"(?:#(?P{fragment}))?" - ).format( - scheme="([a-zA-Z][a-zA-Z0-9+.-]*)?", - authority="[^/?#]*", - path="[^?#]*", - query="[^#]*", - fragment=".*", - ) -) - -# {userinfo}@ (optional) -# {host} -# :{port} (optional) -AUTHORITY_REGEX = re.compile( - ( - r"(?:(?P{userinfo})@)?" r"(?P{host})" r":?(?P{port})?" - ).format( - userinfo="[^@]*", # Any character sequence not including '@'. - host="(\\[.*\\]|[^:]*)", # Either any character sequence not including ':', - # or an IPv6 address enclosed within square brackets. - port=".*", # Any character sequence. - ) -) - - -# If we call urlparse with an individual component, then we need to regex -# validate that component individually. -# Note that we're duplicating the same strings as above. Shock! Horror!! -COMPONENT_REGEX = { - "scheme": re.compile("([a-zA-Z][a-zA-Z0-9+.-]*)?"), - "authority": re.compile("[^/?#]*"), - "path": re.compile("[^?#]*"), - "query": re.compile("[^#]*"), - "fragment": re.compile(".*"), - "userinfo": re.compile("[^@]*"), - "host": re.compile("(\\[.*\\]|[^:]*)"), - "port": re.compile(".*"), -} - - -# We use these simple regexs as a first pass before handing off to -# the stdlib 'ipaddress' module for IP address validation. -IPv4_STYLE_HOSTNAME = re.compile(r"^[0-9]+.[0-9]+.[0-9]+.[0-9]+$") -IPv6_STYLE_HOSTNAME = re.compile(r"^\[.*\]$") - - -class ParseResult(typing.NamedTuple): - scheme: str - userinfo: str - host: str - port: typing.Optional[int] - path: str - query: typing.Optional[str] - fragment: typing.Optional[str] - - @property - def authority(self) -> str: - return "".join( - [ - f"{self.userinfo}@" if self.userinfo else "", - f"[{self.host}]" if ":" in self.host else self.host, - f":{self.port}" if self.port is not None else "", - ] - ) - - @property - def netloc(self) -> str: - return "".join( - [ - f"[{self.host}]" if ":" in self.host else self.host, - f":{self.port}" if self.port is not None else "", - ] - ) - - def copy_with(self, **kwargs: typing.Optional[str]) -> "ParseResult": - if not kwargs: - return self - - defaults = { - "scheme": self.scheme, - "authority": self.authority, - "path": self.path, - "query": self.query, - "fragment": self.fragment, - } - defaults.update(kwargs) - return urlparse("", **defaults) - - def __str__(self) -> str: - authority = self.authority - return "".join( - [ - f"{self.scheme}:" if self.scheme else "", - f"//{authority}" if authority else "", - self.path, - f"?{self.query}" if self.query is not None else "", - f"#{self.fragment}" if self.fragment is not None else "", - ] - ) - - -def urlparse(url: str = "", **kwargs: typing.Optional[str]) -> ParseResult: - # Initial basic checks on allowable URLs. - # --------------------------------------- - - # Hard limit the maximum allowable URL length. - if len(url) > MAX_URL_LENGTH: - raise InvalidURL("URL too long") - - # If a URL includes any ASCII control characters including \t, \r, \n, - # then treat it as invalid. - if any(char.isascii() and not char.isprintable() for char in url): - raise InvalidURL("Invalid non-printable ASCII character in URL") - - # Some keyword arguments require special handling. - # ------------------------------------------------ - - # Coerce "port" to a string, if it is provided as an integer. - if "port" in kwargs: - port = kwargs["port"] - kwargs["port"] = str(port) if isinstance(port, int) else port - - # Replace "netloc" with "host and "port". - if "netloc" in kwargs: - netloc = kwargs.pop("netloc") or "" - kwargs["host"], _, kwargs["port"] = netloc.partition(":") - - # Replace "username" and/or "password" with "userinfo". - if "username" in kwargs or "password" in kwargs: - username = quote(kwargs.pop("username", "") or "") - password = quote(kwargs.pop("password", "") or "") - kwargs["userinfo"] = f"{username}:{password}" if password else username - - # Replace "raw_path" with "path" and "query". - if "raw_path" in kwargs: - raw_path = kwargs.pop("raw_path") or "" - kwargs["path"], seperator, kwargs["query"] = raw_path.partition("?") - if not seperator: - kwargs["query"] = None - - # Ensure that IPv6 "host" addresses are always escaped with "[...]". - if "host" in kwargs: - host = kwargs.get("host") or "" - if ":" in host and not (host.startswith("[") and host.endswith("]")): - kwargs["host"] = f"[{host}]" - - # If any keyword arguments are provided, ensure they are valid. - # ------------------------------------------------------------- - - for key, value in kwargs.items(): - if value is not None: - if len(value) > MAX_URL_LENGTH: - raise InvalidURL(f"URL component '{key}' too long") - - # If a component includes any ASCII control characters including \t, \r, \n, - # then treat it as invalid. - if any(char.isascii() and not char.isprintable() for char in value): - raise InvalidURL( - f"Invalid non-printable ASCII character in URL component '{key}'" - ) - - # Ensure that keyword arguments match as a valid regex. - if not COMPONENT_REGEX[key].fullmatch(value): - raise InvalidURL(f"Invalid URL component '{key}'") - - # The URL_REGEX will always match, but may have empty components. - url_match = URL_REGEX.match(url) - assert url_match is not None - url_dict = url_match.groupdict() - - # * 'scheme', 'authority', and 'path' may be empty strings. - # * 'query' may be 'None', indicating no trailing "?" portion. - # Any string including the empty string, indicates a trailing "?". - # * 'fragment' may be 'None', indicating no trailing "#" portion. - # Any string including the empty string, indicates a trailing "#". - scheme = kwargs.get("scheme", url_dict["scheme"]) or "" - authority = kwargs.get("authority", url_dict["authority"]) or "" - path = kwargs.get("path", url_dict["path"]) or "" - query = kwargs.get("query", url_dict["query"]) - fragment = kwargs.get("fragment", url_dict["fragment"]) - - # The AUTHORITY_REGEX will always match, but may have empty components. - authority_match = AUTHORITY_REGEX.match(authority) - assert authority_match is not None - authority_dict = authority_match.groupdict() - - # * 'userinfo' and 'host' may be empty strings. - # * 'port' may be 'None'. - userinfo = kwargs.get("userinfo", authority_dict["userinfo"]) or "" - host = kwargs.get("host", authority_dict["host"]) or "" - port = kwargs.get("port", authority_dict["port"]) - - # Normalize and validate each component. - # We end up with a parsed representation of the URL, - # with components that are plain ASCII bytestrings. - parsed_scheme: str = scheme.lower() - parsed_userinfo: str = quote(userinfo, safe=SUB_DELIMS + ":") - parsed_host: str = encode_host(host) - parsed_port: typing.Optional[int] = normalize_port(port, scheme) - - has_scheme = parsed_scheme != "" - has_authority = ( - parsed_userinfo != "" or parsed_host != "" or parsed_port is not None - ) - validate_path(path, has_scheme=has_scheme, has_authority=has_authority) - if has_authority: - path = normalize_path(path) - - # The GEN_DELIMS set is... : / ? # [ ] @ - # These do not need to be percent-quoted unless they serve as delimiters for the - # specific component. - - # For 'path' we need to drop ? and # from the GEN_DELIMS set. - parsed_path: str = quote(path, safe=SUB_DELIMS + ":/[]@") - # For 'query' we need to drop '#' from the GEN_DELIMS set. - # We also exclude '/' because it is more robust to replace it with a percent - # encoding despite it not being a requirement of the spec. - parsed_query: typing.Optional[str] = ( - None if query is None else quote(query, safe=SUB_DELIMS + ":?[]@") - ) - # For 'fragment' we can include all of the GEN_DELIMS set. - parsed_fragment: typing.Optional[str] = ( - None if fragment is None else quote(fragment, safe=SUB_DELIMS + ":/?#[]@") - ) - - # The parsed ASCII bytestrings are our canonical form. - # All properties of the URL are derived from these. - return ParseResult( - parsed_scheme, - parsed_userinfo, - parsed_host, - parsed_port, - parsed_path, - parsed_query, - parsed_fragment, - ) - - -def encode_host(host: str) -> str: - if not host: - return "" - - elif IPv4_STYLE_HOSTNAME.match(host): - # Validate IPv4 hostnames like #.#.#.# - # - # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 - # - # IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet - try: - ipaddress.IPv4Address(host) - except ipaddress.AddressValueError: - raise InvalidURL(f"Invalid IPv4 address: {host!r}") - return host - - elif IPv6_STYLE_HOSTNAME.match(host): - # Validate IPv6 hostnames like [...] - # - # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 - # - # "A host identified by an Internet Protocol literal address, version 6 - # [RFC3513] or later, is distinguished by enclosing the IP literal - # within square brackets ("[" and "]"). This is the only place where - # square bracket characters are allowed in the URI syntax." - try: - ipaddress.IPv6Address(host[1:-1]) - except ipaddress.AddressValueError: - raise InvalidURL(f"Invalid IPv6 address: {host!r}") - return host[1:-1] - - elif host.isascii(): - # Regular ASCII hostnames - # - # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 - # - # reg-name = *( unreserved / pct-encoded / sub-delims ) - return quote(host.lower(), safe=SUB_DELIMS) - - # IDNA hostnames - try: - return idna.encode(host.lower()).decode("ascii") - except idna.IDNAError: - raise InvalidURL(f"Invalid IDNA hostname: {host!r}") - - -def normalize_port( - port: typing.Optional[typing.Union[str, int]], scheme: str -) -> typing.Optional[int]: - # From https://tools.ietf.org/html/rfc3986#section-3.2.3 - # - # "A scheme may define a default port. For example, the "http" scheme - # defines a default port of "80", corresponding to its reserved TCP - # port number. The type of port designated by the port number (e.g., - # TCP, UDP, SCTP) is defined by the URI scheme. URI producers and - # normalizers should omit the port component and its ":" delimiter if - # port is empty or if its value would be the same as that of the - # scheme's default." - if port is None or port == "": - return None - - try: - port_as_int = int(port) - except ValueError: - raise InvalidURL(f"Invalid port: {port!r}") - - # See https://url.spec.whatwg.org/#url-miscellaneous - default_port = {"ftp": 21, "http": 80, "https": 443, "ws": 80, "wss": 443}.get( - scheme - ) - if port_as_int == default_port: - return None - return port_as_int - - -def validate_path(path: str, has_scheme: bool, has_authority: bool) -> None: - """ - Path validation rules that depend on if the URL contains a scheme or authority component. - - See https://datatracker.ietf.org/doc/html/rfc3986.html#section-3.3 - """ - if has_authority: - # > If a URI contains an authority component, then the path component - # > must either be empty or begin with a slash ("/") character." - if path and not path.startswith("/"): - raise InvalidURL("For absolute URLs, path must be empty or begin with '/'") - else: - # > If a URI does not contain an authority component, then the path cannot begin - # > with two slash characters ("//"). - if path.startswith("//"): - raise InvalidURL( - "URLs with no authority component cannot have a path starting with '//'" - ) - # > In addition, a URI reference (Section 4.1) may be a relative-path reference, in which - # > case the first path segment cannot contain a colon (":") character. - if path.startswith(":") and not has_scheme: - raise InvalidURL( - "URLs with no scheme component cannot have a path starting with ':'" - ) - - -def normalize_path(path: str) -> str: - """ - Drop "." and ".." segments from a URL path. - - For example: - - normalize_path("/path/./to/somewhere/..") == "/path/to" - """ - # https://datatracker.ietf.org/doc/html/rfc3986#section-5.2.4 - components = path.split("/") - output: typing.List[str] = [] - for component in components: - if component == ".": - pass - elif component == "..": - if output and output != [""]: - output.pop() - else: - output.append(component) - return "/".join(output) - - -def percent_encode(char: str) -> str: - """ - Replace a single character with the percent-encoded representation. - - Characters outside the ASCII range are represented with their a percent-encoded - representation of their UTF-8 byte sequence. - - For example: - - percent_encode(" ") == "%20" - """ - return "".join([f"%{byte:02x}" for byte in char.encode("utf-8")]).upper() - - -def is_safe(string: str, safe: str = "/") -> bool: - """ - Determine if a given string is already quote-safe. - """ - NON_ESCAPED_CHARS = UNRESERVED_CHARACTERS + safe + "%" - - # All characters must already be non-escaping or '%' - for char in string: - if char not in NON_ESCAPED_CHARS: - return False - - # Any '%' characters must be valid '%xx' escape sequences. - return string.count("%") == len(PERCENT_ENCODED_REGEX.findall(string)) - - -def quote(string: str, safe: str = "/") -> str: - """ - Use percent-encoding to quote a string if required. - """ - if is_safe(string, safe=safe): - return string - - NON_ESCAPED_CHARS = UNRESERVED_CHARACTERS + safe - return "".join( - [char if char in NON_ESCAPED_CHARS else percent_encode(char) for char in string] - ) - - -def urlencode(items: typing.List[typing.Tuple[str, str]]) -> str: - # We can use a much simpler version of the stdlib urlencode here because - # we don't need to handle a bunch of different typing cases, such as bytes vs str. - # - # https://github.com/python/cpython/blob/b2f7b2ef0b5421e01efb8c7bee2ef95d3bab77eb/Lib/urllib/parse.py#L926 - # - # Note that we use '%20' encoding for spaces. and '%2F for '/'. - # This is slightly different than `requests`, but is the behaviour that browsers use. - # - # See - # - https://github.com/encode/httpx/issues/2536 - # - https://github.com/encode/httpx/issues/2721 - # - https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlencode - return "&".join([quote(k, safe="") + "=" + quote(v, safe="") for k, v in items]) diff --git a/server/venv/Lib/site-packages/httpx/_urls.py b/server/venv/Lib/site-packages/httpx/_urls.py deleted file mode 100644 index b023941..0000000 --- a/server/venv/Lib/site-packages/httpx/_urls.py +++ /dev/null @@ -1,642 +0,0 @@ -import typing -from urllib.parse import parse_qs, unquote - -import idna - -from ._types import QueryParamTypes, RawURL, URLTypes -from ._urlparse import urlencode, urlparse -from ._utils import primitive_value_to_str - - -class URL: - """ - url = httpx.URL("HTTPS://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink") - - assert url.scheme == "https" - assert url.username == "jo@email.com" - assert url.password == "a secret" - assert url.userinfo == b"jo%40email.com:a%20secret" - assert url.host == "müller.de" - assert url.raw_host == b"xn--mller-kva.de" - assert url.port == 1234 - assert url.netloc == b"xn--mller-kva.de:1234" - assert url.path == "/pa th" - assert url.query == b"?search=ab" - assert url.raw_path == b"/pa%20th?search=ab" - assert url.fragment == "anchorlink" - - The components of a URL are broken down like this: - - https://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink - [scheme] [ username ] [password] [ host ][port][ path ] [ query ] [fragment] - [ userinfo ] [ netloc ][ raw_path ] - - Note that: - - * `url.scheme` is normalized to always be lowercased. - - * `url.host` is normalized to always be lowercased. Internationalized domain - names are represented in unicode, without IDNA encoding applied. For instance: - - url = httpx.URL("http://中国.icom.museum") - assert url.host == "中国.icom.museum" - url = httpx.URL("http://xn--fiqs8s.icom.museum") - assert url.host == "中国.icom.museum" - - * `url.raw_host` is normalized to always be lowercased, and is IDNA encoded. - - url = httpx.URL("http://中国.icom.museum") - assert url.raw_host == b"xn--fiqs8s.icom.museum" - url = httpx.URL("http://xn--fiqs8s.icom.museum") - assert url.raw_host == b"xn--fiqs8s.icom.museum" - - * `url.port` is either None or an integer. URLs that include the default port for - "http", "https", "ws", "wss", and "ftp" schemes have their port normalized to `None`. - - assert httpx.URL("http://example.com") == httpx.URL("http://example.com:80") - assert httpx.URL("http://example.com").port is None - assert httpx.URL("http://example.com:80").port is None - - * `url.userinfo` is raw bytes, without URL escaping. Usually you'll want to work with - `url.username` and `url.password` instead, which handle the URL escaping. - - * `url.raw_path` is raw bytes of both the path and query, without URL escaping. - This portion is used as the target when constructing HTTP requests. Usually you'll - want to work with `url.path` instead. - - * `url.query` is raw bytes, without URL escaping. A URL query string portion can only - be properly URL escaped when decoding the parameter names and values themselves. - """ - - def __init__( - self, url: typing.Union["URL", str] = "", **kwargs: typing.Any - ) -> None: - if kwargs: - allowed = { - "scheme": str, - "username": str, - "password": str, - "userinfo": bytes, - "host": str, - "port": int, - "netloc": bytes, - "path": str, - "query": bytes, - "raw_path": bytes, - "fragment": str, - "params": object, - } - - # Perform type checking for all supported keyword arguments. - for key, value in kwargs.items(): - if key not in allowed: - message = f"{key!r} is an invalid keyword argument for URL()" - raise TypeError(message) - if value is not None and not isinstance(value, allowed[key]): - expected = allowed[key].__name__ - seen = type(value).__name__ - message = f"Argument {key!r} must be {expected} but got {seen}" - raise TypeError(message) - if isinstance(value, bytes): - kwargs[key] = value.decode("ascii") - - if "params" in kwargs: - # Replace any "params" keyword with the raw "query" instead. - # - # Ensure that empty params use `kwargs["query"] = None` rather - # than `kwargs["query"] = ""`, so that generated URLs do not - # include an empty trailing "?". - params = kwargs.pop("params") - kwargs["query"] = None if not params else str(QueryParams(params)) - - if isinstance(url, str): - self._uri_reference = urlparse(url, **kwargs) - elif isinstance(url, URL): - self._uri_reference = url._uri_reference.copy_with(**kwargs) - else: - raise TypeError( - f"Invalid type for url. Expected str or httpx.URL, got {type(url)}: {url!r}" - ) - - @property - def scheme(self) -> str: - """ - The URL scheme, such as "http", "https". - Always normalised to lowercase. - """ - return self._uri_reference.scheme - - @property - def raw_scheme(self) -> bytes: - """ - The raw bytes representation of the URL scheme, such as b"http", b"https". - Always normalised to lowercase. - """ - return self._uri_reference.scheme.encode("ascii") - - @property - def userinfo(self) -> bytes: - """ - The URL userinfo as a raw bytestring. - For example: b"jo%40email.com:a%20secret". - """ - return self._uri_reference.userinfo.encode("ascii") - - @property - def username(self) -> str: - """ - The URL username as a string, with URL decoding applied. - For example: "jo@email.com" - """ - userinfo = self._uri_reference.userinfo - return unquote(userinfo.partition(":")[0]) - - @property - def password(self) -> str: - """ - The URL password as a string, with URL decoding applied. - For example: "a secret" - """ - userinfo = self._uri_reference.userinfo - return unquote(userinfo.partition(":")[2]) - - @property - def host(self) -> str: - """ - The URL host as a string. - Always normalized to lowercase, with IDNA hosts decoded into unicode. - - Examples: - - url = httpx.URL("http://www.EXAMPLE.org") - assert url.host == "www.example.org" - - url = httpx.URL("http://中国.icom.museum") - assert url.host == "中国.icom.museum" - - url = httpx.URL("http://xn--fiqs8s.icom.museum") - assert url.host == "中国.icom.museum" - - url = httpx.URL("https://[::ffff:192.168.0.1]") - assert url.host == "::ffff:192.168.0.1" - """ - host: str = self._uri_reference.host - - if host.startswith("xn--"): - host = idna.decode(host) - - return host - - @property - def raw_host(self) -> bytes: - """ - The raw bytes representation of the URL host. - Always normalized to lowercase, and IDNA encoded. - - Examples: - - url = httpx.URL("http://www.EXAMPLE.org") - assert url.raw_host == b"www.example.org" - - url = httpx.URL("http://中国.icom.museum") - assert url.raw_host == b"xn--fiqs8s.icom.museum" - - url = httpx.URL("http://xn--fiqs8s.icom.museum") - assert url.raw_host == b"xn--fiqs8s.icom.museum" - - url = httpx.URL("https://[::ffff:192.168.0.1]") - assert url.raw_host == b"::ffff:192.168.0.1" - """ - return self._uri_reference.host.encode("ascii") - - @property - def port(self) -> typing.Optional[int]: - """ - The URL port as an integer. - - Note that the URL class performs port normalization as per the WHATWG spec. - Default ports for "http", "https", "ws", "wss", and "ftp" schemes are always - treated as `None`. - - For example: - - assert httpx.URL("http://www.example.com") == httpx.URL("http://www.example.com:80") - assert httpx.URL("http://www.example.com:80").port is None - """ - return self._uri_reference.port - - @property - def netloc(self) -> bytes: - """ - Either `` or `:` as bytes. - Always normalized to lowercase, and IDNA encoded. - - This property may be used for generating the value of a request - "Host" header. - """ - return self._uri_reference.netloc.encode("ascii") - - @property - def path(self) -> str: - """ - The URL path as a string. Excluding the query string, and URL decoded. - - For example: - - url = httpx.URL("https://example.com/pa%20th") - assert url.path == "/pa th" - """ - path = self._uri_reference.path or "/" - return unquote(path) - - @property - def query(self) -> bytes: - """ - The URL query string, as raw bytes, excluding the leading b"?". - - This is necessarily a bytewise interface, because we cannot - perform URL decoding of this representation until we've parsed - the keys and values into a QueryParams instance. - - For example: - - url = httpx.URL("https://example.com/?filter=some%20search%20terms") - assert url.query == b"filter=some%20search%20terms" - """ - query = self._uri_reference.query or "" - return query.encode("ascii") - - @property - def params(self) -> "QueryParams": - """ - The URL query parameters, neatly parsed and packaged into an immutable - multidict representation. - """ - return QueryParams(self._uri_reference.query) - - @property - def raw_path(self) -> bytes: - """ - The complete URL path and query string as raw bytes. - Used as the target when constructing HTTP requests. - - For example: - - GET /users?search=some%20text HTTP/1.1 - Host: www.example.org - Connection: close - """ - path = self._uri_reference.path or "/" - if self._uri_reference.query is not None: - path += "?" + self._uri_reference.query - return path.encode("ascii") - - @property - def fragment(self) -> str: - """ - The URL fragments, as used in HTML anchors. - As a string, without the leading '#'. - """ - return unquote(self._uri_reference.fragment or "") - - @property - def raw(self) -> RawURL: - """ - Provides the (scheme, host, port, target) for the outgoing request. - - In older versions of `httpx` this was used in the low-level transport API. - We no longer use `RawURL`, and this property will be deprecated in a future release. - """ - return RawURL( - self.raw_scheme, - self.raw_host, - self.port, - self.raw_path, - ) - - @property - def is_absolute_url(self) -> bool: - """ - Return `True` for absolute URLs such as 'http://example.com/path', - and `False` for relative URLs such as '/path'. - """ - # We don't use `.is_absolute` from `rfc3986` because it treats - # URLs with a fragment portion as not absolute. - # What we actually care about is if the URL provides - # a scheme and hostname to which connections should be made. - return bool(self._uri_reference.scheme and self._uri_reference.host) - - @property - def is_relative_url(self) -> bool: - """ - Return `False` for absolute URLs such as 'http://example.com/path', - and `True` for relative URLs such as '/path'. - """ - return not self.is_absolute_url - - def copy_with(self, **kwargs: typing.Any) -> "URL": - """ - Copy this URL, returning a new URL with some components altered. - Accepts the same set of parameters as the components that are made - available via properties on the `URL` class. - - For example: - - url = httpx.URL("https://www.example.com").copy_with(username="jo@gmail.com", password="a secret") - assert url == "https://jo%40email.com:a%20secret@www.example.com" - """ - return URL(self, **kwargs) - - def copy_set_param(self, key: str, value: typing.Any = None) -> "URL": - return self.copy_with(params=self.params.set(key, value)) - - def copy_add_param(self, key: str, value: typing.Any = None) -> "URL": - return self.copy_with(params=self.params.add(key, value)) - - def copy_remove_param(self, key: str) -> "URL": - return self.copy_with(params=self.params.remove(key)) - - def copy_merge_params(self, params: QueryParamTypes) -> "URL": - return self.copy_with(params=self.params.merge(params)) - - def join(self, url: URLTypes) -> "URL": - """ - Return an absolute URL, using this URL as the base. - - Eg. - - url = httpx.URL("https://www.example.com/test") - url = url.join("/new/path") - assert url == "https://www.example.com/new/path" - """ - from urllib.parse import urljoin - - return URL(urljoin(str(self), str(URL(url)))) - - def __hash__(self) -> int: - return hash(str(self)) - - def __eq__(self, other: typing.Any) -> bool: - return isinstance(other, (URL, str)) and str(self) == str(URL(other)) - - def __str__(self) -> str: - return str(self._uri_reference) - - def __repr__(self) -> str: - scheme, userinfo, host, port, path, query, fragment = self._uri_reference - - if ":" in userinfo: - # Mask any password component. - userinfo = f'{userinfo.split(":")[0]}:[secure]' - - authority = "".join( - [ - f"{userinfo}@" if userinfo else "", - f"[{host}]" if ":" in host else host, - f":{port}" if port is not None else "", - ] - ) - url = "".join( - [ - f"{self.scheme}:" if scheme else "", - f"//{authority}" if authority else "", - path, - f"?{query}" if query is not None else "", - f"#{fragment}" if fragment is not None else "", - ] - ) - - return f"{self.__class__.__name__}({url!r})" - - -class QueryParams(typing.Mapping[str, str]): - """ - URL query parameters, as a multi-dict. - """ - - def __init__( - self, *args: typing.Optional[QueryParamTypes], **kwargs: typing.Any - ) -> None: - assert len(args) < 2, "Too many arguments." - assert not (args and kwargs), "Cannot mix named and unnamed arguments." - - value = args[0] if args else kwargs - - if value is None or isinstance(value, (str, bytes)): - value = value.decode("ascii") if isinstance(value, bytes) else value - self._dict = parse_qs(value, keep_blank_values=True) - elif isinstance(value, QueryParams): - self._dict = {k: list(v) for k, v in value._dict.items()} - else: - dict_value: typing.Dict[typing.Any, typing.List[typing.Any]] = {} - if isinstance(value, (list, tuple)): - # Convert list inputs like: - # [("a", "123"), ("a", "456"), ("b", "789")] - # To a dict representation, like: - # {"a": ["123", "456"], "b": ["789"]} - for item in value: - dict_value.setdefault(item[0], []).append(item[1]) - else: - # Convert dict inputs like: - # {"a": "123", "b": ["456", "789"]} - # To dict inputs where values are always lists, like: - # {"a": ["123"], "b": ["456", "789"]} - dict_value = { - k: list(v) if isinstance(v, (list, tuple)) else [v] - for k, v in value.items() - } - - # Ensure that keys and values are neatly coerced to strings. - # We coerce values `True` and `False` to JSON-like "true" and "false" - # representations, and coerce `None` values to the empty string. - self._dict = { - str(k): [primitive_value_to_str(item) for item in v] - for k, v in dict_value.items() - } - - def keys(self) -> typing.KeysView[str]: - """ - Return all the keys in the query params. - - Usage: - - q = httpx.QueryParams("a=123&a=456&b=789") - assert list(q.keys()) == ["a", "b"] - """ - return self._dict.keys() - - def values(self) -> typing.ValuesView[str]: - """ - Return all the values in the query params. If a key occurs more than once - only the first item for that key is returned. - - Usage: - - q = httpx.QueryParams("a=123&a=456&b=789") - assert list(q.values()) == ["123", "789"] - """ - return {k: v[0] for k, v in self._dict.items()}.values() - - def items(self) -> typing.ItemsView[str, str]: - """ - Return all items in the query params. If a key occurs more than once - only the first item for that key is returned. - - Usage: - - q = httpx.QueryParams("a=123&a=456&b=789") - assert list(q.items()) == [("a", "123"), ("b", "789")] - """ - return {k: v[0] for k, v in self._dict.items()}.items() - - def multi_items(self) -> typing.List[typing.Tuple[str, str]]: - """ - Return all items in the query params. Allow duplicate keys to occur. - - Usage: - - q = httpx.QueryParams("a=123&a=456&b=789") - assert list(q.multi_items()) == [("a", "123"), ("a", "456"), ("b", "789")] - """ - multi_items: typing.List[typing.Tuple[str, str]] = [] - for k, v in self._dict.items(): - multi_items.extend([(k, i) for i in v]) - return multi_items - - def get(self, key: typing.Any, default: typing.Any = None) -> typing.Any: - """ - Get a value from the query param for a given key. If the key occurs - more than once, then only the first value is returned. - - Usage: - - q = httpx.QueryParams("a=123&a=456&b=789") - assert q.get("a") == "123" - """ - if key in self._dict: - return self._dict[str(key)][0] - return default - - def get_list(self, key: str) -> typing.List[str]: - """ - Get all values from the query param for a given key. - - Usage: - - q = httpx.QueryParams("a=123&a=456&b=789") - assert q.get_list("a") == ["123", "456"] - """ - return list(self._dict.get(str(key), [])) - - def set(self, key: str, value: typing.Any = None) -> "QueryParams": - """ - Return a new QueryParams instance, setting the value of a key. - - Usage: - - q = httpx.QueryParams("a=123") - q = q.set("a", "456") - assert q == httpx.QueryParams("a=456") - """ - q = QueryParams() - q._dict = dict(self._dict) - q._dict[str(key)] = [primitive_value_to_str(value)] - return q - - def add(self, key: str, value: typing.Any = None) -> "QueryParams": - """ - Return a new QueryParams instance, setting or appending the value of a key. - - Usage: - - q = httpx.QueryParams("a=123") - q = q.add("a", "456") - assert q == httpx.QueryParams("a=123&a=456") - """ - q = QueryParams() - q._dict = dict(self._dict) - q._dict[str(key)] = q.get_list(key) + [primitive_value_to_str(value)] - return q - - def remove(self, key: str) -> "QueryParams": - """ - Return a new QueryParams instance, removing the value of a key. - - Usage: - - q = httpx.QueryParams("a=123") - q = q.remove("a") - assert q == httpx.QueryParams("") - """ - q = QueryParams() - q._dict = dict(self._dict) - q._dict.pop(str(key), None) - return q - - def merge(self, params: typing.Optional[QueryParamTypes] = None) -> "QueryParams": - """ - Return a new QueryParams instance, updated with. - - Usage: - - q = httpx.QueryParams("a=123") - q = q.merge({"b": "456"}) - assert q == httpx.QueryParams("a=123&b=456") - - q = httpx.QueryParams("a=123") - q = q.merge({"a": "456", "b": "789"}) - assert q == httpx.QueryParams("a=456&b=789") - """ - q = QueryParams(params) - q._dict = {**self._dict, **q._dict} - return q - - def __getitem__(self, key: typing.Any) -> str: - return self._dict[key][0] - - def __contains__(self, key: typing.Any) -> bool: - return key in self._dict - - def __iter__(self) -> typing.Iterator[typing.Any]: - return iter(self.keys()) - - def __len__(self) -> int: - return len(self._dict) - - def __bool__(self) -> bool: - return bool(self._dict) - - def __hash__(self) -> int: - return hash(str(self)) - - def __eq__(self, other: typing.Any) -> bool: - if not isinstance(other, self.__class__): - return False - return sorted(self.multi_items()) == sorted(other.multi_items()) - - def __str__(self) -> str: - """ - Note that we use '%20' encoding for spaces, and treat '/' as a safe - character. - - See https://github.com/encode/httpx/issues/2536 and - https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlencode - """ - return urlencode(self.multi_items()) - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - query_string = str(self) - return f"{class_name}({query_string!r})" - - def update(self, params: typing.Optional[QueryParamTypes] = None) -> None: - raise RuntimeError( - "QueryParams are immutable since 0.18.0. " - "Use `q = q.merge(...)` to create an updated copy." - ) - - def __setitem__(self, key: str, value: str) -> None: - raise RuntimeError( - "QueryParams are immutable since 0.18.0. " - "Use `q = q.set(key, value)` to create an updated copy." - ) diff --git a/server/venv/Lib/site-packages/httpx/_utils.py b/server/venv/Lib/site-packages/httpx/_utils.py deleted file mode 100644 index 1775b1a..0000000 --- a/server/venv/Lib/site-packages/httpx/_utils.py +++ /dev/null @@ -1,477 +0,0 @@ -import codecs -import email.message -import ipaddress -import mimetypes -import os -import re -import time -import typing -from pathlib import Path -from urllib.request import getproxies - -import sniffio - -from ._types import PrimitiveData - -if typing.TYPE_CHECKING: # pragma: no cover - from ._urls import URL - - -_HTML5_FORM_ENCODING_REPLACEMENTS = {'"': "%22", "\\": "\\\\"} -_HTML5_FORM_ENCODING_REPLACEMENTS.update( - {chr(c): "%{:02X}".format(c) for c in range(0x1F + 1) if c != 0x1B} -) -_HTML5_FORM_ENCODING_RE = re.compile( - r"|".join([re.escape(c) for c in _HTML5_FORM_ENCODING_REPLACEMENTS.keys()]) -) - - -def normalize_header_key( - value: typing.Union[str, bytes], - lower: bool, - encoding: typing.Optional[str] = None, -) -> bytes: - """ - Coerce str/bytes into a strictly byte-wise HTTP header key. - """ - if isinstance(value, bytes): - bytes_value = value - else: - bytes_value = value.encode(encoding or "ascii") - - return bytes_value.lower() if lower else bytes_value - - -def normalize_header_value( - value: typing.Union[str, bytes], encoding: typing.Optional[str] = None -) -> bytes: - """ - Coerce str/bytes into a strictly byte-wise HTTP header value. - """ - if isinstance(value, bytes): - return value - return value.encode(encoding or "ascii") - - -def primitive_value_to_str(value: "PrimitiveData") -> str: - """ - Coerce a primitive data type into a string value. - - Note that we prefer JSON-style 'true'/'false' for boolean values here. - """ - if value is True: - return "true" - elif value is False: - return "false" - elif value is None: - return "" - return str(value) - - -def is_known_encoding(encoding: str) -> bool: - """ - Return `True` if `encoding` is a known codec. - """ - try: - codecs.lookup(encoding) - except LookupError: - return False - return True - - -def format_form_param(name: str, value: str) -> bytes: - """ - Encode a name/value pair within a multipart form. - """ - - def replacer(match: typing.Match[str]) -> str: - return _HTML5_FORM_ENCODING_REPLACEMENTS[match.group(0)] - - value = _HTML5_FORM_ENCODING_RE.sub(replacer, value) - return f'{name}="{value}"'.encode() - - -# Null bytes; no need to recreate these on each call to guess_json_utf -_null = b"\x00" -_null2 = _null * 2 -_null3 = _null * 3 - - -def guess_json_utf(data: bytes) -> typing.Optional[str]: - # JSON always starts with two ASCII characters, so detection is as - # easy as counting the nulls and from their location and count - # determine the encoding. Also detect a BOM, if present. - sample = data[:4] - if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE): - return "utf-32" # BOM included - if sample[:3] == codecs.BOM_UTF8: - return "utf-8-sig" # BOM included, MS style (discouraged) - if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): - return "utf-16" # BOM included - nullcount = sample.count(_null) - if nullcount == 0: - return "utf-8" - if nullcount == 2: - if sample[::2] == _null2: # 1st and 3rd are null - return "utf-16-be" - if sample[1::2] == _null2: # 2nd and 4th are null - return "utf-16-le" - # Did not detect 2 valid UTF-16 ascii-range characters - if nullcount == 3: - if sample[:3] == _null3: - return "utf-32-be" - if sample[1:] == _null3: - return "utf-32-le" - # Did not detect a valid UTF-32 ascii-range character - return None - - -def get_ca_bundle_from_env() -> typing.Optional[str]: - if "SSL_CERT_FILE" in os.environ: - ssl_file = Path(os.environ["SSL_CERT_FILE"]) - if ssl_file.is_file(): - return str(ssl_file) - if "SSL_CERT_DIR" in os.environ: - ssl_path = Path(os.environ["SSL_CERT_DIR"]) - if ssl_path.is_dir(): - return str(ssl_path) - return None - - -def parse_header_links(value: str) -> typing.List[typing.Dict[str, str]]: - """ - Returns a list of parsed link headers, for more info see: - https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Link - The generic syntax of those is: - Link: < uri-reference >; param1=value1; param2="value2" - So for instance: - Link; '; type="image/jpeg",;' - would return - [ - {"url": "http:/.../front.jpeg", "type": "image/jpeg"}, - {"url": "http://.../back.jpeg"}, - ] - :param value: HTTP Link entity-header field - :return: list of parsed link headers - """ - links: typing.List[typing.Dict[str, str]] = [] - replace_chars = " '\"" - value = value.strip(replace_chars) - if not value: - return links - for val in re.split(", *<", value): - try: - url, params = val.split(";", 1) - except ValueError: - url, params = val, "" - link = {"url": url.strip("<> '\"")} - for param in params.split(";"): - try: - key, value = param.split("=") - except ValueError: - break - link[key.strip(replace_chars)] = value.strip(replace_chars) - links.append(link) - return links - - -def parse_content_type_charset(content_type: str) -> typing.Optional[str]: - # We used to use `cgi.parse_header()` here, but `cgi` became a dead battery. - # See: https://peps.python.org/pep-0594/#cgi - msg = email.message.Message() - msg["content-type"] = content_type - return msg.get_content_charset(failobj=None) - - -SENSITIVE_HEADERS = {"authorization", "proxy-authorization"} - - -def obfuscate_sensitive_headers( - items: typing.Iterable[typing.Tuple[typing.AnyStr, typing.AnyStr]] -) -> typing.Iterator[typing.Tuple[typing.AnyStr, typing.AnyStr]]: - for k, v in items: - if to_str(k.lower()) in SENSITIVE_HEADERS: - v = to_bytes_or_str("[secure]", match_type_of=v) - yield k, v - - -def port_or_default(url: "URL") -> typing.Optional[int]: - if url.port is not None: - return url.port - return {"http": 80, "https": 443}.get(url.scheme) - - -def same_origin(url: "URL", other: "URL") -> bool: - """ - Return 'True' if the given URLs share the same origin. - """ - return ( - url.scheme == other.scheme - and url.host == other.host - and port_or_default(url) == port_or_default(other) - ) - - -def is_https_redirect(url: "URL", location: "URL") -> bool: - """ - Return 'True' if 'location' is a HTTPS upgrade of 'url' - """ - if url.host != location.host: - return False - - return ( - url.scheme == "http" - and port_or_default(url) == 80 - and location.scheme == "https" - and port_or_default(location) == 443 - ) - - -def get_environment_proxies() -> typing.Dict[str, typing.Optional[str]]: - """Gets proxy information from the environment""" - - # urllib.request.getproxies() falls back on System - # Registry and Config for proxies on Windows and macOS. - # We don't want to propagate non-HTTP proxies into - # our configuration such as 'TRAVIS_APT_PROXY'. - proxy_info = getproxies() - mounts: typing.Dict[str, typing.Optional[str]] = {} - - for scheme in ("http", "https", "all"): - if proxy_info.get(scheme): - hostname = proxy_info[scheme] - mounts[f"{scheme}://"] = ( - hostname if "://" in hostname else f"http://{hostname}" - ) - - no_proxy_hosts = [host.strip() for host in proxy_info.get("no", "").split(",")] - for hostname in no_proxy_hosts: - # See https://curl.haxx.se/libcurl/c/CURLOPT_NOPROXY.html for details - # on how names in `NO_PROXY` are handled. - if hostname == "*": - # If NO_PROXY=* is used or if "*" occurs as any one of the comma - # separated hostnames, then we should just bypass any information - # from HTTP_PROXY, HTTPS_PROXY, ALL_PROXY, and always ignore - # proxies. - return {} - elif hostname: - # NO_PROXY=.google.com is marked as "all://*.google.com, - # which disables "www.google.com" but not "google.com" - # NO_PROXY=google.com is marked as "all://*google.com, - # which disables "www.google.com" and "google.com". - # (But not "wwwgoogle.com") - # NO_PROXY can include domains, IPv6, IPv4 addresses and "localhost" - # NO_PROXY=example.com,::1,localhost,192.168.0.0/16 - if is_ipv4_hostname(hostname): - mounts[f"all://{hostname}"] = None - elif is_ipv6_hostname(hostname): - mounts[f"all://[{hostname}]"] = None - elif hostname.lower() == "localhost": - mounts[f"all://{hostname}"] = None - else: - mounts[f"all://*{hostname}"] = None - - return mounts - - -def to_bytes(value: typing.Union[str, bytes], encoding: str = "utf-8") -> bytes: - return value.encode(encoding) if isinstance(value, str) else value - - -def to_str(value: typing.Union[str, bytes], encoding: str = "utf-8") -> str: - return value if isinstance(value, str) else value.decode(encoding) - - -def to_bytes_or_str(value: str, match_type_of: typing.AnyStr) -> typing.AnyStr: - return value if isinstance(match_type_of, str) else value.encode() - - -def unquote(value: str) -> str: - return value[1:-1] if value[0] == value[-1] == '"' else value - - -def guess_content_type(filename: typing.Optional[str]) -> typing.Optional[str]: - if filename: - return mimetypes.guess_type(filename)[0] or "application/octet-stream" - return None - - -def peek_filelike_length(stream: typing.Any) -> typing.Optional[int]: - """ - Given a file-like stream object, return its length in number of bytes - without reading it into memory. - """ - try: - # Is it an actual file? - fd = stream.fileno() - # Yup, seems to be an actual file. - length = os.fstat(fd).st_size - except (AttributeError, OSError): - # No... Maybe it's something that supports random access, like `io.BytesIO`? - try: - # Assuming so, go to end of stream to figure out its length, - # then put it back in place. - offset = stream.tell() - length = stream.seek(0, os.SEEK_END) - stream.seek(offset) - except (AttributeError, OSError): - # Not even that? Sorry, we're doomed... - return None - - return length - - -class Timer: - async def _get_time(self) -> float: - library = sniffio.current_async_library() - if library == "trio": - import trio - - return trio.current_time() - elif library == "curio": # pragma: no cover - import curio - - return typing.cast(float, await curio.clock()) - - import asyncio - - return asyncio.get_event_loop().time() - - def sync_start(self) -> None: - self.started = time.perf_counter() - - async def async_start(self) -> None: - self.started = await self._get_time() - - def sync_elapsed(self) -> float: - now = time.perf_counter() - return now - self.started - - async def async_elapsed(self) -> float: - now = await self._get_time() - return now - self.started - - -class URLPattern: - """ - A utility class currently used for making lookups against proxy keys... - - # Wildcard matching... - >>> pattern = URLPattern("all://") - >>> pattern.matches(httpx.URL("http://example.com")) - True - - # Witch scheme matching... - >>> pattern = URLPattern("https://") - >>> pattern.matches(httpx.URL("https://example.com")) - True - >>> pattern.matches(httpx.URL("http://example.com")) - False - - # With domain matching... - >>> pattern = URLPattern("https://example.com") - >>> pattern.matches(httpx.URL("https://example.com")) - True - >>> pattern.matches(httpx.URL("http://example.com")) - False - >>> pattern.matches(httpx.URL("https://other.com")) - False - - # Wildcard scheme, with domain matching... - >>> pattern = URLPattern("all://example.com") - >>> pattern.matches(httpx.URL("https://example.com")) - True - >>> pattern.matches(httpx.URL("http://example.com")) - True - >>> pattern.matches(httpx.URL("https://other.com")) - False - - # With port matching... - >>> pattern = URLPattern("https://example.com:1234") - >>> pattern.matches(httpx.URL("https://example.com:1234")) - True - >>> pattern.matches(httpx.URL("https://example.com")) - False - """ - - def __init__(self, pattern: str) -> None: - from ._urls import URL - - if pattern and ":" not in pattern: - raise ValueError( - f"Proxy keys should use proper URL forms rather " - f"than plain scheme strings. " - f'Instead of "{pattern}", use "{pattern}://"' - ) - - url = URL(pattern) - self.pattern = pattern - self.scheme = "" if url.scheme == "all" else url.scheme - self.host = "" if url.host == "*" else url.host - self.port = url.port - if not url.host or url.host == "*": - self.host_regex: typing.Optional[typing.Pattern[str]] = None - elif url.host.startswith("*."): - # *.example.com should match "www.example.com", but not "example.com" - domain = re.escape(url.host[2:]) - self.host_regex = re.compile(f"^.+\\.{domain}$") - elif url.host.startswith("*"): - # *example.com should match "www.example.com" and "example.com" - domain = re.escape(url.host[1:]) - self.host_regex = re.compile(f"^(.+\\.)?{domain}$") - else: - # example.com should match "example.com" but not "www.example.com" - domain = re.escape(url.host) - self.host_regex = re.compile(f"^{domain}$") - - def matches(self, other: "URL") -> bool: - if self.scheme and self.scheme != other.scheme: - return False - if ( - self.host - and self.host_regex is not None - and not self.host_regex.match(other.host) - ): - return False - if self.port is not None and self.port != other.port: - return False - return True - - @property - def priority(self) -> typing.Tuple[int, int, int]: - """ - The priority allows URLPattern instances to be sortable, so that - we can match from most specific to least specific. - """ - # URLs with a port should take priority over URLs without a port. - port_priority = 0 if self.port is not None else 1 - # Longer hostnames should match first. - host_priority = -len(self.host) - # Longer schemes should match first. - scheme_priority = -len(self.scheme) - return (port_priority, host_priority, scheme_priority) - - def __hash__(self) -> int: - return hash(self.pattern) - - def __lt__(self, other: "URLPattern") -> bool: - return self.priority < other.priority - - def __eq__(self, other: typing.Any) -> bool: - return isinstance(other, URLPattern) and self.pattern == other.pattern - - -def is_ipv4_hostname(hostname: str) -> bool: - try: - ipaddress.IPv4Address(hostname.split("/")[0]) - except Exception: - return False - return True - - -def is_ipv6_hostname(hostname: str) -> bool: - try: - ipaddress.IPv6Address(hostname.split("/")[0]) - except Exception: - return False - return True diff --git a/server/venv/Lib/site-packages/httpx/py.typed b/server/venv/Lib/site-packages/httpx/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/idna-3.4.dist-info/INSTALLER b/server/venv/Lib/site-packages/idna-3.4.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/server/venv/Lib/site-packages/idna-3.4.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/server/venv/Lib/site-packages/idna-3.4.dist-info/LICENSE.md b/server/venv/Lib/site-packages/idna-3.4.dist-info/LICENSE.md deleted file mode 100644 index b6f8732..0000000 --- a/server/venv/Lib/site-packages/idna-3.4.dist-info/LICENSE.md +++ /dev/null @@ -1,29 +0,0 @@ -BSD 3-Clause License - -Copyright (c) 2013-2021, Kim Davies -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/server/venv/Lib/site-packages/idna-3.4.dist-info/METADATA b/server/venv/Lib/site-packages/idna-3.4.dist-info/METADATA deleted file mode 100644 index 07f6193..0000000 --- a/server/venv/Lib/site-packages/idna-3.4.dist-info/METADATA +++ /dev/null @@ -1,242 +0,0 @@ -Metadata-Version: 2.1 -Name: idna -Version: 3.4 -Summary: Internationalized Domain Names in Applications (IDNA) -Author-email: Kim Davies -Requires-Python: >=3.5 -Description-Content-Type: text/x-rst -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Intended Audience :: System Administrators -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.5 -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Internet :: Name Service (DNS) -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Utilities -Project-URL: Changelog, https://github.com/kjd/idna/blob/master/HISTORY.rst -Project-URL: Issue tracker, https://github.com/kjd/idna/issues -Project-URL: Source, https://github.com/kjd/idna - -Internationalized Domain Names in Applications (IDNA) -===================================================== - -Support for the Internationalized Domain Names in -Applications (IDNA) protocol as specified in `RFC 5891 -`_. This is the latest version of -the protocol and is sometimes referred to as “IDNA 2008”. - -This library also provides support for Unicode Technical -Standard 46, `Unicode IDNA Compatibility Processing -`_. - -This acts as a suitable replacement for the “encodings.idna” -module that comes with the Python standard library, but which -only supports the older superseded IDNA specification (`RFC 3490 -`_). - -Basic functions are simply executed: - -.. code-block:: pycon - - >>> import idna - >>> idna.encode('ドメイン.テスト') - b'xn--eckwd4c7c.xn--zckzah' - >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) - ドメイン.テスト - - -Installation ------------- - -This package is available for installation from PyPI: - -.. code-block:: bash - - $ python3 -m pip install idna - - -Usage ------ - -For typical usage, the ``encode`` and ``decode`` functions will take a -domain name argument and perform a conversion to A-labels or U-labels -respectively. - -.. code-block:: pycon - - >>> import idna - >>> idna.encode('ドメイン.テスト') - b'xn--eckwd4c7c.xn--zckzah' - >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) - ドメイン.テスト - -You may use the codec encoding and decoding methods using the -``idna.codec`` module: - -.. code-block:: pycon - - >>> import idna.codec - >>> print('домен.испытание'.encode('idna')) - b'xn--d1acufc.xn--80akhbyknj4f' - >>> print(b'xn--d1acufc.xn--80akhbyknj4f'.decode('idna')) - домен.испытание - -Conversions can be applied at a per-label basis using the ``ulabel`` or -``alabel`` functions if necessary: - -.. code-block:: pycon - - >>> idna.alabel('测试') - b'xn--0zwm56d' - -Compatibility Mapping (UTS #46) -+++++++++++++++++++++++++++++++ - -As described in `RFC 5895 `_, the -IDNA specification does not normalize input from different potential -ways a user may input a domain name. This functionality, known as -a “mapping”, is considered by the specification to be a local -user-interface issue distinct from IDNA conversion functionality. - -This library provides one such mapping, that was developed by the -Unicode Consortium. Known as `Unicode IDNA Compatibility Processing -`_, it provides for both a regular -mapping for typical applications, as well as a transitional mapping to -help migrate from older IDNA 2003 applications. - -For example, “Königsgäßchen” is not a permissible label as *LATIN -CAPITAL LETTER K* is not allowed (nor are capital letters in general). -UTS 46 will convert this into lower case prior to applying the IDNA -conversion. - -.. code-block:: pycon - - >>> import idna - >>> idna.encode('Königsgäßchen') - ... - idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'Königsgäßchen' not allowed - >>> idna.encode('Königsgäßchen', uts46=True) - b'xn--knigsgchen-b4a3dun' - >>> print(idna.decode('xn--knigsgchen-b4a3dun')) - königsgäßchen - -Transitional processing provides conversions to help transition from -the older 2003 standard to the current standard. For example, in the -original IDNA specification, the *LATIN SMALL LETTER SHARP S* (ß) was -converted into two *LATIN SMALL LETTER S* (ss), whereas in the current -IDNA specification this conversion is not performed. - -.. code-block:: pycon - - >>> idna.encode('Königsgäßchen', uts46=True, transitional=True) - 'xn--knigsgsschen-lcb0w' - -Implementors should use transitional processing with caution, only in -rare cases where conversion from legacy labels to current labels must be -performed (i.e. IDNA implementations that pre-date 2008). For typical -applications that just need to convert labels, transitional processing -is unlikely to be beneficial and could produce unexpected incompatible -results. - -``encodings.idna`` Compatibility -++++++++++++++++++++++++++++++++ - -Function calls from the Python built-in ``encodings.idna`` module are -mapped to their IDNA 2008 equivalents using the ``idna.compat`` module. -Simply substitute the ``import`` clause in your code to refer to the new -module name. - -Exceptions ----------- - -All errors raised during the conversion following the specification -should raise an exception derived from the ``idna.IDNAError`` base -class. - -More specific exceptions that may be generated as ``idna.IDNABidiError`` -when the error reflects an illegal combination of left-to-right and -right-to-left characters in a label; ``idna.InvalidCodepoint`` when -a specific codepoint is an illegal character in an IDN label (i.e. -INVALID); and ``idna.InvalidCodepointContext`` when the codepoint is -illegal based on its positional context (i.e. it is CONTEXTO or CONTEXTJ -but the contextual requirements are not satisfied.) - -Building and Diagnostics ------------------------- - -The IDNA and UTS 46 functionality relies upon pre-calculated lookup -tables for performance. These tables are derived from computing against -eligibility criteria in the respective standards. These tables are -computed using the command-line script ``tools/idna-data``. - -This tool will fetch relevant codepoint data from the Unicode repository -and perform the required calculations to identify eligibility. There are -three main modes: - -* ``idna-data make-libdata``. Generates ``idnadata.py`` and - ``uts46data.py``, the pre-calculated lookup tables using for IDNA and - UTS 46 conversions. Implementors who wish to track this library against - a different Unicode version may use this tool to manually generate a - different version of the ``idnadata.py`` and ``uts46data.py`` files. - -* ``idna-data make-table``. Generate a table of the IDNA disposition - (e.g. PVALID, CONTEXTJ, CONTEXTO) in the format found in Appendix - B.1 of RFC 5892 and the pre-computed tables published by `IANA - `_. - -* ``idna-data U+0061``. Prints debugging output on the various - properties associated with an individual Unicode codepoint (in this - case, U+0061), that are used to assess the IDNA and UTS 46 status of a - codepoint. This is helpful in debugging or analysis. - -The tool accepts a number of arguments, described using ``idna-data --h``. Most notably, the ``--version`` argument allows the specification -of the version of Unicode to use in computing the table data. For -example, ``idna-data --version 9.0.0 make-libdata`` will generate -library data against Unicode 9.0.0. - - -Additional Notes ----------------- - -* **Packages**. The latest tagged release version is published in the - `Python Package Index `_. - -* **Version support**. This library supports Python 3.5 and higher. - As this library serves as a low-level toolkit for a variety of - applications, many of which strive for broad compatibility with older - Python versions, there is no rush to remove older intepreter support. - Removing support for older versions should be well justified in that the - maintenance burden has become too high. - -* **Python 2**. Python 2 is supported by version 2.x of this library. - While active development of the version 2.x series has ended, notable - issues being corrected may be backported to 2.x. Use "idna<3" in your - requirements file if you need this library for a Python 2 application. - -* **Testing**. The library has a test suite based on each rule of the - IDNA specification, as well as tests that are provided as part of the - Unicode Technical Standard 46, `Unicode IDNA Compatibility Processing - `_. - -* **Emoji**. It is an occasional request to support emoji domains in - this library. Encoding of symbols like emoji is expressly prohibited by - the technical standard IDNA 2008 and emoji domains are broadly phased - out across the domain industry due to associated security risks. For - now, applications that wish need to support these non-compliant labels - may wish to consider trying the encode/decode operation in this library - first, and then falling back to using `encodings.idna`. See `the Github - project `_ for more discussion. - diff --git a/server/venv/Lib/site-packages/idna-3.4.dist-info/RECORD b/server/venv/Lib/site-packages/idna-3.4.dist-info/RECORD deleted file mode 100644 index 599c400..0000000 --- a/server/venv/Lib/site-packages/idna-3.4.dist-info/RECORD +++ /dev/null @@ -1,23 +0,0 @@ -idna-3.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -idna-3.4.dist-info/LICENSE.md,sha256=otbk2UC9JNvnuWRc3hmpeSzFHbeuDVrNMBrIYMqj6DY,1523 -idna-3.4.dist-info/METADATA,sha256=8aLSf9MFS7oB26pZh2hprg7eJp0UJSc-3rpf_evp4DA,9830 -idna-3.4.dist-info/RECORD,, -idna-3.4.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -idna-3.4.dist-info/WHEEL,sha256=4TfKIB_xu-04bc2iKz6_zFt-gEFEEDU_31HGhqzOCE8,81 -idna/__init__.py,sha256=KJQN1eQBr8iIK5SKrJ47lXvxG0BJ7Lm38W4zT0v_8lk,849 -idna/__pycache__/__init__.cpython-311.pyc,, -idna/__pycache__/codec.cpython-311.pyc,, -idna/__pycache__/compat.cpython-311.pyc,, -idna/__pycache__/core.cpython-311.pyc,, -idna/__pycache__/idnadata.cpython-311.pyc,, -idna/__pycache__/intranges.cpython-311.pyc,, -idna/__pycache__/package_data.cpython-311.pyc,, -idna/__pycache__/uts46data.cpython-311.pyc,, -idna/codec.py,sha256=6ly5odKfqrytKT9_7UrlGklHnf1DSK2r9C6cSM4sa28,3374 -idna/compat.py,sha256=0_sOEUMT4CVw9doD3vyRhX80X19PwqFoUBs7gWsFME4,321 -idna/core.py,sha256=1JxchwKzkxBSn7R_oCE12oBu3eVux0VzdxolmIad24M,12950 -idna/idnadata.py,sha256=xUjqKqiJV8Ho_XzBpAtv5JFoVPSupK-SUXvtjygUHqw,44375 -idna/intranges.py,sha256=YBr4fRYuWH7kTKS2tXlFjM24ZF1Pdvcir-aywniInqg,1881 -idna/package_data.py,sha256=C_jHJzmX8PI4xq0jpzmcTMxpb5lDsq4o5VyxQzlVrZE,21 -idna/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -idna/uts46data.py,sha256=zvjZU24s58_uAS850Mcd0NnD0X7_gCMAMjzWNIeUJdc,206539 diff --git a/server/venv/Lib/site-packages/idna-3.4.dist-info/REQUESTED b/server/venv/Lib/site-packages/idna-3.4.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/idna-3.4.dist-info/WHEEL b/server/venv/Lib/site-packages/idna-3.4.dist-info/WHEEL deleted file mode 100644 index 668ba4d..0000000 --- a/server/venv/Lib/site-packages/idna-3.4.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.7.1 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/server/venv/Lib/site-packages/idna/__init__.py b/server/venv/Lib/site-packages/idna/__init__.py deleted file mode 100644 index a40eeaf..0000000 --- a/server/venv/Lib/site-packages/idna/__init__.py +++ /dev/null @@ -1,44 +0,0 @@ -from .package_data import __version__ -from .core import ( - IDNABidiError, - IDNAError, - InvalidCodepoint, - InvalidCodepointContext, - alabel, - check_bidi, - check_hyphen_ok, - check_initial_combiner, - check_label, - check_nfc, - decode, - encode, - ulabel, - uts46_remap, - valid_contextj, - valid_contexto, - valid_label_length, - valid_string_length, -) -from .intranges import intranges_contain - -__all__ = [ - "IDNABidiError", - "IDNAError", - "InvalidCodepoint", - "InvalidCodepointContext", - "alabel", - "check_bidi", - "check_hyphen_ok", - "check_initial_combiner", - "check_label", - "check_nfc", - "decode", - "encode", - "intranges_contain", - "ulabel", - "uts46_remap", - "valid_contextj", - "valid_contexto", - "valid_label_length", - "valid_string_length", -] diff --git a/server/venv/Lib/site-packages/idna/codec.py b/server/venv/Lib/site-packages/idna/codec.py deleted file mode 100644 index 1ca9ba6..0000000 --- a/server/venv/Lib/site-packages/idna/codec.py +++ /dev/null @@ -1,112 +0,0 @@ -from .core import encode, decode, alabel, ulabel, IDNAError -import codecs -import re -from typing import Tuple, Optional - -_unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]') - -class Codec(codecs.Codec): - - def encode(self, data: str, errors: str = 'strict') -> Tuple[bytes, int]: - if errors != 'strict': - raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) - - if not data: - return b"", 0 - - return encode(data), len(data) - - def decode(self, data: bytes, errors: str = 'strict') -> Tuple[str, int]: - if errors != 'strict': - raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) - - if not data: - return '', 0 - - return decode(data), len(data) - -class IncrementalEncoder(codecs.BufferedIncrementalEncoder): - def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[str, int]: # type: ignore - if errors != 'strict': - raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) - - if not data: - return "", 0 - - labels = _unicode_dots_re.split(data) - trailing_dot = '' - if labels: - if not labels[-1]: - trailing_dot = '.' - del labels[-1] - elif not final: - # Keep potentially unfinished label until the next call - del labels[-1] - if labels: - trailing_dot = '.' - - result = [] - size = 0 - for label in labels: - result.append(alabel(label)) - if size: - size += 1 - size += len(label) - - # Join with U+002E - result_str = '.'.join(result) + trailing_dot # type: ignore - size += len(trailing_dot) - return result_str, size - -class IncrementalDecoder(codecs.BufferedIncrementalDecoder): - def _buffer_decode(self, data: str, errors: str, final: bool) -> Tuple[str, int]: # type: ignore - if errors != 'strict': - raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) - - if not data: - return ('', 0) - - labels = _unicode_dots_re.split(data) - trailing_dot = '' - if labels: - if not labels[-1]: - trailing_dot = '.' - del labels[-1] - elif not final: - # Keep potentially unfinished label until the next call - del labels[-1] - if labels: - trailing_dot = '.' - - result = [] - size = 0 - for label in labels: - result.append(ulabel(label)) - if size: - size += 1 - size += len(label) - - result_str = '.'.join(result) + trailing_dot - size += len(trailing_dot) - return (result_str, size) - - -class StreamWriter(Codec, codecs.StreamWriter): - pass - - -class StreamReader(Codec, codecs.StreamReader): - pass - - -def getregentry() -> codecs.CodecInfo: - # Compatibility as a search_function for codecs.register() - return codecs.CodecInfo( - name='idna', - encode=Codec().encode, # type: ignore - decode=Codec().decode, # type: ignore - incrementalencoder=IncrementalEncoder, - incrementaldecoder=IncrementalDecoder, - streamwriter=StreamWriter, - streamreader=StreamReader, - ) diff --git a/server/venv/Lib/site-packages/idna/compat.py b/server/venv/Lib/site-packages/idna/compat.py deleted file mode 100644 index 786e6bd..0000000 --- a/server/venv/Lib/site-packages/idna/compat.py +++ /dev/null @@ -1,13 +0,0 @@ -from .core import * -from .codec import * -from typing import Any, Union - -def ToASCII(label: str) -> bytes: - return encode(label) - -def ToUnicode(label: Union[bytes, bytearray]) -> str: - return decode(label) - -def nameprep(s: Any) -> None: - raise NotImplementedError('IDNA 2008 does not utilise nameprep protocol') - diff --git a/server/venv/Lib/site-packages/idna/core.py b/server/venv/Lib/site-packages/idna/core.py deleted file mode 100644 index 4f30037..0000000 --- a/server/venv/Lib/site-packages/idna/core.py +++ /dev/null @@ -1,400 +0,0 @@ -from . import idnadata -import bisect -import unicodedata -import re -from typing import Union, Optional -from .intranges import intranges_contain - -_virama_combining_class = 9 -_alabel_prefix = b'xn--' -_unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]') - -class IDNAError(UnicodeError): - """ Base exception for all IDNA-encoding related problems """ - pass - - -class IDNABidiError(IDNAError): - """ Exception when bidirectional requirements are not satisfied """ - pass - - -class InvalidCodepoint(IDNAError): - """ Exception when a disallowed or unallocated codepoint is used """ - pass - - -class InvalidCodepointContext(IDNAError): - """ Exception when the codepoint is not valid in the context it is used """ - pass - - -def _combining_class(cp: int) -> int: - v = unicodedata.combining(chr(cp)) - if v == 0: - if not unicodedata.name(chr(cp)): - raise ValueError('Unknown character in unicodedata') - return v - -def _is_script(cp: str, script: str) -> bool: - return intranges_contain(ord(cp), idnadata.scripts[script]) - -def _punycode(s: str) -> bytes: - return s.encode('punycode') - -def _unot(s: int) -> str: - return 'U+{:04X}'.format(s) - - -def valid_label_length(label: Union[bytes, str]) -> bool: - if len(label) > 63: - return False - return True - - -def valid_string_length(label: Union[bytes, str], trailing_dot: bool) -> bool: - if len(label) > (254 if trailing_dot else 253): - return False - return True - - -def check_bidi(label: str, check_ltr: bool = False) -> bool: - # Bidi rules should only be applied if string contains RTL characters - bidi_label = False - for (idx, cp) in enumerate(label, 1): - direction = unicodedata.bidirectional(cp) - if direction == '': - # String likely comes from a newer version of Unicode - raise IDNABidiError('Unknown directionality in label {} at position {}'.format(repr(label), idx)) - if direction in ['R', 'AL', 'AN']: - bidi_label = True - if not bidi_label and not check_ltr: - return True - - # Bidi rule 1 - direction = unicodedata.bidirectional(label[0]) - if direction in ['R', 'AL']: - rtl = True - elif direction == 'L': - rtl = False - else: - raise IDNABidiError('First codepoint in label {} must be directionality L, R or AL'.format(repr(label))) - - valid_ending = False - number_type = None # type: Optional[str] - for (idx, cp) in enumerate(label, 1): - direction = unicodedata.bidirectional(cp) - - if rtl: - # Bidi rule 2 - if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: - raise IDNABidiError('Invalid direction for codepoint at position {} in a right-to-left label'.format(idx)) - # Bidi rule 3 - if direction in ['R', 'AL', 'EN', 'AN']: - valid_ending = True - elif direction != 'NSM': - valid_ending = False - # Bidi rule 4 - if direction in ['AN', 'EN']: - if not number_type: - number_type = direction - else: - if number_type != direction: - raise IDNABidiError('Can not mix numeral types in a right-to-left label') - else: - # Bidi rule 5 - if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: - raise IDNABidiError('Invalid direction for codepoint at position {} in a left-to-right label'.format(idx)) - # Bidi rule 6 - if direction in ['L', 'EN']: - valid_ending = True - elif direction != 'NSM': - valid_ending = False - - if not valid_ending: - raise IDNABidiError('Label ends with illegal codepoint directionality') - - return True - - -def check_initial_combiner(label: str) -> bool: - if unicodedata.category(label[0])[0] == 'M': - raise IDNAError('Label begins with an illegal combining character') - return True - - -def check_hyphen_ok(label: str) -> bool: - if label[2:4] == '--': - raise IDNAError('Label has disallowed hyphens in 3rd and 4th position') - if label[0] == '-' or label[-1] == '-': - raise IDNAError('Label must not start or end with a hyphen') - return True - - -def check_nfc(label: str) -> None: - if unicodedata.normalize('NFC', label) != label: - raise IDNAError('Label must be in Normalization Form C') - - -def valid_contextj(label: str, pos: int) -> bool: - cp_value = ord(label[pos]) - - if cp_value == 0x200c: - - if pos > 0: - if _combining_class(ord(label[pos - 1])) == _virama_combining_class: - return True - - ok = False - for i in range(pos-1, -1, -1): - joining_type = idnadata.joining_types.get(ord(label[i])) - if joining_type == ord('T'): - continue - if joining_type in [ord('L'), ord('D')]: - ok = True - break - - if not ok: - return False - - ok = False - for i in range(pos+1, len(label)): - joining_type = idnadata.joining_types.get(ord(label[i])) - if joining_type == ord('T'): - continue - if joining_type in [ord('R'), ord('D')]: - ok = True - break - return ok - - if cp_value == 0x200d: - - if pos > 0: - if _combining_class(ord(label[pos - 1])) == _virama_combining_class: - return True - return False - - else: - - return False - - -def valid_contexto(label: str, pos: int, exception: bool = False) -> bool: - cp_value = ord(label[pos]) - - if cp_value == 0x00b7: - if 0 < pos < len(label)-1: - if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c: - return True - return False - - elif cp_value == 0x0375: - if pos < len(label)-1 and len(label) > 1: - return _is_script(label[pos + 1], 'Greek') - return False - - elif cp_value == 0x05f3 or cp_value == 0x05f4: - if pos > 0: - return _is_script(label[pos - 1], 'Hebrew') - return False - - elif cp_value == 0x30fb: - for cp in label: - if cp == '\u30fb': - continue - if _is_script(cp, 'Hiragana') or _is_script(cp, 'Katakana') or _is_script(cp, 'Han'): - return True - return False - - elif 0x660 <= cp_value <= 0x669: - for cp in label: - if 0x6f0 <= ord(cp) <= 0x06f9: - return False - return True - - elif 0x6f0 <= cp_value <= 0x6f9: - for cp in label: - if 0x660 <= ord(cp) <= 0x0669: - return False - return True - - return False - - -def check_label(label: Union[str, bytes, bytearray]) -> None: - if isinstance(label, (bytes, bytearray)): - label = label.decode('utf-8') - if len(label) == 0: - raise IDNAError('Empty Label') - - check_nfc(label) - check_hyphen_ok(label) - check_initial_combiner(label) - - for (pos, cp) in enumerate(label): - cp_value = ord(cp) - if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']): - continue - elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']): - try: - if not valid_contextj(label, pos): - raise InvalidCodepointContext('Joiner {} not allowed at position {} in {}'.format( - _unot(cp_value), pos+1, repr(label))) - except ValueError: - raise IDNAError('Unknown codepoint adjacent to joiner {} at position {} in {}'.format( - _unot(cp_value), pos+1, repr(label))) - elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']): - if not valid_contexto(label, pos): - raise InvalidCodepointContext('Codepoint {} not allowed at position {} in {}'.format(_unot(cp_value), pos+1, repr(label))) - else: - raise InvalidCodepoint('Codepoint {} at position {} of {} not allowed'.format(_unot(cp_value), pos+1, repr(label))) - - check_bidi(label) - - -def alabel(label: str) -> bytes: - try: - label_bytes = label.encode('ascii') - ulabel(label_bytes) - if not valid_label_length(label_bytes): - raise IDNAError('Label too long') - return label_bytes - except UnicodeEncodeError: - pass - - if not label: - raise IDNAError('No Input') - - label = str(label) - check_label(label) - label_bytes = _punycode(label) - label_bytes = _alabel_prefix + label_bytes - - if not valid_label_length(label_bytes): - raise IDNAError('Label too long') - - return label_bytes - - -def ulabel(label: Union[str, bytes, bytearray]) -> str: - if not isinstance(label, (bytes, bytearray)): - try: - label_bytes = label.encode('ascii') - except UnicodeEncodeError: - check_label(label) - return label - else: - label_bytes = label - - label_bytes = label_bytes.lower() - if label_bytes.startswith(_alabel_prefix): - label_bytes = label_bytes[len(_alabel_prefix):] - if not label_bytes: - raise IDNAError('Malformed A-label, no Punycode eligible content found') - if label_bytes.decode('ascii')[-1] == '-': - raise IDNAError('A-label must not end with a hyphen') - else: - check_label(label_bytes) - return label_bytes.decode('ascii') - - try: - label = label_bytes.decode('punycode') - except UnicodeError: - raise IDNAError('Invalid A-label') - check_label(label) - return label - - -def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False) -> str: - """Re-map the characters in the string according to UTS46 processing.""" - from .uts46data import uts46data - output = '' - - for pos, char in enumerate(domain): - code_point = ord(char) - try: - uts46row = uts46data[code_point if code_point < 256 else - bisect.bisect_left(uts46data, (code_point, 'Z')) - 1] - status = uts46row[1] - replacement = None # type: Optional[str] - if len(uts46row) == 3: - replacement = uts46row[2] # type: ignore - if (status == 'V' or - (status == 'D' and not transitional) or - (status == '3' and not std3_rules and replacement is None)): - output += char - elif replacement is not None and (status == 'M' or - (status == '3' and not std3_rules) or - (status == 'D' and transitional)): - output += replacement - elif status != 'I': - raise IndexError() - except IndexError: - raise InvalidCodepoint( - 'Codepoint {} not allowed at position {} in {}'.format( - _unot(code_point), pos + 1, repr(domain))) - - return unicodedata.normalize('NFC', output) - - -def encode(s: Union[str, bytes, bytearray], strict: bool = False, uts46: bool = False, std3_rules: bool = False, transitional: bool = False) -> bytes: - if isinstance(s, (bytes, bytearray)): - try: - s = s.decode('ascii') - except UnicodeDecodeError: - raise IDNAError('should pass a unicode string to the function rather than a byte string.') - if uts46: - s = uts46_remap(s, std3_rules, transitional) - trailing_dot = False - result = [] - if strict: - labels = s.split('.') - else: - labels = _unicode_dots_re.split(s) - if not labels or labels == ['']: - raise IDNAError('Empty domain') - if labels[-1] == '': - del labels[-1] - trailing_dot = True - for label in labels: - s = alabel(label) - if s: - result.append(s) - else: - raise IDNAError('Empty label') - if trailing_dot: - result.append(b'') - s = b'.'.join(result) - if not valid_string_length(s, trailing_dot): - raise IDNAError('Domain too long') - return s - - -def decode(s: Union[str, bytes, bytearray], strict: bool = False, uts46: bool = False, std3_rules: bool = False) -> str: - try: - if isinstance(s, (bytes, bytearray)): - s = s.decode('ascii') - except UnicodeDecodeError: - raise IDNAError('Invalid ASCII in A-label') - if uts46: - s = uts46_remap(s, std3_rules, False) - trailing_dot = False - result = [] - if not strict: - labels = _unicode_dots_re.split(s) - else: - labels = s.split('.') - if not labels or labels == ['']: - raise IDNAError('Empty domain') - if not labels[-1]: - del labels[-1] - trailing_dot = True - for label in labels: - s = ulabel(label) - if s: - result.append(s) - else: - raise IDNAError('Empty label') - if trailing_dot: - result.append('') - return '.'.join(result) diff --git a/server/venv/Lib/site-packages/idna/idnadata.py b/server/venv/Lib/site-packages/idna/idnadata.py deleted file mode 100644 index 67db462..0000000 --- a/server/venv/Lib/site-packages/idna/idnadata.py +++ /dev/null @@ -1,2151 +0,0 @@ -# This file is automatically generated by tools/idna-data - -__version__ = '15.0.0' -scripts = { - 'Greek': ( - 0x37000000374, - 0x37500000378, - 0x37a0000037e, - 0x37f00000380, - 0x38400000385, - 0x38600000387, - 0x3880000038b, - 0x38c0000038d, - 0x38e000003a2, - 0x3a3000003e2, - 0x3f000000400, - 0x1d2600001d2b, - 0x1d5d00001d62, - 0x1d6600001d6b, - 0x1dbf00001dc0, - 0x1f0000001f16, - 0x1f1800001f1e, - 0x1f2000001f46, - 0x1f4800001f4e, - 0x1f5000001f58, - 0x1f5900001f5a, - 0x1f5b00001f5c, - 0x1f5d00001f5e, - 0x1f5f00001f7e, - 0x1f8000001fb5, - 0x1fb600001fc5, - 0x1fc600001fd4, - 0x1fd600001fdc, - 0x1fdd00001ff0, - 0x1ff200001ff5, - 0x1ff600001fff, - 0x212600002127, - 0xab650000ab66, - 0x101400001018f, - 0x101a0000101a1, - 0x1d2000001d246, - ), - 'Han': ( - 0x2e8000002e9a, - 0x2e9b00002ef4, - 0x2f0000002fd6, - 0x300500003006, - 0x300700003008, - 0x30210000302a, - 0x30380000303c, - 0x340000004dc0, - 0x4e000000a000, - 0xf9000000fa6e, - 0xfa700000fada, - 0x16fe200016fe4, - 0x16ff000016ff2, - 0x200000002a6e0, - 0x2a7000002b73a, - 0x2b7400002b81e, - 0x2b8200002cea2, - 0x2ceb00002ebe1, - 0x2f8000002fa1e, - 0x300000003134b, - 0x31350000323b0, - ), - 'Hebrew': ( - 0x591000005c8, - 0x5d0000005eb, - 0x5ef000005f5, - 0xfb1d0000fb37, - 0xfb380000fb3d, - 0xfb3e0000fb3f, - 0xfb400000fb42, - 0xfb430000fb45, - 0xfb460000fb50, - ), - 'Hiragana': ( - 0x304100003097, - 0x309d000030a0, - 0x1b0010001b120, - 0x1b1320001b133, - 0x1b1500001b153, - 0x1f2000001f201, - ), - 'Katakana': ( - 0x30a1000030fb, - 0x30fd00003100, - 0x31f000003200, - 0x32d0000032ff, - 0x330000003358, - 0xff660000ff70, - 0xff710000ff9e, - 0x1aff00001aff4, - 0x1aff50001affc, - 0x1affd0001afff, - 0x1b0000001b001, - 0x1b1200001b123, - 0x1b1550001b156, - 0x1b1640001b168, - ), -} -joining_types = { - 0x600: 85, - 0x601: 85, - 0x602: 85, - 0x603: 85, - 0x604: 85, - 0x605: 85, - 0x608: 85, - 0x60b: 85, - 0x620: 68, - 0x621: 85, - 0x622: 82, - 0x623: 82, - 0x624: 82, - 0x625: 82, - 0x626: 68, - 0x627: 82, - 0x628: 68, - 0x629: 82, - 0x62a: 68, - 0x62b: 68, - 0x62c: 68, - 0x62d: 68, - 0x62e: 68, - 0x62f: 82, - 0x630: 82, - 0x631: 82, - 0x632: 82, - 0x633: 68, - 0x634: 68, - 0x635: 68, - 0x636: 68, - 0x637: 68, - 0x638: 68, - 0x639: 68, - 0x63a: 68, - 0x63b: 68, - 0x63c: 68, - 0x63d: 68, - 0x63e: 68, - 0x63f: 68, - 0x640: 67, - 0x641: 68, - 0x642: 68, - 0x643: 68, - 0x644: 68, - 0x645: 68, - 0x646: 68, - 0x647: 68, - 0x648: 82, - 0x649: 68, - 0x64a: 68, - 0x66e: 68, - 0x66f: 68, - 0x671: 82, - 0x672: 82, - 0x673: 82, - 0x674: 85, - 0x675: 82, - 0x676: 82, - 0x677: 82, - 0x678: 68, - 0x679: 68, - 0x67a: 68, - 0x67b: 68, - 0x67c: 68, - 0x67d: 68, - 0x67e: 68, - 0x67f: 68, - 0x680: 68, - 0x681: 68, - 0x682: 68, - 0x683: 68, - 0x684: 68, - 0x685: 68, - 0x686: 68, - 0x687: 68, - 0x688: 82, - 0x689: 82, - 0x68a: 82, - 0x68b: 82, - 0x68c: 82, - 0x68d: 82, - 0x68e: 82, - 0x68f: 82, - 0x690: 82, - 0x691: 82, - 0x692: 82, - 0x693: 82, - 0x694: 82, - 0x695: 82, - 0x696: 82, - 0x697: 82, - 0x698: 82, - 0x699: 82, - 0x69a: 68, - 0x69b: 68, - 0x69c: 68, - 0x69d: 68, - 0x69e: 68, - 0x69f: 68, - 0x6a0: 68, - 0x6a1: 68, - 0x6a2: 68, - 0x6a3: 68, - 0x6a4: 68, - 0x6a5: 68, - 0x6a6: 68, - 0x6a7: 68, - 0x6a8: 68, - 0x6a9: 68, - 0x6aa: 68, - 0x6ab: 68, - 0x6ac: 68, - 0x6ad: 68, - 0x6ae: 68, - 0x6af: 68, - 0x6b0: 68, - 0x6b1: 68, - 0x6b2: 68, - 0x6b3: 68, - 0x6b4: 68, - 0x6b5: 68, - 0x6b6: 68, - 0x6b7: 68, - 0x6b8: 68, - 0x6b9: 68, - 0x6ba: 68, - 0x6bb: 68, - 0x6bc: 68, - 0x6bd: 68, - 0x6be: 68, - 0x6bf: 68, - 0x6c0: 82, - 0x6c1: 68, - 0x6c2: 68, - 0x6c3: 82, - 0x6c4: 82, - 0x6c5: 82, - 0x6c6: 82, - 0x6c7: 82, - 0x6c8: 82, - 0x6c9: 82, - 0x6ca: 82, - 0x6cb: 82, - 0x6cc: 68, - 0x6cd: 82, - 0x6ce: 68, - 0x6cf: 82, - 0x6d0: 68, - 0x6d1: 68, - 0x6d2: 82, - 0x6d3: 82, - 0x6d5: 82, - 0x6dd: 85, - 0x6ee: 82, - 0x6ef: 82, - 0x6fa: 68, - 0x6fb: 68, - 0x6fc: 68, - 0x6ff: 68, - 0x70f: 84, - 0x710: 82, - 0x712: 68, - 0x713: 68, - 0x714: 68, - 0x715: 82, - 0x716: 82, - 0x717: 82, - 0x718: 82, - 0x719: 82, - 0x71a: 68, - 0x71b: 68, - 0x71c: 68, - 0x71d: 68, - 0x71e: 82, - 0x71f: 68, - 0x720: 68, - 0x721: 68, - 0x722: 68, - 0x723: 68, - 0x724: 68, - 0x725: 68, - 0x726: 68, - 0x727: 68, - 0x728: 82, - 0x729: 68, - 0x72a: 82, - 0x72b: 68, - 0x72c: 82, - 0x72d: 68, - 0x72e: 68, - 0x72f: 82, - 0x74d: 82, - 0x74e: 68, - 0x74f: 68, - 0x750: 68, - 0x751: 68, - 0x752: 68, - 0x753: 68, - 0x754: 68, - 0x755: 68, - 0x756: 68, - 0x757: 68, - 0x758: 68, - 0x759: 82, - 0x75a: 82, - 0x75b: 82, - 0x75c: 68, - 0x75d: 68, - 0x75e: 68, - 0x75f: 68, - 0x760: 68, - 0x761: 68, - 0x762: 68, - 0x763: 68, - 0x764: 68, - 0x765: 68, - 0x766: 68, - 0x767: 68, - 0x768: 68, - 0x769: 68, - 0x76a: 68, - 0x76b: 82, - 0x76c: 82, - 0x76d: 68, - 0x76e: 68, - 0x76f: 68, - 0x770: 68, - 0x771: 82, - 0x772: 68, - 0x773: 82, - 0x774: 82, - 0x775: 68, - 0x776: 68, - 0x777: 68, - 0x778: 82, - 0x779: 82, - 0x77a: 68, - 0x77b: 68, - 0x77c: 68, - 0x77d: 68, - 0x77e: 68, - 0x77f: 68, - 0x7ca: 68, - 0x7cb: 68, - 0x7cc: 68, - 0x7cd: 68, - 0x7ce: 68, - 0x7cf: 68, - 0x7d0: 68, - 0x7d1: 68, - 0x7d2: 68, - 0x7d3: 68, - 0x7d4: 68, - 0x7d5: 68, - 0x7d6: 68, - 0x7d7: 68, - 0x7d8: 68, - 0x7d9: 68, - 0x7da: 68, - 0x7db: 68, - 0x7dc: 68, - 0x7dd: 68, - 0x7de: 68, - 0x7df: 68, - 0x7e0: 68, - 0x7e1: 68, - 0x7e2: 68, - 0x7e3: 68, - 0x7e4: 68, - 0x7e5: 68, - 0x7e6: 68, - 0x7e7: 68, - 0x7e8: 68, - 0x7e9: 68, - 0x7ea: 68, - 0x7fa: 67, - 0x840: 82, - 0x841: 68, - 0x842: 68, - 0x843: 68, - 0x844: 68, - 0x845: 68, - 0x846: 82, - 0x847: 82, - 0x848: 68, - 0x849: 82, - 0x84a: 68, - 0x84b: 68, - 0x84c: 68, - 0x84d: 68, - 0x84e: 68, - 0x84f: 68, - 0x850: 68, - 0x851: 68, - 0x852: 68, - 0x853: 68, - 0x854: 82, - 0x855: 68, - 0x856: 82, - 0x857: 82, - 0x858: 82, - 0x860: 68, - 0x861: 85, - 0x862: 68, - 0x863: 68, - 0x864: 68, - 0x865: 68, - 0x866: 85, - 0x867: 82, - 0x868: 68, - 0x869: 82, - 0x86a: 82, - 0x870: 82, - 0x871: 82, - 0x872: 82, - 0x873: 82, - 0x874: 82, - 0x875: 82, - 0x876: 82, - 0x877: 82, - 0x878: 82, - 0x879: 82, - 0x87a: 82, - 0x87b: 82, - 0x87c: 82, - 0x87d: 82, - 0x87e: 82, - 0x87f: 82, - 0x880: 82, - 0x881: 82, - 0x882: 82, - 0x883: 67, - 0x884: 67, - 0x885: 67, - 0x886: 68, - 0x887: 85, - 0x888: 85, - 0x889: 68, - 0x88a: 68, - 0x88b: 68, - 0x88c: 68, - 0x88d: 68, - 0x88e: 82, - 0x890: 85, - 0x891: 85, - 0x8a0: 68, - 0x8a1: 68, - 0x8a2: 68, - 0x8a3: 68, - 0x8a4: 68, - 0x8a5: 68, - 0x8a6: 68, - 0x8a7: 68, - 0x8a8: 68, - 0x8a9: 68, - 0x8aa: 82, - 0x8ab: 82, - 0x8ac: 82, - 0x8ad: 85, - 0x8ae: 82, - 0x8af: 68, - 0x8b0: 68, - 0x8b1: 82, - 0x8b2: 82, - 0x8b3: 68, - 0x8b4: 68, - 0x8b5: 68, - 0x8b6: 68, - 0x8b7: 68, - 0x8b8: 68, - 0x8b9: 82, - 0x8ba: 68, - 0x8bb: 68, - 0x8bc: 68, - 0x8bd: 68, - 0x8be: 68, - 0x8bf: 68, - 0x8c0: 68, - 0x8c1: 68, - 0x8c2: 68, - 0x8c3: 68, - 0x8c4: 68, - 0x8c5: 68, - 0x8c6: 68, - 0x8c7: 68, - 0x8c8: 68, - 0x8e2: 85, - 0x1806: 85, - 0x1807: 68, - 0x180a: 67, - 0x180e: 85, - 0x1820: 68, - 0x1821: 68, - 0x1822: 68, - 0x1823: 68, - 0x1824: 68, - 0x1825: 68, - 0x1826: 68, - 0x1827: 68, - 0x1828: 68, - 0x1829: 68, - 0x182a: 68, - 0x182b: 68, - 0x182c: 68, - 0x182d: 68, - 0x182e: 68, - 0x182f: 68, - 0x1830: 68, - 0x1831: 68, - 0x1832: 68, - 0x1833: 68, - 0x1834: 68, - 0x1835: 68, - 0x1836: 68, - 0x1837: 68, - 0x1838: 68, - 0x1839: 68, - 0x183a: 68, - 0x183b: 68, - 0x183c: 68, - 0x183d: 68, - 0x183e: 68, - 0x183f: 68, - 0x1840: 68, - 0x1841: 68, - 0x1842: 68, - 0x1843: 68, - 0x1844: 68, - 0x1845: 68, - 0x1846: 68, - 0x1847: 68, - 0x1848: 68, - 0x1849: 68, - 0x184a: 68, - 0x184b: 68, - 0x184c: 68, - 0x184d: 68, - 0x184e: 68, - 0x184f: 68, - 0x1850: 68, - 0x1851: 68, - 0x1852: 68, - 0x1853: 68, - 0x1854: 68, - 0x1855: 68, - 0x1856: 68, - 0x1857: 68, - 0x1858: 68, - 0x1859: 68, - 0x185a: 68, - 0x185b: 68, - 0x185c: 68, - 0x185d: 68, - 0x185e: 68, - 0x185f: 68, - 0x1860: 68, - 0x1861: 68, - 0x1862: 68, - 0x1863: 68, - 0x1864: 68, - 0x1865: 68, - 0x1866: 68, - 0x1867: 68, - 0x1868: 68, - 0x1869: 68, - 0x186a: 68, - 0x186b: 68, - 0x186c: 68, - 0x186d: 68, - 0x186e: 68, - 0x186f: 68, - 0x1870: 68, - 0x1871: 68, - 0x1872: 68, - 0x1873: 68, - 0x1874: 68, - 0x1875: 68, - 0x1876: 68, - 0x1877: 68, - 0x1878: 68, - 0x1880: 85, - 0x1881: 85, - 0x1882: 85, - 0x1883: 85, - 0x1884: 85, - 0x1885: 84, - 0x1886: 84, - 0x1887: 68, - 0x1888: 68, - 0x1889: 68, - 0x188a: 68, - 0x188b: 68, - 0x188c: 68, - 0x188d: 68, - 0x188e: 68, - 0x188f: 68, - 0x1890: 68, - 0x1891: 68, - 0x1892: 68, - 0x1893: 68, - 0x1894: 68, - 0x1895: 68, - 0x1896: 68, - 0x1897: 68, - 0x1898: 68, - 0x1899: 68, - 0x189a: 68, - 0x189b: 68, - 0x189c: 68, - 0x189d: 68, - 0x189e: 68, - 0x189f: 68, - 0x18a0: 68, - 0x18a1: 68, - 0x18a2: 68, - 0x18a3: 68, - 0x18a4: 68, - 0x18a5: 68, - 0x18a6: 68, - 0x18a7: 68, - 0x18a8: 68, - 0x18aa: 68, - 0x200c: 85, - 0x200d: 67, - 0x202f: 85, - 0x2066: 85, - 0x2067: 85, - 0x2068: 85, - 0x2069: 85, - 0xa840: 68, - 0xa841: 68, - 0xa842: 68, - 0xa843: 68, - 0xa844: 68, - 0xa845: 68, - 0xa846: 68, - 0xa847: 68, - 0xa848: 68, - 0xa849: 68, - 0xa84a: 68, - 0xa84b: 68, - 0xa84c: 68, - 0xa84d: 68, - 0xa84e: 68, - 0xa84f: 68, - 0xa850: 68, - 0xa851: 68, - 0xa852: 68, - 0xa853: 68, - 0xa854: 68, - 0xa855: 68, - 0xa856: 68, - 0xa857: 68, - 0xa858: 68, - 0xa859: 68, - 0xa85a: 68, - 0xa85b: 68, - 0xa85c: 68, - 0xa85d: 68, - 0xa85e: 68, - 0xa85f: 68, - 0xa860: 68, - 0xa861: 68, - 0xa862: 68, - 0xa863: 68, - 0xa864: 68, - 0xa865: 68, - 0xa866: 68, - 0xa867: 68, - 0xa868: 68, - 0xa869: 68, - 0xa86a: 68, - 0xa86b: 68, - 0xa86c: 68, - 0xa86d: 68, - 0xa86e: 68, - 0xa86f: 68, - 0xa870: 68, - 0xa871: 68, - 0xa872: 76, - 0xa873: 85, - 0x10ac0: 68, - 0x10ac1: 68, - 0x10ac2: 68, - 0x10ac3: 68, - 0x10ac4: 68, - 0x10ac5: 82, - 0x10ac6: 85, - 0x10ac7: 82, - 0x10ac8: 85, - 0x10ac9: 82, - 0x10aca: 82, - 0x10acb: 85, - 0x10acc: 85, - 0x10acd: 76, - 0x10ace: 82, - 0x10acf: 82, - 0x10ad0: 82, - 0x10ad1: 82, - 0x10ad2: 82, - 0x10ad3: 68, - 0x10ad4: 68, - 0x10ad5: 68, - 0x10ad6: 68, - 0x10ad7: 76, - 0x10ad8: 68, - 0x10ad9: 68, - 0x10ada: 68, - 0x10adb: 68, - 0x10adc: 68, - 0x10add: 82, - 0x10ade: 68, - 0x10adf: 68, - 0x10ae0: 68, - 0x10ae1: 82, - 0x10ae2: 85, - 0x10ae3: 85, - 0x10ae4: 82, - 0x10aeb: 68, - 0x10aec: 68, - 0x10aed: 68, - 0x10aee: 68, - 0x10aef: 82, - 0x10b80: 68, - 0x10b81: 82, - 0x10b82: 68, - 0x10b83: 82, - 0x10b84: 82, - 0x10b85: 82, - 0x10b86: 68, - 0x10b87: 68, - 0x10b88: 68, - 0x10b89: 82, - 0x10b8a: 68, - 0x10b8b: 68, - 0x10b8c: 82, - 0x10b8d: 68, - 0x10b8e: 82, - 0x10b8f: 82, - 0x10b90: 68, - 0x10b91: 82, - 0x10ba9: 82, - 0x10baa: 82, - 0x10bab: 82, - 0x10bac: 82, - 0x10bad: 68, - 0x10bae: 68, - 0x10baf: 85, - 0x10d00: 76, - 0x10d01: 68, - 0x10d02: 68, - 0x10d03: 68, - 0x10d04: 68, - 0x10d05: 68, - 0x10d06: 68, - 0x10d07: 68, - 0x10d08: 68, - 0x10d09: 68, - 0x10d0a: 68, - 0x10d0b: 68, - 0x10d0c: 68, - 0x10d0d: 68, - 0x10d0e: 68, - 0x10d0f: 68, - 0x10d10: 68, - 0x10d11: 68, - 0x10d12: 68, - 0x10d13: 68, - 0x10d14: 68, - 0x10d15: 68, - 0x10d16: 68, - 0x10d17: 68, - 0x10d18: 68, - 0x10d19: 68, - 0x10d1a: 68, - 0x10d1b: 68, - 0x10d1c: 68, - 0x10d1d: 68, - 0x10d1e: 68, - 0x10d1f: 68, - 0x10d20: 68, - 0x10d21: 68, - 0x10d22: 82, - 0x10d23: 68, - 0x10f30: 68, - 0x10f31: 68, - 0x10f32: 68, - 0x10f33: 82, - 0x10f34: 68, - 0x10f35: 68, - 0x10f36: 68, - 0x10f37: 68, - 0x10f38: 68, - 0x10f39: 68, - 0x10f3a: 68, - 0x10f3b: 68, - 0x10f3c: 68, - 0x10f3d: 68, - 0x10f3e: 68, - 0x10f3f: 68, - 0x10f40: 68, - 0x10f41: 68, - 0x10f42: 68, - 0x10f43: 68, - 0x10f44: 68, - 0x10f45: 85, - 0x10f51: 68, - 0x10f52: 68, - 0x10f53: 68, - 0x10f54: 82, - 0x10f70: 68, - 0x10f71: 68, - 0x10f72: 68, - 0x10f73: 68, - 0x10f74: 82, - 0x10f75: 82, - 0x10f76: 68, - 0x10f77: 68, - 0x10f78: 68, - 0x10f79: 68, - 0x10f7a: 68, - 0x10f7b: 68, - 0x10f7c: 68, - 0x10f7d: 68, - 0x10f7e: 68, - 0x10f7f: 68, - 0x10f80: 68, - 0x10f81: 68, - 0x10fb0: 68, - 0x10fb1: 85, - 0x10fb2: 68, - 0x10fb3: 68, - 0x10fb4: 82, - 0x10fb5: 82, - 0x10fb6: 82, - 0x10fb7: 85, - 0x10fb8: 68, - 0x10fb9: 82, - 0x10fba: 82, - 0x10fbb: 68, - 0x10fbc: 68, - 0x10fbd: 82, - 0x10fbe: 68, - 0x10fbf: 68, - 0x10fc0: 85, - 0x10fc1: 68, - 0x10fc2: 82, - 0x10fc3: 82, - 0x10fc4: 68, - 0x10fc5: 85, - 0x10fc6: 85, - 0x10fc7: 85, - 0x10fc8: 85, - 0x10fc9: 82, - 0x10fca: 68, - 0x10fcb: 76, - 0x110bd: 85, - 0x110cd: 85, - 0x1e900: 68, - 0x1e901: 68, - 0x1e902: 68, - 0x1e903: 68, - 0x1e904: 68, - 0x1e905: 68, - 0x1e906: 68, - 0x1e907: 68, - 0x1e908: 68, - 0x1e909: 68, - 0x1e90a: 68, - 0x1e90b: 68, - 0x1e90c: 68, - 0x1e90d: 68, - 0x1e90e: 68, - 0x1e90f: 68, - 0x1e910: 68, - 0x1e911: 68, - 0x1e912: 68, - 0x1e913: 68, - 0x1e914: 68, - 0x1e915: 68, - 0x1e916: 68, - 0x1e917: 68, - 0x1e918: 68, - 0x1e919: 68, - 0x1e91a: 68, - 0x1e91b: 68, - 0x1e91c: 68, - 0x1e91d: 68, - 0x1e91e: 68, - 0x1e91f: 68, - 0x1e920: 68, - 0x1e921: 68, - 0x1e922: 68, - 0x1e923: 68, - 0x1e924: 68, - 0x1e925: 68, - 0x1e926: 68, - 0x1e927: 68, - 0x1e928: 68, - 0x1e929: 68, - 0x1e92a: 68, - 0x1e92b: 68, - 0x1e92c: 68, - 0x1e92d: 68, - 0x1e92e: 68, - 0x1e92f: 68, - 0x1e930: 68, - 0x1e931: 68, - 0x1e932: 68, - 0x1e933: 68, - 0x1e934: 68, - 0x1e935: 68, - 0x1e936: 68, - 0x1e937: 68, - 0x1e938: 68, - 0x1e939: 68, - 0x1e93a: 68, - 0x1e93b: 68, - 0x1e93c: 68, - 0x1e93d: 68, - 0x1e93e: 68, - 0x1e93f: 68, - 0x1e940: 68, - 0x1e941: 68, - 0x1e942: 68, - 0x1e943: 68, - 0x1e94b: 84, -} -codepoint_classes = { - 'PVALID': ( - 0x2d0000002e, - 0x300000003a, - 0x610000007b, - 0xdf000000f7, - 0xf800000100, - 0x10100000102, - 0x10300000104, - 0x10500000106, - 0x10700000108, - 0x1090000010a, - 0x10b0000010c, - 0x10d0000010e, - 0x10f00000110, - 0x11100000112, - 0x11300000114, - 0x11500000116, - 0x11700000118, - 0x1190000011a, - 0x11b0000011c, - 0x11d0000011e, - 0x11f00000120, - 0x12100000122, - 0x12300000124, - 0x12500000126, - 0x12700000128, - 0x1290000012a, - 0x12b0000012c, - 0x12d0000012e, - 0x12f00000130, - 0x13100000132, - 0x13500000136, - 0x13700000139, - 0x13a0000013b, - 0x13c0000013d, - 0x13e0000013f, - 0x14200000143, - 0x14400000145, - 0x14600000147, - 0x14800000149, - 0x14b0000014c, - 0x14d0000014e, - 0x14f00000150, - 0x15100000152, - 0x15300000154, - 0x15500000156, - 0x15700000158, - 0x1590000015a, - 0x15b0000015c, - 0x15d0000015e, - 0x15f00000160, - 0x16100000162, - 0x16300000164, - 0x16500000166, - 0x16700000168, - 0x1690000016a, - 0x16b0000016c, - 0x16d0000016e, - 0x16f00000170, - 0x17100000172, - 0x17300000174, - 0x17500000176, - 0x17700000178, - 0x17a0000017b, - 0x17c0000017d, - 0x17e0000017f, - 0x18000000181, - 0x18300000184, - 0x18500000186, - 0x18800000189, - 0x18c0000018e, - 0x19200000193, - 0x19500000196, - 0x1990000019c, - 0x19e0000019f, - 0x1a1000001a2, - 0x1a3000001a4, - 0x1a5000001a6, - 0x1a8000001a9, - 0x1aa000001ac, - 0x1ad000001ae, - 0x1b0000001b1, - 0x1b4000001b5, - 0x1b6000001b7, - 0x1b9000001bc, - 0x1bd000001c4, - 0x1ce000001cf, - 0x1d0000001d1, - 0x1d2000001d3, - 0x1d4000001d5, - 0x1d6000001d7, - 0x1d8000001d9, - 0x1da000001db, - 0x1dc000001de, - 0x1df000001e0, - 0x1e1000001e2, - 0x1e3000001e4, - 0x1e5000001e6, - 0x1e7000001e8, - 0x1e9000001ea, - 0x1eb000001ec, - 0x1ed000001ee, - 0x1ef000001f1, - 0x1f5000001f6, - 0x1f9000001fa, - 0x1fb000001fc, - 0x1fd000001fe, - 0x1ff00000200, - 0x20100000202, - 0x20300000204, - 0x20500000206, - 0x20700000208, - 0x2090000020a, - 0x20b0000020c, - 0x20d0000020e, - 0x20f00000210, - 0x21100000212, - 0x21300000214, - 0x21500000216, - 0x21700000218, - 0x2190000021a, - 0x21b0000021c, - 0x21d0000021e, - 0x21f00000220, - 0x22100000222, - 0x22300000224, - 0x22500000226, - 0x22700000228, - 0x2290000022a, - 0x22b0000022c, - 0x22d0000022e, - 0x22f00000230, - 0x23100000232, - 0x2330000023a, - 0x23c0000023d, - 0x23f00000241, - 0x24200000243, - 0x24700000248, - 0x2490000024a, - 0x24b0000024c, - 0x24d0000024e, - 0x24f000002b0, - 0x2b9000002c2, - 0x2c6000002d2, - 0x2ec000002ed, - 0x2ee000002ef, - 0x30000000340, - 0x34200000343, - 0x3460000034f, - 0x35000000370, - 0x37100000372, - 0x37300000374, - 0x37700000378, - 0x37b0000037e, - 0x39000000391, - 0x3ac000003cf, - 0x3d7000003d8, - 0x3d9000003da, - 0x3db000003dc, - 0x3dd000003de, - 0x3df000003e0, - 0x3e1000003e2, - 0x3e3000003e4, - 0x3e5000003e6, - 0x3e7000003e8, - 0x3e9000003ea, - 0x3eb000003ec, - 0x3ed000003ee, - 0x3ef000003f0, - 0x3f3000003f4, - 0x3f8000003f9, - 0x3fb000003fd, - 0x43000000460, - 0x46100000462, - 0x46300000464, - 0x46500000466, - 0x46700000468, - 0x4690000046a, - 0x46b0000046c, - 0x46d0000046e, - 0x46f00000470, - 0x47100000472, - 0x47300000474, - 0x47500000476, - 0x47700000478, - 0x4790000047a, - 0x47b0000047c, - 0x47d0000047e, - 0x47f00000480, - 0x48100000482, - 0x48300000488, - 0x48b0000048c, - 0x48d0000048e, - 0x48f00000490, - 0x49100000492, - 0x49300000494, - 0x49500000496, - 0x49700000498, - 0x4990000049a, - 0x49b0000049c, - 0x49d0000049e, - 0x49f000004a0, - 0x4a1000004a2, - 0x4a3000004a4, - 0x4a5000004a6, - 0x4a7000004a8, - 0x4a9000004aa, - 0x4ab000004ac, - 0x4ad000004ae, - 0x4af000004b0, - 0x4b1000004b2, - 0x4b3000004b4, - 0x4b5000004b6, - 0x4b7000004b8, - 0x4b9000004ba, - 0x4bb000004bc, - 0x4bd000004be, - 0x4bf000004c0, - 0x4c2000004c3, - 0x4c4000004c5, - 0x4c6000004c7, - 0x4c8000004c9, - 0x4ca000004cb, - 0x4cc000004cd, - 0x4ce000004d0, - 0x4d1000004d2, - 0x4d3000004d4, - 0x4d5000004d6, - 0x4d7000004d8, - 0x4d9000004da, - 0x4db000004dc, - 0x4dd000004de, - 0x4df000004e0, - 0x4e1000004e2, - 0x4e3000004e4, - 0x4e5000004e6, - 0x4e7000004e8, - 0x4e9000004ea, - 0x4eb000004ec, - 0x4ed000004ee, - 0x4ef000004f0, - 0x4f1000004f2, - 0x4f3000004f4, - 0x4f5000004f6, - 0x4f7000004f8, - 0x4f9000004fa, - 0x4fb000004fc, - 0x4fd000004fe, - 0x4ff00000500, - 0x50100000502, - 0x50300000504, - 0x50500000506, - 0x50700000508, - 0x5090000050a, - 0x50b0000050c, - 0x50d0000050e, - 0x50f00000510, - 0x51100000512, - 0x51300000514, - 0x51500000516, - 0x51700000518, - 0x5190000051a, - 0x51b0000051c, - 0x51d0000051e, - 0x51f00000520, - 0x52100000522, - 0x52300000524, - 0x52500000526, - 0x52700000528, - 0x5290000052a, - 0x52b0000052c, - 0x52d0000052e, - 0x52f00000530, - 0x5590000055a, - 0x56000000587, - 0x58800000589, - 0x591000005be, - 0x5bf000005c0, - 0x5c1000005c3, - 0x5c4000005c6, - 0x5c7000005c8, - 0x5d0000005eb, - 0x5ef000005f3, - 0x6100000061b, - 0x62000000640, - 0x64100000660, - 0x66e00000675, - 0x679000006d4, - 0x6d5000006dd, - 0x6df000006e9, - 0x6ea000006f0, - 0x6fa00000700, - 0x7100000074b, - 0x74d000007b2, - 0x7c0000007f6, - 0x7fd000007fe, - 0x8000000082e, - 0x8400000085c, - 0x8600000086b, - 0x87000000888, - 0x8890000088f, - 0x898000008e2, - 0x8e300000958, - 0x96000000964, - 0x96600000970, - 0x97100000984, - 0x9850000098d, - 0x98f00000991, - 0x993000009a9, - 0x9aa000009b1, - 0x9b2000009b3, - 0x9b6000009ba, - 0x9bc000009c5, - 0x9c7000009c9, - 0x9cb000009cf, - 0x9d7000009d8, - 0x9e0000009e4, - 0x9e6000009f2, - 0x9fc000009fd, - 0x9fe000009ff, - 0xa0100000a04, - 0xa0500000a0b, - 0xa0f00000a11, - 0xa1300000a29, - 0xa2a00000a31, - 0xa3200000a33, - 0xa3500000a36, - 0xa3800000a3a, - 0xa3c00000a3d, - 0xa3e00000a43, - 0xa4700000a49, - 0xa4b00000a4e, - 0xa5100000a52, - 0xa5c00000a5d, - 0xa6600000a76, - 0xa8100000a84, - 0xa8500000a8e, - 0xa8f00000a92, - 0xa9300000aa9, - 0xaaa00000ab1, - 0xab200000ab4, - 0xab500000aba, - 0xabc00000ac6, - 0xac700000aca, - 0xacb00000ace, - 0xad000000ad1, - 0xae000000ae4, - 0xae600000af0, - 0xaf900000b00, - 0xb0100000b04, - 0xb0500000b0d, - 0xb0f00000b11, - 0xb1300000b29, - 0xb2a00000b31, - 0xb3200000b34, - 0xb3500000b3a, - 0xb3c00000b45, - 0xb4700000b49, - 0xb4b00000b4e, - 0xb5500000b58, - 0xb5f00000b64, - 0xb6600000b70, - 0xb7100000b72, - 0xb8200000b84, - 0xb8500000b8b, - 0xb8e00000b91, - 0xb9200000b96, - 0xb9900000b9b, - 0xb9c00000b9d, - 0xb9e00000ba0, - 0xba300000ba5, - 0xba800000bab, - 0xbae00000bba, - 0xbbe00000bc3, - 0xbc600000bc9, - 0xbca00000bce, - 0xbd000000bd1, - 0xbd700000bd8, - 0xbe600000bf0, - 0xc0000000c0d, - 0xc0e00000c11, - 0xc1200000c29, - 0xc2a00000c3a, - 0xc3c00000c45, - 0xc4600000c49, - 0xc4a00000c4e, - 0xc5500000c57, - 0xc5800000c5b, - 0xc5d00000c5e, - 0xc6000000c64, - 0xc6600000c70, - 0xc8000000c84, - 0xc8500000c8d, - 0xc8e00000c91, - 0xc9200000ca9, - 0xcaa00000cb4, - 0xcb500000cba, - 0xcbc00000cc5, - 0xcc600000cc9, - 0xcca00000cce, - 0xcd500000cd7, - 0xcdd00000cdf, - 0xce000000ce4, - 0xce600000cf0, - 0xcf100000cf4, - 0xd0000000d0d, - 0xd0e00000d11, - 0xd1200000d45, - 0xd4600000d49, - 0xd4a00000d4f, - 0xd5400000d58, - 0xd5f00000d64, - 0xd6600000d70, - 0xd7a00000d80, - 0xd8100000d84, - 0xd8500000d97, - 0xd9a00000db2, - 0xdb300000dbc, - 0xdbd00000dbe, - 0xdc000000dc7, - 0xdca00000dcb, - 0xdcf00000dd5, - 0xdd600000dd7, - 0xdd800000de0, - 0xde600000df0, - 0xdf200000df4, - 0xe0100000e33, - 0xe3400000e3b, - 0xe4000000e4f, - 0xe5000000e5a, - 0xe8100000e83, - 0xe8400000e85, - 0xe8600000e8b, - 0xe8c00000ea4, - 0xea500000ea6, - 0xea700000eb3, - 0xeb400000ebe, - 0xec000000ec5, - 0xec600000ec7, - 0xec800000ecf, - 0xed000000eda, - 0xede00000ee0, - 0xf0000000f01, - 0xf0b00000f0c, - 0xf1800000f1a, - 0xf2000000f2a, - 0xf3500000f36, - 0xf3700000f38, - 0xf3900000f3a, - 0xf3e00000f43, - 0xf4400000f48, - 0xf4900000f4d, - 0xf4e00000f52, - 0xf5300000f57, - 0xf5800000f5c, - 0xf5d00000f69, - 0xf6a00000f6d, - 0xf7100000f73, - 0xf7400000f75, - 0xf7a00000f81, - 0xf8200000f85, - 0xf8600000f93, - 0xf9400000f98, - 0xf9900000f9d, - 0xf9e00000fa2, - 0xfa300000fa7, - 0xfa800000fac, - 0xfad00000fb9, - 0xfba00000fbd, - 0xfc600000fc7, - 0x10000000104a, - 0x10500000109e, - 0x10d0000010fb, - 0x10fd00001100, - 0x120000001249, - 0x124a0000124e, - 0x125000001257, - 0x125800001259, - 0x125a0000125e, - 0x126000001289, - 0x128a0000128e, - 0x1290000012b1, - 0x12b2000012b6, - 0x12b8000012bf, - 0x12c0000012c1, - 0x12c2000012c6, - 0x12c8000012d7, - 0x12d800001311, - 0x131200001316, - 0x13180000135b, - 0x135d00001360, - 0x138000001390, - 0x13a0000013f6, - 0x14010000166d, - 0x166f00001680, - 0x16810000169b, - 0x16a0000016eb, - 0x16f1000016f9, - 0x170000001716, - 0x171f00001735, - 0x174000001754, - 0x17600000176d, - 0x176e00001771, - 0x177200001774, - 0x1780000017b4, - 0x17b6000017d4, - 0x17d7000017d8, - 0x17dc000017de, - 0x17e0000017ea, - 0x18100000181a, - 0x182000001879, - 0x1880000018ab, - 0x18b0000018f6, - 0x19000000191f, - 0x19200000192c, - 0x19300000193c, - 0x19460000196e, - 0x197000001975, - 0x1980000019ac, - 0x19b0000019ca, - 0x19d0000019da, - 0x1a0000001a1c, - 0x1a2000001a5f, - 0x1a6000001a7d, - 0x1a7f00001a8a, - 0x1a9000001a9a, - 0x1aa700001aa8, - 0x1ab000001abe, - 0x1abf00001acf, - 0x1b0000001b4d, - 0x1b5000001b5a, - 0x1b6b00001b74, - 0x1b8000001bf4, - 0x1c0000001c38, - 0x1c4000001c4a, - 0x1c4d00001c7e, - 0x1cd000001cd3, - 0x1cd400001cfb, - 0x1d0000001d2c, - 0x1d2f00001d30, - 0x1d3b00001d3c, - 0x1d4e00001d4f, - 0x1d6b00001d78, - 0x1d7900001d9b, - 0x1dc000001e00, - 0x1e0100001e02, - 0x1e0300001e04, - 0x1e0500001e06, - 0x1e0700001e08, - 0x1e0900001e0a, - 0x1e0b00001e0c, - 0x1e0d00001e0e, - 0x1e0f00001e10, - 0x1e1100001e12, - 0x1e1300001e14, - 0x1e1500001e16, - 0x1e1700001e18, - 0x1e1900001e1a, - 0x1e1b00001e1c, - 0x1e1d00001e1e, - 0x1e1f00001e20, - 0x1e2100001e22, - 0x1e2300001e24, - 0x1e2500001e26, - 0x1e2700001e28, - 0x1e2900001e2a, - 0x1e2b00001e2c, - 0x1e2d00001e2e, - 0x1e2f00001e30, - 0x1e3100001e32, - 0x1e3300001e34, - 0x1e3500001e36, - 0x1e3700001e38, - 0x1e3900001e3a, - 0x1e3b00001e3c, - 0x1e3d00001e3e, - 0x1e3f00001e40, - 0x1e4100001e42, - 0x1e4300001e44, - 0x1e4500001e46, - 0x1e4700001e48, - 0x1e4900001e4a, - 0x1e4b00001e4c, - 0x1e4d00001e4e, - 0x1e4f00001e50, - 0x1e5100001e52, - 0x1e5300001e54, - 0x1e5500001e56, - 0x1e5700001e58, - 0x1e5900001e5a, - 0x1e5b00001e5c, - 0x1e5d00001e5e, - 0x1e5f00001e60, - 0x1e6100001e62, - 0x1e6300001e64, - 0x1e6500001e66, - 0x1e6700001e68, - 0x1e6900001e6a, - 0x1e6b00001e6c, - 0x1e6d00001e6e, - 0x1e6f00001e70, - 0x1e7100001e72, - 0x1e7300001e74, - 0x1e7500001e76, - 0x1e7700001e78, - 0x1e7900001e7a, - 0x1e7b00001e7c, - 0x1e7d00001e7e, - 0x1e7f00001e80, - 0x1e8100001e82, - 0x1e8300001e84, - 0x1e8500001e86, - 0x1e8700001e88, - 0x1e8900001e8a, - 0x1e8b00001e8c, - 0x1e8d00001e8e, - 0x1e8f00001e90, - 0x1e9100001e92, - 0x1e9300001e94, - 0x1e9500001e9a, - 0x1e9c00001e9e, - 0x1e9f00001ea0, - 0x1ea100001ea2, - 0x1ea300001ea4, - 0x1ea500001ea6, - 0x1ea700001ea8, - 0x1ea900001eaa, - 0x1eab00001eac, - 0x1ead00001eae, - 0x1eaf00001eb0, - 0x1eb100001eb2, - 0x1eb300001eb4, - 0x1eb500001eb6, - 0x1eb700001eb8, - 0x1eb900001eba, - 0x1ebb00001ebc, - 0x1ebd00001ebe, - 0x1ebf00001ec0, - 0x1ec100001ec2, - 0x1ec300001ec4, - 0x1ec500001ec6, - 0x1ec700001ec8, - 0x1ec900001eca, - 0x1ecb00001ecc, - 0x1ecd00001ece, - 0x1ecf00001ed0, - 0x1ed100001ed2, - 0x1ed300001ed4, - 0x1ed500001ed6, - 0x1ed700001ed8, - 0x1ed900001eda, - 0x1edb00001edc, - 0x1edd00001ede, - 0x1edf00001ee0, - 0x1ee100001ee2, - 0x1ee300001ee4, - 0x1ee500001ee6, - 0x1ee700001ee8, - 0x1ee900001eea, - 0x1eeb00001eec, - 0x1eed00001eee, - 0x1eef00001ef0, - 0x1ef100001ef2, - 0x1ef300001ef4, - 0x1ef500001ef6, - 0x1ef700001ef8, - 0x1ef900001efa, - 0x1efb00001efc, - 0x1efd00001efe, - 0x1eff00001f08, - 0x1f1000001f16, - 0x1f2000001f28, - 0x1f3000001f38, - 0x1f4000001f46, - 0x1f5000001f58, - 0x1f6000001f68, - 0x1f7000001f71, - 0x1f7200001f73, - 0x1f7400001f75, - 0x1f7600001f77, - 0x1f7800001f79, - 0x1f7a00001f7b, - 0x1f7c00001f7d, - 0x1fb000001fb2, - 0x1fb600001fb7, - 0x1fc600001fc7, - 0x1fd000001fd3, - 0x1fd600001fd8, - 0x1fe000001fe3, - 0x1fe400001fe8, - 0x1ff600001ff7, - 0x214e0000214f, - 0x218400002185, - 0x2c3000002c60, - 0x2c6100002c62, - 0x2c6500002c67, - 0x2c6800002c69, - 0x2c6a00002c6b, - 0x2c6c00002c6d, - 0x2c7100002c72, - 0x2c7300002c75, - 0x2c7600002c7c, - 0x2c8100002c82, - 0x2c8300002c84, - 0x2c8500002c86, - 0x2c8700002c88, - 0x2c8900002c8a, - 0x2c8b00002c8c, - 0x2c8d00002c8e, - 0x2c8f00002c90, - 0x2c9100002c92, - 0x2c9300002c94, - 0x2c9500002c96, - 0x2c9700002c98, - 0x2c9900002c9a, - 0x2c9b00002c9c, - 0x2c9d00002c9e, - 0x2c9f00002ca0, - 0x2ca100002ca2, - 0x2ca300002ca4, - 0x2ca500002ca6, - 0x2ca700002ca8, - 0x2ca900002caa, - 0x2cab00002cac, - 0x2cad00002cae, - 0x2caf00002cb0, - 0x2cb100002cb2, - 0x2cb300002cb4, - 0x2cb500002cb6, - 0x2cb700002cb8, - 0x2cb900002cba, - 0x2cbb00002cbc, - 0x2cbd00002cbe, - 0x2cbf00002cc0, - 0x2cc100002cc2, - 0x2cc300002cc4, - 0x2cc500002cc6, - 0x2cc700002cc8, - 0x2cc900002cca, - 0x2ccb00002ccc, - 0x2ccd00002cce, - 0x2ccf00002cd0, - 0x2cd100002cd2, - 0x2cd300002cd4, - 0x2cd500002cd6, - 0x2cd700002cd8, - 0x2cd900002cda, - 0x2cdb00002cdc, - 0x2cdd00002cde, - 0x2cdf00002ce0, - 0x2ce100002ce2, - 0x2ce300002ce5, - 0x2cec00002ced, - 0x2cee00002cf2, - 0x2cf300002cf4, - 0x2d0000002d26, - 0x2d2700002d28, - 0x2d2d00002d2e, - 0x2d3000002d68, - 0x2d7f00002d97, - 0x2da000002da7, - 0x2da800002daf, - 0x2db000002db7, - 0x2db800002dbf, - 0x2dc000002dc7, - 0x2dc800002dcf, - 0x2dd000002dd7, - 0x2dd800002ddf, - 0x2de000002e00, - 0x2e2f00002e30, - 0x300500003008, - 0x302a0000302e, - 0x303c0000303d, - 0x304100003097, - 0x30990000309b, - 0x309d0000309f, - 0x30a1000030fb, - 0x30fc000030ff, - 0x310500003130, - 0x31a0000031c0, - 0x31f000003200, - 0x340000004dc0, - 0x4e000000a48d, - 0xa4d00000a4fe, - 0xa5000000a60d, - 0xa6100000a62c, - 0xa6410000a642, - 0xa6430000a644, - 0xa6450000a646, - 0xa6470000a648, - 0xa6490000a64a, - 0xa64b0000a64c, - 0xa64d0000a64e, - 0xa64f0000a650, - 0xa6510000a652, - 0xa6530000a654, - 0xa6550000a656, - 0xa6570000a658, - 0xa6590000a65a, - 0xa65b0000a65c, - 0xa65d0000a65e, - 0xa65f0000a660, - 0xa6610000a662, - 0xa6630000a664, - 0xa6650000a666, - 0xa6670000a668, - 0xa6690000a66a, - 0xa66b0000a66c, - 0xa66d0000a670, - 0xa6740000a67e, - 0xa67f0000a680, - 0xa6810000a682, - 0xa6830000a684, - 0xa6850000a686, - 0xa6870000a688, - 0xa6890000a68a, - 0xa68b0000a68c, - 0xa68d0000a68e, - 0xa68f0000a690, - 0xa6910000a692, - 0xa6930000a694, - 0xa6950000a696, - 0xa6970000a698, - 0xa6990000a69a, - 0xa69b0000a69c, - 0xa69e0000a6e6, - 0xa6f00000a6f2, - 0xa7170000a720, - 0xa7230000a724, - 0xa7250000a726, - 0xa7270000a728, - 0xa7290000a72a, - 0xa72b0000a72c, - 0xa72d0000a72e, - 0xa72f0000a732, - 0xa7330000a734, - 0xa7350000a736, - 0xa7370000a738, - 0xa7390000a73a, - 0xa73b0000a73c, - 0xa73d0000a73e, - 0xa73f0000a740, - 0xa7410000a742, - 0xa7430000a744, - 0xa7450000a746, - 0xa7470000a748, - 0xa7490000a74a, - 0xa74b0000a74c, - 0xa74d0000a74e, - 0xa74f0000a750, - 0xa7510000a752, - 0xa7530000a754, - 0xa7550000a756, - 0xa7570000a758, - 0xa7590000a75a, - 0xa75b0000a75c, - 0xa75d0000a75e, - 0xa75f0000a760, - 0xa7610000a762, - 0xa7630000a764, - 0xa7650000a766, - 0xa7670000a768, - 0xa7690000a76a, - 0xa76b0000a76c, - 0xa76d0000a76e, - 0xa76f0000a770, - 0xa7710000a779, - 0xa77a0000a77b, - 0xa77c0000a77d, - 0xa77f0000a780, - 0xa7810000a782, - 0xa7830000a784, - 0xa7850000a786, - 0xa7870000a789, - 0xa78c0000a78d, - 0xa78e0000a790, - 0xa7910000a792, - 0xa7930000a796, - 0xa7970000a798, - 0xa7990000a79a, - 0xa79b0000a79c, - 0xa79d0000a79e, - 0xa79f0000a7a0, - 0xa7a10000a7a2, - 0xa7a30000a7a4, - 0xa7a50000a7a6, - 0xa7a70000a7a8, - 0xa7a90000a7aa, - 0xa7af0000a7b0, - 0xa7b50000a7b6, - 0xa7b70000a7b8, - 0xa7b90000a7ba, - 0xa7bb0000a7bc, - 0xa7bd0000a7be, - 0xa7bf0000a7c0, - 0xa7c10000a7c2, - 0xa7c30000a7c4, - 0xa7c80000a7c9, - 0xa7ca0000a7cb, - 0xa7d10000a7d2, - 0xa7d30000a7d4, - 0xa7d50000a7d6, - 0xa7d70000a7d8, - 0xa7d90000a7da, - 0xa7f20000a7f5, - 0xa7f60000a7f8, - 0xa7fa0000a828, - 0xa82c0000a82d, - 0xa8400000a874, - 0xa8800000a8c6, - 0xa8d00000a8da, - 0xa8e00000a8f8, - 0xa8fb0000a8fc, - 0xa8fd0000a92e, - 0xa9300000a954, - 0xa9800000a9c1, - 0xa9cf0000a9da, - 0xa9e00000a9ff, - 0xaa000000aa37, - 0xaa400000aa4e, - 0xaa500000aa5a, - 0xaa600000aa77, - 0xaa7a0000aac3, - 0xaadb0000aade, - 0xaae00000aaf0, - 0xaaf20000aaf7, - 0xab010000ab07, - 0xab090000ab0f, - 0xab110000ab17, - 0xab200000ab27, - 0xab280000ab2f, - 0xab300000ab5b, - 0xab600000ab69, - 0xabc00000abeb, - 0xabec0000abee, - 0xabf00000abfa, - 0xac000000d7a4, - 0xfa0e0000fa10, - 0xfa110000fa12, - 0xfa130000fa15, - 0xfa1f0000fa20, - 0xfa210000fa22, - 0xfa230000fa25, - 0xfa270000fa2a, - 0xfb1e0000fb1f, - 0xfe200000fe30, - 0xfe730000fe74, - 0x100000001000c, - 0x1000d00010027, - 0x100280001003b, - 0x1003c0001003e, - 0x1003f0001004e, - 0x100500001005e, - 0x10080000100fb, - 0x101fd000101fe, - 0x102800001029d, - 0x102a0000102d1, - 0x102e0000102e1, - 0x1030000010320, - 0x1032d00010341, - 0x103420001034a, - 0x103500001037b, - 0x103800001039e, - 0x103a0000103c4, - 0x103c8000103d0, - 0x104280001049e, - 0x104a0000104aa, - 0x104d8000104fc, - 0x1050000010528, - 0x1053000010564, - 0x10597000105a2, - 0x105a3000105b2, - 0x105b3000105ba, - 0x105bb000105bd, - 0x1060000010737, - 0x1074000010756, - 0x1076000010768, - 0x1078000010786, - 0x10787000107b1, - 0x107b2000107bb, - 0x1080000010806, - 0x1080800010809, - 0x1080a00010836, - 0x1083700010839, - 0x1083c0001083d, - 0x1083f00010856, - 0x1086000010877, - 0x108800001089f, - 0x108e0000108f3, - 0x108f4000108f6, - 0x1090000010916, - 0x109200001093a, - 0x10980000109b8, - 0x109be000109c0, - 0x10a0000010a04, - 0x10a0500010a07, - 0x10a0c00010a14, - 0x10a1500010a18, - 0x10a1900010a36, - 0x10a3800010a3b, - 0x10a3f00010a40, - 0x10a6000010a7d, - 0x10a8000010a9d, - 0x10ac000010ac8, - 0x10ac900010ae7, - 0x10b0000010b36, - 0x10b4000010b56, - 0x10b6000010b73, - 0x10b8000010b92, - 0x10c0000010c49, - 0x10cc000010cf3, - 0x10d0000010d28, - 0x10d3000010d3a, - 0x10e8000010eaa, - 0x10eab00010ead, - 0x10eb000010eb2, - 0x10efd00010f1d, - 0x10f2700010f28, - 0x10f3000010f51, - 0x10f7000010f86, - 0x10fb000010fc5, - 0x10fe000010ff7, - 0x1100000011047, - 0x1106600011076, - 0x1107f000110bb, - 0x110c2000110c3, - 0x110d0000110e9, - 0x110f0000110fa, - 0x1110000011135, - 0x1113600011140, - 0x1114400011148, - 0x1115000011174, - 0x1117600011177, - 0x11180000111c5, - 0x111c9000111cd, - 0x111ce000111db, - 0x111dc000111dd, - 0x1120000011212, - 0x1121300011238, - 0x1123e00011242, - 0x1128000011287, - 0x1128800011289, - 0x1128a0001128e, - 0x1128f0001129e, - 0x1129f000112a9, - 0x112b0000112eb, - 0x112f0000112fa, - 0x1130000011304, - 0x113050001130d, - 0x1130f00011311, - 0x1131300011329, - 0x1132a00011331, - 0x1133200011334, - 0x113350001133a, - 0x1133b00011345, - 0x1134700011349, - 0x1134b0001134e, - 0x1135000011351, - 0x1135700011358, - 0x1135d00011364, - 0x113660001136d, - 0x1137000011375, - 0x114000001144b, - 0x114500001145a, - 0x1145e00011462, - 0x11480000114c6, - 0x114c7000114c8, - 0x114d0000114da, - 0x11580000115b6, - 0x115b8000115c1, - 0x115d8000115de, - 0x1160000011641, - 0x1164400011645, - 0x116500001165a, - 0x11680000116b9, - 0x116c0000116ca, - 0x117000001171b, - 0x1171d0001172c, - 0x117300001173a, - 0x1174000011747, - 0x118000001183b, - 0x118c0000118ea, - 0x118ff00011907, - 0x119090001190a, - 0x1190c00011914, - 0x1191500011917, - 0x1191800011936, - 0x1193700011939, - 0x1193b00011944, - 0x119500001195a, - 0x119a0000119a8, - 0x119aa000119d8, - 0x119da000119e2, - 0x119e3000119e5, - 0x11a0000011a3f, - 0x11a4700011a48, - 0x11a5000011a9a, - 0x11a9d00011a9e, - 0x11ab000011af9, - 0x11c0000011c09, - 0x11c0a00011c37, - 0x11c3800011c41, - 0x11c5000011c5a, - 0x11c7200011c90, - 0x11c9200011ca8, - 0x11ca900011cb7, - 0x11d0000011d07, - 0x11d0800011d0a, - 0x11d0b00011d37, - 0x11d3a00011d3b, - 0x11d3c00011d3e, - 0x11d3f00011d48, - 0x11d5000011d5a, - 0x11d6000011d66, - 0x11d6700011d69, - 0x11d6a00011d8f, - 0x11d9000011d92, - 0x11d9300011d99, - 0x11da000011daa, - 0x11ee000011ef7, - 0x11f0000011f11, - 0x11f1200011f3b, - 0x11f3e00011f43, - 0x11f5000011f5a, - 0x11fb000011fb1, - 0x120000001239a, - 0x1248000012544, - 0x12f9000012ff1, - 0x1300000013430, - 0x1344000013456, - 0x1440000014647, - 0x1680000016a39, - 0x16a4000016a5f, - 0x16a6000016a6a, - 0x16a7000016abf, - 0x16ac000016aca, - 0x16ad000016aee, - 0x16af000016af5, - 0x16b0000016b37, - 0x16b4000016b44, - 0x16b5000016b5a, - 0x16b6300016b78, - 0x16b7d00016b90, - 0x16e6000016e80, - 0x16f0000016f4b, - 0x16f4f00016f88, - 0x16f8f00016fa0, - 0x16fe000016fe2, - 0x16fe300016fe5, - 0x16ff000016ff2, - 0x17000000187f8, - 0x1880000018cd6, - 0x18d0000018d09, - 0x1aff00001aff4, - 0x1aff50001affc, - 0x1affd0001afff, - 0x1b0000001b123, - 0x1b1320001b133, - 0x1b1500001b153, - 0x1b1550001b156, - 0x1b1640001b168, - 0x1b1700001b2fc, - 0x1bc000001bc6b, - 0x1bc700001bc7d, - 0x1bc800001bc89, - 0x1bc900001bc9a, - 0x1bc9d0001bc9f, - 0x1cf000001cf2e, - 0x1cf300001cf47, - 0x1da000001da37, - 0x1da3b0001da6d, - 0x1da750001da76, - 0x1da840001da85, - 0x1da9b0001daa0, - 0x1daa10001dab0, - 0x1df000001df1f, - 0x1df250001df2b, - 0x1e0000001e007, - 0x1e0080001e019, - 0x1e01b0001e022, - 0x1e0230001e025, - 0x1e0260001e02b, - 0x1e0300001e06e, - 0x1e08f0001e090, - 0x1e1000001e12d, - 0x1e1300001e13e, - 0x1e1400001e14a, - 0x1e14e0001e14f, - 0x1e2900001e2af, - 0x1e2c00001e2fa, - 0x1e4d00001e4fa, - 0x1e7e00001e7e7, - 0x1e7e80001e7ec, - 0x1e7ed0001e7ef, - 0x1e7f00001e7ff, - 0x1e8000001e8c5, - 0x1e8d00001e8d7, - 0x1e9220001e94c, - 0x1e9500001e95a, - 0x200000002a6e0, - 0x2a7000002b73a, - 0x2b7400002b81e, - 0x2b8200002cea2, - 0x2ceb00002ebe1, - 0x300000003134b, - 0x31350000323b0, - ), - 'CONTEXTJ': ( - 0x200c0000200e, - ), - 'CONTEXTO': ( - 0xb7000000b8, - 0x37500000376, - 0x5f3000005f5, - 0x6600000066a, - 0x6f0000006fa, - 0x30fb000030fc, - ), -} diff --git a/server/venv/Lib/site-packages/idna/intranges.py b/server/venv/Lib/site-packages/idna/intranges.py deleted file mode 100644 index 6a43b04..0000000 --- a/server/venv/Lib/site-packages/idna/intranges.py +++ /dev/null @@ -1,54 +0,0 @@ -""" -Given a list of integers, made up of (hopefully) a small number of long runs -of consecutive integers, compute a representation of the form -((start1, end1), (start2, end2) ...). Then answer the question "was x present -in the original list?" in time O(log(# runs)). -""" - -import bisect -from typing import List, Tuple - -def intranges_from_list(list_: List[int]) -> Tuple[int, ...]: - """Represent a list of integers as a sequence of ranges: - ((start_0, end_0), (start_1, end_1), ...), such that the original - integers are exactly those x such that start_i <= x < end_i for some i. - - Ranges are encoded as single integers (start << 32 | end), not as tuples. - """ - - sorted_list = sorted(list_) - ranges = [] - last_write = -1 - for i in range(len(sorted_list)): - if i+1 < len(sorted_list): - if sorted_list[i] == sorted_list[i+1]-1: - continue - current_range = sorted_list[last_write+1:i+1] - ranges.append(_encode_range(current_range[0], current_range[-1] + 1)) - last_write = i - - return tuple(ranges) - -def _encode_range(start: int, end: int) -> int: - return (start << 32) | end - -def _decode_range(r: int) -> Tuple[int, int]: - return (r >> 32), (r & ((1 << 32) - 1)) - - -def intranges_contain(int_: int, ranges: Tuple[int, ...]) -> bool: - """Determine if `int_` falls into one of the ranges in `ranges`.""" - tuple_ = _encode_range(int_, 0) - pos = bisect.bisect_left(ranges, tuple_) - # we could be immediately ahead of a tuple (start, end) - # with start < int_ <= end - if pos > 0: - left, right = _decode_range(ranges[pos-1]) - if left <= int_ < right: - return True - # or we could be immediately behind a tuple (int_, end) - if pos < len(ranges): - left, _ = _decode_range(ranges[pos]) - if left == int_: - return True - return False diff --git a/server/venv/Lib/site-packages/idna/package_data.py b/server/venv/Lib/site-packages/idna/package_data.py deleted file mode 100644 index 8501893..0000000 --- a/server/venv/Lib/site-packages/idna/package_data.py +++ /dev/null @@ -1,2 +0,0 @@ -__version__ = '3.4' - diff --git a/server/venv/Lib/site-packages/idna/py.typed b/server/venv/Lib/site-packages/idna/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/idna/uts46data.py b/server/venv/Lib/site-packages/idna/uts46data.py deleted file mode 100644 index 186796c..0000000 --- a/server/venv/Lib/site-packages/idna/uts46data.py +++ /dev/null @@ -1,8600 +0,0 @@ -# This file is automatically generated by tools/idna-data -# vim: set fileencoding=utf-8 : - -from typing import List, Tuple, Union - - -"""IDNA Mapping Table from UTS46.""" - - -__version__ = '15.0.0' -def _seg_0() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x0, '3'), - (0x1, '3'), - (0x2, '3'), - (0x3, '3'), - (0x4, '3'), - (0x5, '3'), - (0x6, '3'), - (0x7, '3'), - (0x8, '3'), - (0x9, '3'), - (0xA, '3'), - (0xB, '3'), - (0xC, '3'), - (0xD, '3'), - (0xE, '3'), - (0xF, '3'), - (0x10, '3'), - (0x11, '3'), - (0x12, '3'), - (0x13, '3'), - (0x14, '3'), - (0x15, '3'), - (0x16, '3'), - (0x17, '3'), - (0x18, '3'), - (0x19, '3'), - (0x1A, '3'), - (0x1B, '3'), - (0x1C, '3'), - (0x1D, '3'), - (0x1E, '3'), - (0x1F, '3'), - (0x20, '3'), - (0x21, '3'), - (0x22, '3'), - (0x23, '3'), - (0x24, '3'), - (0x25, '3'), - (0x26, '3'), - (0x27, '3'), - (0x28, '3'), - (0x29, '3'), - (0x2A, '3'), - (0x2B, '3'), - (0x2C, '3'), - (0x2D, 'V'), - (0x2E, 'V'), - (0x2F, '3'), - (0x30, 'V'), - (0x31, 'V'), - (0x32, 'V'), - (0x33, 'V'), - (0x34, 'V'), - (0x35, 'V'), - (0x36, 'V'), - (0x37, 'V'), - (0x38, 'V'), - (0x39, 'V'), - (0x3A, '3'), - (0x3B, '3'), - (0x3C, '3'), - (0x3D, '3'), - (0x3E, '3'), - (0x3F, '3'), - (0x40, '3'), - (0x41, 'M', 'a'), - (0x42, 'M', 'b'), - (0x43, 'M', 'c'), - (0x44, 'M', 'd'), - (0x45, 'M', 'e'), - (0x46, 'M', 'f'), - (0x47, 'M', 'g'), - (0x48, 'M', 'h'), - (0x49, 'M', 'i'), - (0x4A, 'M', 'j'), - (0x4B, 'M', 'k'), - (0x4C, 'M', 'l'), - (0x4D, 'M', 'm'), - (0x4E, 'M', 'n'), - (0x4F, 'M', 'o'), - (0x50, 'M', 'p'), - (0x51, 'M', 'q'), - (0x52, 'M', 'r'), - (0x53, 'M', 's'), - (0x54, 'M', 't'), - (0x55, 'M', 'u'), - (0x56, 'M', 'v'), - (0x57, 'M', 'w'), - (0x58, 'M', 'x'), - (0x59, 'M', 'y'), - (0x5A, 'M', 'z'), - (0x5B, '3'), - (0x5C, '3'), - (0x5D, '3'), - (0x5E, '3'), - (0x5F, '3'), - (0x60, '3'), - (0x61, 'V'), - (0x62, 'V'), - (0x63, 'V'), - ] - -def _seg_1() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x64, 'V'), - (0x65, 'V'), - (0x66, 'V'), - (0x67, 'V'), - (0x68, 'V'), - (0x69, 'V'), - (0x6A, 'V'), - (0x6B, 'V'), - (0x6C, 'V'), - (0x6D, 'V'), - (0x6E, 'V'), - (0x6F, 'V'), - (0x70, 'V'), - (0x71, 'V'), - (0x72, 'V'), - (0x73, 'V'), - (0x74, 'V'), - (0x75, 'V'), - (0x76, 'V'), - (0x77, 'V'), - (0x78, 'V'), - (0x79, 'V'), - (0x7A, 'V'), - (0x7B, '3'), - (0x7C, '3'), - (0x7D, '3'), - (0x7E, '3'), - (0x7F, '3'), - (0x80, 'X'), - (0x81, 'X'), - (0x82, 'X'), - (0x83, 'X'), - (0x84, 'X'), - (0x85, 'X'), - (0x86, 'X'), - (0x87, 'X'), - (0x88, 'X'), - (0x89, 'X'), - (0x8A, 'X'), - (0x8B, 'X'), - (0x8C, 'X'), - (0x8D, 'X'), - (0x8E, 'X'), - (0x8F, 'X'), - (0x90, 'X'), - (0x91, 'X'), - (0x92, 'X'), - (0x93, 'X'), - (0x94, 'X'), - (0x95, 'X'), - (0x96, 'X'), - (0x97, 'X'), - (0x98, 'X'), - (0x99, 'X'), - (0x9A, 'X'), - (0x9B, 'X'), - (0x9C, 'X'), - (0x9D, 'X'), - (0x9E, 'X'), - (0x9F, 'X'), - (0xA0, '3', ' '), - (0xA1, 'V'), - (0xA2, 'V'), - (0xA3, 'V'), - (0xA4, 'V'), - (0xA5, 'V'), - (0xA6, 'V'), - (0xA7, 'V'), - (0xA8, '3', ' ̈'), - (0xA9, 'V'), - (0xAA, 'M', 'a'), - (0xAB, 'V'), - (0xAC, 'V'), - (0xAD, 'I'), - (0xAE, 'V'), - (0xAF, '3', ' ̄'), - (0xB0, 'V'), - (0xB1, 'V'), - (0xB2, 'M', '2'), - (0xB3, 'M', '3'), - (0xB4, '3', ' ́'), - (0xB5, 'M', 'μ'), - (0xB6, 'V'), - (0xB7, 'V'), - (0xB8, '3', ' ̧'), - (0xB9, 'M', '1'), - (0xBA, 'M', 'o'), - (0xBB, 'V'), - (0xBC, 'M', '1⁄4'), - (0xBD, 'M', '1⁄2'), - (0xBE, 'M', '3⁄4'), - (0xBF, 'V'), - (0xC0, 'M', 'à'), - (0xC1, 'M', 'á'), - (0xC2, 'M', 'â'), - (0xC3, 'M', 'ã'), - (0xC4, 'M', 'ä'), - (0xC5, 'M', 'å'), - (0xC6, 'M', 'æ'), - (0xC7, 'M', 'ç'), - ] - -def _seg_2() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xC8, 'M', 'è'), - (0xC9, 'M', 'é'), - (0xCA, 'M', 'ê'), - (0xCB, 'M', 'ë'), - (0xCC, 'M', 'ì'), - (0xCD, 'M', 'í'), - (0xCE, 'M', 'î'), - (0xCF, 'M', 'ï'), - (0xD0, 'M', 'ð'), - (0xD1, 'M', 'ñ'), - (0xD2, 'M', 'ò'), - (0xD3, 'M', 'ó'), - (0xD4, 'M', 'ô'), - (0xD5, 'M', 'õ'), - (0xD6, 'M', 'ö'), - (0xD7, 'V'), - (0xD8, 'M', 'ø'), - (0xD9, 'M', 'ù'), - (0xDA, 'M', 'ú'), - (0xDB, 'M', 'û'), - (0xDC, 'M', 'ü'), - (0xDD, 'M', 'ý'), - (0xDE, 'M', 'þ'), - (0xDF, 'D', 'ss'), - (0xE0, 'V'), - (0xE1, 'V'), - (0xE2, 'V'), - (0xE3, 'V'), - (0xE4, 'V'), - (0xE5, 'V'), - (0xE6, 'V'), - (0xE7, 'V'), - (0xE8, 'V'), - (0xE9, 'V'), - (0xEA, 'V'), - (0xEB, 'V'), - (0xEC, 'V'), - (0xED, 'V'), - (0xEE, 'V'), - (0xEF, 'V'), - (0xF0, 'V'), - (0xF1, 'V'), - (0xF2, 'V'), - (0xF3, 'V'), - (0xF4, 'V'), - (0xF5, 'V'), - (0xF6, 'V'), - (0xF7, 'V'), - (0xF8, 'V'), - (0xF9, 'V'), - (0xFA, 'V'), - (0xFB, 'V'), - (0xFC, 'V'), - (0xFD, 'V'), - (0xFE, 'V'), - (0xFF, 'V'), - (0x100, 'M', 'ā'), - (0x101, 'V'), - (0x102, 'M', 'ă'), - (0x103, 'V'), - (0x104, 'M', 'ą'), - (0x105, 'V'), - (0x106, 'M', 'ć'), - (0x107, 'V'), - (0x108, 'M', 'ĉ'), - (0x109, 'V'), - (0x10A, 'M', 'ċ'), - (0x10B, 'V'), - (0x10C, 'M', 'č'), - (0x10D, 'V'), - (0x10E, 'M', 'ď'), - (0x10F, 'V'), - (0x110, 'M', 'đ'), - (0x111, 'V'), - (0x112, 'M', 'ē'), - (0x113, 'V'), - (0x114, 'M', 'ĕ'), - (0x115, 'V'), - (0x116, 'M', 'ė'), - (0x117, 'V'), - (0x118, 'M', 'ę'), - (0x119, 'V'), - (0x11A, 'M', 'ě'), - (0x11B, 'V'), - (0x11C, 'M', 'ĝ'), - (0x11D, 'V'), - (0x11E, 'M', 'ğ'), - (0x11F, 'V'), - (0x120, 'M', 'ġ'), - (0x121, 'V'), - (0x122, 'M', 'ģ'), - (0x123, 'V'), - (0x124, 'M', 'ĥ'), - (0x125, 'V'), - (0x126, 'M', 'ħ'), - (0x127, 'V'), - (0x128, 'M', 'ĩ'), - (0x129, 'V'), - (0x12A, 'M', 'ī'), - (0x12B, 'V'), - ] - -def _seg_3() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x12C, 'M', 'ĭ'), - (0x12D, 'V'), - (0x12E, 'M', 'į'), - (0x12F, 'V'), - (0x130, 'M', 'i̇'), - (0x131, 'V'), - (0x132, 'M', 'ij'), - (0x134, 'M', 'ĵ'), - (0x135, 'V'), - (0x136, 'M', 'ķ'), - (0x137, 'V'), - (0x139, 'M', 'ĺ'), - (0x13A, 'V'), - (0x13B, 'M', 'ļ'), - (0x13C, 'V'), - (0x13D, 'M', 'ľ'), - (0x13E, 'V'), - (0x13F, 'M', 'l·'), - (0x141, 'M', 'ł'), - (0x142, 'V'), - (0x143, 'M', 'ń'), - (0x144, 'V'), - (0x145, 'M', 'ņ'), - (0x146, 'V'), - (0x147, 'M', 'ň'), - (0x148, 'V'), - (0x149, 'M', 'ʼn'), - (0x14A, 'M', 'ŋ'), - (0x14B, 'V'), - (0x14C, 'M', 'ō'), - (0x14D, 'V'), - (0x14E, 'M', 'ŏ'), - (0x14F, 'V'), - (0x150, 'M', 'ő'), - (0x151, 'V'), - (0x152, 'M', 'œ'), - (0x153, 'V'), - (0x154, 'M', 'ŕ'), - (0x155, 'V'), - (0x156, 'M', 'ŗ'), - (0x157, 'V'), - (0x158, 'M', 'ř'), - (0x159, 'V'), - (0x15A, 'M', 'ś'), - (0x15B, 'V'), - (0x15C, 'M', 'ŝ'), - (0x15D, 'V'), - (0x15E, 'M', 'ş'), - (0x15F, 'V'), - (0x160, 'M', 'š'), - (0x161, 'V'), - (0x162, 'M', 'ţ'), - (0x163, 'V'), - (0x164, 'M', 'ť'), - (0x165, 'V'), - (0x166, 'M', 'ŧ'), - (0x167, 'V'), - (0x168, 'M', 'ũ'), - (0x169, 'V'), - (0x16A, 'M', 'ū'), - (0x16B, 'V'), - (0x16C, 'M', 'ŭ'), - (0x16D, 'V'), - (0x16E, 'M', 'ů'), - (0x16F, 'V'), - (0x170, 'M', 'ű'), - (0x171, 'V'), - (0x172, 'M', 'ų'), - (0x173, 'V'), - (0x174, 'M', 'ŵ'), - (0x175, 'V'), - (0x176, 'M', 'ŷ'), - (0x177, 'V'), - (0x178, 'M', 'ÿ'), - (0x179, 'M', 'ź'), - (0x17A, 'V'), - (0x17B, 'M', 'ż'), - (0x17C, 'V'), - (0x17D, 'M', 'ž'), - (0x17E, 'V'), - (0x17F, 'M', 's'), - (0x180, 'V'), - (0x181, 'M', 'ɓ'), - (0x182, 'M', 'ƃ'), - (0x183, 'V'), - (0x184, 'M', 'ƅ'), - (0x185, 'V'), - (0x186, 'M', 'ɔ'), - (0x187, 'M', 'ƈ'), - (0x188, 'V'), - (0x189, 'M', 'ɖ'), - (0x18A, 'M', 'ɗ'), - (0x18B, 'M', 'ƌ'), - (0x18C, 'V'), - (0x18E, 'M', 'ǝ'), - (0x18F, 'M', 'ə'), - (0x190, 'M', 'ɛ'), - (0x191, 'M', 'ƒ'), - (0x192, 'V'), - (0x193, 'M', 'ɠ'), - ] - -def _seg_4() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x194, 'M', 'ɣ'), - (0x195, 'V'), - (0x196, 'M', 'ɩ'), - (0x197, 'M', 'ɨ'), - (0x198, 'M', 'ƙ'), - (0x199, 'V'), - (0x19C, 'M', 'ɯ'), - (0x19D, 'M', 'ɲ'), - (0x19E, 'V'), - (0x19F, 'M', 'ɵ'), - (0x1A0, 'M', 'ơ'), - (0x1A1, 'V'), - (0x1A2, 'M', 'ƣ'), - (0x1A3, 'V'), - (0x1A4, 'M', 'ƥ'), - (0x1A5, 'V'), - (0x1A6, 'M', 'ʀ'), - (0x1A7, 'M', 'ƨ'), - (0x1A8, 'V'), - (0x1A9, 'M', 'ʃ'), - (0x1AA, 'V'), - (0x1AC, 'M', 'ƭ'), - (0x1AD, 'V'), - (0x1AE, 'M', 'ʈ'), - (0x1AF, 'M', 'ư'), - (0x1B0, 'V'), - (0x1B1, 'M', 'ʊ'), - (0x1B2, 'M', 'ʋ'), - (0x1B3, 'M', 'ƴ'), - (0x1B4, 'V'), - (0x1B5, 'M', 'ƶ'), - (0x1B6, 'V'), - (0x1B7, 'M', 'ʒ'), - (0x1B8, 'M', 'ƹ'), - (0x1B9, 'V'), - (0x1BC, 'M', 'ƽ'), - (0x1BD, 'V'), - (0x1C4, 'M', 'dž'), - (0x1C7, 'M', 'lj'), - (0x1CA, 'M', 'nj'), - (0x1CD, 'M', 'ǎ'), - (0x1CE, 'V'), - (0x1CF, 'M', 'ǐ'), - (0x1D0, 'V'), - (0x1D1, 'M', 'ǒ'), - (0x1D2, 'V'), - (0x1D3, 'M', 'ǔ'), - (0x1D4, 'V'), - (0x1D5, 'M', 'ǖ'), - (0x1D6, 'V'), - (0x1D7, 'M', 'ǘ'), - (0x1D8, 'V'), - (0x1D9, 'M', 'ǚ'), - (0x1DA, 'V'), - (0x1DB, 'M', 'ǜ'), - (0x1DC, 'V'), - (0x1DE, 'M', 'ǟ'), - (0x1DF, 'V'), - (0x1E0, 'M', 'ǡ'), - (0x1E1, 'V'), - (0x1E2, 'M', 'ǣ'), - (0x1E3, 'V'), - (0x1E4, 'M', 'ǥ'), - (0x1E5, 'V'), - (0x1E6, 'M', 'ǧ'), - (0x1E7, 'V'), - (0x1E8, 'M', 'ǩ'), - (0x1E9, 'V'), - (0x1EA, 'M', 'ǫ'), - (0x1EB, 'V'), - (0x1EC, 'M', 'ǭ'), - (0x1ED, 'V'), - (0x1EE, 'M', 'ǯ'), - (0x1EF, 'V'), - (0x1F1, 'M', 'dz'), - (0x1F4, 'M', 'ǵ'), - (0x1F5, 'V'), - (0x1F6, 'M', 'ƕ'), - (0x1F7, 'M', 'ƿ'), - (0x1F8, 'M', 'ǹ'), - (0x1F9, 'V'), - (0x1FA, 'M', 'ǻ'), - (0x1FB, 'V'), - (0x1FC, 'M', 'ǽ'), - (0x1FD, 'V'), - (0x1FE, 'M', 'ǿ'), - (0x1FF, 'V'), - (0x200, 'M', 'ȁ'), - (0x201, 'V'), - (0x202, 'M', 'ȃ'), - (0x203, 'V'), - (0x204, 'M', 'ȅ'), - (0x205, 'V'), - (0x206, 'M', 'ȇ'), - (0x207, 'V'), - (0x208, 'M', 'ȉ'), - (0x209, 'V'), - (0x20A, 'M', 'ȋ'), - (0x20B, 'V'), - (0x20C, 'M', 'ȍ'), - ] - -def _seg_5() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x20D, 'V'), - (0x20E, 'M', 'ȏ'), - (0x20F, 'V'), - (0x210, 'M', 'ȑ'), - (0x211, 'V'), - (0x212, 'M', 'ȓ'), - (0x213, 'V'), - (0x214, 'M', 'ȕ'), - (0x215, 'V'), - (0x216, 'M', 'ȗ'), - (0x217, 'V'), - (0x218, 'M', 'ș'), - (0x219, 'V'), - (0x21A, 'M', 'ț'), - (0x21B, 'V'), - (0x21C, 'M', 'ȝ'), - (0x21D, 'V'), - (0x21E, 'M', 'ȟ'), - (0x21F, 'V'), - (0x220, 'M', 'ƞ'), - (0x221, 'V'), - (0x222, 'M', 'ȣ'), - (0x223, 'V'), - (0x224, 'M', 'ȥ'), - (0x225, 'V'), - (0x226, 'M', 'ȧ'), - (0x227, 'V'), - (0x228, 'M', 'ȩ'), - (0x229, 'V'), - (0x22A, 'M', 'ȫ'), - (0x22B, 'V'), - (0x22C, 'M', 'ȭ'), - (0x22D, 'V'), - (0x22E, 'M', 'ȯ'), - (0x22F, 'V'), - (0x230, 'M', 'ȱ'), - (0x231, 'V'), - (0x232, 'M', 'ȳ'), - (0x233, 'V'), - (0x23A, 'M', 'ⱥ'), - (0x23B, 'M', 'ȼ'), - (0x23C, 'V'), - (0x23D, 'M', 'ƚ'), - (0x23E, 'M', 'ⱦ'), - (0x23F, 'V'), - (0x241, 'M', 'ɂ'), - (0x242, 'V'), - (0x243, 'M', 'ƀ'), - (0x244, 'M', 'ʉ'), - (0x245, 'M', 'ʌ'), - (0x246, 'M', 'ɇ'), - (0x247, 'V'), - (0x248, 'M', 'ɉ'), - (0x249, 'V'), - (0x24A, 'M', 'ɋ'), - (0x24B, 'V'), - (0x24C, 'M', 'ɍ'), - (0x24D, 'V'), - (0x24E, 'M', 'ɏ'), - (0x24F, 'V'), - (0x2B0, 'M', 'h'), - (0x2B1, 'M', 'ɦ'), - (0x2B2, 'M', 'j'), - (0x2B3, 'M', 'r'), - (0x2B4, 'M', 'ɹ'), - (0x2B5, 'M', 'ɻ'), - (0x2B6, 'M', 'ʁ'), - (0x2B7, 'M', 'w'), - (0x2B8, 'M', 'y'), - (0x2B9, 'V'), - (0x2D8, '3', ' ̆'), - (0x2D9, '3', ' ̇'), - (0x2DA, '3', ' ̊'), - (0x2DB, '3', ' ̨'), - (0x2DC, '3', ' ̃'), - (0x2DD, '3', ' ̋'), - (0x2DE, 'V'), - (0x2E0, 'M', 'ɣ'), - (0x2E1, 'M', 'l'), - (0x2E2, 'M', 's'), - (0x2E3, 'M', 'x'), - (0x2E4, 'M', 'ʕ'), - (0x2E5, 'V'), - (0x340, 'M', '̀'), - (0x341, 'M', '́'), - (0x342, 'V'), - (0x343, 'M', '̓'), - (0x344, 'M', '̈́'), - (0x345, 'M', 'ι'), - (0x346, 'V'), - (0x34F, 'I'), - (0x350, 'V'), - (0x370, 'M', 'ͱ'), - (0x371, 'V'), - (0x372, 'M', 'ͳ'), - (0x373, 'V'), - (0x374, 'M', 'ʹ'), - (0x375, 'V'), - (0x376, 'M', 'ͷ'), - (0x377, 'V'), - ] - -def _seg_6() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x378, 'X'), - (0x37A, '3', ' ι'), - (0x37B, 'V'), - (0x37E, '3', ';'), - (0x37F, 'M', 'ϳ'), - (0x380, 'X'), - (0x384, '3', ' ́'), - (0x385, '3', ' ̈́'), - (0x386, 'M', 'ά'), - (0x387, 'M', '·'), - (0x388, 'M', 'έ'), - (0x389, 'M', 'ή'), - (0x38A, 'M', 'ί'), - (0x38B, 'X'), - (0x38C, 'M', 'ό'), - (0x38D, 'X'), - (0x38E, 'M', 'ύ'), - (0x38F, 'M', 'ώ'), - (0x390, 'V'), - (0x391, 'M', 'α'), - (0x392, 'M', 'β'), - (0x393, 'M', 'γ'), - (0x394, 'M', 'δ'), - (0x395, 'M', 'ε'), - (0x396, 'M', 'ζ'), - (0x397, 'M', 'η'), - (0x398, 'M', 'θ'), - (0x399, 'M', 'ι'), - (0x39A, 'M', 'κ'), - (0x39B, 'M', 'λ'), - (0x39C, 'M', 'μ'), - (0x39D, 'M', 'ν'), - (0x39E, 'M', 'ξ'), - (0x39F, 'M', 'ο'), - (0x3A0, 'M', 'π'), - (0x3A1, 'M', 'ρ'), - (0x3A2, 'X'), - (0x3A3, 'M', 'σ'), - (0x3A4, 'M', 'τ'), - (0x3A5, 'M', 'υ'), - (0x3A6, 'M', 'φ'), - (0x3A7, 'M', 'χ'), - (0x3A8, 'M', 'ψ'), - (0x3A9, 'M', 'ω'), - (0x3AA, 'M', 'ϊ'), - (0x3AB, 'M', 'ϋ'), - (0x3AC, 'V'), - (0x3C2, 'D', 'σ'), - (0x3C3, 'V'), - (0x3CF, 'M', 'ϗ'), - (0x3D0, 'M', 'β'), - (0x3D1, 'M', 'θ'), - (0x3D2, 'M', 'υ'), - (0x3D3, 'M', 'ύ'), - (0x3D4, 'M', 'ϋ'), - (0x3D5, 'M', 'φ'), - (0x3D6, 'M', 'π'), - (0x3D7, 'V'), - (0x3D8, 'M', 'ϙ'), - (0x3D9, 'V'), - (0x3DA, 'M', 'ϛ'), - (0x3DB, 'V'), - (0x3DC, 'M', 'ϝ'), - (0x3DD, 'V'), - (0x3DE, 'M', 'ϟ'), - (0x3DF, 'V'), - (0x3E0, 'M', 'ϡ'), - (0x3E1, 'V'), - (0x3E2, 'M', 'ϣ'), - (0x3E3, 'V'), - (0x3E4, 'M', 'ϥ'), - (0x3E5, 'V'), - (0x3E6, 'M', 'ϧ'), - (0x3E7, 'V'), - (0x3E8, 'M', 'ϩ'), - (0x3E9, 'V'), - (0x3EA, 'M', 'ϫ'), - (0x3EB, 'V'), - (0x3EC, 'M', 'ϭ'), - (0x3ED, 'V'), - (0x3EE, 'M', 'ϯ'), - (0x3EF, 'V'), - (0x3F0, 'M', 'κ'), - (0x3F1, 'M', 'ρ'), - (0x3F2, 'M', 'σ'), - (0x3F3, 'V'), - (0x3F4, 'M', 'θ'), - (0x3F5, 'M', 'ε'), - (0x3F6, 'V'), - (0x3F7, 'M', 'ϸ'), - (0x3F8, 'V'), - (0x3F9, 'M', 'σ'), - (0x3FA, 'M', 'ϻ'), - (0x3FB, 'V'), - (0x3FD, 'M', 'ͻ'), - (0x3FE, 'M', 'ͼ'), - (0x3FF, 'M', 'ͽ'), - (0x400, 'M', 'ѐ'), - (0x401, 'M', 'ё'), - (0x402, 'M', 'ђ'), - ] - -def _seg_7() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x403, 'M', 'ѓ'), - (0x404, 'M', 'є'), - (0x405, 'M', 'ѕ'), - (0x406, 'M', 'і'), - (0x407, 'M', 'ї'), - (0x408, 'M', 'ј'), - (0x409, 'M', 'љ'), - (0x40A, 'M', 'њ'), - (0x40B, 'M', 'ћ'), - (0x40C, 'M', 'ќ'), - (0x40D, 'M', 'ѝ'), - (0x40E, 'M', 'ў'), - (0x40F, 'M', 'џ'), - (0x410, 'M', 'а'), - (0x411, 'M', 'б'), - (0x412, 'M', 'в'), - (0x413, 'M', 'г'), - (0x414, 'M', 'д'), - (0x415, 'M', 'е'), - (0x416, 'M', 'ж'), - (0x417, 'M', 'з'), - (0x418, 'M', 'и'), - (0x419, 'M', 'й'), - (0x41A, 'M', 'к'), - (0x41B, 'M', 'л'), - (0x41C, 'M', 'м'), - (0x41D, 'M', 'н'), - (0x41E, 'M', 'о'), - (0x41F, 'M', 'п'), - (0x420, 'M', 'р'), - (0x421, 'M', 'с'), - (0x422, 'M', 'т'), - (0x423, 'M', 'у'), - (0x424, 'M', 'ф'), - (0x425, 'M', 'х'), - (0x426, 'M', 'ц'), - (0x427, 'M', 'ч'), - (0x428, 'M', 'ш'), - (0x429, 'M', 'щ'), - (0x42A, 'M', 'ъ'), - (0x42B, 'M', 'ы'), - (0x42C, 'M', 'ь'), - (0x42D, 'M', 'э'), - (0x42E, 'M', 'ю'), - (0x42F, 'M', 'я'), - (0x430, 'V'), - (0x460, 'M', 'ѡ'), - (0x461, 'V'), - (0x462, 'M', 'ѣ'), - (0x463, 'V'), - (0x464, 'M', 'ѥ'), - (0x465, 'V'), - (0x466, 'M', 'ѧ'), - (0x467, 'V'), - (0x468, 'M', 'ѩ'), - (0x469, 'V'), - (0x46A, 'M', 'ѫ'), - (0x46B, 'V'), - (0x46C, 'M', 'ѭ'), - (0x46D, 'V'), - (0x46E, 'M', 'ѯ'), - (0x46F, 'V'), - (0x470, 'M', 'ѱ'), - (0x471, 'V'), - (0x472, 'M', 'ѳ'), - (0x473, 'V'), - (0x474, 'M', 'ѵ'), - (0x475, 'V'), - (0x476, 'M', 'ѷ'), - (0x477, 'V'), - (0x478, 'M', 'ѹ'), - (0x479, 'V'), - (0x47A, 'M', 'ѻ'), - (0x47B, 'V'), - (0x47C, 'M', 'ѽ'), - (0x47D, 'V'), - (0x47E, 'M', 'ѿ'), - (0x47F, 'V'), - (0x480, 'M', 'ҁ'), - (0x481, 'V'), - (0x48A, 'M', 'ҋ'), - (0x48B, 'V'), - (0x48C, 'M', 'ҍ'), - (0x48D, 'V'), - (0x48E, 'M', 'ҏ'), - (0x48F, 'V'), - (0x490, 'M', 'ґ'), - (0x491, 'V'), - (0x492, 'M', 'ғ'), - (0x493, 'V'), - (0x494, 'M', 'ҕ'), - (0x495, 'V'), - (0x496, 'M', 'җ'), - (0x497, 'V'), - (0x498, 'M', 'ҙ'), - (0x499, 'V'), - (0x49A, 'M', 'қ'), - (0x49B, 'V'), - (0x49C, 'M', 'ҝ'), - (0x49D, 'V'), - ] - -def _seg_8() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x49E, 'M', 'ҟ'), - (0x49F, 'V'), - (0x4A0, 'M', 'ҡ'), - (0x4A1, 'V'), - (0x4A2, 'M', 'ң'), - (0x4A3, 'V'), - (0x4A4, 'M', 'ҥ'), - (0x4A5, 'V'), - (0x4A6, 'M', 'ҧ'), - (0x4A7, 'V'), - (0x4A8, 'M', 'ҩ'), - (0x4A9, 'V'), - (0x4AA, 'M', 'ҫ'), - (0x4AB, 'V'), - (0x4AC, 'M', 'ҭ'), - (0x4AD, 'V'), - (0x4AE, 'M', 'ү'), - (0x4AF, 'V'), - (0x4B0, 'M', 'ұ'), - (0x4B1, 'V'), - (0x4B2, 'M', 'ҳ'), - (0x4B3, 'V'), - (0x4B4, 'M', 'ҵ'), - (0x4B5, 'V'), - (0x4B6, 'M', 'ҷ'), - (0x4B7, 'V'), - (0x4B8, 'M', 'ҹ'), - (0x4B9, 'V'), - (0x4BA, 'M', 'һ'), - (0x4BB, 'V'), - (0x4BC, 'M', 'ҽ'), - (0x4BD, 'V'), - (0x4BE, 'M', 'ҿ'), - (0x4BF, 'V'), - (0x4C0, 'X'), - (0x4C1, 'M', 'ӂ'), - (0x4C2, 'V'), - (0x4C3, 'M', 'ӄ'), - (0x4C4, 'V'), - (0x4C5, 'M', 'ӆ'), - (0x4C6, 'V'), - (0x4C7, 'M', 'ӈ'), - (0x4C8, 'V'), - (0x4C9, 'M', 'ӊ'), - (0x4CA, 'V'), - (0x4CB, 'M', 'ӌ'), - (0x4CC, 'V'), - (0x4CD, 'M', 'ӎ'), - (0x4CE, 'V'), - (0x4D0, 'M', 'ӑ'), - (0x4D1, 'V'), - (0x4D2, 'M', 'ӓ'), - (0x4D3, 'V'), - (0x4D4, 'M', 'ӕ'), - (0x4D5, 'V'), - (0x4D6, 'M', 'ӗ'), - (0x4D7, 'V'), - (0x4D8, 'M', 'ә'), - (0x4D9, 'V'), - (0x4DA, 'M', 'ӛ'), - (0x4DB, 'V'), - (0x4DC, 'M', 'ӝ'), - (0x4DD, 'V'), - (0x4DE, 'M', 'ӟ'), - (0x4DF, 'V'), - (0x4E0, 'M', 'ӡ'), - (0x4E1, 'V'), - (0x4E2, 'M', 'ӣ'), - (0x4E3, 'V'), - (0x4E4, 'M', 'ӥ'), - (0x4E5, 'V'), - (0x4E6, 'M', 'ӧ'), - (0x4E7, 'V'), - (0x4E8, 'M', 'ө'), - (0x4E9, 'V'), - (0x4EA, 'M', 'ӫ'), - (0x4EB, 'V'), - (0x4EC, 'M', 'ӭ'), - (0x4ED, 'V'), - (0x4EE, 'M', 'ӯ'), - (0x4EF, 'V'), - (0x4F0, 'M', 'ӱ'), - (0x4F1, 'V'), - (0x4F2, 'M', 'ӳ'), - (0x4F3, 'V'), - (0x4F4, 'M', 'ӵ'), - (0x4F5, 'V'), - (0x4F6, 'M', 'ӷ'), - (0x4F7, 'V'), - (0x4F8, 'M', 'ӹ'), - (0x4F9, 'V'), - (0x4FA, 'M', 'ӻ'), - (0x4FB, 'V'), - (0x4FC, 'M', 'ӽ'), - (0x4FD, 'V'), - (0x4FE, 'M', 'ӿ'), - (0x4FF, 'V'), - (0x500, 'M', 'ԁ'), - (0x501, 'V'), - (0x502, 'M', 'ԃ'), - ] - -def _seg_9() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x503, 'V'), - (0x504, 'M', 'ԅ'), - (0x505, 'V'), - (0x506, 'M', 'ԇ'), - (0x507, 'V'), - (0x508, 'M', 'ԉ'), - (0x509, 'V'), - (0x50A, 'M', 'ԋ'), - (0x50B, 'V'), - (0x50C, 'M', 'ԍ'), - (0x50D, 'V'), - (0x50E, 'M', 'ԏ'), - (0x50F, 'V'), - (0x510, 'M', 'ԑ'), - (0x511, 'V'), - (0x512, 'M', 'ԓ'), - (0x513, 'V'), - (0x514, 'M', 'ԕ'), - (0x515, 'V'), - (0x516, 'M', 'ԗ'), - (0x517, 'V'), - (0x518, 'M', 'ԙ'), - (0x519, 'V'), - (0x51A, 'M', 'ԛ'), - (0x51B, 'V'), - (0x51C, 'M', 'ԝ'), - (0x51D, 'V'), - (0x51E, 'M', 'ԟ'), - (0x51F, 'V'), - (0x520, 'M', 'ԡ'), - (0x521, 'V'), - (0x522, 'M', 'ԣ'), - (0x523, 'V'), - (0x524, 'M', 'ԥ'), - (0x525, 'V'), - (0x526, 'M', 'ԧ'), - (0x527, 'V'), - (0x528, 'M', 'ԩ'), - (0x529, 'V'), - (0x52A, 'M', 'ԫ'), - (0x52B, 'V'), - (0x52C, 'M', 'ԭ'), - (0x52D, 'V'), - (0x52E, 'M', 'ԯ'), - (0x52F, 'V'), - (0x530, 'X'), - (0x531, 'M', 'ա'), - (0x532, 'M', 'բ'), - (0x533, 'M', 'գ'), - (0x534, 'M', 'դ'), - (0x535, 'M', 'ե'), - (0x536, 'M', 'զ'), - (0x537, 'M', 'է'), - (0x538, 'M', 'ը'), - (0x539, 'M', 'թ'), - (0x53A, 'M', 'ժ'), - (0x53B, 'M', 'ի'), - (0x53C, 'M', 'լ'), - (0x53D, 'M', 'խ'), - (0x53E, 'M', 'ծ'), - (0x53F, 'M', 'կ'), - (0x540, 'M', 'հ'), - (0x541, 'M', 'ձ'), - (0x542, 'M', 'ղ'), - (0x543, 'M', 'ճ'), - (0x544, 'M', 'մ'), - (0x545, 'M', 'յ'), - (0x546, 'M', 'ն'), - (0x547, 'M', 'շ'), - (0x548, 'M', 'ո'), - (0x549, 'M', 'չ'), - (0x54A, 'M', 'պ'), - (0x54B, 'M', 'ջ'), - (0x54C, 'M', 'ռ'), - (0x54D, 'M', 'ս'), - (0x54E, 'M', 'վ'), - (0x54F, 'M', 'տ'), - (0x550, 'M', 'ր'), - (0x551, 'M', 'ց'), - (0x552, 'M', 'ւ'), - (0x553, 'M', 'փ'), - (0x554, 'M', 'ք'), - (0x555, 'M', 'օ'), - (0x556, 'M', 'ֆ'), - (0x557, 'X'), - (0x559, 'V'), - (0x587, 'M', 'եւ'), - (0x588, 'V'), - (0x58B, 'X'), - (0x58D, 'V'), - (0x590, 'X'), - (0x591, 'V'), - (0x5C8, 'X'), - (0x5D0, 'V'), - (0x5EB, 'X'), - (0x5EF, 'V'), - (0x5F5, 'X'), - (0x606, 'V'), - (0x61C, 'X'), - (0x61D, 'V'), - ] - -def _seg_10() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x675, 'M', 'اٴ'), - (0x676, 'M', 'وٴ'), - (0x677, 'M', 'ۇٴ'), - (0x678, 'M', 'يٴ'), - (0x679, 'V'), - (0x6DD, 'X'), - (0x6DE, 'V'), - (0x70E, 'X'), - (0x710, 'V'), - (0x74B, 'X'), - (0x74D, 'V'), - (0x7B2, 'X'), - (0x7C0, 'V'), - (0x7FB, 'X'), - (0x7FD, 'V'), - (0x82E, 'X'), - (0x830, 'V'), - (0x83F, 'X'), - (0x840, 'V'), - (0x85C, 'X'), - (0x85E, 'V'), - (0x85F, 'X'), - (0x860, 'V'), - (0x86B, 'X'), - (0x870, 'V'), - (0x88F, 'X'), - (0x898, 'V'), - (0x8E2, 'X'), - (0x8E3, 'V'), - (0x958, 'M', 'क़'), - (0x959, 'M', 'ख़'), - (0x95A, 'M', 'ग़'), - (0x95B, 'M', 'ज़'), - (0x95C, 'M', 'ड़'), - (0x95D, 'M', 'ढ़'), - (0x95E, 'M', 'फ़'), - (0x95F, 'M', 'य़'), - (0x960, 'V'), - (0x984, 'X'), - (0x985, 'V'), - (0x98D, 'X'), - (0x98F, 'V'), - (0x991, 'X'), - (0x993, 'V'), - (0x9A9, 'X'), - (0x9AA, 'V'), - (0x9B1, 'X'), - (0x9B2, 'V'), - (0x9B3, 'X'), - (0x9B6, 'V'), - (0x9BA, 'X'), - (0x9BC, 'V'), - (0x9C5, 'X'), - (0x9C7, 'V'), - (0x9C9, 'X'), - (0x9CB, 'V'), - (0x9CF, 'X'), - (0x9D7, 'V'), - (0x9D8, 'X'), - (0x9DC, 'M', 'ড়'), - (0x9DD, 'M', 'ঢ়'), - (0x9DE, 'X'), - (0x9DF, 'M', 'য়'), - (0x9E0, 'V'), - (0x9E4, 'X'), - (0x9E6, 'V'), - (0x9FF, 'X'), - (0xA01, 'V'), - (0xA04, 'X'), - (0xA05, 'V'), - (0xA0B, 'X'), - (0xA0F, 'V'), - (0xA11, 'X'), - (0xA13, 'V'), - (0xA29, 'X'), - (0xA2A, 'V'), - (0xA31, 'X'), - (0xA32, 'V'), - (0xA33, 'M', 'ਲ਼'), - (0xA34, 'X'), - (0xA35, 'V'), - (0xA36, 'M', 'ਸ਼'), - (0xA37, 'X'), - (0xA38, 'V'), - (0xA3A, 'X'), - (0xA3C, 'V'), - (0xA3D, 'X'), - (0xA3E, 'V'), - (0xA43, 'X'), - (0xA47, 'V'), - (0xA49, 'X'), - (0xA4B, 'V'), - (0xA4E, 'X'), - (0xA51, 'V'), - (0xA52, 'X'), - (0xA59, 'M', 'ਖ਼'), - (0xA5A, 'M', 'ਗ਼'), - (0xA5B, 'M', 'ਜ਼'), - (0xA5C, 'V'), - (0xA5D, 'X'), - ] - -def _seg_11() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xA5E, 'M', 'ਫ਼'), - (0xA5F, 'X'), - (0xA66, 'V'), - (0xA77, 'X'), - (0xA81, 'V'), - (0xA84, 'X'), - (0xA85, 'V'), - (0xA8E, 'X'), - (0xA8F, 'V'), - (0xA92, 'X'), - (0xA93, 'V'), - (0xAA9, 'X'), - (0xAAA, 'V'), - (0xAB1, 'X'), - (0xAB2, 'V'), - (0xAB4, 'X'), - (0xAB5, 'V'), - (0xABA, 'X'), - (0xABC, 'V'), - (0xAC6, 'X'), - (0xAC7, 'V'), - (0xACA, 'X'), - (0xACB, 'V'), - (0xACE, 'X'), - (0xAD0, 'V'), - (0xAD1, 'X'), - (0xAE0, 'V'), - (0xAE4, 'X'), - (0xAE6, 'V'), - (0xAF2, 'X'), - (0xAF9, 'V'), - (0xB00, 'X'), - (0xB01, 'V'), - (0xB04, 'X'), - (0xB05, 'V'), - (0xB0D, 'X'), - (0xB0F, 'V'), - (0xB11, 'X'), - (0xB13, 'V'), - (0xB29, 'X'), - (0xB2A, 'V'), - (0xB31, 'X'), - (0xB32, 'V'), - (0xB34, 'X'), - (0xB35, 'V'), - (0xB3A, 'X'), - (0xB3C, 'V'), - (0xB45, 'X'), - (0xB47, 'V'), - (0xB49, 'X'), - (0xB4B, 'V'), - (0xB4E, 'X'), - (0xB55, 'V'), - (0xB58, 'X'), - (0xB5C, 'M', 'ଡ଼'), - (0xB5D, 'M', 'ଢ଼'), - (0xB5E, 'X'), - (0xB5F, 'V'), - (0xB64, 'X'), - (0xB66, 'V'), - (0xB78, 'X'), - (0xB82, 'V'), - (0xB84, 'X'), - (0xB85, 'V'), - (0xB8B, 'X'), - (0xB8E, 'V'), - (0xB91, 'X'), - (0xB92, 'V'), - (0xB96, 'X'), - (0xB99, 'V'), - (0xB9B, 'X'), - (0xB9C, 'V'), - (0xB9D, 'X'), - (0xB9E, 'V'), - (0xBA0, 'X'), - (0xBA3, 'V'), - (0xBA5, 'X'), - (0xBA8, 'V'), - (0xBAB, 'X'), - (0xBAE, 'V'), - (0xBBA, 'X'), - (0xBBE, 'V'), - (0xBC3, 'X'), - (0xBC6, 'V'), - (0xBC9, 'X'), - (0xBCA, 'V'), - (0xBCE, 'X'), - (0xBD0, 'V'), - (0xBD1, 'X'), - (0xBD7, 'V'), - (0xBD8, 'X'), - (0xBE6, 'V'), - (0xBFB, 'X'), - (0xC00, 'V'), - (0xC0D, 'X'), - (0xC0E, 'V'), - (0xC11, 'X'), - (0xC12, 'V'), - (0xC29, 'X'), - (0xC2A, 'V'), - ] - -def _seg_12() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xC3A, 'X'), - (0xC3C, 'V'), - (0xC45, 'X'), - (0xC46, 'V'), - (0xC49, 'X'), - (0xC4A, 'V'), - (0xC4E, 'X'), - (0xC55, 'V'), - (0xC57, 'X'), - (0xC58, 'V'), - (0xC5B, 'X'), - (0xC5D, 'V'), - (0xC5E, 'X'), - (0xC60, 'V'), - (0xC64, 'X'), - (0xC66, 'V'), - (0xC70, 'X'), - (0xC77, 'V'), - (0xC8D, 'X'), - (0xC8E, 'V'), - (0xC91, 'X'), - (0xC92, 'V'), - (0xCA9, 'X'), - (0xCAA, 'V'), - (0xCB4, 'X'), - (0xCB5, 'V'), - (0xCBA, 'X'), - (0xCBC, 'V'), - (0xCC5, 'X'), - (0xCC6, 'V'), - (0xCC9, 'X'), - (0xCCA, 'V'), - (0xCCE, 'X'), - (0xCD5, 'V'), - (0xCD7, 'X'), - (0xCDD, 'V'), - (0xCDF, 'X'), - (0xCE0, 'V'), - (0xCE4, 'X'), - (0xCE6, 'V'), - (0xCF0, 'X'), - (0xCF1, 'V'), - (0xCF4, 'X'), - (0xD00, 'V'), - (0xD0D, 'X'), - (0xD0E, 'V'), - (0xD11, 'X'), - (0xD12, 'V'), - (0xD45, 'X'), - (0xD46, 'V'), - (0xD49, 'X'), - (0xD4A, 'V'), - (0xD50, 'X'), - (0xD54, 'V'), - (0xD64, 'X'), - (0xD66, 'V'), - (0xD80, 'X'), - (0xD81, 'V'), - (0xD84, 'X'), - (0xD85, 'V'), - (0xD97, 'X'), - (0xD9A, 'V'), - (0xDB2, 'X'), - (0xDB3, 'V'), - (0xDBC, 'X'), - (0xDBD, 'V'), - (0xDBE, 'X'), - (0xDC0, 'V'), - (0xDC7, 'X'), - (0xDCA, 'V'), - (0xDCB, 'X'), - (0xDCF, 'V'), - (0xDD5, 'X'), - (0xDD6, 'V'), - (0xDD7, 'X'), - (0xDD8, 'V'), - (0xDE0, 'X'), - (0xDE6, 'V'), - (0xDF0, 'X'), - (0xDF2, 'V'), - (0xDF5, 'X'), - (0xE01, 'V'), - (0xE33, 'M', 'ํา'), - (0xE34, 'V'), - (0xE3B, 'X'), - (0xE3F, 'V'), - (0xE5C, 'X'), - (0xE81, 'V'), - (0xE83, 'X'), - (0xE84, 'V'), - (0xE85, 'X'), - (0xE86, 'V'), - (0xE8B, 'X'), - (0xE8C, 'V'), - (0xEA4, 'X'), - (0xEA5, 'V'), - (0xEA6, 'X'), - (0xEA7, 'V'), - (0xEB3, 'M', 'ໍາ'), - (0xEB4, 'V'), - ] - -def _seg_13() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xEBE, 'X'), - (0xEC0, 'V'), - (0xEC5, 'X'), - (0xEC6, 'V'), - (0xEC7, 'X'), - (0xEC8, 'V'), - (0xECF, 'X'), - (0xED0, 'V'), - (0xEDA, 'X'), - (0xEDC, 'M', 'ຫນ'), - (0xEDD, 'M', 'ຫມ'), - (0xEDE, 'V'), - (0xEE0, 'X'), - (0xF00, 'V'), - (0xF0C, 'M', '་'), - (0xF0D, 'V'), - (0xF43, 'M', 'གྷ'), - (0xF44, 'V'), - (0xF48, 'X'), - (0xF49, 'V'), - (0xF4D, 'M', 'ཌྷ'), - (0xF4E, 'V'), - (0xF52, 'M', 'དྷ'), - (0xF53, 'V'), - (0xF57, 'M', 'བྷ'), - (0xF58, 'V'), - (0xF5C, 'M', 'ཛྷ'), - (0xF5D, 'V'), - (0xF69, 'M', 'ཀྵ'), - (0xF6A, 'V'), - (0xF6D, 'X'), - (0xF71, 'V'), - (0xF73, 'M', 'ཱི'), - (0xF74, 'V'), - (0xF75, 'M', 'ཱུ'), - (0xF76, 'M', 'ྲྀ'), - (0xF77, 'M', 'ྲཱྀ'), - (0xF78, 'M', 'ླྀ'), - (0xF79, 'M', 'ླཱྀ'), - (0xF7A, 'V'), - (0xF81, 'M', 'ཱྀ'), - (0xF82, 'V'), - (0xF93, 'M', 'ྒྷ'), - (0xF94, 'V'), - (0xF98, 'X'), - (0xF99, 'V'), - (0xF9D, 'M', 'ྜྷ'), - (0xF9E, 'V'), - (0xFA2, 'M', 'ྡྷ'), - (0xFA3, 'V'), - (0xFA7, 'M', 'ྦྷ'), - (0xFA8, 'V'), - (0xFAC, 'M', 'ྫྷ'), - (0xFAD, 'V'), - (0xFB9, 'M', 'ྐྵ'), - (0xFBA, 'V'), - (0xFBD, 'X'), - (0xFBE, 'V'), - (0xFCD, 'X'), - (0xFCE, 'V'), - (0xFDB, 'X'), - (0x1000, 'V'), - (0x10A0, 'X'), - (0x10C7, 'M', 'ⴧ'), - (0x10C8, 'X'), - (0x10CD, 'M', 'ⴭ'), - (0x10CE, 'X'), - (0x10D0, 'V'), - (0x10FC, 'M', 'ნ'), - (0x10FD, 'V'), - (0x115F, 'X'), - (0x1161, 'V'), - (0x1249, 'X'), - (0x124A, 'V'), - (0x124E, 'X'), - (0x1250, 'V'), - (0x1257, 'X'), - (0x1258, 'V'), - (0x1259, 'X'), - (0x125A, 'V'), - (0x125E, 'X'), - (0x1260, 'V'), - (0x1289, 'X'), - (0x128A, 'V'), - (0x128E, 'X'), - (0x1290, 'V'), - (0x12B1, 'X'), - (0x12B2, 'V'), - (0x12B6, 'X'), - (0x12B8, 'V'), - (0x12BF, 'X'), - (0x12C0, 'V'), - (0x12C1, 'X'), - (0x12C2, 'V'), - (0x12C6, 'X'), - (0x12C8, 'V'), - (0x12D7, 'X'), - (0x12D8, 'V'), - (0x1311, 'X'), - (0x1312, 'V'), - ] - -def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1316, 'X'), - (0x1318, 'V'), - (0x135B, 'X'), - (0x135D, 'V'), - (0x137D, 'X'), - (0x1380, 'V'), - (0x139A, 'X'), - (0x13A0, 'V'), - (0x13F6, 'X'), - (0x13F8, 'M', 'Ᏸ'), - (0x13F9, 'M', 'Ᏹ'), - (0x13FA, 'M', 'Ᏺ'), - (0x13FB, 'M', 'Ᏻ'), - (0x13FC, 'M', 'Ᏼ'), - (0x13FD, 'M', 'Ᏽ'), - (0x13FE, 'X'), - (0x1400, 'V'), - (0x1680, 'X'), - (0x1681, 'V'), - (0x169D, 'X'), - (0x16A0, 'V'), - (0x16F9, 'X'), - (0x1700, 'V'), - (0x1716, 'X'), - (0x171F, 'V'), - (0x1737, 'X'), - (0x1740, 'V'), - (0x1754, 'X'), - (0x1760, 'V'), - (0x176D, 'X'), - (0x176E, 'V'), - (0x1771, 'X'), - (0x1772, 'V'), - (0x1774, 'X'), - (0x1780, 'V'), - (0x17B4, 'X'), - (0x17B6, 'V'), - (0x17DE, 'X'), - (0x17E0, 'V'), - (0x17EA, 'X'), - (0x17F0, 'V'), - (0x17FA, 'X'), - (0x1800, 'V'), - (0x1806, 'X'), - (0x1807, 'V'), - (0x180B, 'I'), - (0x180E, 'X'), - (0x180F, 'I'), - (0x1810, 'V'), - (0x181A, 'X'), - (0x1820, 'V'), - (0x1879, 'X'), - (0x1880, 'V'), - (0x18AB, 'X'), - (0x18B0, 'V'), - (0x18F6, 'X'), - (0x1900, 'V'), - (0x191F, 'X'), - (0x1920, 'V'), - (0x192C, 'X'), - (0x1930, 'V'), - (0x193C, 'X'), - (0x1940, 'V'), - (0x1941, 'X'), - (0x1944, 'V'), - (0x196E, 'X'), - (0x1970, 'V'), - (0x1975, 'X'), - (0x1980, 'V'), - (0x19AC, 'X'), - (0x19B0, 'V'), - (0x19CA, 'X'), - (0x19D0, 'V'), - (0x19DB, 'X'), - (0x19DE, 'V'), - (0x1A1C, 'X'), - (0x1A1E, 'V'), - (0x1A5F, 'X'), - (0x1A60, 'V'), - (0x1A7D, 'X'), - (0x1A7F, 'V'), - (0x1A8A, 'X'), - (0x1A90, 'V'), - (0x1A9A, 'X'), - (0x1AA0, 'V'), - (0x1AAE, 'X'), - (0x1AB0, 'V'), - (0x1ACF, 'X'), - (0x1B00, 'V'), - (0x1B4D, 'X'), - (0x1B50, 'V'), - (0x1B7F, 'X'), - (0x1B80, 'V'), - (0x1BF4, 'X'), - (0x1BFC, 'V'), - (0x1C38, 'X'), - (0x1C3B, 'V'), - (0x1C4A, 'X'), - (0x1C4D, 'V'), - (0x1C80, 'M', 'в'), - ] - -def _seg_15() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1C81, 'M', 'д'), - (0x1C82, 'M', 'о'), - (0x1C83, 'M', 'с'), - (0x1C84, 'M', 'т'), - (0x1C86, 'M', 'ъ'), - (0x1C87, 'M', 'ѣ'), - (0x1C88, 'M', 'ꙋ'), - (0x1C89, 'X'), - (0x1C90, 'M', 'ა'), - (0x1C91, 'M', 'ბ'), - (0x1C92, 'M', 'გ'), - (0x1C93, 'M', 'დ'), - (0x1C94, 'M', 'ე'), - (0x1C95, 'M', 'ვ'), - (0x1C96, 'M', 'ზ'), - (0x1C97, 'M', 'თ'), - (0x1C98, 'M', 'ი'), - (0x1C99, 'M', 'კ'), - (0x1C9A, 'M', 'ლ'), - (0x1C9B, 'M', 'მ'), - (0x1C9C, 'M', 'ნ'), - (0x1C9D, 'M', 'ო'), - (0x1C9E, 'M', 'პ'), - (0x1C9F, 'M', 'ჟ'), - (0x1CA0, 'M', 'რ'), - (0x1CA1, 'M', 'ს'), - (0x1CA2, 'M', 'ტ'), - (0x1CA3, 'M', 'უ'), - (0x1CA4, 'M', 'ფ'), - (0x1CA5, 'M', 'ქ'), - (0x1CA6, 'M', 'ღ'), - (0x1CA7, 'M', 'ყ'), - (0x1CA8, 'M', 'შ'), - (0x1CA9, 'M', 'ჩ'), - (0x1CAA, 'M', 'ც'), - (0x1CAB, 'M', 'ძ'), - (0x1CAC, 'M', 'წ'), - (0x1CAD, 'M', 'ჭ'), - (0x1CAE, 'M', 'ხ'), - (0x1CAF, 'M', 'ჯ'), - (0x1CB0, 'M', 'ჰ'), - (0x1CB1, 'M', 'ჱ'), - (0x1CB2, 'M', 'ჲ'), - (0x1CB3, 'M', 'ჳ'), - (0x1CB4, 'M', 'ჴ'), - (0x1CB5, 'M', 'ჵ'), - (0x1CB6, 'M', 'ჶ'), - (0x1CB7, 'M', 'ჷ'), - (0x1CB8, 'M', 'ჸ'), - (0x1CB9, 'M', 'ჹ'), - (0x1CBA, 'M', 'ჺ'), - (0x1CBB, 'X'), - (0x1CBD, 'M', 'ჽ'), - (0x1CBE, 'M', 'ჾ'), - (0x1CBF, 'M', 'ჿ'), - (0x1CC0, 'V'), - (0x1CC8, 'X'), - (0x1CD0, 'V'), - (0x1CFB, 'X'), - (0x1D00, 'V'), - (0x1D2C, 'M', 'a'), - (0x1D2D, 'M', 'æ'), - (0x1D2E, 'M', 'b'), - (0x1D2F, 'V'), - (0x1D30, 'M', 'd'), - (0x1D31, 'M', 'e'), - (0x1D32, 'M', 'ǝ'), - (0x1D33, 'M', 'g'), - (0x1D34, 'M', 'h'), - (0x1D35, 'M', 'i'), - (0x1D36, 'M', 'j'), - (0x1D37, 'M', 'k'), - (0x1D38, 'M', 'l'), - (0x1D39, 'M', 'm'), - (0x1D3A, 'M', 'n'), - (0x1D3B, 'V'), - (0x1D3C, 'M', 'o'), - (0x1D3D, 'M', 'ȣ'), - (0x1D3E, 'M', 'p'), - (0x1D3F, 'M', 'r'), - (0x1D40, 'M', 't'), - (0x1D41, 'M', 'u'), - (0x1D42, 'M', 'w'), - (0x1D43, 'M', 'a'), - (0x1D44, 'M', 'ɐ'), - (0x1D45, 'M', 'ɑ'), - (0x1D46, 'M', 'ᴂ'), - (0x1D47, 'M', 'b'), - (0x1D48, 'M', 'd'), - (0x1D49, 'M', 'e'), - (0x1D4A, 'M', 'ə'), - (0x1D4B, 'M', 'ɛ'), - (0x1D4C, 'M', 'ɜ'), - (0x1D4D, 'M', 'g'), - (0x1D4E, 'V'), - (0x1D4F, 'M', 'k'), - (0x1D50, 'M', 'm'), - (0x1D51, 'M', 'ŋ'), - (0x1D52, 'M', 'o'), - (0x1D53, 'M', 'ɔ'), - ] - -def _seg_16() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D54, 'M', 'ᴖ'), - (0x1D55, 'M', 'ᴗ'), - (0x1D56, 'M', 'p'), - (0x1D57, 'M', 't'), - (0x1D58, 'M', 'u'), - (0x1D59, 'M', 'ᴝ'), - (0x1D5A, 'M', 'ɯ'), - (0x1D5B, 'M', 'v'), - (0x1D5C, 'M', 'ᴥ'), - (0x1D5D, 'M', 'β'), - (0x1D5E, 'M', 'γ'), - (0x1D5F, 'M', 'δ'), - (0x1D60, 'M', 'φ'), - (0x1D61, 'M', 'χ'), - (0x1D62, 'M', 'i'), - (0x1D63, 'M', 'r'), - (0x1D64, 'M', 'u'), - (0x1D65, 'M', 'v'), - (0x1D66, 'M', 'β'), - (0x1D67, 'M', 'γ'), - (0x1D68, 'M', 'ρ'), - (0x1D69, 'M', 'φ'), - (0x1D6A, 'M', 'χ'), - (0x1D6B, 'V'), - (0x1D78, 'M', 'н'), - (0x1D79, 'V'), - (0x1D9B, 'M', 'ɒ'), - (0x1D9C, 'M', 'c'), - (0x1D9D, 'M', 'ɕ'), - (0x1D9E, 'M', 'ð'), - (0x1D9F, 'M', 'ɜ'), - (0x1DA0, 'M', 'f'), - (0x1DA1, 'M', 'ɟ'), - (0x1DA2, 'M', 'ɡ'), - (0x1DA3, 'M', 'ɥ'), - (0x1DA4, 'M', 'ɨ'), - (0x1DA5, 'M', 'ɩ'), - (0x1DA6, 'M', 'ɪ'), - (0x1DA7, 'M', 'ᵻ'), - (0x1DA8, 'M', 'ʝ'), - (0x1DA9, 'M', 'ɭ'), - (0x1DAA, 'M', 'ᶅ'), - (0x1DAB, 'M', 'ʟ'), - (0x1DAC, 'M', 'ɱ'), - (0x1DAD, 'M', 'ɰ'), - (0x1DAE, 'M', 'ɲ'), - (0x1DAF, 'M', 'ɳ'), - (0x1DB0, 'M', 'ɴ'), - (0x1DB1, 'M', 'ɵ'), - (0x1DB2, 'M', 'ɸ'), - (0x1DB3, 'M', 'ʂ'), - (0x1DB4, 'M', 'ʃ'), - (0x1DB5, 'M', 'ƫ'), - (0x1DB6, 'M', 'ʉ'), - (0x1DB7, 'M', 'ʊ'), - (0x1DB8, 'M', 'ᴜ'), - (0x1DB9, 'M', 'ʋ'), - (0x1DBA, 'M', 'ʌ'), - (0x1DBB, 'M', 'z'), - (0x1DBC, 'M', 'ʐ'), - (0x1DBD, 'M', 'ʑ'), - (0x1DBE, 'M', 'ʒ'), - (0x1DBF, 'M', 'θ'), - (0x1DC0, 'V'), - (0x1E00, 'M', 'ḁ'), - (0x1E01, 'V'), - (0x1E02, 'M', 'ḃ'), - (0x1E03, 'V'), - (0x1E04, 'M', 'ḅ'), - (0x1E05, 'V'), - (0x1E06, 'M', 'ḇ'), - (0x1E07, 'V'), - (0x1E08, 'M', 'ḉ'), - (0x1E09, 'V'), - (0x1E0A, 'M', 'ḋ'), - (0x1E0B, 'V'), - (0x1E0C, 'M', 'ḍ'), - (0x1E0D, 'V'), - (0x1E0E, 'M', 'ḏ'), - (0x1E0F, 'V'), - (0x1E10, 'M', 'ḑ'), - (0x1E11, 'V'), - (0x1E12, 'M', 'ḓ'), - (0x1E13, 'V'), - (0x1E14, 'M', 'ḕ'), - (0x1E15, 'V'), - (0x1E16, 'M', 'ḗ'), - (0x1E17, 'V'), - (0x1E18, 'M', 'ḙ'), - (0x1E19, 'V'), - (0x1E1A, 'M', 'ḛ'), - (0x1E1B, 'V'), - (0x1E1C, 'M', 'ḝ'), - (0x1E1D, 'V'), - (0x1E1E, 'M', 'ḟ'), - (0x1E1F, 'V'), - (0x1E20, 'M', 'ḡ'), - (0x1E21, 'V'), - (0x1E22, 'M', 'ḣ'), - (0x1E23, 'V'), - ] - -def _seg_17() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1E24, 'M', 'ḥ'), - (0x1E25, 'V'), - (0x1E26, 'M', 'ḧ'), - (0x1E27, 'V'), - (0x1E28, 'M', 'ḩ'), - (0x1E29, 'V'), - (0x1E2A, 'M', 'ḫ'), - (0x1E2B, 'V'), - (0x1E2C, 'M', 'ḭ'), - (0x1E2D, 'V'), - (0x1E2E, 'M', 'ḯ'), - (0x1E2F, 'V'), - (0x1E30, 'M', 'ḱ'), - (0x1E31, 'V'), - (0x1E32, 'M', 'ḳ'), - (0x1E33, 'V'), - (0x1E34, 'M', 'ḵ'), - (0x1E35, 'V'), - (0x1E36, 'M', 'ḷ'), - (0x1E37, 'V'), - (0x1E38, 'M', 'ḹ'), - (0x1E39, 'V'), - (0x1E3A, 'M', 'ḻ'), - (0x1E3B, 'V'), - (0x1E3C, 'M', 'ḽ'), - (0x1E3D, 'V'), - (0x1E3E, 'M', 'ḿ'), - (0x1E3F, 'V'), - (0x1E40, 'M', 'ṁ'), - (0x1E41, 'V'), - (0x1E42, 'M', 'ṃ'), - (0x1E43, 'V'), - (0x1E44, 'M', 'ṅ'), - (0x1E45, 'V'), - (0x1E46, 'M', 'ṇ'), - (0x1E47, 'V'), - (0x1E48, 'M', 'ṉ'), - (0x1E49, 'V'), - (0x1E4A, 'M', 'ṋ'), - (0x1E4B, 'V'), - (0x1E4C, 'M', 'ṍ'), - (0x1E4D, 'V'), - (0x1E4E, 'M', 'ṏ'), - (0x1E4F, 'V'), - (0x1E50, 'M', 'ṑ'), - (0x1E51, 'V'), - (0x1E52, 'M', 'ṓ'), - (0x1E53, 'V'), - (0x1E54, 'M', 'ṕ'), - (0x1E55, 'V'), - (0x1E56, 'M', 'ṗ'), - (0x1E57, 'V'), - (0x1E58, 'M', 'ṙ'), - (0x1E59, 'V'), - (0x1E5A, 'M', 'ṛ'), - (0x1E5B, 'V'), - (0x1E5C, 'M', 'ṝ'), - (0x1E5D, 'V'), - (0x1E5E, 'M', 'ṟ'), - (0x1E5F, 'V'), - (0x1E60, 'M', 'ṡ'), - (0x1E61, 'V'), - (0x1E62, 'M', 'ṣ'), - (0x1E63, 'V'), - (0x1E64, 'M', 'ṥ'), - (0x1E65, 'V'), - (0x1E66, 'M', 'ṧ'), - (0x1E67, 'V'), - (0x1E68, 'M', 'ṩ'), - (0x1E69, 'V'), - (0x1E6A, 'M', 'ṫ'), - (0x1E6B, 'V'), - (0x1E6C, 'M', 'ṭ'), - (0x1E6D, 'V'), - (0x1E6E, 'M', 'ṯ'), - (0x1E6F, 'V'), - (0x1E70, 'M', 'ṱ'), - (0x1E71, 'V'), - (0x1E72, 'M', 'ṳ'), - (0x1E73, 'V'), - (0x1E74, 'M', 'ṵ'), - (0x1E75, 'V'), - (0x1E76, 'M', 'ṷ'), - (0x1E77, 'V'), - (0x1E78, 'M', 'ṹ'), - (0x1E79, 'V'), - (0x1E7A, 'M', 'ṻ'), - (0x1E7B, 'V'), - (0x1E7C, 'M', 'ṽ'), - (0x1E7D, 'V'), - (0x1E7E, 'M', 'ṿ'), - (0x1E7F, 'V'), - (0x1E80, 'M', 'ẁ'), - (0x1E81, 'V'), - (0x1E82, 'M', 'ẃ'), - (0x1E83, 'V'), - (0x1E84, 'M', 'ẅ'), - (0x1E85, 'V'), - (0x1E86, 'M', 'ẇ'), - (0x1E87, 'V'), - ] - -def _seg_18() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1E88, 'M', 'ẉ'), - (0x1E89, 'V'), - (0x1E8A, 'M', 'ẋ'), - (0x1E8B, 'V'), - (0x1E8C, 'M', 'ẍ'), - (0x1E8D, 'V'), - (0x1E8E, 'M', 'ẏ'), - (0x1E8F, 'V'), - (0x1E90, 'M', 'ẑ'), - (0x1E91, 'V'), - (0x1E92, 'M', 'ẓ'), - (0x1E93, 'V'), - (0x1E94, 'M', 'ẕ'), - (0x1E95, 'V'), - (0x1E9A, 'M', 'aʾ'), - (0x1E9B, 'M', 'ṡ'), - (0x1E9C, 'V'), - (0x1E9E, 'M', 'ss'), - (0x1E9F, 'V'), - (0x1EA0, 'M', 'ạ'), - (0x1EA1, 'V'), - (0x1EA2, 'M', 'ả'), - (0x1EA3, 'V'), - (0x1EA4, 'M', 'ấ'), - (0x1EA5, 'V'), - (0x1EA6, 'M', 'ầ'), - (0x1EA7, 'V'), - (0x1EA8, 'M', 'ẩ'), - (0x1EA9, 'V'), - (0x1EAA, 'M', 'ẫ'), - (0x1EAB, 'V'), - (0x1EAC, 'M', 'ậ'), - (0x1EAD, 'V'), - (0x1EAE, 'M', 'ắ'), - (0x1EAF, 'V'), - (0x1EB0, 'M', 'ằ'), - (0x1EB1, 'V'), - (0x1EB2, 'M', 'ẳ'), - (0x1EB3, 'V'), - (0x1EB4, 'M', 'ẵ'), - (0x1EB5, 'V'), - (0x1EB6, 'M', 'ặ'), - (0x1EB7, 'V'), - (0x1EB8, 'M', 'ẹ'), - (0x1EB9, 'V'), - (0x1EBA, 'M', 'ẻ'), - (0x1EBB, 'V'), - (0x1EBC, 'M', 'ẽ'), - (0x1EBD, 'V'), - (0x1EBE, 'M', 'ế'), - (0x1EBF, 'V'), - (0x1EC0, 'M', 'ề'), - (0x1EC1, 'V'), - (0x1EC2, 'M', 'ể'), - (0x1EC3, 'V'), - (0x1EC4, 'M', 'ễ'), - (0x1EC5, 'V'), - (0x1EC6, 'M', 'ệ'), - (0x1EC7, 'V'), - (0x1EC8, 'M', 'ỉ'), - (0x1EC9, 'V'), - (0x1ECA, 'M', 'ị'), - (0x1ECB, 'V'), - (0x1ECC, 'M', 'ọ'), - (0x1ECD, 'V'), - (0x1ECE, 'M', 'ỏ'), - (0x1ECF, 'V'), - (0x1ED0, 'M', 'ố'), - (0x1ED1, 'V'), - (0x1ED2, 'M', 'ồ'), - (0x1ED3, 'V'), - (0x1ED4, 'M', 'ổ'), - (0x1ED5, 'V'), - (0x1ED6, 'M', 'ỗ'), - (0x1ED7, 'V'), - (0x1ED8, 'M', 'ộ'), - (0x1ED9, 'V'), - (0x1EDA, 'M', 'ớ'), - (0x1EDB, 'V'), - (0x1EDC, 'M', 'ờ'), - (0x1EDD, 'V'), - (0x1EDE, 'M', 'ở'), - (0x1EDF, 'V'), - (0x1EE0, 'M', 'ỡ'), - (0x1EE1, 'V'), - (0x1EE2, 'M', 'ợ'), - (0x1EE3, 'V'), - (0x1EE4, 'M', 'ụ'), - (0x1EE5, 'V'), - (0x1EE6, 'M', 'ủ'), - (0x1EE7, 'V'), - (0x1EE8, 'M', 'ứ'), - (0x1EE9, 'V'), - (0x1EEA, 'M', 'ừ'), - (0x1EEB, 'V'), - (0x1EEC, 'M', 'ử'), - (0x1EED, 'V'), - (0x1EEE, 'M', 'ữ'), - (0x1EEF, 'V'), - (0x1EF0, 'M', 'ự'), - ] - -def _seg_19() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1EF1, 'V'), - (0x1EF2, 'M', 'ỳ'), - (0x1EF3, 'V'), - (0x1EF4, 'M', 'ỵ'), - (0x1EF5, 'V'), - (0x1EF6, 'M', 'ỷ'), - (0x1EF7, 'V'), - (0x1EF8, 'M', 'ỹ'), - (0x1EF9, 'V'), - (0x1EFA, 'M', 'ỻ'), - (0x1EFB, 'V'), - (0x1EFC, 'M', 'ỽ'), - (0x1EFD, 'V'), - (0x1EFE, 'M', 'ỿ'), - (0x1EFF, 'V'), - (0x1F08, 'M', 'ἀ'), - (0x1F09, 'M', 'ἁ'), - (0x1F0A, 'M', 'ἂ'), - (0x1F0B, 'M', 'ἃ'), - (0x1F0C, 'M', 'ἄ'), - (0x1F0D, 'M', 'ἅ'), - (0x1F0E, 'M', 'ἆ'), - (0x1F0F, 'M', 'ἇ'), - (0x1F10, 'V'), - (0x1F16, 'X'), - (0x1F18, 'M', 'ἐ'), - (0x1F19, 'M', 'ἑ'), - (0x1F1A, 'M', 'ἒ'), - (0x1F1B, 'M', 'ἓ'), - (0x1F1C, 'M', 'ἔ'), - (0x1F1D, 'M', 'ἕ'), - (0x1F1E, 'X'), - (0x1F20, 'V'), - (0x1F28, 'M', 'ἠ'), - (0x1F29, 'M', 'ἡ'), - (0x1F2A, 'M', 'ἢ'), - (0x1F2B, 'M', 'ἣ'), - (0x1F2C, 'M', 'ἤ'), - (0x1F2D, 'M', 'ἥ'), - (0x1F2E, 'M', 'ἦ'), - (0x1F2F, 'M', 'ἧ'), - (0x1F30, 'V'), - (0x1F38, 'M', 'ἰ'), - (0x1F39, 'M', 'ἱ'), - (0x1F3A, 'M', 'ἲ'), - (0x1F3B, 'M', 'ἳ'), - (0x1F3C, 'M', 'ἴ'), - (0x1F3D, 'M', 'ἵ'), - (0x1F3E, 'M', 'ἶ'), - (0x1F3F, 'M', 'ἷ'), - (0x1F40, 'V'), - (0x1F46, 'X'), - (0x1F48, 'M', 'ὀ'), - (0x1F49, 'M', 'ὁ'), - (0x1F4A, 'M', 'ὂ'), - (0x1F4B, 'M', 'ὃ'), - (0x1F4C, 'M', 'ὄ'), - (0x1F4D, 'M', 'ὅ'), - (0x1F4E, 'X'), - (0x1F50, 'V'), - (0x1F58, 'X'), - (0x1F59, 'M', 'ὑ'), - (0x1F5A, 'X'), - (0x1F5B, 'M', 'ὓ'), - (0x1F5C, 'X'), - (0x1F5D, 'M', 'ὕ'), - (0x1F5E, 'X'), - (0x1F5F, 'M', 'ὗ'), - (0x1F60, 'V'), - (0x1F68, 'M', 'ὠ'), - (0x1F69, 'M', 'ὡ'), - (0x1F6A, 'M', 'ὢ'), - (0x1F6B, 'M', 'ὣ'), - (0x1F6C, 'M', 'ὤ'), - (0x1F6D, 'M', 'ὥ'), - (0x1F6E, 'M', 'ὦ'), - (0x1F6F, 'M', 'ὧ'), - (0x1F70, 'V'), - (0x1F71, 'M', 'ά'), - (0x1F72, 'V'), - (0x1F73, 'M', 'έ'), - (0x1F74, 'V'), - (0x1F75, 'M', 'ή'), - (0x1F76, 'V'), - (0x1F77, 'M', 'ί'), - (0x1F78, 'V'), - (0x1F79, 'M', 'ό'), - (0x1F7A, 'V'), - (0x1F7B, 'M', 'ύ'), - (0x1F7C, 'V'), - (0x1F7D, 'M', 'ώ'), - (0x1F7E, 'X'), - (0x1F80, 'M', 'ἀι'), - (0x1F81, 'M', 'ἁι'), - (0x1F82, 'M', 'ἂι'), - (0x1F83, 'M', 'ἃι'), - (0x1F84, 'M', 'ἄι'), - (0x1F85, 'M', 'ἅι'), - (0x1F86, 'M', 'ἆι'), - (0x1F87, 'M', 'ἇι'), - ] - -def _seg_20() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1F88, 'M', 'ἀι'), - (0x1F89, 'M', 'ἁι'), - (0x1F8A, 'M', 'ἂι'), - (0x1F8B, 'M', 'ἃι'), - (0x1F8C, 'M', 'ἄι'), - (0x1F8D, 'M', 'ἅι'), - (0x1F8E, 'M', 'ἆι'), - (0x1F8F, 'M', 'ἇι'), - (0x1F90, 'M', 'ἠι'), - (0x1F91, 'M', 'ἡι'), - (0x1F92, 'M', 'ἢι'), - (0x1F93, 'M', 'ἣι'), - (0x1F94, 'M', 'ἤι'), - (0x1F95, 'M', 'ἥι'), - (0x1F96, 'M', 'ἦι'), - (0x1F97, 'M', 'ἧι'), - (0x1F98, 'M', 'ἠι'), - (0x1F99, 'M', 'ἡι'), - (0x1F9A, 'M', 'ἢι'), - (0x1F9B, 'M', 'ἣι'), - (0x1F9C, 'M', 'ἤι'), - (0x1F9D, 'M', 'ἥι'), - (0x1F9E, 'M', 'ἦι'), - (0x1F9F, 'M', 'ἧι'), - (0x1FA0, 'M', 'ὠι'), - (0x1FA1, 'M', 'ὡι'), - (0x1FA2, 'M', 'ὢι'), - (0x1FA3, 'M', 'ὣι'), - (0x1FA4, 'M', 'ὤι'), - (0x1FA5, 'M', 'ὥι'), - (0x1FA6, 'M', 'ὦι'), - (0x1FA7, 'M', 'ὧι'), - (0x1FA8, 'M', 'ὠι'), - (0x1FA9, 'M', 'ὡι'), - (0x1FAA, 'M', 'ὢι'), - (0x1FAB, 'M', 'ὣι'), - (0x1FAC, 'M', 'ὤι'), - (0x1FAD, 'M', 'ὥι'), - (0x1FAE, 'M', 'ὦι'), - (0x1FAF, 'M', 'ὧι'), - (0x1FB0, 'V'), - (0x1FB2, 'M', 'ὰι'), - (0x1FB3, 'M', 'αι'), - (0x1FB4, 'M', 'άι'), - (0x1FB5, 'X'), - (0x1FB6, 'V'), - (0x1FB7, 'M', 'ᾶι'), - (0x1FB8, 'M', 'ᾰ'), - (0x1FB9, 'M', 'ᾱ'), - (0x1FBA, 'M', 'ὰ'), - (0x1FBB, 'M', 'ά'), - (0x1FBC, 'M', 'αι'), - (0x1FBD, '3', ' ̓'), - (0x1FBE, 'M', 'ι'), - (0x1FBF, '3', ' ̓'), - (0x1FC0, '3', ' ͂'), - (0x1FC1, '3', ' ̈͂'), - (0x1FC2, 'M', 'ὴι'), - (0x1FC3, 'M', 'ηι'), - (0x1FC4, 'M', 'ήι'), - (0x1FC5, 'X'), - (0x1FC6, 'V'), - (0x1FC7, 'M', 'ῆι'), - (0x1FC8, 'M', 'ὲ'), - (0x1FC9, 'M', 'έ'), - (0x1FCA, 'M', 'ὴ'), - (0x1FCB, 'M', 'ή'), - (0x1FCC, 'M', 'ηι'), - (0x1FCD, '3', ' ̓̀'), - (0x1FCE, '3', ' ̓́'), - (0x1FCF, '3', ' ̓͂'), - (0x1FD0, 'V'), - (0x1FD3, 'M', 'ΐ'), - (0x1FD4, 'X'), - (0x1FD6, 'V'), - (0x1FD8, 'M', 'ῐ'), - (0x1FD9, 'M', 'ῑ'), - (0x1FDA, 'M', 'ὶ'), - (0x1FDB, 'M', 'ί'), - (0x1FDC, 'X'), - (0x1FDD, '3', ' ̔̀'), - (0x1FDE, '3', ' ̔́'), - (0x1FDF, '3', ' ̔͂'), - (0x1FE0, 'V'), - (0x1FE3, 'M', 'ΰ'), - (0x1FE4, 'V'), - (0x1FE8, 'M', 'ῠ'), - (0x1FE9, 'M', 'ῡ'), - (0x1FEA, 'M', 'ὺ'), - (0x1FEB, 'M', 'ύ'), - (0x1FEC, 'M', 'ῥ'), - (0x1FED, '3', ' ̈̀'), - (0x1FEE, '3', ' ̈́'), - (0x1FEF, '3', '`'), - (0x1FF0, 'X'), - (0x1FF2, 'M', 'ὼι'), - (0x1FF3, 'M', 'ωι'), - (0x1FF4, 'M', 'ώι'), - (0x1FF5, 'X'), - (0x1FF6, 'V'), - ] - -def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1FF7, 'M', 'ῶι'), - (0x1FF8, 'M', 'ὸ'), - (0x1FF9, 'M', 'ό'), - (0x1FFA, 'M', 'ὼ'), - (0x1FFB, 'M', 'ώ'), - (0x1FFC, 'M', 'ωι'), - (0x1FFD, '3', ' ́'), - (0x1FFE, '3', ' ̔'), - (0x1FFF, 'X'), - (0x2000, '3', ' '), - (0x200B, 'I'), - (0x200C, 'D', ''), - (0x200E, 'X'), - (0x2010, 'V'), - (0x2011, 'M', '‐'), - (0x2012, 'V'), - (0x2017, '3', ' ̳'), - (0x2018, 'V'), - (0x2024, 'X'), - (0x2027, 'V'), - (0x2028, 'X'), - (0x202F, '3', ' '), - (0x2030, 'V'), - (0x2033, 'M', '′′'), - (0x2034, 'M', '′′′'), - (0x2035, 'V'), - (0x2036, 'M', '‵‵'), - (0x2037, 'M', '‵‵‵'), - (0x2038, 'V'), - (0x203C, '3', '!!'), - (0x203D, 'V'), - (0x203E, '3', ' ̅'), - (0x203F, 'V'), - (0x2047, '3', '??'), - (0x2048, '3', '?!'), - (0x2049, '3', '!?'), - (0x204A, 'V'), - (0x2057, 'M', '′′′′'), - (0x2058, 'V'), - (0x205F, '3', ' '), - (0x2060, 'I'), - (0x2061, 'X'), - (0x2064, 'I'), - (0x2065, 'X'), - (0x2070, 'M', '0'), - (0x2071, 'M', 'i'), - (0x2072, 'X'), - (0x2074, 'M', '4'), - (0x2075, 'M', '5'), - (0x2076, 'M', '6'), - (0x2077, 'M', '7'), - (0x2078, 'M', '8'), - (0x2079, 'M', '9'), - (0x207A, '3', '+'), - (0x207B, 'M', '−'), - (0x207C, '3', '='), - (0x207D, '3', '('), - (0x207E, '3', ')'), - (0x207F, 'M', 'n'), - (0x2080, 'M', '0'), - (0x2081, 'M', '1'), - (0x2082, 'M', '2'), - (0x2083, 'M', '3'), - (0x2084, 'M', '4'), - (0x2085, 'M', '5'), - (0x2086, 'M', '6'), - (0x2087, 'M', '7'), - (0x2088, 'M', '8'), - (0x2089, 'M', '9'), - (0x208A, '3', '+'), - (0x208B, 'M', '−'), - (0x208C, '3', '='), - (0x208D, '3', '('), - (0x208E, '3', ')'), - (0x208F, 'X'), - (0x2090, 'M', 'a'), - (0x2091, 'M', 'e'), - (0x2092, 'M', 'o'), - (0x2093, 'M', 'x'), - (0x2094, 'M', 'ə'), - (0x2095, 'M', 'h'), - (0x2096, 'M', 'k'), - (0x2097, 'M', 'l'), - (0x2098, 'M', 'm'), - (0x2099, 'M', 'n'), - (0x209A, 'M', 'p'), - (0x209B, 'M', 's'), - (0x209C, 'M', 't'), - (0x209D, 'X'), - (0x20A0, 'V'), - (0x20A8, 'M', 'rs'), - (0x20A9, 'V'), - (0x20C1, 'X'), - (0x20D0, 'V'), - (0x20F1, 'X'), - (0x2100, '3', 'a/c'), - (0x2101, '3', 'a/s'), - (0x2102, 'M', 'c'), - (0x2103, 'M', '°c'), - (0x2104, 'V'), - ] - -def _seg_22() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2105, '3', 'c/o'), - (0x2106, '3', 'c/u'), - (0x2107, 'M', 'ɛ'), - (0x2108, 'V'), - (0x2109, 'M', '°f'), - (0x210A, 'M', 'g'), - (0x210B, 'M', 'h'), - (0x210F, 'M', 'ħ'), - (0x2110, 'M', 'i'), - (0x2112, 'M', 'l'), - (0x2114, 'V'), - (0x2115, 'M', 'n'), - (0x2116, 'M', 'no'), - (0x2117, 'V'), - (0x2119, 'M', 'p'), - (0x211A, 'M', 'q'), - (0x211B, 'M', 'r'), - (0x211E, 'V'), - (0x2120, 'M', 'sm'), - (0x2121, 'M', 'tel'), - (0x2122, 'M', 'tm'), - (0x2123, 'V'), - (0x2124, 'M', 'z'), - (0x2125, 'V'), - (0x2126, 'M', 'ω'), - (0x2127, 'V'), - (0x2128, 'M', 'z'), - (0x2129, 'V'), - (0x212A, 'M', 'k'), - (0x212B, 'M', 'å'), - (0x212C, 'M', 'b'), - (0x212D, 'M', 'c'), - (0x212E, 'V'), - (0x212F, 'M', 'e'), - (0x2131, 'M', 'f'), - (0x2132, 'X'), - (0x2133, 'M', 'm'), - (0x2134, 'M', 'o'), - (0x2135, 'M', 'א'), - (0x2136, 'M', 'ב'), - (0x2137, 'M', 'ג'), - (0x2138, 'M', 'ד'), - (0x2139, 'M', 'i'), - (0x213A, 'V'), - (0x213B, 'M', 'fax'), - (0x213C, 'M', 'π'), - (0x213D, 'M', 'γ'), - (0x213F, 'M', 'π'), - (0x2140, 'M', '∑'), - (0x2141, 'V'), - (0x2145, 'M', 'd'), - (0x2147, 'M', 'e'), - (0x2148, 'M', 'i'), - (0x2149, 'M', 'j'), - (0x214A, 'V'), - (0x2150, 'M', '1⁄7'), - (0x2151, 'M', '1⁄9'), - (0x2152, 'M', '1⁄10'), - (0x2153, 'M', '1⁄3'), - (0x2154, 'M', '2⁄3'), - (0x2155, 'M', '1⁄5'), - (0x2156, 'M', '2⁄5'), - (0x2157, 'M', '3⁄5'), - (0x2158, 'M', '4⁄5'), - (0x2159, 'M', '1⁄6'), - (0x215A, 'M', '5⁄6'), - (0x215B, 'M', '1⁄8'), - (0x215C, 'M', '3⁄8'), - (0x215D, 'M', '5⁄8'), - (0x215E, 'M', '7⁄8'), - (0x215F, 'M', '1⁄'), - (0x2160, 'M', 'i'), - (0x2161, 'M', 'ii'), - (0x2162, 'M', 'iii'), - (0x2163, 'M', 'iv'), - (0x2164, 'M', 'v'), - (0x2165, 'M', 'vi'), - (0x2166, 'M', 'vii'), - (0x2167, 'M', 'viii'), - (0x2168, 'M', 'ix'), - (0x2169, 'M', 'x'), - (0x216A, 'M', 'xi'), - (0x216B, 'M', 'xii'), - (0x216C, 'M', 'l'), - (0x216D, 'M', 'c'), - (0x216E, 'M', 'd'), - (0x216F, 'M', 'm'), - (0x2170, 'M', 'i'), - (0x2171, 'M', 'ii'), - (0x2172, 'M', 'iii'), - (0x2173, 'M', 'iv'), - (0x2174, 'M', 'v'), - (0x2175, 'M', 'vi'), - (0x2176, 'M', 'vii'), - (0x2177, 'M', 'viii'), - (0x2178, 'M', 'ix'), - (0x2179, 'M', 'x'), - (0x217A, 'M', 'xi'), - (0x217B, 'M', 'xii'), - (0x217C, 'M', 'l'), - ] - -def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x217D, 'M', 'c'), - (0x217E, 'M', 'd'), - (0x217F, 'M', 'm'), - (0x2180, 'V'), - (0x2183, 'X'), - (0x2184, 'V'), - (0x2189, 'M', '0⁄3'), - (0x218A, 'V'), - (0x218C, 'X'), - (0x2190, 'V'), - (0x222C, 'M', '∫∫'), - (0x222D, 'M', '∫∫∫'), - (0x222E, 'V'), - (0x222F, 'M', '∮∮'), - (0x2230, 'M', '∮∮∮'), - (0x2231, 'V'), - (0x2260, '3'), - (0x2261, 'V'), - (0x226E, '3'), - (0x2270, 'V'), - (0x2329, 'M', '〈'), - (0x232A, 'M', '〉'), - (0x232B, 'V'), - (0x2427, 'X'), - (0x2440, 'V'), - (0x244B, 'X'), - (0x2460, 'M', '1'), - (0x2461, 'M', '2'), - (0x2462, 'M', '3'), - (0x2463, 'M', '4'), - (0x2464, 'M', '5'), - (0x2465, 'M', '6'), - (0x2466, 'M', '7'), - (0x2467, 'M', '8'), - (0x2468, 'M', '9'), - (0x2469, 'M', '10'), - (0x246A, 'M', '11'), - (0x246B, 'M', '12'), - (0x246C, 'M', '13'), - (0x246D, 'M', '14'), - (0x246E, 'M', '15'), - (0x246F, 'M', '16'), - (0x2470, 'M', '17'), - (0x2471, 'M', '18'), - (0x2472, 'M', '19'), - (0x2473, 'M', '20'), - (0x2474, '3', '(1)'), - (0x2475, '3', '(2)'), - (0x2476, '3', '(3)'), - (0x2477, '3', '(4)'), - (0x2478, '3', '(5)'), - (0x2479, '3', '(6)'), - (0x247A, '3', '(7)'), - (0x247B, '3', '(8)'), - (0x247C, '3', '(9)'), - (0x247D, '3', '(10)'), - (0x247E, '3', '(11)'), - (0x247F, '3', '(12)'), - (0x2480, '3', '(13)'), - (0x2481, '3', '(14)'), - (0x2482, '3', '(15)'), - (0x2483, '3', '(16)'), - (0x2484, '3', '(17)'), - (0x2485, '3', '(18)'), - (0x2486, '3', '(19)'), - (0x2487, '3', '(20)'), - (0x2488, 'X'), - (0x249C, '3', '(a)'), - (0x249D, '3', '(b)'), - (0x249E, '3', '(c)'), - (0x249F, '3', '(d)'), - (0x24A0, '3', '(e)'), - (0x24A1, '3', '(f)'), - (0x24A2, '3', '(g)'), - (0x24A3, '3', '(h)'), - (0x24A4, '3', '(i)'), - (0x24A5, '3', '(j)'), - (0x24A6, '3', '(k)'), - (0x24A7, '3', '(l)'), - (0x24A8, '3', '(m)'), - (0x24A9, '3', '(n)'), - (0x24AA, '3', '(o)'), - (0x24AB, '3', '(p)'), - (0x24AC, '3', '(q)'), - (0x24AD, '3', '(r)'), - (0x24AE, '3', '(s)'), - (0x24AF, '3', '(t)'), - (0x24B0, '3', '(u)'), - (0x24B1, '3', '(v)'), - (0x24B2, '3', '(w)'), - (0x24B3, '3', '(x)'), - (0x24B4, '3', '(y)'), - (0x24B5, '3', '(z)'), - (0x24B6, 'M', 'a'), - (0x24B7, 'M', 'b'), - (0x24B8, 'M', 'c'), - (0x24B9, 'M', 'd'), - (0x24BA, 'M', 'e'), - (0x24BB, 'M', 'f'), - (0x24BC, 'M', 'g'), - ] - -def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x24BD, 'M', 'h'), - (0x24BE, 'M', 'i'), - (0x24BF, 'M', 'j'), - (0x24C0, 'M', 'k'), - (0x24C1, 'M', 'l'), - (0x24C2, 'M', 'm'), - (0x24C3, 'M', 'n'), - (0x24C4, 'M', 'o'), - (0x24C5, 'M', 'p'), - (0x24C6, 'M', 'q'), - (0x24C7, 'M', 'r'), - (0x24C8, 'M', 's'), - (0x24C9, 'M', 't'), - (0x24CA, 'M', 'u'), - (0x24CB, 'M', 'v'), - (0x24CC, 'M', 'w'), - (0x24CD, 'M', 'x'), - (0x24CE, 'M', 'y'), - (0x24CF, 'M', 'z'), - (0x24D0, 'M', 'a'), - (0x24D1, 'M', 'b'), - (0x24D2, 'M', 'c'), - (0x24D3, 'M', 'd'), - (0x24D4, 'M', 'e'), - (0x24D5, 'M', 'f'), - (0x24D6, 'M', 'g'), - (0x24D7, 'M', 'h'), - (0x24D8, 'M', 'i'), - (0x24D9, 'M', 'j'), - (0x24DA, 'M', 'k'), - (0x24DB, 'M', 'l'), - (0x24DC, 'M', 'm'), - (0x24DD, 'M', 'n'), - (0x24DE, 'M', 'o'), - (0x24DF, 'M', 'p'), - (0x24E0, 'M', 'q'), - (0x24E1, 'M', 'r'), - (0x24E2, 'M', 's'), - (0x24E3, 'M', 't'), - (0x24E4, 'M', 'u'), - (0x24E5, 'M', 'v'), - (0x24E6, 'M', 'w'), - (0x24E7, 'M', 'x'), - (0x24E8, 'M', 'y'), - (0x24E9, 'M', 'z'), - (0x24EA, 'M', '0'), - (0x24EB, 'V'), - (0x2A0C, 'M', '∫∫∫∫'), - (0x2A0D, 'V'), - (0x2A74, '3', '::='), - (0x2A75, '3', '=='), - (0x2A76, '3', '==='), - (0x2A77, 'V'), - (0x2ADC, 'M', '⫝̸'), - (0x2ADD, 'V'), - (0x2B74, 'X'), - (0x2B76, 'V'), - (0x2B96, 'X'), - (0x2B97, 'V'), - (0x2C00, 'M', 'ⰰ'), - (0x2C01, 'M', 'ⰱ'), - (0x2C02, 'M', 'ⰲ'), - (0x2C03, 'M', 'ⰳ'), - (0x2C04, 'M', 'ⰴ'), - (0x2C05, 'M', 'ⰵ'), - (0x2C06, 'M', 'ⰶ'), - (0x2C07, 'M', 'ⰷ'), - (0x2C08, 'M', 'ⰸ'), - (0x2C09, 'M', 'ⰹ'), - (0x2C0A, 'M', 'ⰺ'), - (0x2C0B, 'M', 'ⰻ'), - (0x2C0C, 'M', 'ⰼ'), - (0x2C0D, 'M', 'ⰽ'), - (0x2C0E, 'M', 'ⰾ'), - (0x2C0F, 'M', 'ⰿ'), - (0x2C10, 'M', 'ⱀ'), - (0x2C11, 'M', 'ⱁ'), - (0x2C12, 'M', 'ⱂ'), - (0x2C13, 'M', 'ⱃ'), - (0x2C14, 'M', 'ⱄ'), - (0x2C15, 'M', 'ⱅ'), - (0x2C16, 'M', 'ⱆ'), - (0x2C17, 'M', 'ⱇ'), - (0x2C18, 'M', 'ⱈ'), - (0x2C19, 'M', 'ⱉ'), - (0x2C1A, 'M', 'ⱊ'), - (0x2C1B, 'M', 'ⱋ'), - (0x2C1C, 'M', 'ⱌ'), - (0x2C1D, 'M', 'ⱍ'), - (0x2C1E, 'M', 'ⱎ'), - (0x2C1F, 'M', 'ⱏ'), - (0x2C20, 'M', 'ⱐ'), - (0x2C21, 'M', 'ⱑ'), - (0x2C22, 'M', 'ⱒ'), - (0x2C23, 'M', 'ⱓ'), - (0x2C24, 'M', 'ⱔ'), - (0x2C25, 'M', 'ⱕ'), - (0x2C26, 'M', 'ⱖ'), - (0x2C27, 'M', 'ⱗ'), - (0x2C28, 'M', 'ⱘ'), - ] - -def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2C29, 'M', 'ⱙ'), - (0x2C2A, 'M', 'ⱚ'), - (0x2C2B, 'M', 'ⱛ'), - (0x2C2C, 'M', 'ⱜ'), - (0x2C2D, 'M', 'ⱝ'), - (0x2C2E, 'M', 'ⱞ'), - (0x2C2F, 'M', 'ⱟ'), - (0x2C30, 'V'), - (0x2C60, 'M', 'ⱡ'), - (0x2C61, 'V'), - (0x2C62, 'M', 'ɫ'), - (0x2C63, 'M', 'ᵽ'), - (0x2C64, 'M', 'ɽ'), - (0x2C65, 'V'), - (0x2C67, 'M', 'ⱨ'), - (0x2C68, 'V'), - (0x2C69, 'M', 'ⱪ'), - (0x2C6A, 'V'), - (0x2C6B, 'M', 'ⱬ'), - (0x2C6C, 'V'), - (0x2C6D, 'M', 'ɑ'), - (0x2C6E, 'M', 'ɱ'), - (0x2C6F, 'M', 'ɐ'), - (0x2C70, 'M', 'ɒ'), - (0x2C71, 'V'), - (0x2C72, 'M', 'ⱳ'), - (0x2C73, 'V'), - (0x2C75, 'M', 'ⱶ'), - (0x2C76, 'V'), - (0x2C7C, 'M', 'j'), - (0x2C7D, 'M', 'v'), - (0x2C7E, 'M', 'ȿ'), - (0x2C7F, 'M', 'ɀ'), - (0x2C80, 'M', 'ⲁ'), - (0x2C81, 'V'), - (0x2C82, 'M', 'ⲃ'), - (0x2C83, 'V'), - (0x2C84, 'M', 'ⲅ'), - (0x2C85, 'V'), - (0x2C86, 'M', 'ⲇ'), - (0x2C87, 'V'), - (0x2C88, 'M', 'ⲉ'), - (0x2C89, 'V'), - (0x2C8A, 'M', 'ⲋ'), - (0x2C8B, 'V'), - (0x2C8C, 'M', 'ⲍ'), - (0x2C8D, 'V'), - (0x2C8E, 'M', 'ⲏ'), - (0x2C8F, 'V'), - (0x2C90, 'M', 'ⲑ'), - (0x2C91, 'V'), - (0x2C92, 'M', 'ⲓ'), - (0x2C93, 'V'), - (0x2C94, 'M', 'ⲕ'), - (0x2C95, 'V'), - (0x2C96, 'M', 'ⲗ'), - (0x2C97, 'V'), - (0x2C98, 'M', 'ⲙ'), - (0x2C99, 'V'), - (0x2C9A, 'M', 'ⲛ'), - (0x2C9B, 'V'), - (0x2C9C, 'M', 'ⲝ'), - (0x2C9D, 'V'), - (0x2C9E, 'M', 'ⲟ'), - (0x2C9F, 'V'), - (0x2CA0, 'M', 'ⲡ'), - (0x2CA1, 'V'), - (0x2CA2, 'M', 'ⲣ'), - (0x2CA3, 'V'), - (0x2CA4, 'M', 'ⲥ'), - (0x2CA5, 'V'), - (0x2CA6, 'M', 'ⲧ'), - (0x2CA7, 'V'), - (0x2CA8, 'M', 'ⲩ'), - (0x2CA9, 'V'), - (0x2CAA, 'M', 'ⲫ'), - (0x2CAB, 'V'), - (0x2CAC, 'M', 'ⲭ'), - (0x2CAD, 'V'), - (0x2CAE, 'M', 'ⲯ'), - (0x2CAF, 'V'), - (0x2CB0, 'M', 'ⲱ'), - (0x2CB1, 'V'), - (0x2CB2, 'M', 'ⲳ'), - (0x2CB3, 'V'), - (0x2CB4, 'M', 'ⲵ'), - (0x2CB5, 'V'), - (0x2CB6, 'M', 'ⲷ'), - (0x2CB7, 'V'), - (0x2CB8, 'M', 'ⲹ'), - (0x2CB9, 'V'), - (0x2CBA, 'M', 'ⲻ'), - (0x2CBB, 'V'), - (0x2CBC, 'M', 'ⲽ'), - (0x2CBD, 'V'), - (0x2CBE, 'M', 'ⲿ'), - (0x2CBF, 'V'), - (0x2CC0, 'M', 'ⳁ'), - (0x2CC1, 'V'), - (0x2CC2, 'M', 'ⳃ'), - ] - -def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2CC3, 'V'), - (0x2CC4, 'M', 'ⳅ'), - (0x2CC5, 'V'), - (0x2CC6, 'M', 'ⳇ'), - (0x2CC7, 'V'), - (0x2CC8, 'M', 'ⳉ'), - (0x2CC9, 'V'), - (0x2CCA, 'M', 'ⳋ'), - (0x2CCB, 'V'), - (0x2CCC, 'M', 'ⳍ'), - (0x2CCD, 'V'), - (0x2CCE, 'M', 'ⳏ'), - (0x2CCF, 'V'), - (0x2CD0, 'M', 'ⳑ'), - (0x2CD1, 'V'), - (0x2CD2, 'M', 'ⳓ'), - (0x2CD3, 'V'), - (0x2CD4, 'M', 'ⳕ'), - (0x2CD5, 'V'), - (0x2CD6, 'M', 'ⳗ'), - (0x2CD7, 'V'), - (0x2CD8, 'M', 'ⳙ'), - (0x2CD9, 'V'), - (0x2CDA, 'M', 'ⳛ'), - (0x2CDB, 'V'), - (0x2CDC, 'M', 'ⳝ'), - (0x2CDD, 'V'), - (0x2CDE, 'M', 'ⳟ'), - (0x2CDF, 'V'), - (0x2CE0, 'M', 'ⳡ'), - (0x2CE1, 'V'), - (0x2CE2, 'M', 'ⳣ'), - (0x2CE3, 'V'), - (0x2CEB, 'M', 'ⳬ'), - (0x2CEC, 'V'), - (0x2CED, 'M', 'ⳮ'), - (0x2CEE, 'V'), - (0x2CF2, 'M', 'ⳳ'), - (0x2CF3, 'V'), - (0x2CF4, 'X'), - (0x2CF9, 'V'), - (0x2D26, 'X'), - (0x2D27, 'V'), - (0x2D28, 'X'), - (0x2D2D, 'V'), - (0x2D2E, 'X'), - (0x2D30, 'V'), - (0x2D68, 'X'), - (0x2D6F, 'M', 'ⵡ'), - (0x2D70, 'V'), - (0x2D71, 'X'), - (0x2D7F, 'V'), - (0x2D97, 'X'), - (0x2DA0, 'V'), - (0x2DA7, 'X'), - (0x2DA8, 'V'), - (0x2DAF, 'X'), - (0x2DB0, 'V'), - (0x2DB7, 'X'), - (0x2DB8, 'V'), - (0x2DBF, 'X'), - (0x2DC0, 'V'), - (0x2DC7, 'X'), - (0x2DC8, 'V'), - (0x2DCF, 'X'), - (0x2DD0, 'V'), - (0x2DD7, 'X'), - (0x2DD8, 'V'), - (0x2DDF, 'X'), - (0x2DE0, 'V'), - (0x2E5E, 'X'), - (0x2E80, 'V'), - (0x2E9A, 'X'), - (0x2E9B, 'V'), - (0x2E9F, 'M', '母'), - (0x2EA0, 'V'), - (0x2EF3, 'M', '龟'), - (0x2EF4, 'X'), - (0x2F00, 'M', '一'), - (0x2F01, 'M', '丨'), - (0x2F02, 'M', '丶'), - (0x2F03, 'M', '丿'), - (0x2F04, 'M', '乙'), - (0x2F05, 'M', '亅'), - (0x2F06, 'M', '二'), - (0x2F07, 'M', '亠'), - (0x2F08, 'M', '人'), - (0x2F09, 'M', '儿'), - (0x2F0A, 'M', '入'), - (0x2F0B, 'M', '八'), - (0x2F0C, 'M', '冂'), - (0x2F0D, 'M', '冖'), - (0x2F0E, 'M', '冫'), - (0x2F0F, 'M', '几'), - (0x2F10, 'M', '凵'), - (0x2F11, 'M', '刀'), - (0x2F12, 'M', '力'), - (0x2F13, 'M', '勹'), - (0x2F14, 'M', '匕'), - (0x2F15, 'M', '匚'), - ] - -def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F16, 'M', '匸'), - (0x2F17, 'M', '十'), - (0x2F18, 'M', '卜'), - (0x2F19, 'M', '卩'), - (0x2F1A, 'M', '厂'), - (0x2F1B, 'M', '厶'), - (0x2F1C, 'M', '又'), - (0x2F1D, 'M', '口'), - (0x2F1E, 'M', '囗'), - (0x2F1F, 'M', '土'), - (0x2F20, 'M', '士'), - (0x2F21, 'M', '夂'), - (0x2F22, 'M', '夊'), - (0x2F23, 'M', '夕'), - (0x2F24, 'M', '大'), - (0x2F25, 'M', '女'), - (0x2F26, 'M', '子'), - (0x2F27, 'M', '宀'), - (0x2F28, 'M', '寸'), - (0x2F29, 'M', '小'), - (0x2F2A, 'M', '尢'), - (0x2F2B, 'M', '尸'), - (0x2F2C, 'M', '屮'), - (0x2F2D, 'M', '山'), - (0x2F2E, 'M', '巛'), - (0x2F2F, 'M', '工'), - (0x2F30, 'M', '己'), - (0x2F31, 'M', '巾'), - (0x2F32, 'M', '干'), - (0x2F33, 'M', '幺'), - (0x2F34, 'M', '广'), - (0x2F35, 'M', '廴'), - (0x2F36, 'M', '廾'), - (0x2F37, 'M', '弋'), - (0x2F38, 'M', '弓'), - (0x2F39, 'M', '彐'), - (0x2F3A, 'M', '彡'), - (0x2F3B, 'M', '彳'), - (0x2F3C, 'M', '心'), - (0x2F3D, 'M', '戈'), - (0x2F3E, 'M', '戶'), - (0x2F3F, 'M', '手'), - (0x2F40, 'M', '支'), - (0x2F41, 'M', '攴'), - (0x2F42, 'M', '文'), - (0x2F43, 'M', '斗'), - (0x2F44, 'M', '斤'), - (0x2F45, 'M', '方'), - (0x2F46, 'M', '无'), - (0x2F47, 'M', '日'), - (0x2F48, 'M', '曰'), - (0x2F49, 'M', '月'), - (0x2F4A, 'M', '木'), - (0x2F4B, 'M', '欠'), - (0x2F4C, 'M', '止'), - (0x2F4D, 'M', '歹'), - (0x2F4E, 'M', '殳'), - (0x2F4F, 'M', '毋'), - (0x2F50, 'M', '比'), - (0x2F51, 'M', '毛'), - (0x2F52, 'M', '氏'), - (0x2F53, 'M', '气'), - (0x2F54, 'M', '水'), - (0x2F55, 'M', '火'), - (0x2F56, 'M', '爪'), - (0x2F57, 'M', '父'), - (0x2F58, 'M', '爻'), - (0x2F59, 'M', '爿'), - (0x2F5A, 'M', '片'), - (0x2F5B, 'M', '牙'), - (0x2F5C, 'M', '牛'), - (0x2F5D, 'M', '犬'), - (0x2F5E, 'M', '玄'), - (0x2F5F, 'M', '玉'), - (0x2F60, 'M', '瓜'), - (0x2F61, 'M', '瓦'), - (0x2F62, 'M', '甘'), - (0x2F63, 'M', '生'), - (0x2F64, 'M', '用'), - (0x2F65, 'M', '田'), - (0x2F66, 'M', '疋'), - (0x2F67, 'M', '疒'), - (0x2F68, 'M', '癶'), - (0x2F69, 'M', '白'), - (0x2F6A, 'M', '皮'), - (0x2F6B, 'M', '皿'), - (0x2F6C, 'M', '目'), - (0x2F6D, 'M', '矛'), - (0x2F6E, 'M', '矢'), - (0x2F6F, 'M', '石'), - (0x2F70, 'M', '示'), - (0x2F71, 'M', '禸'), - (0x2F72, 'M', '禾'), - (0x2F73, 'M', '穴'), - (0x2F74, 'M', '立'), - (0x2F75, 'M', '竹'), - (0x2F76, 'M', '米'), - (0x2F77, 'M', '糸'), - (0x2F78, 'M', '缶'), - (0x2F79, 'M', '网'), - ] - -def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F7A, 'M', '羊'), - (0x2F7B, 'M', '羽'), - (0x2F7C, 'M', '老'), - (0x2F7D, 'M', '而'), - (0x2F7E, 'M', '耒'), - (0x2F7F, 'M', '耳'), - (0x2F80, 'M', '聿'), - (0x2F81, 'M', '肉'), - (0x2F82, 'M', '臣'), - (0x2F83, 'M', '自'), - (0x2F84, 'M', '至'), - (0x2F85, 'M', '臼'), - (0x2F86, 'M', '舌'), - (0x2F87, 'M', '舛'), - (0x2F88, 'M', '舟'), - (0x2F89, 'M', '艮'), - (0x2F8A, 'M', '色'), - (0x2F8B, 'M', '艸'), - (0x2F8C, 'M', '虍'), - (0x2F8D, 'M', '虫'), - (0x2F8E, 'M', '血'), - (0x2F8F, 'M', '行'), - (0x2F90, 'M', '衣'), - (0x2F91, 'M', '襾'), - (0x2F92, 'M', '見'), - (0x2F93, 'M', '角'), - (0x2F94, 'M', '言'), - (0x2F95, 'M', '谷'), - (0x2F96, 'M', '豆'), - (0x2F97, 'M', '豕'), - (0x2F98, 'M', '豸'), - (0x2F99, 'M', '貝'), - (0x2F9A, 'M', '赤'), - (0x2F9B, 'M', '走'), - (0x2F9C, 'M', '足'), - (0x2F9D, 'M', '身'), - (0x2F9E, 'M', '車'), - (0x2F9F, 'M', '辛'), - (0x2FA0, 'M', '辰'), - (0x2FA1, 'M', '辵'), - (0x2FA2, 'M', '邑'), - (0x2FA3, 'M', '酉'), - (0x2FA4, 'M', '釆'), - (0x2FA5, 'M', '里'), - (0x2FA6, 'M', '金'), - (0x2FA7, 'M', '長'), - (0x2FA8, 'M', '門'), - (0x2FA9, 'M', '阜'), - (0x2FAA, 'M', '隶'), - (0x2FAB, 'M', '隹'), - (0x2FAC, 'M', '雨'), - (0x2FAD, 'M', '靑'), - (0x2FAE, 'M', '非'), - (0x2FAF, 'M', '面'), - (0x2FB0, 'M', '革'), - (0x2FB1, 'M', '韋'), - (0x2FB2, 'M', '韭'), - (0x2FB3, 'M', '音'), - (0x2FB4, 'M', '頁'), - (0x2FB5, 'M', '風'), - (0x2FB6, 'M', '飛'), - (0x2FB7, 'M', '食'), - (0x2FB8, 'M', '首'), - (0x2FB9, 'M', '香'), - (0x2FBA, 'M', '馬'), - (0x2FBB, 'M', '骨'), - (0x2FBC, 'M', '高'), - (0x2FBD, 'M', '髟'), - (0x2FBE, 'M', '鬥'), - (0x2FBF, 'M', '鬯'), - (0x2FC0, 'M', '鬲'), - (0x2FC1, 'M', '鬼'), - (0x2FC2, 'M', '魚'), - (0x2FC3, 'M', '鳥'), - (0x2FC4, 'M', '鹵'), - (0x2FC5, 'M', '鹿'), - (0x2FC6, 'M', '麥'), - (0x2FC7, 'M', '麻'), - (0x2FC8, 'M', '黃'), - (0x2FC9, 'M', '黍'), - (0x2FCA, 'M', '黑'), - (0x2FCB, 'M', '黹'), - (0x2FCC, 'M', '黽'), - (0x2FCD, 'M', '鼎'), - (0x2FCE, 'M', '鼓'), - (0x2FCF, 'M', '鼠'), - (0x2FD0, 'M', '鼻'), - (0x2FD1, 'M', '齊'), - (0x2FD2, 'M', '齒'), - (0x2FD3, 'M', '龍'), - (0x2FD4, 'M', '龜'), - (0x2FD5, 'M', '龠'), - (0x2FD6, 'X'), - (0x3000, '3', ' '), - (0x3001, 'V'), - (0x3002, 'M', '.'), - (0x3003, 'V'), - (0x3036, 'M', '〒'), - (0x3037, 'V'), - (0x3038, 'M', '十'), - ] - -def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x3039, 'M', '卄'), - (0x303A, 'M', '卅'), - (0x303B, 'V'), - (0x3040, 'X'), - (0x3041, 'V'), - (0x3097, 'X'), - (0x3099, 'V'), - (0x309B, '3', ' ゙'), - (0x309C, '3', ' ゚'), - (0x309D, 'V'), - (0x309F, 'M', 'より'), - (0x30A0, 'V'), - (0x30FF, 'M', 'コト'), - (0x3100, 'X'), - (0x3105, 'V'), - (0x3130, 'X'), - (0x3131, 'M', 'ᄀ'), - (0x3132, 'M', 'ᄁ'), - (0x3133, 'M', 'ᆪ'), - (0x3134, 'M', 'ᄂ'), - (0x3135, 'M', 'ᆬ'), - (0x3136, 'M', 'ᆭ'), - (0x3137, 'M', 'ᄃ'), - (0x3138, 'M', 'ᄄ'), - (0x3139, 'M', 'ᄅ'), - (0x313A, 'M', 'ᆰ'), - (0x313B, 'M', 'ᆱ'), - (0x313C, 'M', 'ᆲ'), - (0x313D, 'M', 'ᆳ'), - (0x313E, 'M', 'ᆴ'), - (0x313F, 'M', 'ᆵ'), - (0x3140, 'M', 'ᄚ'), - (0x3141, 'M', 'ᄆ'), - (0x3142, 'M', 'ᄇ'), - (0x3143, 'M', 'ᄈ'), - (0x3144, 'M', 'ᄡ'), - (0x3145, 'M', 'ᄉ'), - (0x3146, 'M', 'ᄊ'), - (0x3147, 'M', 'ᄋ'), - (0x3148, 'M', 'ᄌ'), - (0x3149, 'M', 'ᄍ'), - (0x314A, 'M', 'ᄎ'), - (0x314B, 'M', 'ᄏ'), - (0x314C, 'M', 'ᄐ'), - (0x314D, 'M', 'ᄑ'), - (0x314E, 'M', 'ᄒ'), - (0x314F, 'M', 'ᅡ'), - (0x3150, 'M', 'ᅢ'), - (0x3151, 'M', 'ᅣ'), - (0x3152, 'M', 'ᅤ'), - (0x3153, 'M', 'ᅥ'), - (0x3154, 'M', 'ᅦ'), - (0x3155, 'M', 'ᅧ'), - (0x3156, 'M', 'ᅨ'), - (0x3157, 'M', 'ᅩ'), - (0x3158, 'M', 'ᅪ'), - (0x3159, 'M', 'ᅫ'), - (0x315A, 'M', 'ᅬ'), - (0x315B, 'M', 'ᅭ'), - (0x315C, 'M', 'ᅮ'), - (0x315D, 'M', 'ᅯ'), - (0x315E, 'M', 'ᅰ'), - (0x315F, 'M', 'ᅱ'), - (0x3160, 'M', 'ᅲ'), - (0x3161, 'M', 'ᅳ'), - (0x3162, 'M', 'ᅴ'), - (0x3163, 'M', 'ᅵ'), - (0x3164, 'X'), - (0x3165, 'M', 'ᄔ'), - (0x3166, 'M', 'ᄕ'), - (0x3167, 'M', 'ᇇ'), - (0x3168, 'M', 'ᇈ'), - (0x3169, 'M', 'ᇌ'), - (0x316A, 'M', 'ᇎ'), - (0x316B, 'M', 'ᇓ'), - (0x316C, 'M', 'ᇗ'), - (0x316D, 'M', 'ᇙ'), - (0x316E, 'M', 'ᄜ'), - (0x316F, 'M', 'ᇝ'), - (0x3170, 'M', 'ᇟ'), - (0x3171, 'M', 'ᄝ'), - (0x3172, 'M', 'ᄞ'), - (0x3173, 'M', 'ᄠ'), - (0x3174, 'M', 'ᄢ'), - (0x3175, 'M', 'ᄣ'), - (0x3176, 'M', 'ᄧ'), - (0x3177, 'M', 'ᄩ'), - (0x3178, 'M', 'ᄫ'), - (0x3179, 'M', 'ᄬ'), - (0x317A, 'M', 'ᄭ'), - (0x317B, 'M', 'ᄮ'), - (0x317C, 'M', 'ᄯ'), - (0x317D, 'M', 'ᄲ'), - (0x317E, 'M', 'ᄶ'), - (0x317F, 'M', 'ᅀ'), - (0x3180, 'M', 'ᅇ'), - (0x3181, 'M', 'ᅌ'), - (0x3182, 'M', 'ᇱ'), - (0x3183, 'M', 'ᇲ'), - (0x3184, 'M', 'ᅗ'), - ] - -def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x3185, 'M', 'ᅘ'), - (0x3186, 'M', 'ᅙ'), - (0x3187, 'M', 'ᆄ'), - (0x3188, 'M', 'ᆅ'), - (0x3189, 'M', 'ᆈ'), - (0x318A, 'M', 'ᆑ'), - (0x318B, 'M', 'ᆒ'), - (0x318C, 'M', 'ᆔ'), - (0x318D, 'M', 'ᆞ'), - (0x318E, 'M', 'ᆡ'), - (0x318F, 'X'), - (0x3190, 'V'), - (0x3192, 'M', '一'), - (0x3193, 'M', '二'), - (0x3194, 'M', '三'), - (0x3195, 'M', '四'), - (0x3196, 'M', '上'), - (0x3197, 'M', '中'), - (0x3198, 'M', '下'), - (0x3199, 'M', '甲'), - (0x319A, 'M', '乙'), - (0x319B, 'M', '丙'), - (0x319C, 'M', '丁'), - (0x319D, 'M', '天'), - (0x319E, 'M', '地'), - (0x319F, 'M', '人'), - (0x31A0, 'V'), - (0x31E4, 'X'), - (0x31F0, 'V'), - (0x3200, '3', '(ᄀ)'), - (0x3201, '3', '(ᄂ)'), - (0x3202, '3', '(ᄃ)'), - (0x3203, '3', '(ᄅ)'), - (0x3204, '3', '(ᄆ)'), - (0x3205, '3', '(ᄇ)'), - (0x3206, '3', '(ᄉ)'), - (0x3207, '3', '(ᄋ)'), - (0x3208, '3', '(ᄌ)'), - (0x3209, '3', '(ᄎ)'), - (0x320A, '3', '(ᄏ)'), - (0x320B, '3', '(ᄐ)'), - (0x320C, '3', '(ᄑ)'), - (0x320D, '3', '(ᄒ)'), - (0x320E, '3', '(가)'), - (0x320F, '3', '(나)'), - (0x3210, '3', '(다)'), - (0x3211, '3', '(라)'), - (0x3212, '3', '(마)'), - (0x3213, '3', '(바)'), - (0x3214, '3', '(사)'), - (0x3215, '3', '(아)'), - (0x3216, '3', '(자)'), - (0x3217, '3', '(차)'), - (0x3218, '3', '(카)'), - (0x3219, '3', '(타)'), - (0x321A, '3', '(파)'), - (0x321B, '3', '(하)'), - (0x321C, '3', '(주)'), - (0x321D, '3', '(오전)'), - (0x321E, '3', '(오후)'), - (0x321F, 'X'), - (0x3220, '3', '(一)'), - (0x3221, '3', '(二)'), - (0x3222, '3', '(三)'), - (0x3223, '3', '(四)'), - (0x3224, '3', '(五)'), - (0x3225, '3', '(六)'), - (0x3226, '3', '(七)'), - (0x3227, '3', '(八)'), - (0x3228, '3', '(九)'), - (0x3229, '3', '(十)'), - (0x322A, '3', '(月)'), - (0x322B, '3', '(火)'), - (0x322C, '3', '(水)'), - (0x322D, '3', '(木)'), - (0x322E, '3', '(金)'), - (0x322F, '3', '(土)'), - (0x3230, '3', '(日)'), - (0x3231, '3', '(株)'), - (0x3232, '3', '(有)'), - (0x3233, '3', '(社)'), - (0x3234, '3', '(名)'), - (0x3235, '3', '(特)'), - (0x3236, '3', '(財)'), - (0x3237, '3', '(祝)'), - (0x3238, '3', '(労)'), - (0x3239, '3', '(代)'), - (0x323A, '3', '(呼)'), - (0x323B, '3', '(学)'), - (0x323C, '3', '(監)'), - (0x323D, '3', '(企)'), - (0x323E, '3', '(資)'), - (0x323F, '3', '(協)'), - (0x3240, '3', '(祭)'), - (0x3241, '3', '(休)'), - (0x3242, '3', '(自)'), - (0x3243, '3', '(至)'), - (0x3244, 'M', '問'), - (0x3245, 'M', '幼'), - (0x3246, 'M', '文'), - ] - -def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x3247, 'M', '箏'), - (0x3248, 'V'), - (0x3250, 'M', 'pte'), - (0x3251, 'M', '21'), - (0x3252, 'M', '22'), - (0x3253, 'M', '23'), - (0x3254, 'M', '24'), - (0x3255, 'M', '25'), - (0x3256, 'M', '26'), - (0x3257, 'M', '27'), - (0x3258, 'M', '28'), - (0x3259, 'M', '29'), - (0x325A, 'M', '30'), - (0x325B, 'M', '31'), - (0x325C, 'M', '32'), - (0x325D, 'M', '33'), - (0x325E, 'M', '34'), - (0x325F, 'M', '35'), - (0x3260, 'M', 'ᄀ'), - (0x3261, 'M', 'ᄂ'), - (0x3262, 'M', 'ᄃ'), - (0x3263, 'M', 'ᄅ'), - (0x3264, 'M', 'ᄆ'), - (0x3265, 'M', 'ᄇ'), - (0x3266, 'M', 'ᄉ'), - (0x3267, 'M', 'ᄋ'), - (0x3268, 'M', 'ᄌ'), - (0x3269, 'M', 'ᄎ'), - (0x326A, 'M', 'ᄏ'), - (0x326B, 'M', 'ᄐ'), - (0x326C, 'M', 'ᄑ'), - (0x326D, 'M', 'ᄒ'), - (0x326E, 'M', '가'), - (0x326F, 'M', '나'), - (0x3270, 'M', '다'), - (0x3271, 'M', '라'), - (0x3272, 'M', '마'), - (0x3273, 'M', '바'), - (0x3274, 'M', '사'), - (0x3275, 'M', '아'), - (0x3276, 'M', '자'), - (0x3277, 'M', '차'), - (0x3278, 'M', '카'), - (0x3279, 'M', '타'), - (0x327A, 'M', '파'), - (0x327B, 'M', '하'), - (0x327C, 'M', '참고'), - (0x327D, 'M', '주의'), - (0x327E, 'M', '우'), - (0x327F, 'V'), - (0x3280, 'M', '一'), - (0x3281, 'M', '二'), - (0x3282, 'M', '三'), - (0x3283, 'M', '四'), - (0x3284, 'M', '五'), - (0x3285, 'M', '六'), - (0x3286, 'M', '七'), - (0x3287, 'M', '八'), - (0x3288, 'M', '九'), - (0x3289, 'M', '十'), - (0x328A, 'M', '月'), - (0x328B, 'M', '火'), - (0x328C, 'M', '水'), - (0x328D, 'M', '木'), - (0x328E, 'M', '金'), - (0x328F, 'M', '土'), - (0x3290, 'M', '日'), - (0x3291, 'M', '株'), - (0x3292, 'M', '有'), - (0x3293, 'M', '社'), - (0x3294, 'M', '名'), - (0x3295, 'M', '特'), - (0x3296, 'M', '財'), - (0x3297, 'M', '祝'), - (0x3298, 'M', '労'), - (0x3299, 'M', '秘'), - (0x329A, 'M', '男'), - (0x329B, 'M', '女'), - (0x329C, 'M', '適'), - (0x329D, 'M', '優'), - (0x329E, 'M', '印'), - (0x329F, 'M', '注'), - (0x32A0, 'M', '項'), - (0x32A1, 'M', '休'), - (0x32A2, 'M', '写'), - (0x32A3, 'M', '正'), - (0x32A4, 'M', '上'), - (0x32A5, 'M', '中'), - (0x32A6, 'M', '下'), - (0x32A7, 'M', '左'), - (0x32A8, 'M', '右'), - (0x32A9, 'M', '医'), - (0x32AA, 'M', '宗'), - (0x32AB, 'M', '学'), - (0x32AC, 'M', '監'), - (0x32AD, 'M', '企'), - (0x32AE, 'M', '資'), - (0x32AF, 'M', '協'), - (0x32B0, 'M', '夜'), - (0x32B1, 'M', '36'), - ] - -def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x32B2, 'M', '37'), - (0x32B3, 'M', '38'), - (0x32B4, 'M', '39'), - (0x32B5, 'M', '40'), - (0x32B6, 'M', '41'), - (0x32B7, 'M', '42'), - (0x32B8, 'M', '43'), - (0x32B9, 'M', '44'), - (0x32BA, 'M', '45'), - (0x32BB, 'M', '46'), - (0x32BC, 'M', '47'), - (0x32BD, 'M', '48'), - (0x32BE, 'M', '49'), - (0x32BF, 'M', '50'), - (0x32C0, 'M', '1月'), - (0x32C1, 'M', '2月'), - (0x32C2, 'M', '3月'), - (0x32C3, 'M', '4月'), - (0x32C4, 'M', '5月'), - (0x32C5, 'M', '6月'), - (0x32C6, 'M', '7月'), - (0x32C7, 'M', '8月'), - (0x32C8, 'M', '9月'), - (0x32C9, 'M', '10月'), - (0x32CA, 'M', '11月'), - (0x32CB, 'M', '12月'), - (0x32CC, 'M', 'hg'), - (0x32CD, 'M', 'erg'), - (0x32CE, 'M', 'ev'), - (0x32CF, 'M', 'ltd'), - (0x32D0, 'M', 'ア'), - (0x32D1, 'M', 'イ'), - (0x32D2, 'M', 'ウ'), - (0x32D3, 'M', 'エ'), - (0x32D4, 'M', 'オ'), - (0x32D5, 'M', 'カ'), - (0x32D6, 'M', 'キ'), - (0x32D7, 'M', 'ク'), - (0x32D8, 'M', 'ケ'), - (0x32D9, 'M', 'コ'), - (0x32DA, 'M', 'サ'), - (0x32DB, 'M', 'シ'), - (0x32DC, 'M', 'ス'), - (0x32DD, 'M', 'セ'), - (0x32DE, 'M', 'ソ'), - (0x32DF, 'M', 'タ'), - (0x32E0, 'M', 'チ'), - (0x32E1, 'M', 'ツ'), - (0x32E2, 'M', 'テ'), - (0x32E3, 'M', 'ト'), - (0x32E4, 'M', 'ナ'), - (0x32E5, 'M', 'ニ'), - (0x32E6, 'M', 'ヌ'), - (0x32E7, 'M', 'ネ'), - (0x32E8, 'M', 'ノ'), - (0x32E9, 'M', 'ハ'), - (0x32EA, 'M', 'ヒ'), - (0x32EB, 'M', 'フ'), - (0x32EC, 'M', 'ヘ'), - (0x32ED, 'M', 'ホ'), - (0x32EE, 'M', 'マ'), - (0x32EF, 'M', 'ミ'), - (0x32F0, 'M', 'ム'), - (0x32F1, 'M', 'メ'), - (0x32F2, 'M', 'モ'), - (0x32F3, 'M', 'ヤ'), - (0x32F4, 'M', 'ユ'), - (0x32F5, 'M', 'ヨ'), - (0x32F6, 'M', 'ラ'), - (0x32F7, 'M', 'リ'), - (0x32F8, 'M', 'ル'), - (0x32F9, 'M', 'レ'), - (0x32FA, 'M', 'ロ'), - (0x32FB, 'M', 'ワ'), - (0x32FC, 'M', 'ヰ'), - (0x32FD, 'M', 'ヱ'), - (0x32FE, 'M', 'ヲ'), - (0x32FF, 'M', '令和'), - (0x3300, 'M', 'アパート'), - (0x3301, 'M', 'アルファ'), - (0x3302, 'M', 'アンペア'), - (0x3303, 'M', 'アール'), - (0x3304, 'M', 'イニング'), - (0x3305, 'M', 'インチ'), - (0x3306, 'M', 'ウォン'), - (0x3307, 'M', 'エスクード'), - (0x3308, 'M', 'エーカー'), - (0x3309, 'M', 'オンス'), - (0x330A, 'M', 'オーム'), - (0x330B, 'M', 'カイリ'), - (0x330C, 'M', 'カラット'), - (0x330D, 'M', 'カロリー'), - (0x330E, 'M', 'ガロン'), - (0x330F, 'M', 'ガンマ'), - (0x3310, 'M', 'ギガ'), - (0x3311, 'M', 'ギニー'), - (0x3312, 'M', 'キュリー'), - (0x3313, 'M', 'ギルダー'), - (0x3314, 'M', 'キロ'), - (0x3315, 'M', 'キログラム'), - ] - -def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x3316, 'M', 'キロメートル'), - (0x3317, 'M', 'キロワット'), - (0x3318, 'M', 'グラム'), - (0x3319, 'M', 'グラムトン'), - (0x331A, 'M', 'クルゼイロ'), - (0x331B, 'M', 'クローネ'), - (0x331C, 'M', 'ケース'), - (0x331D, 'M', 'コルナ'), - (0x331E, 'M', 'コーポ'), - (0x331F, 'M', 'サイクル'), - (0x3320, 'M', 'サンチーム'), - (0x3321, 'M', 'シリング'), - (0x3322, 'M', 'センチ'), - (0x3323, 'M', 'セント'), - (0x3324, 'M', 'ダース'), - (0x3325, 'M', 'デシ'), - (0x3326, 'M', 'ドル'), - (0x3327, 'M', 'トン'), - (0x3328, 'M', 'ナノ'), - (0x3329, 'M', 'ノット'), - (0x332A, 'M', 'ハイツ'), - (0x332B, 'M', 'パーセント'), - (0x332C, 'M', 'パーツ'), - (0x332D, 'M', 'バーレル'), - (0x332E, 'M', 'ピアストル'), - (0x332F, 'M', 'ピクル'), - (0x3330, 'M', 'ピコ'), - (0x3331, 'M', 'ビル'), - (0x3332, 'M', 'ファラッド'), - (0x3333, 'M', 'フィート'), - (0x3334, 'M', 'ブッシェル'), - (0x3335, 'M', 'フラン'), - (0x3336, 'M', 'ヘクタール'), - (0x3337, 'M', 'ペソ'), - (0x3338, 'M', 'ペニヒ'), - (0x3339, 'M', 'ヘルツ'), - (0x333A, 'M', 'ペンス'), - (0x333B, 'M', 'ページ'), - (0x333C, 'M', 'ベータ'), - (0x333D, 'M', 'ポイント'), - (0x333E, 'M', 'ボルト'), - (0x333F, 'M', 'ホン'), - (0x3340, 'M', 'ポンド'), - (0x3341, 'M', 'ホール'), - (0x3342, 'M', 'ホーン'), - (0x3343, 'M', 'マイクロ'), - (0x3344, 'M', 'マイル'), - (0x3345, 'M', 'マッハ'), - (0x3346, 'M', 'マルク'), - (0x3347, 'M', 'マンション'), - (0x3348, 'M', 'ミクロン'), - (0x3349, 'M', 'ミリ'), - (0x334A, 'M', 'ミリバール'), - (0x334B, 'M', 'メガ'), - (0x334C, 'M', 'メガトン'), - (0x334D, 'M', 'メートル'), - (0x334E, 'M', 'ヤード'), - (0x334F, 'M', 'ヤール'), - (0x3350, 'M', 'ユアン'), - (0x3351, 'M', 'リットル'), - (0x3352, 'M', 'リラ'), - (0x3353, 'M', 'ルピー'), - (0x3354, 'M', 'ルーブル'), - (0x3355, 'M', 'レム'), - (0x3356, 'M', 'レントゲン'), - (0x3357, 'M', 'ワット'), - (0x3358, 'M', '0点'), - (0x3359, 'M', '1点'), - (0x335A, 'M', '2点'), - (0x335B, 'M', '3点'), - (0x335C, 'M', '4点'), - (0x335D, 'M', '5点'), - (0x335E, 'M', '6点'), - (0x335F, 'M', '7点'), - (0x3360, 'M', '8点'), - (0x3361, 'M', '9点'), - (0x3362, 'M', '10点'), - (0x3363, 'M', '11点'), - (0x3364, 'M', '12点'), - (0x3365, 'M', '13点'), - (0x3366, 'M', '14点'), - (0x3367, 'M', '15点'), - (0x3368, 'M', '16点'), - (0x3369, 'M', '17点'), - (0x336A, 'M', '18点'), - (0x336B, 'M', '19点'), - (0x336C, 'M', '20点'), - (0x336D, 'M', '21点'), - (0x336E, 'M', '22点'), - (0x336F, 'M', '23点'), - (0x3370, 'M', '24点'), - (0x3371, 'M', 'hpa'), - (0x3372, 'M', 'da'), - (0x3373, 'M', 'au'), - (0x3374, 'M', 'bar'), - (0x3375, 'M', 'ov'), - (0x3376, 'M', 'pc'), - (0x3377, 'M', 'dm'), - (0x3378, 'M', 'dm2'), - (0x3379, 'M', 'dm3'), - ] - -def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x337A, 'M', 'iu'), - (0x337B, 'M', '平成'), - (0x337C, 'M', '昭和'), - (0x337D, 'M', '大正'), - (0x337E, 'M', '明治'), - (0x337F, 'M', '株式会社'), - (0x3380, 'M', 'pa'), - (0x3381, 'M', 'na'), - (0x3382, 'M', 'μa'), - (0x3383, 'M', 'ma'), - (0x3384, 'M', 'ka'), - (0x3385, 'M', 'kb'), - (0x3386, 'M', 'mb'), - (0x3387, 'M', 'gb'), - (0x3388, 'M', 'cal'), - (0x3389, 'M', 'kcal'), - (0x338A, 'M', 'pf'), - (0x338B, 'M', 'nf'), - (0x338C, 'M', 'μf'), - (0x338D, 'M', 'μg'), - (0x338E, 'M', 'mg'), - (0x338F, 'M', 'kg'), - (0x3390, 'M', 'hz'), - (0x3391, 'M', 'khz'), - (0x3392, 'M', 'mhz'), - (0x3393, 'M', 'ghz'), - (0x3394, 'M', 'thz'), - (0x3395, 'M', 'μl'), - (0x3396, 'M', 'ml'), - (0x3397, 'M', 'dl'), - (0x3398, 'M', 'kl'), - (0x3399, 'M', 'fm'), - (0x339A, 'M', 'nm'), - (0x339B, 'M', 'μm'), - (0x339C, 'M', 'mm'), - (0x339D, 'M', 'cm'), - (0x339E, 'M', 'km'), - (0x339F, 'M', 'mm2'), - (0x33A0, 'M', 'cm2'), - (0x33A1, 'M', 'm2'), - (0x33A2, 'M', 'km2'), - (0x33A3, 'M', 'mm3'), - (0x33A4, 'M', 'cm3'), - (0x33A5, 'M', 'm3'), - (0x33A6, 'M', 'km3'), - (0x33A7, 'M', 'm∕s'), - (0x33A8, 'M', 'm∕s2'), - (0x33A9, 'M', 'pa'), - (0x33AA, 'M', 'kpa'), - (0x33AB, 'M', 'mpa'), - (0x33AC, 'M', 'gpa'), - (0x33AD, 'M', 'rad'), - (0x33AE, 'M', 'rad∕s'), - (0x33AF, 'M', 'rad∕s2'), - (0x33B0, 'M', 'ps'), - (0x33B1, 'M', 'ns'), - (0x33B2, 'M', 'μs'), - (0x33B3, 'M', 'ms'), - (0x33B4, 'M', 'pv'), - (0x33B5, 'M', 'nv'), - (0x33B6, 'M', 'μv'), - (0x33B7, 'M', 'mv'), - (0x33B8, 'M', 'kv'), - (0x33B9, 'M', 'mv'), - (0x33BA, 'M', 'pw'), - (0x33BB, 'M', 'nw'), - (0x33BC, 'M', 'μw'), - (0x33BD, 'M', 'mw'), - (0x33BE, 'M', 'kw'), - (0x33BF, 'M', 'mw'), - (0x33C0, 'M', 'kω'), - (0x33C1, 'M', 'mω'), - (0x33C2, 'X'), - (0x33C3, 'M', 'bq'), - (0x33C4, 'M', 'cc'), - (0x33C5, 'M', 'cd'), - (0x33C6, 'M', 'c∕kg'), - (0x33C7, 'X'), - (0x33C8, 'M', 'db'), - (0x33C9, 'M', 'gy'), - (0x33CA, 'M', 'ha'), - (0x33CB, 'M', 'hp'), - (0x33CC, 'M', 'in'), - (0x33CD, 'M', 'kk'), - (0x33CE, 'M', 'km'), - (0x33CF, 'M', 'kt'), - (0x33D0, 'M', 'lm'), - (0x33D1, 'M', 'ln'), - (0x33D2, 'M', 'log'), - (0x33D3, 'M', 'lx'), - (0x33D4, 'M', 'mb'), - (0x33D5, 'M', 'mil'), - (0x33D6, 'M', 'mol'), - (0x33D7, 'M', 'ph'), - (0x33D8, 'X'), - (0x33D9, 'M', 'ppm'), - (0x33DA, 'M', 'pr'), - (0x33DB, 'M', 'sr'), - (0x33DC, 'M', 'sv'), - (0x33DD, 'M', 'wb'), - ] - -def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x33DE, 'M', 'v∕m'), - (0x33DF, 'M', 'a∕m'), - (0x33E0, 'M', '1日'), - (0x33E1, 'M', '2日'), - (0x33E2, 'M', '3日'), - (0x33E3, 'M', '4日'), - (0x33E4, 'M', '5日'), - (0x33E5, 'M', '6日'), - (0x33E6, 'M', '7日'), - (0x33E7, 'M', '8日'), - (0x33E8, 'M', '9日'), - (0x33E9, 'M', '10日'), - (0x33EA, 'M', '11日'), - (0x33EB, 'M', '12日'), - (0x33EC, 'M', '13日'), - (0x33ED, 'M', '14日'), - (0x33EE, 'M', '15日'), - (0x33EF, 'M', '16日'), - (0x33F0, 'M', '17日'), - (0x33F1, 'M', '18日'), - (0x33F2, 'M', '19日'), - (0x33F3, 'M', '20日'), - (0x33F4, 'M', '21日'), - (0x33F5, 'M', '22日'), - (0x33F6, 'M', '23日'), - (0x33F7, 'M', '24日'), - (0x33F8, 'M', '25日'), - (0x33F9, 'M', '26日'), - (0x33FA, 'M', '27日'), - (0x33FB, 'M', '28日'), - (0x33FC, 'M', '29日'), - (0x33FD, 'M', '30日'), - (0x33FE, 'M', '31日'), - (0x33FF, 'M', 'gal'), - (0x3400, 'V'), - (0xA48D, 'X'), - (0xA490, 'V'), - (0xA4C7, 'X'), - (0xA4D0, 'V'), - (0xA62C, 'X'), - (0xA640, 'M', 'ꙁ'), - (0xA641, 'V'), - (0xA642, 'M', 'ꙃ'), - (0xA643, 'V'), - (0xA644, 'M', 'ꙅ'), - (0xA645, 'V'), - (0xA646, 'M', 'ꙇ'), - (0xA647, 'V'), - (0xA648, 'M', 'ꙉ'), - (0xA649, 'V'), - (0xA64A, 'M', 'ꙋ'), - (0xA64B, 'V'), - (0xA64C, 'M', 'ꙍ'), - (0xA64D, 'V'), - (0xA64E, 'M', 'ꙏ'), - (0xA64F, 'V'), - (0xA650, 'M', 'ꙑ'), - (0xA651, 'V'), - (0xA652, 'M', 'ꙓ'), - (0xA653, 'V'), - (0xA654, 'M', 'ꙕ'), - (0xA655, 'V'), - (0xA656, 'M', 'ꙗ'), - (0xA657, 'V'), - (0xA658, 'M', 'ꙙ'), - (0xA659, 'V'), - (0xA65A, 'M', 'ꙛ'), - (0xA65B, 'V'), - (0xA65C, 'M', 'ꙝ'), - (0xA65D, 'V'), - (0xA65E, 'M', 'ꙟ'), - (0xA65F, 'V'), - (0xA660, 'M', 'ꙡ'), - (0xA661, 'V'), - (0xA662, 'M', 'ꙣ'), - (0xA663, 'V'), - (0xA664, 'M', 'ꙥ'), - (0xA665, 'V'), - (0xA666, 'M', 'ꙧ'), - (0xA667, 'V'), - (0xA668, 'M', 'ꙩ'), - (0xA669, 'V'), - (0xA66A, 'M', 'ꙫ'), - (0xA66B, 'V'), - (0xA66C, 'M', 'ꙭ'), - (0xA66D, 'V'), - (0xA680, 'M', 'ꚁ'), - (0xA681, 'V'), - (0xA682, 'M', 'ꚃ'), - (0xA683, 'V'), - (0xA684, 'M', 'ꚅ'), - (0xA685, 'V'), - (0xA686, 'M', 'ꚇ'), - (0xA687, 'V'), - (0xA688, 'M', 'ꚉ'), - (0xA689, 'V'), - (0xA68A, 'M', 'ꚋ'), - (0xA68B, 'V'), - (0xA68C, 'M', 'ꚍ'), - (0xA68D, 'V'), - ] - -def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xA68E, 'M', 'ꚏ'), - (0xA68F, 'V'), - (0xA690, 'M', 'ꚑ'), - (0xA691, 'V'), - (0xA692, 'M', 'ꚓ'), - (0xA693, 'V'), - (0xA694, 'M', 'ꚕ'), - (0xA695, 'V'), - (0xA696, 'M', 'ꚗ'), - (0xA697, 'V'), - (0xA698, 'M', 'ꚙ'), - (0xA699, 'V'), - (0xA69A, 'M', 'ꚛ'), - (0xA69B, 'V'), - (0xA69C, 'M', 'ъ'), - (0xA69D, 'M', 'ь'), - (0xA69E, 'V'), - (0xA6F8, 'X'), - (0xA700, 'V'), - (0xA722, 'M', 'ꜣ'), - (0xA723, 'V'), - (0xA724, 'M', 'ꜥ'), - (0xA725, 'V'), - (0xA726, 'M', 'ꜧ'), - (0xA727, 'V'), - (0xA728, 'M', 'ꜩ'), - (0xA729, 'V'), - (0xA72A, 'M', 'ꜫ'), - (0xA72B, 'V'), - (0xA72C, 'M', 'ꜭ'), - (0xA72D, 'V'), - (0xA72E, 'M', 'ꜯ'), - (0xA72F, 'V'), - (0xA732, 'M', 'ꜳ'), - (0xA733, 'V'), - (0xA734, 'M', 'ꜵ'), - (0xA735, 'V'), - (0xA736, 'M', 'ꜷ'), - (0xA737, 'V'), - (0xA738, 'M', 'ꜹ'), - (0xA739, 'V'), - (0xA73A, 'M', 'ꜻ'), - (0xA73B, 'V'), - (0xA73C, 'M', 'ꜽ'), - (0xA73D, 'V'), - (0xA73E, 'M', 'ꜿ'), - (0xA73F, 'V'), - (0xA740, 'M', 'ꝁ'), - (0xA741, 'V'), - (0xA742, 'M', 'ꝃ'), - (0xA743, 'V'), - (0xA744, 'M', 'ꝅ'), - (0xA745, 'V'), - (0xA746, 'M', 'ꝇ'), - (0xA747, 'V'), - (0xA748, 'M', 'ꝉ'), - (0xA749, 'V'), - (0xA74A, 'M', 'ꝋ'), - (0xA74B, 'V'), - (0xA74C, 'M', 'ꝍ'), - (0xA74D, 'V'), - (0xA74E, 'M', 'ꝏ'), - (0xA74F, 'V'), - (0xA750, 'M', 'ꝑ'), - (0xA751, 'V'), - (0xA752, 'M', 'ꝓ'), - (0xA753, 'V'), - (0xA754, 'M', 'ꝕ'), - (0xA755, 'V'), - (0xA756, 'M', 'ꝗ'), - (0xA757, 'V'), - (0xA758, 'M', 'ꝙ'), - (0xA759, 'V'), - (0xA75A, 'M', 'ꝛ'), - (0xA75B, 'V'), - (0xA75C, 'M', 'ꝝ'), - (0xA75D, 'V'), - (0xA75E, 'M', 'ꝟ'), - (0xA75F, 'V'), - (0xA760, 'M', 'ꝡ'), - (0xA761, 'V'), - (0xA762, 'M', 'ꝣ'), - (0xA763, 'V'), - (0xA764, 'M', 'ꝥ'), - (0xA765, 'V'), - (0xA766, 'M', 'ꝧ'), - (0xA767, 'V'), - (0xA768, 'M', 'ꝩ'), - (0xA769, 'V'), - (0xA76A, 'M', 'ꝫ'), - (0xA76B, 'V'), - (0xA76C, 'M', 'ꝭ'), - (0xA76D, 'V'), - (0xA76E, 'M', 'ꝯ'), - (0xA76F, 'V'), - (0xA770, 'M', 'ꝯ'), - (0xA771, 'V'), - (0xA779, 'M', 'ꝺ'), - (0xA77A, 'V'), - (0xA77B, 'M', 'ꝼ'), - ] - -def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xA77C, 'V'), - (0xA77D, 'M', 'ᵹ'), - (0xA77E, 'M', 'ꝿ'), - (0xA77F, 'V'), - (0xA780, 'M', 'ꞁ'), - (0xA781, 'V'), - (0xA782, 'M', 'ꞃ'), - (0xA783, 'V'), - (0xA784, 'M', 'ꞅ'), - (0xA785, 'V'), - (0xA786, 'M', 'ꞇ'), - (0xA787, 'V'), - (0xA78B, 'M', 'ꞌ'), - (0xA78C, 'V'), - (0xA78D, 'M', 'ɥ'), - (0xA78E, 'V'), - (0xA790, 'M', 'ꞑ'), - (0xA791, 'V'), - (0xA792, 'M', 'ꞓ'), - (0xA793, 'V'), - (0xA796, 'M', 'ꞗ'), - (0xA797, 'V'), - (0xA798, 'M', 'ꞙ'), - (0xA799, 'V'), - (0xA79A, 'M', 'ꞛ'), - (0xA79B, 'V'), - (0xA79C, 'M', 'ꞝ'), - (0xA79D, 'V'), - (0xA79E, 'M', 'ꞟ'), - (0xA79F, 'V'), - (0xA7A0, 'M', 'ꞡ'), - (0xA7A1, 'V'), - (0xA7A2, 'M', 'ꞣ'), - (0xA7A3, 'V'), - (0xA7A4, 'M', 'ꞥ'), - (0xA7A5, 'V'), - (0xA7A6, 'M', 'ꞧ'), - (0xA7A7, 'V'), - (0xA7A8, 'M', 'ꞩ'), - (0xA7A9, 'V'), - (0xA7AA, 'M', 'ɦ'), - (0xA7AB, 'M', 'ɜ'), - (0xA7AC, 'M', 'ɡ'), - (0xA7AD, 'M', 'ɬ'), - (0xA7AE, 'M', 'ɪ'), - (0xA7AF, 'V'), - (0xA7B0, 'M', 'ʞ'), - (0xA7B1, 'M', 'ʇ'), - (0xA7B2, 'M', 'ʝ'), - (0xA7B3, 'M', 'ꭓ'), - (0xA7B4, 'M', 'ꞵ'), - (0xA7B5, 'V'), - (0xA7B6, 'M', 'ꞷ'), - (0xA7B7, 'V'), - (0xA7B8, 'M', 'ꞹ'), - (0xA7B9, 'V'), - (0xA7BA, 'M', 'ꞻ'), - (0xA7BB, 'V'), - (0xA7BC, 'M', 'ꞽ'), - (0xA7BD, 'V'), - (0xA7BE, 'M', 'ꞿ'), - (0xA7BF, 'V'), - (0xA7C0, 'M', 'ꟁ'), - (0xA7C1, 'V'), - (0xA7C2, 'M', 'ꟃ'), - (0xA7C3, 'V'), - (0xA7C4, 'M', 'ꞔ'), - (0xA7C5, 'M', 'ʂ'), - (0xA7C6, 'M', 'ᶎ'), - (0xA7C7, 'M', 'ꟈ'), - (0xA7C8, 'V'), - (0xA7C9, 'M', 'ꟊ'), - (0xA7CA, 'V'), - (0xA7CB, 'X'), - (0xA7D0, 'M', 'ꟑ'), - (0xA7D1, 'V'), - (0xA7D2, 'X'), - (0xA7D3, 'V'), - (0xA7D4, 'X'), - (0xA7D5, 'V'), - (0xA7D6, 'M', 'ꟗ'), - (0xA7D7, 'V'), - (0xA7D8, 'M', 'ꟙ'), - (0xA7D9, 'V'), - (0xA7DA, 'X'), - (0xA7F2, 'M', 'c'), - (0xA7F3, 'M', 'f'), - (0xA7F4, 'M', 'q'), - (0xA7F5, 'M', 'ꟶ'), - (0xA7F6, 'V'), - (0xA7F8, 'M', 'ħ'), - (0xA7F9, 'M', 'œ'), - (0xA7FA, 'V'), - (0xA82D, 'X'), - (0xA830, 'V'), - (0xA83A, 'X'), - (0xA840, 'V'), - (0xA878, 'X'), - (0xA880, 'V'), - (0xA8C6, 'X'), - ] - -def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xA8CE, 'V'), - (0xA8DA, 'X'), - (0xA8E0, 'V'), - (0xA954, 'X'), - (0xA95F, 'V'), - (0xA97D, 'X'), - (0xA980, 'V'), - (0xA9CE, 'X'), - (0xA9CF, 'V'), - (0xA9DA, 'X'), - (0xA9DE, 'V'), - (0xA9FF, 'X'), - (0xAA00, 'V'), - (0xAA37, 'X'), - (0xAA40, 'V'), - (0xAA4E, 'X'), - (0xAA50, 'V'), - (0xAA5A, 'X'), - (0xAA5C, 'V'), - (0xAAC3, 'X'), - (0xAADB, 'V'), - (0xAAF7, 'X'), - (0xAB01, 'V'), - (0xAB07, 'X'), - (0xAB09, 'V'), - (0xAB0F, 'X'), - (0xAB11, 'V'), - (0xAB17, 'X'), - (0xAB20, 'V'), - (0xAB27, 'X'), - (0xAB28, 'V'), - (0xAB2F, 'X'), - (0xAB30, 'V'), - (0xAB5C, 'M', 'ꜧ'), - (0xAB5D, 'M', 'ꬷ'), - (0xAB5E, 'M', 'ɫ'), - (0xAB5F, 'M', 'ꭒ'), - (0xAB60, 'V'), - (0xAB69, 'M', 'ʍ'), - (0xAB6A, 'V'), - (0xAB6C, 'X'), - (0xAB70, 'M', 'Ꭰ'), - (0xAB71, 'M', 'Ꭱ'), - (0xAB72, 'M', 'Ꭲ'), - (0xAB73, 'M', 'Ꭳ'), - (0xAB74, 'M', 'Ꭴ'), - (0xAB75, 'M', 'Ꭵ'), - (0xAB76, 'M', 'Ꭶ'), - (0xAB77, 'M', 'Ꭷ'), - (0xAB78, 'M', 'Ꭸ'), - (0xAB79, 'M', 'Ꭹ'), - (0xAB7A, 'M', 'Ꭺ'), - (0xAB7B, 'M', 'Ꭻ'), - (0xAB7C, 'M', 'Ꭼ'), - (0xAB7D, 'M', 'Ꭽ'), - (0xAB7E, 'M', 'Ꭾ'), - (0xAB7F, 'M', 'Ꭿ'), - (0xAB80, 'M', 'Ꮀ'), - (0xAB81, 'M', 'Ꮁ'), - (0xAB82, 'M', 'Ꮂ'), - (0xAB83, 'M', 'Ꮃ'), - (0xAB84, 'M', 'Ꮄ'), - (0xAB85, 'M', 'Ꮅ'), - (0xAB86, 'M', 'Ꮆ'), - (0xAB87, 'M', 'Ꮇ'), - (0xAB88, 'M', 'Ꮈ'), - (0xAB89, 'M', 'Ꮉ'), - (0xAB8A, 'M', 'Ꮊ'), - (0xAB8B, 'M', 'Ꮋ'), - (0xAB8C, 'M', 'Ꮌ'), - (0xAB8D, 'M', 'Ꮍ'), - (0xAB8E, 'M', 'Ꮎ'), - (0xAB8F, 'M', 'Ꮏ'), - (0xAB90, 'M', 'Ꮐ'), - (0xAB91, 'M', 'Ꮑ'), - (0xAB92, 'M', 'Ꮒ'), - (0xAB93, 'M', 'Ꮓ'), - (0xAB94, 'M', 'Ꮔ'), - (0xAB95, 'M', 'Ꮕ'), - (0xAB96, 'M', 'Ꮖ'), - (0xAB97, 'M', 'Ꮗ'), - (0xAB98, 'M', 'Ꮘ'), - (0xAB99, 'M', 'Ꮙ'), - (0xAB9A, 'M', 'Ꮚ'), - (0xAB9B, 'M', 'Ꮛ'), - (0xAB9C, 'M', 'Ꮜ'), - (0xAB9D, 'M', 'Ꮝ'), - (0xAB9E, 'M', 'Ꮞ'), - (0xAB9F, 'M', 'Ꮟ'), - (0xABA0, 'M', 'Ꮠ'), - (0xABA1, 'M', 'Ꮡ'), - (0xABA2, 'M', 'Ꮢ'), - (0xABA3, 'M', 'Ꮣ'), - (0xABA4, 'M', 'Ꮤ'), - (0xABA5, 'M', 'Ꮥ'), - (0xABA6, 'M', 'Ꮦ'), - (0xABA7, 'M', 'Ꮧ'), - (0xABA8, 'M', 'Ꮨ'), - (0xABA9, 'M', 'Ꮩ'), - (0xABAA, 'M', 'Ꮪ'), - ] - -def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xABAB, 'M', 'Ꮫ'), - (0xABAC, 'M', 'Ꮬ'), - (0xABAD, 'M', 'Ꮭ'), - (0xABAE, 'M', 'Ꮮ'), - (0xABAF, 'M', 'Ꮯ'), - (0xABB0, 'M', 'Ꮰ'), - (0xABB1, 'M', 'Ꮱ'), - (0xABB2, 'M', 'Ꮲ'), - (0xABB3, 'M', 'Ꮳ'), - (0xABB4, 'M', 'Ꮴ'), - (0xABB5, 'M', 'Ꮵ'), - (0xABB6, 'M', 'Ꮶ'), - (0xABB7, 'M', 'Ꮷ'), - (0xABB8, 'M', 'Ꮸ'), - (0xABB9, 'M', 'Ꮹ'), - (0xABBA, 'M', 'Ꮺ'), - (0xABBB, 'M', 'Ꮻ'), - (0xABBC, 'M', 'Ꮼ'), - (0xABBD, 'M', 'Ꮽ'), - (0xABBE, 'M', 'Ꮾ'), - (0xABBF, 'M', 'Ꮿ'), - (0xABC0, 'V'), - (0xABEE, 'X'), - (0xABF0, 'V'), - (0xABFA, 'X'), - (0xAC00, 'V'), - (0xD7A4, 'X'), - (0xD7B0, 'V'), - (0xD7C7, 'X'), - (0xD7CB, 'V'), - (0xD7FC, 'X'), - (0xF900, 'M', '豈'), - (0xF901, 'M', '更'), - (0xF902, 'M', '車'), - (0xF903, 'M', '賈'), - (0xF904, 'M', '滑'), - (0xF905, 'M', '串'), - (0xF906, 'M', '句'), - (0xF907, 'M', '龜'), - (0xF909, 'M', '契'), - (0xF90A, 'M', '金'), - (0xF90B, 'M', '喇'), - (0xF90C, 'M', '奈'), - (0xF90D, 'M', '懶'), - (0xF90E, 'M', '癩'), - (0xF90F, 'M', '羅'), - (0xF910, 'M', '蘿'), - (0xF911, 'M', '螺'), - (0xF912, 'M', '裸'), - (0xF913, 'M', '邏'), - (0xF914, 'M', '樂'), - (0xF915, 'M', '洛'), - (0xF916, 'M', '烙'), - (0xF917, 'M', '珞'), - (0xF918, 'M', '落'), - (0xF919, 'M', '酪'), - (0xF91A, 'M', '駱'), - (0xF91B, 'M', '亂'), - (0xF91C, 'M', '卵'), - (0xF91D, 'M', '欄'), - (0xF91E, 'M', '爛'), - (0xF91F, 'M', '蘭'), - (0xF920, 'M', '鸞'), - (0xF921, 'M', '嵐'), - (0xF922, 'M', '濫'), - (0xF923, 'M', '藍'), - (0xF924, 'M', '襤'), - (0xF925, 'M', '拉'), - (0xF926, 'M', '臘'), - (0xF927, 'M', '蠟'), - (0xF928, 'M', '廊'), - (0xF929, 'M', '朗'), - (0xF92A, 'M', '浪'), - (0xF92B, 'M', '狼'), - (0xF92C, 'M', '郎'), - (0xF92D, 'M', '來'), - (0xF92E, 'M', '冷'), - (0xF92F, 'M', '勞'), - (0xF930, 'M', '擄'), - (0xF931, 'M', '櫓'), - (0xF932, 'M', '爐'), - (0xF933, 'M', '盧'), - (0xF934, 'M', '老'), - (0xF935, 'M', '蘆'), - (0xF936, 'M', '虜'), - (0xF937, 'M', '路'), - (0xF938, 'M', '露'), - (0xF939, 'M', '魯'), - (0xF93A, 'M', '鷺'), - (0xF93B, 'M', '碌'), - (0xF93C, 'M', '祿'), - (0xF93D, 'M', '綠'), - (0xF93E, 'M', '菉'), - (0xF93F, 'M', '錄'), - (0xF940, 'M', '鹿'), - (0xF941, 'M', '論'), - (0xF942, 'M', '壟'), - (0xF943, 'M', '弄'), - (0xF944, 'M', '籠'), - (0xF945, 'M', '聾'), - ] - -def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xF946, 'M', '牢'), - (0xF947, 'M', '磊'), - (0xF948, 'M', '賂'), - (0xF949, 'M', '雷'), - (0xF94A, 'M', '壘'), - (0xF94B, 'M', '屢'), - (0xF94C, 'M', '樓'), - (0xF94D, 'M', '淚'), - (0xF94E, 'M', '漏'), - (0xF94F, 'M', '累'), - (0xF950, 'M', '縷'), - (0xF951, 'M', '陋'), - (0xF952, 'M', '勒'), - (0xF953, 'M', '肋'), - (0xF954, 'M', '凜'), - (0xF955, 'M', '凌'), - (0xF956, 'M', '稜'), - (0xF957, 'M', '綾'), - (0xF958, 'M', '菱'), - (0xF959, 'M', '陵'), - (0xF95A, 'M', '讀'), - (0xF95B, 'M', '拏'), - (0xF95C, 'M', '樂'), - (0xF95D, 'M', '諾'), - (0xF95E, 'M', '丹'), - (0xF95F, 'M', '寧'), - (0xF960, 'M', '怒'), - (0xF961, 'M', '率'), - (0xF962, 'M', '異'), - (0xF963, 'M', '北'), - (0xF964, 'M', '磻'), - (0xF965, 'M', '便'), - (0xF966, 'M', '復'), - (0xF967, 'M', '不'), - (0xF968, 'M', '泌'), - (0xF969, 'M', '數'), - (0xF96A, 'M', '索'), - (0xF96B, 'M', '參'), - (0xF96C, 'M', '塞'), - (0xF96D, 'M', '省'), - (0xF96E, 'M', '葉'), - (0xF96F, 'M', '說'), - (0xF970, 'M', '殺'), - (0xF971, 'M', '辰'), - (0xF972, 'M', '沈'), - (0xF973, 'M', '拾'), - (0xF974, 'M', '若'), - (0xF975, 'M', '掠'), - (0xF976, 'M', '略'), - (0xF977, 'M', '亮'), - (0xF978, 'M', '兩'), - (0xF979, 'M', '凉'), - (0xF97A, 'M', '梁'), - (0xF97B, 'M', '糧'), - (0xF97C, 'M', '良'), - (0xF97D, 'M', '諒'), - (0xF97E, 'M', '量'), - (0xF97F, 'M', '勵'), - (0xF980, 'M', '呂'), - (0xF981, 'M', '女'), - (0xF982, 'M', '廬'), - (0xF983, 'M', '旅'), - (0xF984, 'M', '濾'), - (0xF985, 'M', '礪'), - (0xF986, 'M', '閭'), - (0xF987, 'M', '驪'), - (0xF988, 'M', '麗'), - (0xF989, 'M', '黎'), - (0xF98A, 'M', '力'), - (0xF98B, 'M', '曆'), - (0xF98C, 'M', '歷'), - (0xF98D, 'M', '轢'), - (0xF98E, 'M', '年'), - (0xF98F, 'M', '憐'), - (0xF990, 'M', '戀'), - (0xF991, 'M', '撚'), - (0xF992, 'M', '漣'), - (0xF993, 'M', '煉'), - (0xF994, 'M', '璉'), - (0xF995, 'M', '秊'), - (0xF996, 'M', '練'), - (0xF997, 'M', '聯'), - (0xF998, 'M', '輦'), - (0xF999, 'M', '蓮'), - (0xF99A, 'M', '連'), - (0xF99B, 'M', '鍊'), - (0xF99C, 'M', '列'), - (0xF99D, 'M', '劣'), - (0xF99E, 'M', '咽'), - (0xF99F, 'M', '烈'), - (0xF9A0, 'M', '裂'), - (0xF9A1, 'M', '說'), - (0xF9A2, 'M', '廉'), - (0xF9A3, 'M', '念'), - (0xF9A4, 'M', '捻'), - (0xF9A5, 'M', '殮'), - (0xF9A6, 'M', '簾'), - (0xF9A7, 'M', '獵'), - (0xF9A8, 'M', '令'), - (0xF9A9, 'M', '囹'), - ] - -def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xF9AA, 'M', '寧'), - (0xF9AB, 'M', '嶺'), - (0xF9AC, 'M', '怜'), - (0xF9AD, 'M', '玲'), - (0xF9AE, 'M', '瑩'), - (0xF9AF, 'M', '羚'), - (0xF9B0, 'M', '聆'), - (0xF9B1, 'M', '鈴'), - (0xF9B2, 'M', '零'), - (0xF9B3, 'M', '靈'), - (0xF9B4, 'M', '領'), - (0xF9B5, 'M', '例'), - (0xF9B6, 'M', '禮'), - (0xF9B7, 'M', '醴'), - (0xF9B8, 'M', '隸'), - (0xF9B9, 'M', '惡'), - (0xF9BA, 'M', '了'), - (0xF9BB, 'M', '僚'), - (0xF9BC, 'M', '寮'), - (0xF9BD, 'M', '尿'), - (0xF9BE, 'M', '料'), - (0xF9BF, 'M', '樂'), - (0xF9C0, 'M', '燎'), - (0xF9C1, 'M', '療'), - (0xF9C2, 'M', '蓼'), - (0xF9C3, 'M', '遼'), - (0xF9C4, 'M', '龍'), - (0xF9C5, 'M', '暈'), - (0xF9C6, 'M', '阮'), - (0xF9C7, 'M', '劉'), - (0xF9C8, 'M', '杻'), - (0xF9C9, 'M', '柳'), - (0xF9CA, 'M', '流'), - (0xF9CB, 'M', '溜'), - (0xF9CC, 'M', '琉'), - (0xF9CD, 'M', '留'), - (0xF9CE, 'M', '硫'), - (0xF9CF, 'M', '紐'), - (0xF9D0, 'M', '類'), - (0xF9D1, 'M', '六'), - (0xF9D2, 'M', '戮'), - (0xF9D3, 'M', '陸'), - (0xF9D4, 'M', '倫'), - (0xF9D5, 'M', '崙'), - (0xF9D6, 'M', '淪'), - (0xF9D7, 'M', '輪'), - (0xF9D8, 'M', '律'), - (0xF9D9, 'M', '慄'), - (0xF9DA, 'M', '栗'), - (0xF9DB, 'M', '率'), - (0xF9DC, 'M', '隆'), - (0xF9DD, 'M', '利'), - (0xF9DE, 'M', '吏'), - (0xF9DF, 'M', '履'), - (0xF9E0, 'M', '易'), - (0xF9E1, 'M', '李'), - (0xF9E2, 'M', '梨'), - (0xF9E3, 'M', '泥'), - (0xF9E4, 'M', '理'), - (0xF9E5, 'M', '痢'), - (0xF9E6, 'M', '罹'), - (0xF9E7, 'M', '裏'), - (0xF9E8, 'M', '裡'), - (0xF9E9, 'M', '里'), - (0xF9EA, 'M', '離'), - (0xF9EB, 'M', '匿'), - (0xF9EC, 'M', '溺'), - (0xF9ED, 'M', '吝'), - (0xF9EE, 'M', '燐'), - (0xF9EF, 'M', '璘'), - (0xF9F0, 'M', '藺'), - (0xF9F1, 'M', '隣'), - (0xF9F2, 'M', '鱗'), - (0xF9F3, 'M', '麟'), - (0xF9F4, 'M', '林'), - (0xF9F5, 'M', '淋'), - (0xF9F6, 'M', '臨'), - (0xF9F7, 'M', '立'), - (0xF9F8, 'M', '笠'), - (0xF9F9, 'M', '粒'), - (0xF9FA, 'M', '狀'), - (0xF9FB, 'M', '炙'), - (0xF9FC, 'M', '識'), - (0xF9FD, 'M', '什'), - (0xF9FE, 'M', '茶'), - (0xF9FF, 'M', '刺'), - (0xFA00, 'M', '切'), - (0xFA01, 'M', '度'), - (0xFA02, 'M', '拓'), - (0xFA03, 'M', '糖'), - (0xFA04, 'M', '宅'), - (0xFA05, 'M', '洞'), - (0xFA06, 'M', '暴'), - (0xFA07, 'M', '輻'), - (0xFA08, 'M', '行'), - (0xFA09, 'M', '降'), - (0xFA0A, 'M', '見'), - (0xFA0B, 'M', '廓'), - (0xFA0C, 'M', '兀'), - (0xFA0D, 'M', '嗀'), - ] - -def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFA0E, 'V'), - (0xFA10, 'M', '塚'), - (0xFA11, 'V'), - (0xFA12, 'M', '晴'), - (0xFA13, 'V'), - (0xFA15, 'M', '凞'), - (0xFA16, 'M', '猪'), - (0xFA17, 'M', '益'), - (0xFA18, 'M', '礼'), - (0xFA19, 'M', '神'), - (0xFA1A, 'M', '祥'), - (0xFA1B, 'M', '福'), - (0xFA1C, 'M', '靖'), - (0xFA1D, 'M', '精'), - (0xFA1E, 'M', '羽'), - (0xFA1F, 'V'), - (0xFA20, 'M', '蘒'), - (0xFA21, 'V'), - (0xFA22, 'M', '諸'), - (0xFA23, 'V'), - (0xFA25, 'M', '逸'), - (0xFA26, 'M', '都'), - (0xFA27, 'V'), - (0xFA2A, 'M', '飯'), - (0xFA2B, 'M', '飼'), - (0xFA2C, 'M', '館'), - (0xFA2D, 'M', '鶴'), - (0xFA2E, 'M', '郞'), - (0xFA2F, 'M', '隷'), - (0xFA30, 'M', '侮'), - (0xFA31, 'M', '僧'), - (0xFA32, 'M', '免'), - (0xFA33, 'M', '勉'), - (0xFA34, 'M', '勤'), - (0xFA35, 'M', '卑'), - (0xFA36, 'M', '喝'), - (0xFA37, 'M', '嘆'), - (0xFA38, 'M', '器'), - (0xFA39, 'M', '塀'), - (0xFA3A, 'M', '墨'), - (0xFA3B, 'M', '層'), - (0xFA3C, 'M', '屮'), - (0xFA3D, 'M', '悔'), - (0xFA3E, 'M', '慨'), - (0xFA3F, 'M', '憎'), - (0xFA40, 'M', '懲'), - (0xFA41, 'M', '敏'), - (0xFA42, 'M', '既'), - (0xFA43, 'M', '暑'), - (0xFA44, 'M', '梅'), - (0xFA45, 'M', '海'), - (0xFA46, 'M', '渚'), - (0xFA47, 'M', '漢'), - (0xFA48, 'M', '煮'), - (0xFA49, 'M', '爫'), - (0xFA4A, 'M', '琢'), - (0xFA4B, 'M', '碑'), - (0xFA4C, 'M', '社'), - (0xFA4D, 'M', '祉'), - (0xFA4E, 'M', '祈'), - (0xFA4F, 'M', '祐'), - (0xFA50, 'M', '祖'), - (0xFA51, 'M', '祝'), - (0xFA52, 'M', '禍'), - (0xFA53, 'M', '禎'), - (0xFA54, 'M', '穀'), - (0xFA55, 'M', '突'), - (0xFA56, 'M', '節'), - (0xFA57, 'M', '練'), - (0xFA58, 'M', '縉'), - (0xFA59, 'M', '繁'), - (0xFA5A, 'M', '署'), - (0xFA5B, 'M', '者'), - (0xFA5C, 'M', '臭'), - (0xFA5D, 'M', '艹'), - (0xFA5F, 'M', '著'), - (0xFA60, 'M', '褐'), - (0xFA61, 'M', '視'), - (0xFA62, 'M', '謁'), - (0xFA63, 'M', '謹'), - (0xFA64, 'M', '賓'), - (0xFA65, 'M', '贈'), - (0xFA66, 'M', '辶'), - (0xFA67, 'M', '逸'), - (0xFA68, 'M', '難'), - (0xFA69, 'M', '響'), - (0xFA6A, 'M', '頻'), - (0xFA6B, 'M', '恵'), - (0xFA6C, 'M', '𤋮'), - (0xFA6D, 'M', '舘'), - (0xFA6E, 'X'), - (0xFA70, 'M', '並'), - (0xFA71, 'M', '况'), - (0xFA72, 'M', '全'), - (0xFA73, 'M', '侀'), - (0xFA74, 'M', '充'), - (0xFA75, 'M', '冀'), - (0xFA76, 'M', '勇'), - (0xFA77, 'M', '勺'), - (0xFA78, 'M', '喝'), - ] - -def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFA79, 'M', '啕'), - (0xFA7A, 'M', '喙'), - (0xFA7B, 'M', '嗢'), - (0xFA7C, 'M', '塚'), - (0xFA7D, 'M', '墳'), - (0xFA7E, 'M', '奄'), - (0xFA7F, 'M', '奔'), - (0xFA80, 'M', '婢'), - (0xFA81, 'M', '嬨'), - (0xFA82, 'M', '廒'), - (0xFA83, 'M', '廙'), - (0xFA84, 'M', '彩'), - (0xFA85, 'M', '徭'), - (0xFA86, 'M', '惘'), - (0xFA87, 'M', '慎'), - (0xFA88, 'M', '愈'), - (0xFA89, 'M', '憎'), - (0xFA8A, 'M', '慠'), - (0xFA8B, 'M', '懲'), - (0xFA8C, 'M', '戴'), - (0xFA8D, 'M', '揄'), - (0xFA8E, 'M', '搜'), - (0xFA8F, 'M', '摒'), - (0xFA90, 'M', '敖'), - (0xFA91, 'M', '晴'), - (0xFA92, 'M', '朗'), - (0xFA93, 'M', '望'), - (0xFA94, 'M', '杖'), - (0xFA95, 'M', '歹'), - (0xFA96, 'M', '殺'), - (0xFA97, 'M', '流'), - (0xFA98, 'M', '滛'), - (0xFA99, 'M', '滋'), - (0xFA9A, 'M', '漢'), - (0xFA9B, 'M', '瀞'), - (0xFA9C, 'M', '煮'), - (0xFA9D, 'M', '瞧'), - (0xFA9E, 'M', '爵'), - (0xFA9F, 'M', '犯'), - (0xFAA0, 'M', '猪'), - (0xFAA1, 'M', '瑱'), - (0xFAA2, 'M', '甆'), - (0xFAA3, 'M', '画'), - (0xFAA4, 'M', '瘝'), - (0xFAA5, 'M', '瘟'), - (0xFAA6, 'M', '益'), - (0xFAA7, 'M', '盛'), - (0xFAA8, 'M', '直'), - (0xFAA9, 'M', '睊'), - (0xFAAA, 'M', '着'), - (0xFAAB, 'M', '磌'), - (0xFAAC, 'M', '窱'), - (0xFAAD, 'M', '節'), - (0xFAAE, 'M', '类'), - (0xFAAF, 'M', '絛'), - (0xFAB0, 'M', '練'), - (0xFAB1, 'M', '缾'), - (0xFAB2, 'M', '者'), - (0xFAB3, 'M', '荒'), - (0xFAB4, 'M', '華'), - (0xFAB5, 'M', '蝹'), - (0xFAB6, 'M', '襁'), - (0xFAB7, 'M', '覆'), - (0xFAB8, 'M', '視'), - (0xFAB9, 'M', '調'), - (0xFABA, 'M', '諸'), - (0xFABB, 'M', '請'), - (0xFABC, 'M', '謁'), - (0xFABD, 'M', '諾'), - (0xFABE, 'M', '諭'), - (0xFABF, 'M', '謹'), - (0xFAC0, 'M', '變'), - (0xFAC1, 'M', '贈'), - (0xFAC2, 'M', '輸'), - (0xFAC3, 'M', '遲'), - (0xFAC4, 'M', '醙'), - (0xFAC5, 'M', '鉶'), - (0xFAC6, 'M', '陼'), - (0xFAC7, 'M', '難'), - (0xFAC8, 'M', '靖'), - (0xFAC9, 'M', '韛'), - (0xFACA, 'M', '響'), - (0xFACB, 'M', '頋'), - (0xFACC, 'M', '頻'), - (0xFACD, 'M', '鬒'), - (0xFACE, 'M', '龜'), - (0xFACF, 'M', '𢡊'), - (0xFAD0, 'M', '𢡄'), - (0xFAD1, 'M', '𣏕'), - (0xFAD2, 'M', '㮝'), - (0xFAD3, 'M', '䀘'), - (0xFAD4, 'M', '䀹'), - (0xFAD5, 'M', '𥉉'), - (0xFAD6, 'M', '𥳐'), - (0xFAD7, 'M', '𧻓'), - (0xFAD8, 'M', '齃'), - (0xFAD9, 'M', '龎'), - (0xFADA, 'X'), - (0xFB00, 'M', 'ff'), - (0xFB01, 'M', 'fi'), - ] - -def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFB02, 'M', 'fl'), - (0xFB03, 'M', 'ffi'), - (0xFB04, 'M', 'ffl'), - (0xFB05, 'M', 'st'), - (0xFB07, 'X'), - (0xFB13, 'M', 'մն'), - (0xFB14, 'M', 'մե'), - (0xFB15, 'M', 'մի'), - (0xFB16, 'M', 'վն'), - (0xFB17, 'M', 'մխ'), - (0xFB18, 'X'), - (0xFB1D, 'M', 'יִ'), - (0xFB1E, 'V'), - (0xFB1F, 'M', 'ײַ'), - (0xFB20, 'M', 'ע'), - (0xFB21, 'M', 'א'), - (0xFB22, 'M', 'ד'), - (0xFB23, 'M', 'ה'), - (0xFB24, 'M', 'כ'), - (0xFB25, 'M', 'ל'), - (0xFB26, 'M', 'ם'), - (0xFB27, 'M', 'ר'), - (0xFB28, 'M', 'ת'), - (0xFB29, '3', '+'), - (0xFB2A, 'M', 'שׁ'), - (0xFB2B, 'M', 'שׂ'), - (0xFB2C, 'M', 'שּׁ'), - (0xFB2D, 'M', 'שּׂ'), - (0xFB2E, 'M', 'אַ'), - (0xFB2F, 'M', 'אָ'), - (0xFB30, 'M', 'אּ'), - (0xFB31, 'M', 'בּ'), - (0xFB32, 'M', 'גּ'), - (0xFB33, 'M', 'דּ'), - (0xFB34, 'M', 'הּ'), - (0xFB35, 'M', 'וּ'), - (0xFB36, 'M', 'זּ'), - (0xFB37, 'X'), - (0xFB38, 'M', 'טּ'), - (0xFB39, 'M', 'יּ'), - (0xFB3A, 'M', 'ךּ'), - (0xFB3B, 'M', 'כּ'), - (0xFB3C, 'M', 'לּ'), - (0xFB3D, 'X'), - (0xFB3E, 'M', 'מּ'), - (0xFB3F, 'X'), - (0xFB40, 'M', 'נּ'), - (0xFB41, 'M', 'סּ'), - (0xFB42, 'X'), - (0xFB43, 'M', 'ףּ'), - (0xFB44, 'M', 'פּ'), - (0xFB45, 'X'), - (0xFB46, 'M', 'צּ'), - (0xFB47, 'M', 'קּ'), - (0xFB48, 'M', 'רּ'), - (0xFB49, 'M', 'שּ'), - (0xFB4A, 'M', 'תּ'), - (0xFB4B, 'M', 'וֹ'), - (0xFB4C, 'M', 'בֿ'), - (0xFB4D, 'M', 'כֿ'), - (0xFB4E, 'M', 'פֿ'), - (0xFB4F, 'M', 'אל'), - (0xFB50, 'M', 'ٱ'), - (0xFB52, 'M', 'ٻ'), - (0xFB56, 'M', 'پ'), - (0xFB5A, 'M', 'ڀ'), - (0xFB5E, 'M', 'ٺ'), - (0xFB62, 'M', 'ٿ'), - (0xFB66, 'M', 'ٹ'), - (0xFB6A, 'M', 'ڤ'), - (0xFB6E, 'M', 'ڦ'), - (0xFB72, 'M', 'ڄ'), - (0xFB76, 'M', 'ڃ'), - (0xFB7A, 'M', 'چ'), - (0xFB7E, 'M', 'ڇ'), - (0xFB82, 'M', 'ڍ'), - (0xFB84, 'M', 'ڌ'), - (0xFB86, 'M', 'ڎ'), - (0xFB88, 'M', 'ڈ'), - (0xFB8A, 'M', 'ژ'), - (0xFB8C, 'M', 'ڑ'), - (0xFB8E, 'M', 'ک'), - (0xFB92, 'M', 'گ'), - (0xFB96, 'M', 'ڳ'), - (0xFB9A, 'M', 'ڱ'), - (0xFB9E, 'M', 'ں'), - (0xFBA0, 'M', 'ڻ'), - (0xFBA4, 'M', 'ۀ'), - (0xFBA6, 'M', 'ہ'), - (0xFBAA, 'M', 'ھ'), - (0xFBAE, 'M', 'ے'), - (0xFBB0, 'M', 'ۓ'), - (0xFBB2, 'V'), - (0xFBC3, 'X'), - (0xFBD3, 'M', 'ڭ'), - (0xFBD7, 'M', 'ۇ'), - (0xFBD9, 'M', 'ۆ'), - (0xFBDB, 'M', 'ۈ'), - (0xFBDD, 'M', 'ۇٴ'), - (0xFBDE, 'M', 'ۋ'), - ] - -def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFBE0, 'M', 'ۅ'), - (0xFBE2, 'M', 'ۉ'), - (0xFBE4, 'M', 'ې'), - (0xFBE8, 'M', 'ى'), - (0xFBEA, 'M', 'ئا'), - (0xFBEC, 'M', 'ئە'), - (0xFBEE, 'M', 'ئو'), - (0xFBF0, 'M', 'ئۇ'), - (0xFBF2, 'M', 'ئۆ'), - (0xFBF4, 'M', 'ئۈ'), - (0xFBF6, 'M', 'ئې'), - (0xFBF9, 'M', 'ئى'), - (0xFBFC, 'M', 'ی'), - (0xFC00, 'M', 'ئج'), - (0xFC01, 'M', 'ئح'), - (0xFC02, 'M', 'ئم'), - (0xFC03, 'M', 'ئى'), - (0xFC04, 'M', 'ئي'), - (0xFC05, 'M', 'بج'), - (0xFC06, 'M', 'بح'), - (0xFC07, 'M', 'بخ'), - (0xFC08, 'M', 'بم'), - (0xFC09, 'M', 'بى'), - (0xFC0A, 'M', 'بي'), - (0xFC0B, 'M', 'تج'), - (0xFC0C, 'M', 'تح'), - (0xFC0D, 'M', 'تخ'), - (0xFC0E, 'M', 'تم'), - (0xFC0F, 'M', 'تى'), - (0xFC10, 'M', 'تي'), - (0xFC11, 'M', 'ثج'), - (0xFC12, 'M', 'ثم'), - (0xFC13, 'M', 'ثى'), - (0xFC14, 'M', 'ثي'), - (0xFC15, 'M', 'جح'), - (0xFC16, 'M', 'جم'), - (0xFC17, 'M', 'حج'), - (0xFC18, 'M', 'حم'), - (0xFC19, 'M', 'خج'), - (0xFC1A, 'M', 'خح'), - (0xFC1B, 'M', 'خم'), - (0xFC1C, 'M', 'سج'), - (0xFC1D, 'M', 'سح'), - (0xFC1E, 'M', 'سخ'), - (0xFC1F, 'M', 'سم'), - (0xFC20, 'M', 'صح'), - (0xFC21, 'M', 'صم'), - (0xFC22, 'M', 'ضج'), - (0xFC23, 'M', 'ضح'), - (0xFC24, 'M', 'ضخ'), - (0xFC25, 'M', 'ضم'), - (0xFC26, 'M', 'طح'), - (0xFC27, 'M', 'طم'), - (0xFC28, 'M', 'ظم'), - (0xFC29, 'M', 'عج'), - (0xFC2A, 'M', 'عم'), - (0xFC2B, 'M', 'غج'), - (0xFC2C, 'M', 'غم'), - (0xFC2D, 'M', 'فج'), - (0xFC2E, 'M', 'فح'), - (0xFC2F, 'M', 'فخ'), - (0xFC30, 'M', 'فم'), - (0xFC31, 'M', 'فى'), - (0xFC32, 'M', 'في'), - (0xFC33, 'M', 'قح'), - (0xFC34, 'M', 'قم'), - (0xFC35, 'M', 'قى'), - (0xFC36, 'M', 'قي'), - (0xFC37, 'M', 'كا'), - (0xFC38, 'M', 'كج'), - (0xFC39, 'M', 'كح'), - (0xFC3A, 'M', 'كخ'), - (0xFC3B, 'M', 'كل'), - (0xFC3C, 'M', 'كم'), - (0xFC3D, 'M', 'كى'), - (0xFC3E, 'M', 'كي'), - (0xFC3F, 'M', 'لج'), - (0xFC40, 'M', 'لح'), - (0xFC41, 'M', 'لخ'), - (0xFC42, 'M', 'لم'), - (0xFC43, 'M', 'لى'), - (0xFC44, 'M', 'لي'), - (0xFC45, 'M', 'مج'), - (0xFC46, 'M', 'مح'), - (0xFC47, 'M', 'مخ'), - (0xFC48, 'M', 'مم'), - (0xFC49, 'M', 'مى'), - (0xFC4A, 'M', 'مي'), - (0xFC4B, 'M', 'نج'), - (0xFC4C, 'M', 'نح'), - (0xFC4D, 'M', 'نخ'), - (0xFC4E, 'M', 'نم'), - (0xFC4F, 'M', 'نى'), - (0xFC50, 'M', 'ني'), - (0xFC51, 'M', 'هج'), - (0xFC52, 'M', 'هم'), - (0xFC53, 'M', 'هى'), - (0xFC54, 'M', 'هي'), - (0xFC55, 'M', 'يج'), - (0xFC56, 'M', 'يح'), - ] - -def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFC57, 'M', 'يخ'), - (0xFC58, 'M', 'يم'), - (0xFC59, 'M', 'يى'), - (0xFC5A, 'M', 'يي'), - (0xFC5B, 'M', 'ذٰ'), - (0xFC5C, 'M', 'رٰ'), - (0xFC5D, 'M', 'ىٰ'), - (0xFC5E, '3', ' ٌّ'), - (0xFC5F, '3', ' ٍّ'), - (0xFC60, '3', ' َّ'), - (0xFC61, '3', ' ُّ'), - (0xFC62, '3', ' ِّ'), - (0xFC63, '3', ' ّٰ'), - (0xFC64, 'M', 'ئر'), - (0xFC65, 'M', 'ئز'), - (0xFC66, 'M', 'ئم'), - (0xFC67, 'M', 'ئن'), - (0xFC68, 'M', 'ئى'), - (0xFC69, 'M', 'ئي'), - (0xFC6A, 'M', 'بر'), - (0xFC6B, 'M', 'بز'), - (0xFC6C, 'M', 'بم'), - (0xFC6D, 'M', 'بن'), - (0xFC6E, 'M', 'بى'), - (0xFC6F, 'M', 'بي'), - (0xFC70, 'M', 'تر'), - (0xFC71, 'M', 'تز'), - (0xFC72, 'M', 'تم'), - (0xFC73, 'M', 'تن'), - (0xFC74, 'M', 'تى'), - (0xFC75, 'M', 'تي'), - (0xFC76, 'M', 'ثر'), - (0xFC77, 'M', 'ثز'), - (0xFC78, 'M', 'ثم'), - (0xFC79, 'M', 'ثن'), - (0xFC7A, 'M', 'ثى'), - (0xFC7B, 'M', 'ثي'), - (0xFC7C, 'M', 'فى'), - (0xFC7D, 'M', 'في'), - (0xFC7E, 'M', 'قى'), - (0xFC7F, 'M', 'قي'), - (0xFC80, 'M', 'كا'), - (0xFC81, 'M', 'كل'), - (0xFC82, 'M', 'كم'), - (0xFC83, 'M', 'كى'), - (0xFC84, 'M', 'كي'), - (0xFC85, 'M', 'لم'), - (0xFC86, 'M', 'لى'), - (0xFC87, 'M', 'لي'), - (0xFC88, 'M', 'ما'), - (0xFC89, 'M', 'مم'), - (0xFC8A, 'M', 'نر'), - (0xFC8B, 'M', 'نز'), - (0xFC8C, 'M', 'نم'), - (0xFC8D, 'M', 'نن'), - (0xFC8E, 'M', 'نى'), - (0xFC8F, 'M', 'ني'), - (0xFC90, 'M', 'ىٰ'), - (0xFC91, 'M', 'ير'), - (0xFC92, 'M', 'يز'), - (0xFC93, 'M', 'يم'), - (0xFC94, 'M', 'ين'), - (0xFC95, 'M', 'يى'), - (0xFC96, 'M', 'يي'), - (0xFC97, 'M', 'ئج'), - (0xFC98, 'M', 'ئح'), - (0xFC99, 'M', 'ئخ'), - (0xFC9A, 'M', 'ئم'), - (0xFC9B, 'M', 'ئه'), - (0xFC9C, 'M', 'بج'), - (0xFC9D, 'M', 'بح'), - (0xFC9E, 'M', 'بخ'), - (0xFC9F, 'M', 'بم'), - (0xFCA0, 'M', 'به'), - (0xFCA1, 'M', 'تج'), - (0xFCA2, 'M', 'تح'), - (0xFCA3, 'M', 'تخ'), - (0xFCA4, 'M', 'تم'), - (0xFCA5, 'M', 'ته'), - (0xFCA6, 'M', 'ثم'), - (0xFCA7, 'M', 'جح'), - (0xFCA8, 'M', 'جم'), - (0xFCA9, 'M', 'حج'), - (0xFCAA, 'M', 'حم'), - (0xFCAB, 'M', 'خج'), - (0xFCAC, 'M', 'خم'), - (0xFCAD, 'M', 'سج'), - (0xFCAE, 'M', 'سح'), - (0xFCAF, 'M', 'سخ'), - (0xFCB0, 'M', 'سم'), - (0xFCB1, 'M', 'صح'), - (0xFCB2, 'M', 'صخ'), - (0xFCB3, 'M', 'صم'), - (0xFCB4, 'M', 'ضج'), - (0xFCB5, 'M', 'ضح'), - (0xFCB6, 'M', 'ضخ'), - (0xFCB7, 'M', 'ضم'), - (0xFCB8, 'M', 'طح'), - (0xFCB9, 'M', 'ظم'), - (0xFCBA, 'M', 'عج'), - ] - -def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFCBB, 'M', 'عم'), - (0xFCBC, 'M', 'غج'), - (0xFCBD, 'M', 'غم'), - (0xFCBE, 'M', 'فج'), - (0xFCBF, 'M', 'فح'), - (0xFCC0, 'M', 'فخ'), - (0xFCC1, 'M', 'فم'), - (0xFCC2, 'M', 'قح'), - (0xFCC3, 'M', 'قم'), - (0xFCC4, 'M', 'كج'), - (0xFCC5, 'M', 'كح'), - (0xFCC6, 'M', 'كخ'), - (0xFCC7, 'M', 'كل'), - (0xFCC8, 'M', 'كم'), - (0xFCC9, 'M', 'لج'), - (0xFCCA, 'M', 'لح'), - (0xFCCB, 'M', 'لخ'), - (0xFCCC, 'M', 'لم'), - (0xFCCD, 'M', 'له'), - (0xFCCE, 'M', 'مج'), - (0xFCCF, 'M', 'مح'), - (0xFCD0, 'M', 'مخ'), - (0xFCD1, 'M', 'مم'), - (0xFCD2, 'M', 'نج'), - (0xFCD3, 'M', 'نح'), - (0xFCD4, 'M', 'نخ'), - (0xFCD5, 'M', 'نم'), - (0xFCD6, 'M', 'نه'), - (0xFCD7, 'M', 'هج'), - (0xFCD8, 'M', 'هم'), - (0xFCD9, 'M', 'هٰ'), - (0xFCDA, 'M', 'يج'), - (0xFCDB, 'M', 'يح'), - (0xFCDC, 'M', 'يخ'), - (0xFCDD, 'M', 'يم'), - (0xFCDE, 'M', 'يه'), - (0xFCDF, 'M', 'ئم'), - (0xFCE0, 'M', 'ئه'), - (0xFCE1, 'M', 'بم'), - (0xFCE2, 'M', 'به'), - (0xFCE3, 'M', 'تم'), - (0xFCE4, 'M', 'ته'), - (0xFCE5, 'M', 'ثم'), - (0xFCE6, 'M', 'ثه'), - (0xFCE7, 'M', 'سم'), - (0xFCE8, 'M', 'سه'), - (0xFCE9, 'M', 'شم'), - (0xFCEA, 'M', 'شه'), - (0xFCEB, 'M', 'كل'), - (0xFCEC, 'M', 'كم'), - (0xFCED, 'M', 'لم'), - (0xFCEE, 'M', 'نم'), - (0xFCEF, 'M', 'نه'), - (0xFCF0, 'M', 'يم'), - (0xFCF1, 'M', 'يه'), - (0xFCF2, 'M', 'ـَّ'), - (0xFCF3, 'M', 'ـُّ'), - (0xFCF4, 'M', 'ـِّ'), - (0xFCF5, 'M', 'طى'), - (0xFCF6, 'M', 'طي'), - (0xFCF7, 'M', 'عى'), - (0xFCF8, 'M', 'عي'), - (0xFCF9, 'M', 'غى'), - (0xFCFA, 'M', 'غي'), - (0xFCFB, 'M', 'سى'), - (0xFCFC, 'M', 'سي'), - (0xFCFD, 'M', 'شى'), - (0xFCFE, 'M', 'شي'), - (0xFCFF, 'M', 'حى'), - (0xFD00, 'M', 'حي'), - (0xFD01, 'M', 'جى'), - (0xFD02, 'M', 'جي'), - (0xFD03, 'M', 'خى'), - (0xFD04, 'M', 'خي'), - (0xFD05, 'M', 'صى'), - (0xFD06, 'M', 'صي'), - (0xFD07, 'M', 'ضى'), - (0xFD08, 'M', 'ضي'), - (0xFD09, 'M', 'شج'), - (0xFD0A, 'M', 'شح'), - (0xFD0B, 'M', 'شخ'), - (0xFD0C, 'M', 'شم'), - (0xFD0D, 'M', 'شر'), - (0xFD0E, 'M', 'سر'), - (0xFD0F, 'M', 'صر'), - (0xFD10, 'M', 'ضر'), - (0xFD11, 'M', 'طى'), - (0xFD12, 'M', 'طي'), - (0xFD13, 'M', 'عى'), - (0xFD14, 'M', 'عي'), - (0xFD15, 'M', 'غى'), - (0xFD16, 'M', 'غي'), - (0xFD17, 'M', 'سى'), - (0xFD18, 'M', 'سي'), - (0xFD19, 'M', 'شى'), - (0xFD1A, 'M', 'شي'), - (0xFD1B, 'M', 'حى'), - (0xFD1C, 'M', 'حي'), - (0xFD1D, 'M', 'جى'), - (0xFD1E, 'M', 'جي'), - ] - -def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFD1F, 'M', 'خى'), - (0xFD20, 'M', 'خي'), - (0xFD21, 'M', 'صى'), - (0xFD22, 'M', 'صي'), - (0xFD23, 'M', 'ضى'), - (0xFD24, 'M', 'ضي'), - (0xFD25, 'M', 'شج'), - (0xFD26, 'M', 'شح'), - (0xFD27, 'M', 'شخ'), - (0xFD28, 'M', 'شم'), - (0xFD29, 'M', 'شر'), - (0xFD2A, 'M', 'سر'), - (0xFD2B, 'M', 'صر'), - (0xFD2C, 'M', 'ضر'), - (0xFD2D, 'M', 'شج'), - (0xFD2E, 'M', 'شح'), - (0xFD2F, 'M', 'شخ'), - (0xFD30, 'M', 'شم'), - (0xFD31, 'M', 'سه'), - (0xFD32, 'M', 'شه'), - (0xFD33, 'M', 'طم'), - (0xFD34, 'M', 'سج'), - (0xFD35, 'M', 'سح'), - (0xFD36, 'M', 'سخ'), - (0xFD37, 'M', 'شج'), - (0xFD38, 'M', 'شح'), - (0xFD39, 'M', 'شخ'), - (0xFD3A, 'M', 'طم'), - (0xFD3B, 'M', 'ظم'), - (0xFD3C, 'M', 'اً'), - (0xFD3E, 'V'), - (0xFD50, 'M', 'تجم'), - (0xFD51, 'M', 'تحج'), - (0xFD53, 'M', 'تحم'), - (0xFD54, 'M', 'تخم'), - (0xFD55, 'M', 'تمج'), - (0xFD56, 'M', 'تمح'), - (0xFD57, 'M', 'تمخ'), - (0xFD58, 'M', 'جمح'), - (0xFD5A, 'M', 'حمي'), - (0xFD5B, 'M', 'حمى'), - (0xFD5C, 'M', 'سحج'), - (0xFD5D, 'M', 'سجح'), - (0xFD5E, 'M', 'سجى'), - (0xFD5F, 'M', 'سمح'), - (0xFD61, 'M', 'سمج'), - (0xFD62, 'M', 'سمم'), - (0xFD64, 'M', 'صحح'), - (0xFD66, 'M', 'صمم'), - (0xFD67, 'M', 'شحم'), - (0xFD69, 'M', 'شجي'), - (0xFD6A, 'M', 'شمخ'), - (0xFD6C, 'M', 'شمم'), - (0xFD6E, 'M', 'ضحى'), - (0xFD6F, 'M', 'ضخم'), - (0xFD71, 'M', 'طمح'), - (0xFD73, 'M', 'طمم'), - (0xFD74, 'M', 'طمي'), - (0xFD75, 'M', 'عجم'), - (0xFD76, 'M', 'عمم'), - (0xFD78, 'M', 'عمى'), - (0xFD79, 'M', 'غمم'), - (0xFD7A, 'M', 'غمي'), - (0xFD7B, 'M', 'غمى'), - (0xFD7C, 'M', 'فخم'), - (0xFD7E, 'M', 'قمح'), - (0xFD7F, 'M', 'قمم'), - (0xFD80, 'M', 'لحم'), - (0xFD81, 'M', 'لحي'), - (0xFD82, 'M', 'لحى'), - (0xFD83, 'M', 'لجج'), - (0xFD85, 'M', 'لخم'), - (0xFD87, 'M', 'لمح'), - (0xFD89, 'M', 'محج'), - (0xFD8A, 'M', 'محم'), - (0xFD8B, 'M', 'محي'), - (0xFD8C, 'M', 'مجح'), - (0xFD8D, 'M', 'مجم'), - (0xFD8E, 'M', 'مخج'), - (0xFD8F, 'M', 'مخم'), - (0xFD90, 'X'), - (0xFD92, 'M', 'مجخ'), - (0xFD93, 'M', 'همج'), - (0xFD94, 'M', 'همم'), - (0xFD95, 'M', 'نحم'), - (0xFD96, 'M', 'نحى'), - (0xFD97, 'M', 'نجم'), - (0xFD99, 'M', 'نجى'), - (0xFD9A, 'M', 'نمي'), - (0xFD9B, 'M', 'نمى'), - (0xFD9C, 'M', 'يمم'), - (0xFD9E, 'M', 'بخي'), - (0xFD9F, 'M', 'تجي'), - (0xFDA0, 'M', 'تجى'), - (0xFDA1, 'M', 'تخي'), - (0xFDA2, 'M', 'تخى'), - (0xFDA3, 'M', 'تمي'), - (0xFDA4, 'M', 'تمى'), - (0xFDA5, 'M', 'جمي'), - (0xFDA6, 'M', 'جحى'), - ] - -def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFDA7, 'M', 'جمى'), - (0xFDA8, 'M', 'سخى'), - (0xFDA9, 'M', 'صحي'), - (0xFDAA, 'M', 'شحي'), - (0xFDAB, 'M', 'ضحي'), - (0xFDAC, 'M', 'لجي'), - (0xFDAD, 'M', 'لمي'), - (0xFDAE, 'M', 'يحي'), - (0xFDAF, 'M', 'يجي'), - (0xFDB0, 'M', 'يمي'), - (0xFDB1, 'M', 'ممي'), - (0xFDB2, 'M', 'قمي'), - (0xFDB3, 'M', 'نحي'), - (0xFDB4, 'M', 'قمح'), - (0xFDB5, 'M', 'لحم'), - (0xFDB6, 'M', 'عمي'), - (0xFDB7, 'M', 'كمي'), - (0xFDB8, 'M', 'نجح'), - (0xFDB9, 'M', 'مخي'), - (0xFDBA, 'M', 'لجم'), - (0xFDBB, 'M', 'كمم'), - (0xFDBC, 'M', 'لجم'), - (0xFDBD, 'M', 'نجح'), - (0xFDBE, 'M', 'جحي'), - (0xFDBF, 'M', 'حجي'), - (0xFDC0, 'M', 'مجي'), - (0xFDC1, 'M', 'فمي'), - (0xFDC2, 'M', 'بحي'), - (0xFDC3, 'M', 'كمم'), - (0xFDC4, 'M', 'عجم'), - (0xFDC5, 'M', 'صمم'), - (0xFDC6, 'M', 'سخي'), - (0xFDC7, 'M', 'نجي'), - (0xFDC8, 'X'), - (0xFDCF, 'V'), - (0xFDD0, 'X'), - (0xFDF0, 'M', 'صلے'), - (0xFDF1, 'M', 'قلے'), - (0xFDF2, 'M', 'الله'), - (0xFDF3, 'M', 'اكبر'), - (0xFDF4, 'M', 'محمد'), - (0xFDF5, 'M', 'صلعم'), - (0xFDF6, 'M', 'رسول'), - (0xFDF7, 'M', 'عليه'), - (0xFDF8, 'M', 'وسلم'), - (0xFDF9, 'M', 'صلى'), - (0xFDFA, '3', 'صلى الله عليه وسلم'), - (0xFDFB, '3', 'جل جلاله'), - (0xFDFC, 'M', 'ریال'), - (0xFDFD, 'V'), - (0xFE00, 'I'), - (0xFE10, '3', ','), - (0xFE11, 'M', '、'), - (0xFE12, 'X'), - (0xFE13, '3', ':'), - (0xFE14, '3', ';'), - (0xFE15, '3', '!'), - (0xFE16, '3', '?'), - (0xFE17, 'M', '〖'), - (0xFE18, 'M', '〗'), - (0xFE19, 'X'), - (0xFE20, 'V'), - (0xFE30, 'X'), - (0xFE31, 'M', '—'), - (0xFE32, 'M', '–'), - (0xFE33, '3', '_'), - (0xFE35, '3', '('), - (0xFE36, '3', ')'), - (0xFE37, '3', '{'), - (0xFE38, '3', '}'), - (0xFE39, 'M', '〔'), - (0xFE3A, 'M', '〕'), - (0xFE3B, 'M', '【'), - (0xFE3C, 'M', '】'), - (0xFE3D, 'M', '《'), - (0xFE3E, 'M', '》'), - (0xFE3F, 'M', '〈'), - (0xFE40, 'M', '〉'), - (0xFE41, 'M', '「'), - (0xFE42, 'M', '」'), - (0xFE43, 'M', '『'), - (0xFE44, 'M', '』'), - (0xFE45, 'V'), - (0xFE47, '3', '['), - (0xFE48, '3', ']'), - (0xFE49, '3', ' ̅'), - (0xFE4D, '3', '_'), - (0xFE50, '3', ','), - (0xFE51, 'M', '、'), - (0xFE52, 'X'), - (0xFE54, '3', ';'), - (0xFE55, '3', ':'), - (0xFE56, '3', '?'), - (0xFE57, '3', '!'), - (0xFE58, 'M', '—'), - (0xFE59, '3', '('), - (0xFE5A, '3', ')'), - (0xFE5B, '3', '{'), - (0xFE5C, '3', '}'), - (0xFE5D, 'M', '〔'), - ] - -def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFE5E, 'M', '〕'), - (0xFE5F, '3', '#'), - (0xFE60, '3', '&'), - (0xFE61, '3', '*'), - (0xFE62, '3', '+'), - (0xFE63, 'M', '-'), - (0xFE64, '3', '<'), - (0xFE65, '3', '>'), - (0xFE66, '3', '='), - (0xFE67, 'X'), - (0xFE68, '3', '\\'), - (0xFE69, '3', '$'), - (0xFE6A, '3', '%'), - (0xFE6B, '3', '@'), - (0xFE6C, 'X'), - (0xFE70, '3', ' ً'), - (0xFE71, 'M', 'ـً'), - (0xFE72, '3', ' ٌ'), - (0xFE73, 'V'), - (0xFE74, '3', ' ٍ'), - (0xFE75, 'X'), - (0xFE76, '3', ' َ'), - (0xFE77, 'M', 'ـَ'), - (0xFE78, '3', ' ُ'), - (0xFE79, 'M', 'ـُ'), - (0xFE7A, '3', ' ِ'), - (0xFE7B, 'M', 'ـِ'), - (0xFE7C, '3', ' ّ'), - (0xFE7D, 'M', 'ـّ'), - (0xFE7E, '3', ' ْ'), - (0xFE7F, 'M', 'ـْ'), - (0xFE80, 'M', 'ء'), - (0xFE81, 'M', 'آ'), - (0xFE83, 'M', 'أ'), - (0xFE85, 'M', 'ؤ'), - (0xFE87, 'M', 'إ'), - (0xFE89, 'M', 'ئ'), - (0xFE8D, 'M', 'ا'), - (0xFE8F, 'M', 'ب'), - (0xFE93, 'M', 'ة'), - (0xFE95, 'M', 'ت'), - (0xFE99, 'M', 'ث'), - (0xFE9D, 'M', 'ج'), - (0xFEA1, 'M', 'ح'), - (0xFEA5, 'M', 'خ'), - (0xFEA9, 'M', 'د'), - (0xFEAB, 'M', 'ذ'), - (0xFEAD, 'M', 'ر'), - (0xFEAF, 'M', 'ز'), - (0xFEB1, 'M', 'س'), - (0xFEB5, 'M', 'ش'), - (0xFEB9, 'M', 'ص'), - (0xFEBD, 'M', 'ض'), - (0xFEC1, 'M', 'ط'), - (0xFEC5, 'M', 'ظ'), - (0xFEC9, 'M', 'ع'), - (0xFECD, 'M', 'غ'), - (0xFED1, 'M', 'ف'), - (0xFED5, 'M', 'ق'), - (0xFED9, 'M', 'ك'), - (0xFEDD, 'M', 'ل'), - (0xFEE1, 'M', 'م'), - (0xFEE5, 'M', 'ن'), - (0xFEE9, 'M', 'ه'), - (0xFEED, 'M', 'و'), - (0xFEEF, 'M', 'ى'), - (0xFEF1, 'M', 'ي'), - (0xFEF5, 'M', 'لآ'), - (0xFEF7, 'M', 'لأ'), - (0xFEF9, 'M', 'لإ'), - (0xFEFB, 'M', 'لا'), - (0xFEFD, 'X'), - (0xFEFF, 'I'), - (0xFF00, 'X'), - (0xFF01, '3', '!'), - (0xFF02, '3', '"'), - (0xFF03, '3', '#'), - (0xFF04, '3', '$'), - (0xFF05, '3', '%'), - (0xFF06, '3', '&'), - (0xFF07, '3', '\''), - (0xFF08, '3', '('), - (0xFF09, '3', ')'), - (0xFF0A, '3', '*'), - (0xFF0B, '3', '+'), - (0xFF0C, '3', ','), - (0xFF0D, 'M', '-'), - (0xFF0E, 'M', '.'), - (0xFF0F, '3', '/'), - (0xFF10, 'M', '0'), - (0xFF11, 'M', '1'), - (0xFF12, 'M', '2'), - (0xFF13, 'M', '3'), - (0xFF14, 'M', '4'), - (0xFF15, 'M', '5'), - (0xFF16, 'M', '6'), - (0xFF17, 'M', '7'), - (0xFF18, 'M', '8'), - (0xFF19, 'M', '9'), - (0xFF1A, '3', ':'), - ] - -def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFF1B, '3', ';'), - (0xFF1C, '3', '<'), - (0xFF1D, '3', '='), - (0xFF1E, '3', '>'), - (0xFF1F, '3', '?'), - (0xFF20, '3', '@'), - (0xFF21, 'M', 'a'), - (0xFF22, 'M', 'b'), - (0xFF23, 'M', 'c'), - (0xFF24, 'M', 'd'), - (0xFF25, 'M', 'e'), - (0xFF26, 'M', 'f'), - (0xFF27, 'M', 'g'), - (0xFF28, 'M', 'h'), - (0xFF29, 'M', 'i'), - (0xFF2A, 'M', 'j'), - (0xFF2B, 'M', 'k'), - (0xFF2C, 'M', 'l'), - (0xFF2D, 'M', 'm'), - (0xFF2E, 'M', 'n'), - (0xFF2F, 'M', 'o'), - (0xFF30, 'M', 'p'), - (0xFF31, 'M', 'q'), - (0xFF32, 'M', 'r'), - (0xFF33, 'M', 's'), - (0xFF34, 'M', 't'), - (0xFF35, 'M', 'u'), - (0xFF36, 'M', 'v'), - (0xFF37, 'M', 'w'), - (0xFF38, 'M', 'x'), - (0xFF39, 'M', 'y'), - (0xFF3A, 'M', 'z'), - (0xFF3B, '3', '['), - (0xFF3C, '3', '\\'), - (0xFF3D, '3', ']'), - (0xFF3E, '3', '^'), - (0xFF3F, '3', '_'), - (0xFF40, '3', '`'), - (0xFF41, 'M', 'a'), - (0xFF42, 'M', 'b'), - (0xFF43, 'M', 'c'), - (0xFF44, 'M', 'd'), - (0xFF45, 'M', 'e'), - (0xFF46, 'M', 'f'), - (0xFF47, 'M', 'g'), - (0xFF48, 'M', 'h'), - (0xFF49, 'M', 'i'), - (0xFF4A, 'M', 'j'), - (0xFF4B, 'M', 'k'), - (0xFF4C, 'M', 'l'), - (0xFF4D, 'M', 'm'), - (0xFF4E, 'M', 'n'), - (0xFF4F, 'M', 'o'), - (0xFF50, 'M', 'p'), - (0xFF51, 'M', 'q'), - (0xFF52, 'M', 'r'), - (0xFF53, 'M', 's'), - (0xFF54, 'M', 't'), - (0xFF55, 'M', 'u'), - (0xFF56, 'M', 'v'), - (0xFF57, 'M', 'w'), - (0xFF58, 'M', 'x'), - (0xFF59, 'M', 'y'), - (0xFF5A, 'M', 'z'), - (0xFF5B, '3', '{'), - (0xFF5C, '3', '|'), - (0xFF5D, '3', '}'), - (0xFF5E, '3', '~'), - (0xFF5F, 'M', '⦅'), - (0xFF60, 'M', '⦆'), - (0xFF61, 'M', '.'), - (0xFF62, 'M', '「'), - (0xFF63, 'M', '」'), - (0xFF64, 'M', '、'), - (0xFF65, 'M', '・'), - (0xFF66, 'M', 'ヲ'), - (0xFF67, 'M', 'ァ'), - (0xFF68, 'M', 'ィ'), - (0xFF69, 'M', 'ゥ'), - (0xFF6A, 'M', 'ェ'), - (0xFF6B, 'M', 'ォ'), - (0xFF6C, 'M', 'ャ'), - (0xFF6D, 'M', 'ュ'), - (0xFF6E, 'M', 'ョ'), - (0xFF6F, 'M', 'ッ'), - (0xFF70, 'M', 'ー'), - (0xFF71, 'M', 'ア'), - (0xFF72, 'M', 'イ'), - (0xFF73, 'M', 'ウ'), - (0xFF74, 'M', 'エ'), - (0xFF75, 'M', 'オ'), - (0xFF76, 'M', 'カ'), - (0xFF77, 'M', 'キ'), - (0xFF78, 'M', 'ク'), - (0xFF79, 'M', 'ケ'), - (0xFF7A, 'M', 'コ'), - (0xFF7B, 'M', 'サ'), - (0xFF7C, 'M', 'シ'), - (0xFF7D, 'M', 'ス'), - (0xFF7E, 'M', 'セ'), - ] - -def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFF7F, 'M', 'ソ'), - (0xFF80, 'M', 'タ'), - (0xFF81, 'M', 'チ'), - (0xFF82, 'M', 'ツ'), - (0xFF83, 'M', 'テ'), - (0xFF84, 'M', 'ト'), - (0xFF85, 'M', 'ナ'), - (0xFF86, 'M', 'ニ'), - (0xFF87, 'M', 'ヌ'), - (0xFF88, 'M', 'ネ'), - (0xFF89, 'M', 'ノ'), - (0xFF8A, 'M', 'ハ'), - (0xFF8B, 'M', 'ヒ'), - (0xFF8C, 'M', 'フ'), - (0xFF8D, 'M', 'ヘ'), - (0xFF8E, 'M', 'ホ'), - (0xFF8F, 'M', 'マ'), - (0xFF90, 'M', 'ミ'), - (0xFF91, 'M', 'ム'), - (0xFF92, 'M', 'メ'), - (0xFF93, 'M', 'モ'), - (0xFF94, 'M', 'ヤ'), - (0xFF95, 'M', 'ユ'), - (0xFF96, 'M', 'ヨ'), - (0xFF97, 'M', 'ラ'), - (0xFF98, 'M', 'リ'), - (0xFF99, 'M', 'ル'), - (0xFF9A, 'M', 'レ'), - (0xFF9B, 'M', 'ロ'), - (0xFF9C, 'M', 'ワ'), - (0xFF9D, 'M', 'ン'), - (0xFF9E, 'M', '゙'), - (0xFF9F, 'M', '゚'), - (0xFFA0, 'X'), - (0xFFA1, 'M', 'ᄀ'), - (0xFFA2, 'M', 'ᄁ'), - (0xFFA3, 'M', 'ᆪ'), - (0xFFA4, 'M', 'ᄂ'), - (0xFFA5, 'M', 'ᆬ'), - (0xFFA6, 'M', 'ᆭ'), - (0xFFA7, 'M', 'ᄃ'), - (0xFFA8, 'M', 'ᄄ'), - (0xFFA9, 'M', 'ᄅ'), - (0xFFAA, 'M', 'ᆰ'), - (0xFFAB, 'M', 'ᆱ'), - (0xFFAC, 'M', 'ᆲ'), - (0xFFAD, 'M', 'ᆳ'), - (0xFFAE, 'M', 'ᆴ'), - (0xFFAF, 'M', 'ᆵ'), - (0xFFB0, 'M', 'ᄚ'), - (0xFFB1, 'M', 'ᄆ'), - (0xFFB2, 'M', 'ᄇ'), - (0xFFB3, 'M', 'ᄈ'), - (0xFFB4, 'M', 'ᄡ'), - (0xFFB5, 'M', 'ᄉ'), - (0xFFB6, 'M', 'ᄊ'), - (0xFFB7, 'M', 'ᄋ'), - (0xFFB8, 'M', 'ᄌ'), - (0xFFB9, 'M', 'ᄍ'), - (0xFFBA, 'M', 'ᄎ'), - (0xFFBB, 'M', 'ᄏ'), - (0xFFBC, 'M', 'ᄐ'), - (0xFFBD, 'M', 'ᄑ'), - (0xFFBE, 'M', 'ᄒ'), - (0xFFBF, 'X'), - (0xFFC2, 'M', 'ᅡ'), - (0xFFC3, 'M', 'ᅢ'), - (0xFFC4, 'M', 'ᅣ'), - (0xFFC5, 'M', 'ᅤ'), - (0xFFC6, 'M', 'ᅥ'), - (0xFFC7, 'M', 'ᅦ'), - (0xFFC8, 'X'), - (0xFFCA, 'M', 'ᅧ'), - (0xFFCB, 'M', 'ᅨ'), - (0xFFCC, 'M', 'ᅩ'), - (0xFFCD, 'M', 'ᅪ'), - (0xFFCE, 'M', 'ᅫ'), - (0xFFCF, 'M', 'ᅬ'), - (0xFFD0, 'X'), - (0xFFD2, 'M', 'ᅭ'), - (0xFFD3, 'M', 'ᅮ'), - (0xFFD4, 'M', 'ᅯ'), - (0xFFD5, 'M', 'ᅰ'), - (0xFFD6, 'M', 'ᅱ'), - (0xFFD7, 'M', 'ᅲ'), - (0xFFD8, 'X'), - (0xFFDA, 'M', 'ᅳ'), - (0xFFDB, 'M', 'ᅴ'), - (0xFFDC, 'M', 'ᅵ'), - (0xFFDD, 'X'), - (0xFFE0, 'M', '¢'), - (0xFFE1, 'M', '£'), - (0xFFE2, 'M', '¬'), - (0xFFE3, '3', ' ̄'), - (0xFFE4, 'M', '¦'), - (0xFFE5, 'M', '¥'), - (0xFFE6, 'M', '₩'), - (0xFFE7, 'X'), - (0xFFE8, 'M', '│'), - (0xFFE9, 'M', '←'), - ] - -def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFFEA, 'M', '↑'), - (0xFFEB, 'M', '→'), - (0xFFEC, 'M', '↓'), - (0xFFED, 'M', '■'), - (0xFFEE, 'M', '○'), - (0xFFEF, 'X'), - (0x10000, 'V'), - (0x1000C, 'X'), - (0x1000D, 'V'), - (0x10027, 'X'), - (0x10028, 'V'), - (0x1003B, 'X'), - (0x1003C, 'V'), - (0x1003E, 'X'), - (0x1003F, 'V'), - (0x1004E, 'X'), - (0x10050, 'V'), - (0x1005E, 'X'), - (0x10080, 'V'), - (0x100FB, 'X'), - (0x10100, 'V'), - (0x10103, 'X'), - (0x10107, 'V'), - (0x10134, 'X'), - (0x10137, 'V'), - (0x1018F, 'X'), - (0x10190, 'V'), - (0x1019D, 'X'), - (0x101A0, 'V'), - (0x101A1, 'X'), - (0x101D0, 'V'), - (0x101FE, 'X'), - (0x10280, 'V'), - (0x1029D, 'X'), - (0x102A0, 'V'), - (0x102D1, 'X'), - (0x102E0, 'V'), - (0x102FC, 'X'), - (0x10300, 'V'), - (0x10324, 'X'), - (0x1032D, 'V'), - (0x1034B, 'X'), - (0x10350, 'V'), - (0x1037B, 'X'), - (0x10380, 'V'), - (0x1039E, 'X'), - (0x1039F, 'V'), - (0x103C4, 'X'), - (0x103C8, 'V'), - (0x103D6, 'X'), - (0x10400, 'M', '𐐨'), - (0x10401, 'M', '𐐩'), - (0x10402, 'M', '𐐪'), - (0x10403, 'M', '𐐫'), - (0x10404, 'M', '𐐬'), - (0x10405, 'M', '𐐭'), - (0x10406, 'M', '𐐮'), - (0x10407, 'M', '𐐯'), - (0x10408, 'M', '𐐰'), - (0x10409, 'M', '𐐱'), - (0x1040A, 'M', '𐐲'), - (0x1040B, 'M', '𐐳'), - (0x1040C, 'M', '𐐴'), - (0x1040D, 'M', '𐐵'), - (0x1040E, 'M', '𐐶'), - (0x1040F, 'M', '𐐷'), - (0x10410, 'M', '𐐸'), - (0x10411, 'M', '𐐹'), - (0x10412, 'M', '𐐺'), - (0x10413, 'M', '𐐻'), - (0x10414, 'M', '𐐼'), - (0x10415, 'M', '𐐽'), - (0x10416, 'M', '𐐾'), - (0x10417, 'M', '𐐿'), - (0x10418, 'M', '𐑀'), - (0x10419, 'M', '𐑁'), - (0x1041A, 'M', '𐑂'), - (0x1041B, 'M', '𐑃'), - (0x1041C, 'M', '𐑄'), - (0x1041D, 'M', '𐑅'), - (0x1041E, 'M', '𐑆'), - (0x1041F, 'M', '𐑇'), - (0x10420, 'M', '𐑈'), - (0x10421, 'M', '𐑉'), - (0x10422, 'M', '𐑊'), - (0x10423, 'M', '𐑋'), - (0x10424, 'M', '𐑌'), - (0x10425, 'M', '𐑍'), - (0x10426, 'M', '𐑎'), - (0x10427, 'M', '𐑏'), - (0x10428, 'V'), - (0x1049E, 'X'), - (0x104A0, 'V'), - (0x104AA, 'X'), - (0x104B0, 'M', '𐓘'), - (0x104B1, 'M', '𐓙'), - (0x104B2, 'M', '𐓚'), - (0x104B3, 'M', '𐓛'), - (0x104B4, 'M', '𐓜'), - (0x104B5, 'M', '𐓝'), - ] - -def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x104B6, 'M', '𐓞'), - (0x104B7, 'M', '𐓟'), - (0x104B8, 'M', '𐓠'), - (0x104B9, 'M', '𐓡'), - (0x104BA, 'M', '𐓢'), - (0x104BB, 'M', '𐓣'), - (0x104BC, 'M', '𐓤'), - (0x104BD, 'M', '𐓥'), - (0x104BE, 'M', '𐓦'), - (0x104BF, 'M', '𐓧'), - (0x104C0, 'M', '𐓨'), - (0x104C1, 'M', '𐓩'), - (0x104C2, 'M', '𐓪'), - (0x104C3, 'M', '𐓫'), - (0x104C4, 'M', '𐓬'), - (0x104C5, 'M', '𐓭'), - (0x104C6, 'M', '𐓮'), - (0x104C7, 'M', '𐓯'), - (0x104C8, 'M', '𐓰'), - (0x104C9, 'M', '𐓱'), - (0x104CA, 'M', '𐓲'), - (0x104CB, 'M', '𐓳'), - (0x104CC, 'M', '𐓴'), - (0x104CD, 'M', '𐓵'), - (0x104CE, 'M', '𐓶'), - (0x104CF, 'M', '𐓷'), - (0x104D0, 'M', '𐓸'), - (0x104D1, 'M', '𐓹'), - (0x104D2, 'M', '𐓺'), - (0x104D3, 'M', '𐓻'), - (0x104D4, 'X'), - (0x104D8, 'V'), - (0x104FC, 'X'), - (0x10500, 'V'), - (0x10528, 'X'), - (0x10530, 'V'), - (0x10564, 'X'), - (0x1056F, 'V'), - (0x10570, 'M', '𐖗'), - (0x10571, 'M', '𐖘'), - (0x10572, 'M', '𐖙'), - (0x10573, 'M', '𐖚'), - (0x10574, 'M', '𐖛'), - (0x10575, 'M', '𐖜'), - (0x10576, 'M', '𐖝'), - (0x10577, 'M', '𐖞'), - (0x10578, 'M', '𐖟'), - (0x10579, 'M', '𐖠'), - (0x1057A, 'M', '𐖡'), - (0x1057B, 'X'), - (0x1057C, 'M', '𐖣'), - (0x1057D, 'M', '𐖤'), - (0x1057E, 'M', '𐖥'), - (0x1057F, 'M', '𐖦'), - (0x10580, 'M', '𐖧'), - (0x10581, 'M', '𐖨'), - (0x10582, 'M', '𐖩'), - (0x10583, 'M', '𐖪'), - (0x10584, 'M', '𐖫'), - (0x10585, 'M', '𐖬'), - (0x10586, 'M', '𐖭'), - (0x10587, 'M', '𐖮'), - (0x10588, 'M', '𐖯'), - (0x10589, 'M', '𐖰'), - (0x1058A, 'M', '𐖱'), - (0x1058B, 'X'), - (0x1058C, 'M', '𐖳'), - (0x1058D, 'M', '𐖴'), - (0x1058E, 'M', '𐖵'), - (0x1058F, 'M', '𐖶'), - (0x10590, 'M', '𐖷'), - (0x10591, 'M', '𐖸'), - (0x10592, 'M', '𐖹'), - (0x10593, 'X'), - (0x10594, 'M', '𐖻'), - (0x10595, 'M', '𐖼'), - (0x10596, 'X'), - (0x10597, 'V'), - (0x105A2, 'X'), - (0x105A3, 'V'), - (0x105B2, 'X'), - (0x105B3, 'V'), - (0x105BA, 'X'), - (0x105BB, 'V'), - (0x105BD, 'X'), - (0x10600, 'V'), - (0x10737, 'X'), - (0x10740, 'V'), - (0x10756, 'X'), - (0x10760, 'V'), - (0x10768, 'X'), - (0x10780, 'V'), - (0x10781, 'M', 'ː'), - (0x10782, 'M', 'ˑ'), - (0x10783, 'M', 'æ'), - (0x10784, 'M', 'ʙ'), - (0x10785, 'M', 'ɓ'), - (0x10786, 'X'), - (0x10787, 'M', 'ʣ'), - (0x10788, 'M', 'ꭦ'), - ] - -def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x10789, 'M', 'ʥ'), - (0x1078A, 'M', 'ʤ'), - (0x1078B, 'M', 'ɖ'), - (0x1078C, 'M', 'ɗ'), - (0x1078D, 'M', 'ᶑ'), - (0x1078E, 'M', 'ɘ'), - (0x1078F, 'M', 'ɞ'), - (0x10790, 'M', 'ʩ'), - (0x10791, 'M', 'ɤ'), - (0x10792, 'M', 'ɢ'), - (0x10793, 'M', 'ɠ'), - (0x10794, 'M', 'ʛ'), - (0x10795, 'M', 'ħ'), - (0x10796, 'M', 'ʜ'), - (0x10797, 'M', 'ɧ'), - (0x10798, 'M', 'ʄ'), - (0x10799, 'M', 'ʪ'), - (0x1079A, 'M', 'ʫ'), - (0x1079B, 'M', 'ɬ'), - (0x1079C, 'M', '𝼄'), - (0x1079D, 'M', 'ꞎ'), - (0x1079E, 'M', 'ɮ'), - (0x1079F, 'M', '𝼅'), - (0x107A0, 'M', 'ʎ'), - (0x107A1, 'M', '𝼆'), - (0x107A2, 'M', 'ø'), - (0x107A3, 'M', 'ɶ'), - (0x107A4, 'M', 'ɷ'), - (0x107A5, 'M', 'q'), - (0x107A6, 'M', 'ɺ'), - (0x107A7, 'M', '𝼈'), - (0x107A8, 'M', 'ɽ'), - (0x107A9, 'M', 'ɾ'), - (0x107AA, 'M', 'ʀ'), - (0x107AB, 'M', 'ʨ'), - (0x107AC, 'M', 'ʦ'), - (0x107AD, 'M', 'ꭧ'), - (0x107AE, 'M', 'ʧ'), - (0x107AF, 'M', 'ʈ'), - (0x107B0, 'M', 'ⱱ'), - (0x107B1, 'X'), - (0x107B2, 'M', 'ʏ'), - (0x107B3, 'M', 'ʡ'), - (0x107B4, 'M', 'ʢ'), - (0x107B5, 'M', 'ʘ'), - (0x107B6, 'M', 'ǀ'), - (0x107B7, 'M', 'ǁ'), - (0x107B8, 'M', 'ǂ'), - (0x107B9, 'M', '𝼊'), - (0x107BA, 'M', '𝼞'), - (0x107BB, 'X'), - (0x10800, 'V'), - (0x10806, 'X'), - (0x10808, 'V'), - (0x10809, 'X'), - (0x1080A, 'V'), - (0x10836, 'X'), - (0x10837, 'V'), - (0x10839, 'X'), - (0x1083C, 'V'), - (0x1083D, 'X'), - (0x1083F, 'V'), - (0x10856, 'X'), - (0x10857, 'V'), - (0x1089F, 'X'), - (0x108A7, 'V'), - (0x108B0, 'X'), - (0x108E0, 'V'), - (0x108F3, 'X'), - (0x108F4, 'V'), - (0x108F6, 'X'), - (0x108FB, 'V'), - (0x1091C, 'X'), - (0x1091F, 'V'), - (0x1093A, 'X'), - (0x1093F, 'V'), - (0x10940, 'X'), - (0x10980, 'V'), - (0x109B8, 'X'), - (0x109BC, 'V'), - (0x109D0, 'X'), - (0x109D2, 'V'), - (0x10A04, 'X'), - (0x10A05, 'V'), - (0x10A07, 'X'), - (0x10A0C, 'V'), - (0x10A14, 'X'), - (0x10A15, 'V'), - (0x10A18, 'X'), - (0x10A19, 'V'), - (0x10A36, 'X'), - (0x10A38, 'V'), - (0x10A3B, 'X'), - (0x10A3F, 'V'), - (0x10A49, 'X'), - (0x10A50, 'V'), - (0x10A59, 'X'), - (0x10A60, 'V'), - (0x10AA0, 'X'), - (0x10AC0, 'V'), - ] - -def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x10AE7, 'X'), - (0x10AEB, 'V'), - (0x10AF7, 'X'), - (0x10B00, 'V'), - (0x10B36, 'X'), - (0x10B39, 'V'), - (0x10B56, 'X'), - (0x10B58, 'V'), - (0x10B73, 'X'), - (0x10B78, 'V'), - (0x10B92, 'X'), - (0x10B99, 'V'), - (0x10B9D, 'X'), - (0x10BA9, 'V'), - (0x10BB0, 'X'), - (0x10C00, 'V'), - (0x10C49, 'X'), - (0x10C80, 'M', '𐳀'), - (0x10C81, 'M', '𐳁'), - (0x10C82, 'M', '𐳂'), - (0x10C83, 'M', '𐳃'), - (0x10C84, 'M', '𐳄'), - (0x10C85, 'M', '𐳅'), - (0x10C86, 'M', '𐳆'), - (0x10C87, 'M', '𐳇'), - (0x10C88, 'M', '𐳈'), - (0x10C89, 'M', '𐳉'), - (0x10C8A, 'M', '𐳊'), - (0x10C8B, 'M', '𐳋'), - (0x10C8C, 'M', '𐳌'), - (0x10C8D, 'M', '𐳍'), - (0x10C8E, 'M', '𐳎'), - (0x10C8F, 'M', '𐳏'), - (0x10C90, 'M', '𐳐'), - (0x10C91, 'M', '𐳑'), - (0x10C92, 'M', '𐳒'), - (0x10C93, 'M', '𐳓'), - (0x10C94, 'M', '𐳔'), - (0x10C95, 'M', '𐳕'), - (0x10C96, 'M', '𐳖'), - (0x10C97, 'M', '𐳗'), - (0x10C98, 'M', '𐳘'), - (0x10C99, 'M', '𐳙'), - (0x10C9A, 'M', '𐳚'), - (0x10C9B, 'M', '𐳛'), - (0x10C9C, 'M', '𐳜'), - (0x10C9D, 'M', '𐳝'), - (0x10C9E, 'M', '𐳞'), - (0x10C9F, 'M', '𐳟'), - (0x10CA0, 'M', '𐳠'), - (0x10CA1, 'M', '𐳡'), - (0x10CA2, 'M', '𐳢'), - (0x10CA3, 'M', '𐳣'), - (0x10CA4, 'M', '𐳤'), - (0x10CA5, 'M', '𐳥'), - (0x10CA6, 'M', '𐳦'), - (0x10CA7, 'M', '𐳧'), - (0x10CA8, 'M', '𐳨'), - (0x10CA9, 'M', '𐳩'), - (0x10CAA, 'M', '𐳪'), - (0x10CAB, 'M', '𐳫'), - (0x10CAC, 'M', '𐳬'), - (0x10CAD, 'M', '𐳭'), - (0x10CAE, 'M', '𐳮'), - (0x10CAF, 'M', '𐳯'), - (0x10CB0, 'M', '𐳰'), - (0x10CB1, 'M', '𐳱'), - (0x10CB2, 'M', '𐳲'), - (0x10CB3, 'X'), - (0x10CC0, 'V'), - (0x10CF3, 'X'), - (0x10CFA, 'V'), - (0x10D28, 'X'), - (0x10D30, 'V'), - (0x10D3A, 'X'), - (0x10E60, 'V'), - (0x10E7F, 'X'), - (0x10E80, 'V'), - (0x10EAA, 'X'), - (0x10EAB, 'V'), - (0x10EAE, 'X'), - (0x10EB0, 'V'), - (0x10EB2, 'X'), - (0x10EFD, 'V'), - (0x10F28, 'X'), - (0x10F30, 'V'), - (0x10F5A, 'X'), - (0x10F70, 'V'), - (0x10F8A, 'X'), - (0x10FB0, 'V'), - (0x10FCC, 'X'), - (0x10FE0, 'V'), - (0x10FF7, 'X'), - (0x11000, 'V'), - (0x1104E, 'X'), - (0x11052, 'V'), - (0x11076, 'X'), - (0x1107F, 'V'), - (0x110BD, 'X'), - (0x110BE, 'V'), - ] - -def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x110C3, 'X'), - (0x110D0, 'V'), - (0x110E9, 'X'), - (0x110F0, 'V'), - (0x110FA, 'X'), - (0x11100, 'V'), - (0x11135, 'X'), - (0x11136, 'V'), - (0x11148, 'X'), - (0x11150, 'V'), - (0x11177, 'X'), - (0x11180, 'V'), - (0x111E0, 'X'), - (0x111E1, 'V'), - (0x111F5, 'X'), - (0x11200, 'V'), - (0x11212, 'X'), - (0x11213, 'V'), - (0x11242, 'X'), - (0x11280, 'V'), - (0x11287, 'X'), - (0x11288, 'V'), - (0x11289, 'X'), - (0x1128A, 'V'), - (0x1128E, 'X'), - (0x1128F, 'V'), - (0x1129E, 'X'), - (0x1129F, 'V'), - (0x112AA, 'X'), - (0x112B0, 'V'), - (0x112EB, 'X'), - (0x112F0, 'V'), - (0x112FA, 'X'), - (0x11300, 'V'), - (0x11304, 'X'), - (0x11305, 'V'), - (0x1130D, 'X'), - (0x1130F, 'V'), - (0x11311, 'X'), - (0x11313, 'V'), - (0x11329, 'X'), - (0x1132A, 'V'), - (0x11331, 'X'), - (0x11332, 'V'), - (0x11334, 'X'), - (0x11335, 'V'), - (0x1133A, 'X'), - (0x1133B, 'V'), - (0x11345, 'X'), - (0x11347, 'V'), - (0x11349, 'X'), - (0x1134B, 'V'), - (0x1134E, 'X'), - (0x11350, 'V'), - (0x11351, 'X'), - (0x11357, 'V'), - (0x11358, 'X'), - (0x1135D, 'V'), - (0x11364, 'X'), - (0x11366, 'V'), - (0x1136D, 'X'), - (0x11370, 'V'), - (0x11375, 'X'), - (0x11400, 'V'), - (0x1145C, 'X'), - (0x1145D, 'V'), - (0x11462, 'X'), - (0x11480, 'V'), - (0x114C8, 'X'), - (0x114D0, 'V'), - (0x114DA, 'X'), - (0x11580, 'V'), - (0x115B6, 'X'), - (0x115B8, 'V'), - (0x115DE, 'X'), - (0x11600, 'V'), - (0x11645, 'X'), - (0x11650, 'V'), - (0x1165A, 'X'), - (0x11660, 'V'), - (0x1166D, 'X'), - (0x11680, 'V'), - (0x116BA, 'X'), - (0x116C0, 'V'), - (0x116CA, 'X'), - (0x11700, 'V'), - (0x1171B, 'X'), - (0x1171D, 'V'), - (0x1172C, 'X'), - (0x11730, 'V'), - (0x11747, 'X'), - (0x11800, 'V'), - (0x1183C, 'X'), - (0x118A0, 'M', '𑣀'), - (0x118A1, 'M', '𑣁'), - (0x118A2, 'M', '𑣂'), - (0x118A3, 'M', '𑣃'), - (0x118A4, 'M', '𑣄'), - (0x118A5, 'M', '𑣅'), - (0x118A6, 'M', '𑣆'), - ] - -def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x118A7, 'M', '𑣇'), - (0x118A8, 'M', '𑣈'), - (0x118A9, 'M', '𑣉'), - (0x118AA, 'M', '𑣊'), - (0x118AB, 'M', '𑣋'), - (0x118AC, 'M', '𑣌'), - (0x118AD, 'M', '𑣍'), - (0x118AE, 'M', '𑣎'), - (0x118AF, 'M', '𑣏'), - (0x118B0, 'M', '𑣐'), - (0x118B1, 'M', '𑣑'), - (0x118B2, 'M', '𑣒'), - (0x118B3, 'M', '𑣓'), - (0x118B4, 'M', '𑣔'), - (0x118B5, 'M', '𑣕'), - (0x118B6, 'M', '𑣖'), - (0x118B7, 'M', '𑣗'), - (0x118B8, 'M', '𑣘'), - (0x118B9, 'M', '𑣙'), - (0x118BA, 'M', '𑣚'), - (0x118BB, 'M', '𑣛'), - (0x118BC, 'M', '𑣜'), - (0x118BD, 'M', '𑣝'), - (0x118BE, 'M', '𑣞'), - (0x118BF, 'M', '𑣟'), - (0x118C0, 'V'), - (0x118F3, 'X'), - (0x118FF, 'V'), - (0x11907, 'X'), - (0x11909, 'V'), - (0x1190A, 'X'), - (0x1190C, 'V'), - (0x11914, 'X'), - (0x11915, 'V'), - (0x11917, 'X'), - (0x11918, 'V'), - (0x11936, 'X'), - (0x11937, 'V'), - (0x11939, 'X'), - (0x1193B, 'V'), - (0x11947, 'X'), - (0x11950, 'V'), - (0x1195A, 'X'), - (0x119A0, 'V'), - (0x119A8, 'X'), - (0x119AA, 'V'), - (0x119D8, 'X'), - (0x119DA, 'V'), - (0x119E5, 'X'), - (0x11A00, 'V'), - (0x11A48, 'X'), - (0x11A50, 'V'), - (0x11AA3, 'X'), - (0x11AB0, 'V'), - (0x11AF9, 'X'), - (0x11B00, 'V'), - (0x11B0A, 'X'), - (0x11C00, 'V'), - (0x11C09, 'X'), - (0x11C0A, 'V'), - (0x11C37, 'X'), - (0x11C38, 'V'), - (0x11C46, 'X'), - (0x11C50, 'V'), - (0x11C6D, 'X'), - (0x11C70, 'V'), - (0x11C90, 'X'), - (0x11C92, 'V'), - (0x11CA8, 'X'), - (0x11CA9, 'V'), - (0x11CB7, 'X'), - (0x11D00, 'V'), - (0x11D07, 'X'), - (0x11D08, 'V'), - (0x11D0A, 'X'), - (0x11D0B, 'V'), - (0x11D37, 'X'), - (0x11D3A, 'V'), - (0x11D3B, 'X'), - (0x11D3C, 'V'), - (0x11D3E, 'X'), - (0x11D3F, 'V'), - (0x11D48, 'X'), - (0x11D50, 'V'), - (0x11D5A, 'X'), - (0x11D60, 'V'), - (0x11D66, 'X'), - (0x11D67, 'V'), - (0x11D69, 'X'), - (0x11D6A, 'V'), - (0x11D8F, 'X'), - (0x11D90, 'V'), - (0x11D92, 'X'), - (0x11D93, 'V'), - (0x11D99, 'X'), - (0x11DA0, 'V'), - (0x11DAA, 'X'), - (0x11EE0, 'V'), - (0x11EF9, 'X'), - (0x11F00, 'V'), - ] - -def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x11F11, 'X'), - (0x11F12, 'V'), - (0x11F3B, 'X'), - (0x11F3E, 'V'), - (0x11F5A, 'X'), - (0x11FB0, 'V'), - (0x11FB1, 'X'), - (0x11FC0, 'V'), - (0x11FF2, 'X'), - (0x11FFF, 'V'), - (0x1239A, 'X'), - (0x12400, 'V'), - (0x1246F, 'X'), - (0x12470, 'V'), - (0x12475, 'X'), - (0x12480, 'V'), - (0x12544, 'X'), - (0x12F90, 'V'), - (0x12FF3, 'X'), - (0x13000, 'V'), - (0x13430, 'X'), - (0x13440, 'V'), - (0x13456, 'X'), - (0x14400, 'V'), - (0x14647, 'X'), - (0x16800, 'V'), - (0x16A39, 'X'), - (0x16A40, 'V'), - (0x16A5F, 'X'), - (0x16A60, 'V'), - (0x16A6A, 'X'), - (0x16A6E, 'V'), - (0x16ABF, 'X'), - (0x16AC0, 'V'), - (0x16ACA, 'X'), - (0x16AD0, 'V'), - (0x16AEE, 'X'), - (0x16AF0, 'V'), - (0x16AF6, 'X'), - (0x16B00, 'V'), - (0x16B46, 'X'), - (0x16B50, 'V'), - (0x16B5A, 'X'), - (0x16B5B, 'V'), - (0x16B62, 'X'), - (0x16B63, 'V'), - (0x16B78, 'X'), - (0x16B7D, 'V'), - (0x16B90, 'X'), - (0x16E40, 'M', '𖹠'), - (0x16E41, 'M', '𖹡'), - (0x16E42, 'M', '𖹢'), - (0x16E43, 'M', '𖹣'), - (0x16E44, 'M', '𖹤'), - (0x16E45, 'M', '𖹥'), - (0x16E46, 'M', '𖹦'), - (0x16E47, 'M', '𖹧'), - (0x16E48, 'M', '𖹨'), - (0x16E49, 'M', '𖹩'), - (0x16E4A, 'M', '𖹪'), - (0x16E4B, 'M', '𖹫'), - (0x16E4C, 'M', '𖹬'), - (0x16E4D, 'M', '𖹭'), - (0x16E4E, 'M', '𖹮'), - (0x16E4F, 'M', '𖹯'), - (0x16E50, 'M', '𖹰'), - (0x16E51, 'M', '𖹱'), - (0x16E52, 'M', '𖹲'), - (0x16E53, 'M', '𖹳'), - (0x16E54, 'M', '𖹴'), - (0x16E55, 'M', '𖹵'), - (0x16E56, 'M', '𖹶'), - (0x16E57, 'M', '𖹷'), - (0x16E58, 'M', '𖹸'), - (0x16E59, 'M', '𖹹'), - (0x16E5A, 'M', '𖹺'), - (0x16E5B, 'M', '𖹻'), - (0x16E5C, 'M', '𖹼'), - (0x16E5D, 'M', '𖹽'), - (0x16E5E, 'M', '𖹾'), - (0x16E5F, 'M', '𖹿'), - (0x16E60, 'V'), - (0x16E9B, 'X'), - (0x16F00, 'V'), - (0x16F4B, 'X'), - (0x16F4F, 'V'), - (0x16F88, 'X'), - (0x16F8F, 'V'), - (0x16FA0, 'X'), - (0x16FE0, 'V'), - (0x16FE5, 'X'), - (0x16FF0, 'V'), - (0x16FF2, 'X'), - (0x17000, 'V'), - (0x187F8, 'X'), - (0x18800, 'V'), - (0x18CD6, 'X'), - (0x18D00, 'V'), - (0x18D09, 'X'), - (0x1AFF0, 'V'), - ] - -def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1AFF4, 'X'), - (0x1AFF5, 'V'), - (0x1AFFC, 'X'), - (0x1AFFD, 'V'), - (0x1AFFF, 'X'), - (0x1B000, 'V'), - (0x1B123, 'X'), - (0x1B132, 'V'), - (0x1B133, 'X'), - (0x1B150, 'V'), - (0x1B153, 'X'), - (0x1B155, 'V'), - (0x1B156, 'X'), - (0x1B164, 'V'), - (0x1B168, 'X'), - (0x1B170, 'V'), - (0x1B2FC, 'X'), - (0x1BC00, 'V'), - (0x1BC6B, 'X'), - (0x1BC70, 'V'), - (0x1BC7D, 'X'), - (0x1BC80, 'V'), - (0x1BC89, 'X'), - (0x1BC90, 'V'), - (0x1BC9A, 'X'), - (0x1BC9C, 'V'), - (0x1BCA0, 'I'), - (0x1BCA4, 'X'), - (0x1CF00, 'V'), - (0x1CF2E, 'X'), - (0x1CF30, 'V'), - (0x1CF47, 'X'), - (0x1CF50, 'V'), - (0x1CFC4, 'X'), - (0x1D000, 'V'), - (0x1D0F6, 'X'), - (0x1D100, 'V'), - (0x1D127, 'X'), - (0x1D129, 'V'), - (0x1D15E, 'M', '𝅗𝅥'), - (0x1D15F, 'M', '𝅘𝅥'), - (0x1D160, 'M', '𝅘𝅥𝅮'), - (0x1D161, 'M', '𝅘𝅥𝅯'), - (0x1D162, 'M', '𝅘𝅥𝅰'), - (0x1D163, 'M', '𝅘𝅥𝅱'), - (0x1D164, 'M', '𝅘𝅥𝅲'), - (0x1D165, 'V'), - (0x1D173, 'X'), - (0x1D17B, 'V'), - (0x1D1BB, 'M', '𝆹𝅥'), - (0x1D1BC, 'M', '𝆺𝅥'), - (0x1D1BD, 'M', '𝆹𝅥𝅮'), - (0x1D1BE, 'M', '𝆺𝅥𝅮'), - (0x1D1BF, 'M', '𝆹𝅥𝅯'), - (0x1D1C0, 'M', '𝆺𝅥𝅯'), - (0x1D1C1, 'V'), - (0x1D1EB, 'X'), - (0x1D200, 'V'), - (0x1D246, 'X'), - (0x1D2C0, 'V'), - (0x1D2D4, 'X'), - (0x1D2E0, 'V'), - (0x1D2F4, 'X'), - (0x1D300, 'V'), - (0x1D357, 'X'), - (0x1D360, 'V'), - (0x1D379, 'X'), - (0x1D400, 'M', 'a'), - (0x1D401, 'M', 'b'), - (0x1D402, 'M', 'c'), - (0x1D403, 'M', 'd'), - (0x1D404, 'M', 'e'), - (0x1D405, 'M', 'f'), - (0x1D406, 'M', 'g'), - (0x1D407, 'M', 'h'), - (0x1D408, 'M', 'i'), - (0x1D409, 'M', 'j'), - (0x1D40A, 'M', 'k'), - (0x1D40B, 'M', 'l'), - (0x1D40C, 'M', 'm'), - (0x1D40D, 'M', 'n'), - (0x1D40E, 'M', 'o'), - (0x1D40F, 'M', 'p'), - (0x1D410, 'M', 'q'), - (0x1D411, 'M', 'r'), - (0x1D412, 'M', 's'), - (0x1D413, 'M', 't'), - (0x1D414, 'M', 'u'), - (0x1D415, 'M', 'v'), - (0x1D416, 'M', 'w'), - (0x1D417, 'M', 'x'), - (0x1D418, 'M', 'y'), - (0x1D419, 'M', 'z'), - (0x1D41A, 'M', 'a'), - (0x1D41B, 'M', 'b'), - (0x1D41C, 'M', 'c'), - (0x1D41D, 'M', 'd'), - (0x1D41E, 'M', 'e'), - (0x1D41F, 'M', 'f'), - (0x1D420, 'M', 'g'), - ] - -def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D421, 'M', 'h'), - (0x1D422, 'M', 'i'), - (0x1D423, 'M', 'j'), - (0x1D424, 'M', 'k'), - (0x1D425, 'M', 'l'), - (0x1D426, 'M', 'm'), - (0x1D427, 'M', 'n'), - (0x1D428, 'M', 'o'), - (0x1D429, 'M', 'p'), - (0x1D42A, 'M', 'q'), - (0x1D42B, 'M', 'r'), - (0x1D42C, 'M', 's'), - (0x1D42D, 'M', 't'), - (0x1D42E, 'M', 'u'), - (0x1D42F, 'M', 'v'), - (0x1D430, 'M', 'w'), - (0x1D431, 'M', 'x'), - (0x1D432, 'M', 'y'), - (0x1D433, 'M', 'z'), - (0x1D434, 'M', 'a'), - (0x1D435, 'M', 'b'), - (0x1D436, 'M', 'c'), - (0x1D437, 'M', 'd'), - (0x1D438, 'M', 'e'), - (0x1D439, 'M', 'f'), - (0x1D43A, 'M', 'g'), - (0x1D43B, 'M', 'h'), - (0x1D43C, 'M', 'i'), - (0x1D43D, 'M', 'j'), - (0x1D43E, 'M', 'k'), - (0x1D43F, 'M', 'l'), - (0x1D440, 'M', 'm'), - (0x1D441, 'M', 'n'), - (0x1D442, 'M', 'o'), - (0x1D443, 'M', 'p'), - (0x1D444, 'M', 'q'), - (0x1D445, 'M', 'r'), - (0x1D446, 'M', 's'), - (0x1D447, 'M', 't'), - (0x1D448, 'M', 'u'), - (0x1D449, 'M', 'v'), - (0x1D44A, 'M', 'w'), - (0x1D44B, 'M', 'x'), - (0x1D44C, 'M', 'y'), - (0x1D44D, 'M', 'z'), - (0x1D44E, 'M', 'a'), - (0x1D44F, 'M', 'b'), - (0x1D450, 'M', 'c'), - (0x1D451, 'M', 'd'), - (0x1D452, 'M', 'e'), - (0x1D453, 'M', 'f'), - (0x1D454, 'M', 'g'), - (0x1D455, 'X'), - (0x1D456, 'M', 'i'), - (0x1D457, 'M', 'j'), - (0x1D458, 'M', 'k'), - (0x1D459, 'M', 'l'), - (0x1D45A, 'M', 'm'), - (0x1D45B, 'M', 'n'), - (0x1D45C, 'M', 'o'), - (0x1D45D, 'M', 'p'), - (0x1D45E, 'M', 'q'), - (0x1D45F, 'M', 'r'), - (0x1D460, 'M', 's'), - (0x1D461, 'M', 't'), - (0x1D462, 'M', 'u'), - (0x1D463, 'M', 'v'), - (0x1D464, 'M', 'w'), - (0x1D465, 'M', 'x'), - (0x1D466, 'M', 'y'), - (0x1D467, 'M', 'z'), - (0x1D468, 'M', 'a'), - (0x1D469, 'M', 'b'), - (0x1D46A, 'M', 'c'), - (0x1D46B, 'M', 'd'), - (0x1D46C, 'M', 'e'), - (0x1D46D, 'M', 'f'), - (0x1D46E, 'M', 'g'), - (0x1D46F, 'M', 'h'), - (0x1D470, 'M', 'i'), - (0x1D471, 'M', 'j'), - (0x1D472, 'M', 'k'), - (0x1D473, 'M', 'l'), - (0x1D474, 'M', 'm'), - (0x1D475, 'M', 'n'), - (0x1D476, 'M', 'o'), - (0x1D477, 'M', 'p'), - (0x1D478, 'M', 'q'), - (0x1D479, 'M', 'r'), - (0x1D47A, 'M', 's'), - (0x1D47B, 'M', 't'), - (0x1D47C, 'M', 'u'), - (0x1D47D, 'M', 'v'), - (0x1D47E, 'M', 'w'), - (0x1D47F, 'M', 'x'), - (0x1D480, 'M', 'y'), - (0x1D481, 'M', 'z'), - (0x1D482, 'M', 'a'), - (0x1D483, 'M', 'b'), - (0x1D484, 'M', 'c'), - ] - -def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D485, 'M', 'd'), - (0x1D486, 'M', 'e'), - (0x1D487, 'M', 'f'), - (0x1D488, 'M', 'g'), - (0x1D489, 'M', 'h'), - (0x1D48A, 'M', 'i'), - (0x1D48B, 'M', 'j'), - (0x1D48C, 'M', 'k'), - (0x1D48D, 'M', 'l'), - (0x1D48E, 'M', 'm'), - (0x1D48F, 'M', 'n'), - (0x1D490, 'M', 'o'), - (0x1D491, 'M', 'p'), - (0x1D492, 'M', 'q'), - (0x1D493, 'M', 'r'), - (0x1D494, 'M', 's'), - (0x1D495, 'M', 't'), - (0x1D496, 'M', 'u'), - (0x1D497, 'M', 'v'), - (0x1D498, 'M', 'w'), - (0x1D499, 'M', 'x'), - (0x1D49A, 'M', 'y'), - (0x1D49B, 'M', 'z'), - (0x1D49C, 'M', 'a'), - (0x1D49D, 'X'), - (0x1D49E, 'M', 'c'), - (0x1D49F, 'M', 'd'), - (0x1D4A0, 'X'), - (0x1D4A2, 'M', 'g'), - (0x1D4A3, 'X'), - (0x1D4A5, 'M', 'j'), - (0x1D4A6, 'M', 'k'), - (0x1D4A7, 'X'), - (0x1D4A9, 'M', 'n'), - (0x1D4AA, 'M', 'o'), - (0x1D4AB, 'M', 'p'), - (0x1D4AC, 'M', 'q'), - (0x1D4AD, 'X'), - (0x1D4AE, 'M', 's'), - (0x1D4AF, 'M', 't'), - (0x1D4B0, 'M', 'u'), - (0x1D4B1, 'M', 'v'), - (0x1D4B2, 'M', 'w'), - (0x1D4B3, 'M', 'x'), - (0x1D4B4, 'M', 'y'), - (0x1D4B5, 'M', 'z'), - (0x1D4B6, 'M', 'a'), - (0x1D4B7, 'M', 'b'), - (0x1D4B8, 'M', 'c'), - (0x1D4B9, 'M', 'd'), - (0x1D4BA, 'X'), - (0x1D4BB, 'M', 'f'), - (0x1D4BC, 'X'), - (0x1D4BD, 'M', 'h'), - (0x1D4BE, 'M', 'i'), - (0x1D4BF, 'M', 'j'), - (0x1D4C0, 'M', 'k'), - (0x1D4C1, 'M', 'l'), - (0x1D4C2, 'M', 'm'), - (0x1D4C3, 'M', 'n'), - (0x1D4C4, 'X'), - (0x1D4C5, 'M', 'p'), - (0x1D4C6, 'M', 'q'), - (0x1D4C7, 'M', 'r'), - (0x1D4C8, 'M', 's'), - (0x1D4C9, 'M', 't'), - (0x1D4CA, 'M', 'u'), - (0x1D4CB, 'M', 'v'), - (0x1D4CC, 'M', 'w'), - (0x1D4CD, 'M', 'x'), - (0x1D4CE, 'M', 'y'), - (0x1D4CF, 'M', 'z'), - (0x1D4D0, 'M', 'a'), - (0x1D4D1, 'M', 'b'), - (0x1D4D2, 'M', 'c'), - (0x1D4D3, 'M', 'd'), - (0x1D4D4, 'M', 'e'), - (0x1D4D5, 'M', 'f'), - (0x1D4D6, 'M', 'g'), - (0x1D4D7, 'M', 'h'), - (0x1D4D8, 'M', 'i'), - (0x1D4D9, 'M', 'j'), - (0x1D4DA, 'M', 'k'), - (0x1D4DB, 'M', 'l'), - (0x1D4DC, 'M', 'm'), - (0x1D4DD, 'M', 'n'), - (0x1D4DE, 'M', 'o'), - (0x1D4DF, 'M', 'p'), - (0x1D4E0, 'M', 'q'), - (0x1D4E1, 'M', 'r'), - (0x1D4E2, 'M', 's'), - (0x1D4E3, 'M', 't'), - (0x1D4E4, 'M', 'u'), - (0x1D4E5, 'M', 'v'), - (0x1D4E6, 'M', 'w'), - (0x1D4E7, 'M', 'x'), - (0x1D4E8, 'M', 'y'), - (0x1D4E9, 'M', 'z'), - (0x1D4EA, 'M', 'a'), - (0x1D4EB, 'M', 'b'), - ] - -def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D4EC, 'M', 'c'), - (0x1D4ED, 'M', 'd'), - (0x1D4EE, 'M', 'e'), - (0x1D4EF, 'M', 'f'), - (0x1D4F0, 'M', 'g'), - (0x1D4F1, 'M', 'h'), - (0x1D4F2, 'M', 'i'), - (0x1D4F3, 'M', 'j'), - (0x1D4F4, 'M', 'k'), - (0x1D4F5, 'M', 'l'), - (0x1D4F6, 'M', 'm'), - (0x1D4F7, 'M', 'n'), - (0x1D4F8, 'M', 'o'), - (0x1D4F9, 'M', 'p'), - (0x1D4FA, 'M', 'q'), - (0x1D4FB, 'M', 'r'), - (0x1D4FC, 'M', 's'), - (0x1D4FD, 'M', 't'), - (0x1D4FE, 'M', 'u'), - (0x1D4FF, 'M', 'v'), - (0x1D500, 'M', 'w'), - (0x1D501, 'M', 'x'), - (0x1D502, 'M', 'y'), - (0x1D503, 'M', 'z'), - (0x1D504, 'M', 'a'), - (0x1D505, 'M', 'b'), - (0x1D506, 'X'), - (0x1D507, 'M', 'd'), - (0x1D508, 'M', 'e'), - (0x1D509, 'M', 'f'), - (0x1D50A, 'M', 'g'), - (0x1D50B, 'X'), - (0x1D50D, 'M', 'j'), - (0x1D50E, 'M', 'k'), - (0x1D50F, 'M', 'l'), - (0x1D510, 'M', 'm'), - (0x1D511, 'M', 'n'), - (0x1D512, 'M', 'o'), - (0x1D513, 'M', 'p'), - (0x1D514, 'M', 'q'), - (0x1D515, 'X'), - (0x1D516, 'M', 's'), - (0x1D517, 'M', 't'), - (0x1D518, 'M', 'u'), - (0x1D519, 'M', 'v'), - (0x1D51A, 'M', 'w'), - (0x1D51B, 'M', 'x'), - (0x1D51C, 'M', 'y'), - (0x1D51D, 'X'), - (0x1D51E, 'M', 'a'), - (0x1D51F, 'M', 'b'), - (0x1D520, 'M', 'c'), - (0x1D521, 'M', 'd'), - (0x1D522, 'M', 'e'), - (0x1D523, 'M', 'f'), - (0x1D524, 'M', 'g'), - (0x1D525, 'M', 'h'), - (0x1D526, 'M', 'i'), - (0x1D527, 'M', 'j'), - (0x1D528, 'M', 'k'), - (0x1D529, 'M', 'l'), - (0x1D52A, 'M', 'm'), - (0x1D52B, 'M', 'n'), - (0x1D52C, 'M', 'o'), - (0x1D52D, 'M', 'p'), - (0x1D52E, 'M', 'q'), - (0x1D52F, 'M', 'r'), - (0x1D530, 'M', 's'), - (0x1D531, 'M', 't'), - (0x1D532, 'M', 'u'), - (0x1D533, 'M', 'v'), - (0x1D534, 'M', 'w'), - (0x1D535, 'M', 'x'), - (0x1D536, 'M', 'y'), - (0x1D537, 'M', 'z'), - (0x1D538, 'M', 'a'), - (0x1D539, 'M', 'b'), - (0x1D53A, 'X'), - (0x1D53B, 'M', 'd'), - (0x1D53C, 'M', 'e'), - (0x1D53D, 'M', 'f'), - (0x1D53E, 'M', 'g'), - (0x1D53F, 'X'), - (0x1D540, 'M', 'i'), - (0x1D541, 'M', 'j'), - (0x1D542, 'M', 'k'), - (0x1D543, 'M', 'l'), - (0x1D544, 'M', 'm'), - (0x1D545, 'X'), - (0x1D546, 'M', 'o'), - (0x1D547, 'X'), - (0x1D54A, 'M', 's'), - (0x1D54B, 'M', 't'), - (0x1D54C, 'M', 'u'), - (0x1D54D, 'M', 'v'), - (0x1D54E, 'M', 'w'), - (0x1D54F, 'M', 'x'), - (0x1D550, 'M', 'y'), - (0x1D551, 'X'), - (0x1D552, 'M', 'a'), - ] - -def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D553, 'M', 'b'), - (0x1D554, 'M', 'c'), - (0x1D555, 'M', 'd'), - (0x1D556, 'M', 'e'), - (0x1D557, 'M', 'f'), - (0x1D558, 'M', 'g'), - (0x1D559, 'M', 'h'), - (0x1D55A, 'M', 'i'), - (0x1D55B, 'M', 'j'), - (0x1D55C, 'M', 'k'), - (0x1D55D, 'M', 'l'), - (0x1D55E, 'M', 'm'), - (0x1D55F, 'M', 'n'), - (0x1D560, 'M', 'o'), - (0x1D561, 'M', 'p'), - (0x1D562, 'M', 'q'), - (0x1D563, 'M', 'r'), - (0x1D564, 'M', 's'), - (0x1D565, 'M', 't'), - (0x1D566, 'M', 'u'), - (0x1D567, 'M', 'v'), - (0x1D568, 'M', 'w'), - (0x1D569, 'M', 'x'), - (0x1D56A, 'M', 'y'), - (0x1D56B, 'M', 'z'), - (0x1D56C, 'M', 'a'), - (0x1D56D, 'M', 'b'), - (0x1D56E, 'M', 'c'), - (0x1D56F, 'M', 'd'), - (0x1D570, 'M', 'e'), - (0x1D571, 'M', 'f'), - (0x1D572, 'M', 'g'), - (0x1D573, 'M', 'h'), - (0x1D574, 'M', 'i'), - (0x1D575, 'M', 'j'), - (0x1D576, 'M', 'k'), - (0x1D577, 'M', 'l'), - (0x1D578, 'M', 'm'), - (0x1D579, 'M', 'n'), - (0x1D57A, 'M', 'o'), - (0x1D57B, 'M', 'p'), - (0x1D57C, 'M', 'q'), - (0x1D57D, 'M', 'r'), - (0x1D57E, 'M', 's'), - (0x1D57F, 'M', 't'), - (0x1D580, 'M', 'u'), - (0x1D581, 'M', 'v'), - (0x1D582, 'M', 'w'), - (0x1D583, 'M', 'x'), - (0x1D584, 'M', 'y'), - (0x1D585, 'M', 'z'), - (0x1D586, 'M', 'a'), - (0x1D587, 'M', 'b'), - (0x1D588, 'M', 'c'), - (0x1D589, 'M', 'd'), - (0x1D58A, 'M', 'e'), - (0x1D58B, 'M', 'f'), - (0x1D58C, 'M', 'g'), - (0x1D58D, 'M', 'h'), - (0x1D58E, 'M', 'i'), - (0x1D58F, 'M', 'j'), - (0x1D590, 'M', 'k'), - (0x1D591, 'M', 'l'), - (0x1D592, 'M', 'm'), - (0x1D593, 'M', 'n'), - (0x1D594, 'M', 'o'), - (0x1D595, 'M', 'p'), - (0x1D596, 'M', 'q'), - (0x1D597, 'M', 'r'), - (0x1D598, 'M', 's'), - (0x1D599, 'M', 't'), - (0x1D59A, 'M', 'u'), - (0x1D59B, 'M', 'v'), - (0x1D59C, 'M', 'w'), - (0x1D59D, 'M', 'x'), - (0x1D59E, 'M', 'y'), - (0x1D59F, 'M', 'z'), - (0x1D5A0, 'M', 'a'), - (0x1D5A1, 'M', 'b'), - (0x1D5A2, 'M', 'c'), - (0x1D5A3, 'M', 'd'), - (0x1D5A4, 'M', 'e'), - (0x1D5A5, 'M', 'f'), - (0x1D5A6, 'M', 'g'), - (0x1D5A7, 'M', 'h'), - (0x1D5A8, 'M', 'i'), - (0x1D5A9, 'M', 'j'), - (0x1D5AA, 'M', 'k'), - (0x1D5AB, 'M', 'l'), - (0x1D5AC, 'M', 'm'), - (0x1D5AD, 'M', 'n'), - (0x1D5AE, 'M', 'o'), - (0x1D5AF, 'M', 'p'), - (0x1D5B0, 'M', 'q'), - (0x1D5B1, 'M', 'r'), - (0x1D5B2, 'M', 's'), - (0x1D5B3, 'M', 't'), - (0x1D5B4, 'M', 'u'), - (0x1D5B5, 'M', 'v'), - (0x1D5B6, 'M', 'w'), - ] - -def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D5B7, 'M', 'x'), - (0x1D5B8, 'M', 'y'), - (0x1D5B9, 'M', 'z'), - (0x1D5BA, 'M', 'a'), - (0x1D5BB, 'M', 'b'), - (0x1D5BC, 'M', 'c'), - (0x1D5BD, 'M', 'd'), - (0x1D5BE, 'M', 'e'), - (0x1D5BF, 'M', 'f'), - (0x1D5C0, 'M', 'g'), - (0x1D5C1, 'M', 'h'), - (0x1D5C2, 'M', 'i'), - (0x1D5C3, 'M', 'j'), - (0x1D5C4, 'M', 'k'), - (0x1D5C5, 'M', 'l'), - (0x1D5C6, 'M', 'm'), - (0x1D5C7, 'M', 'n'), - (0x1D5C8, 'M', 'o'), - (0x1D5C9, 'M', 'p'), - (0x1D5CA, 'M', 'q'), - (0x1D5CB, 'M', 'r'), - (0x1D5CC, 'M', 's'), - (0x1D5CD, 'M', 't'), - (0x1D5CE, 'M', 'u'), - (0x1D5CF, 'M', 'v'), - (0x1D5D0, 'M', 'w'), - (0x1D5D1, 'M', 'x'), - (0x1D5D2, 'M', 'y'), - (0x1D5D3, 'M', 'z'), - (0x1D5D4, 'M', 'a'), - (0x1D5D5, 'M', 'b'), - (0x1D5D6, 'M', 'c'), - (0x1D5D7, 'M', 'd'), - (0x1D5D8, 'M', 'e'), - (0x1D5D9, 'M', 'f'), - (0x1D5DA, 'M', 'g'), - (0x1D5DB, 'M', 'h'), - (0x1D5DC, 'M', 'i'), - (0x1D5DD, 'M', 'j'), - (0x1D5DE, 'M', 'k'), - (0x1D5DF, 'M', 'l'), - (0x1D5E0, 'M', 'm'), - (0x1D5E1, 'M', 'n'), - (0x1D5E2, 'M', 'o'), - (0x1D5E3, 'M', 'p'), - (0x1D5E4, 'M', 'q'), - (0x1D5E5, 'M', 'r'), - (0x1D5E6, 'M', 's'), - (0x1D5E7, 'M', 't'), - (0x1D5E8, 'M', 'u'), - (0x1D5E9, 'M', 'v'), - (0x1D5EA, 'M', 'w'), - (0x1D5EB, 'M', 'x'), - (0x1D5EC, 'M', 'y'), - (0x1D5ED, 'M', 'z'), - (0x1D5EE, 'M', 'a'), - (0x1D5EF, 'M', 'b'), - (0x1D5F0, 'M', 'c'), - (0x1D5F1, 'M', 'd'), - (0x1D5F2, 'M', 'e'), - (0x1D5F3, 'M', 'f'), - (0x1D5F4, 'M', 'g'), - (0x1D5F5, 'M', 'h'), - (0x1D5F6, 'M', 'i'), - (0x1D5F7, 'M', 'j'), - (0x1D5F8, 'M', 'k'), - (0x1D5F9, 'M', 'l'), - (0x1D5FA, 'M', 'm'), - (0x1D5FB, 'M', 'n'), - (0x1D5FC, 'M', 'o'), - (0x1D5FD, 'M', 'p'), - (0x1D5FE, 'M', 'q'), - (0x1D5FF, 'M', 'r'), - (0x1D600, 'M', 's'), - (0x1D601, 'M', 't'), - (0x1D602, 'M', 'u'), - (0x1D603, 'M', 'v'), - (0x1D604, 'M', 'w'), - (0x1D605, 'M', 'x'), - (0x1D606, 'M', 'y'), - (0x1D607, 'M', 'z'), - (0x1D608, 'M', 'a'), - (0x1D609, 'M', 'b'), - (0x1D60A, 'M', 'c'), - (0x1D60B, 'M', 'd'), - (0x1D60C, 'M', 'e'), - (0x1D60D, 'M', 'f'), - (0x1D60E, 'M', 'g'), - (0x1D60F, 'M', 'h'), - (0x1D610, 'M', 'i'), - (0x1D611, 'M', 'j'), - (0x1D612, 'M', 'k'), - (0x1D613, 'M', 'l'), - (0x1D614, 'M', 'm'), - (0x1D615, 'M', 'n'), - (0x1D616, 'M', 'o'), - (0x1D617, 'M', 'p'), - (0x1D618, 'M', 'q'), - (0x1D619, 'M', 'r'), - (0x1D61A, 'M', 's'), - ] - -def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D61B, 'M', 't'), - (0x1D61C, 'M', 'u'), - (0x1D61D, 'M', 'v'), - (0x1D61E, 'M', 'w'), - (0x1D61F, 'M', 'x'), - (0x1D620, 'M', 'y'), - (0x1D621, 'M', 'z'), - (0x1D622, 'M', 'a'), - (0x1D623, 'M', 'b'), - (0x1D624, 'M', 'c'), - (0x1D625, 'M', 'd'), - (0x1D626, 'M', 'e'), - (0x1D627, 'M', 'f'), - (0x1D628, 'M', 'g'), - (0x1D629, 'M', 'h'), - (0x1D62A, 'M', 'i'), - (0x1D62B, 'M', 'j'), - (0x1D62C, 'M', 'k'), - (0x1D62D, 'M', 'l'), - (0x1D62E, 'M', 'm'), - (0x1D62F, 'M', 'n'), - (0x1D630, 'M', 'o'), - (0x1D631, 'M', 'p'), - (0x1D632, 'M', 'q'), - (0x1D633, 'M', 'r'), - (0x1D634, 'M', 's'), - (0x1D635, 'M', 't'), - (0x1D636, 'M', 'u'), - (0x1D637, 'M', 'v'), - (0x1D638, 'M', 'w'), - (0x1D639, 'M', 'x'), - (0x1D63A, 'M', 'y'), - (0x1D63B, 'M', 'z'), - (0x1D63C, 'M', 'a'), - (0x1D63D, 'M', 'b'), - (0x1D63E, 'M', 'c'), - (0x1D63F, 'M', 'd'), - (0x1D640, 'M', 'e'), - (0x1D641, 'M', 'f'), - (0x1D642, 'M', 'g'), - (0x1D643, 'M', 'h'), - (0x1D644, 'M', 'i'), - (0x1D645, 'M', 'j'), - (0x1D646, 'M', 'k'), - (0x1D647, 'M', 'l'), - (0x1D648, 'M', 'm'), - (0x1D649, 'M', 'n'), - (0x1D64A, 'M', 'o'), - (0x1D64B, 'M', 'p'), - (0x1D64C, 'M', 'q'), - (0x1D64D, 'M', 'r'), - (0x1D64E, 'M', 's'), - (0x1D64F, 'M', 't'), - (0x1D650, 'M', 'u'), - (0x1D651, 'M', 'v'), - (0x1D652, 'M', 'w'), - (0x1D653, 'M', 'x'), - (0x1D654, 'M', 'y'), - (0x1D655, 'M', 'z'), - (0x1D656, 'M', 'a'), - (0x1D657, 'M', 'b'), - (0x1D658, 'M', 'c'), - (0x1D659, 'M', 'd'), - (0x1D65A, 'M', 'e'), - (0x1D65B, 'M', 'f'), - (0x1D65C, 'M', 'g'), - (0x1D65D, 'M', 'h'), - (0x1D65E, 'M', 'i'), - (0x1D65F, 'M', 'j'), - (0x1D660, 'M', 'k'), - (0x1D661, 'M', 'l'), - (0x1D662, 'M', 'm'), - (0x1D663, 'M', 'n'), - (0x1D664, 'M', 'o'), - (0x1D665, 'M', 'p'), - (0x1D666, 'M', 'q'), - (0x1D667, 'M', 'r'), - (0x1D668, 'M', 's'), - (0x1D669, 'M', 't'), - (0x1D66A, 'M', 'u'), - (0x1D66B, 'M', 'v'), - (0x1D66C, 'M', 'w'), - (0x1D66D, 'M', 'x'), - (0x1D66E, 'M', 'y'), - (0x1D66F, 'M', 'z'), - (0x1D670, 'M', 'a'), - (0x1D671, 'M', 'b'), - (0x1D672, 'M', 'c'), - (0x1D673, 'M', 'd'), - (0x1D674, 'M', 'e'), - (0x1D675, 'M', 'f'), - (0x1D676, 'M', 'g'), - (0x1D677, 'M', 'h'), - (0x1D678, 'M', 'i'), - (0x1D679, 'M', 'j'), - (0x1D67A, 'M', 'k'), - (0x1D67B, 'M', 'l'), - (0x1D67C, 'M', 'm'), - (0x1D67D, 'M', 'n'), - (0x1D67E, 'M', 'o'), - ] - -def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D67F, 'M', 'p'), - (0x1D680, 'M', 'q'), - (0x1D681, 'M', 'r'), - (0x1D682, 'M', 's'), - (0x1D683, 'M', 't'), - (0x1D684, 'M', 'u'), - (0x1D685, 'M', 'v'), - (0x1D686, 'M', 'w'), - (0x1D687, 'M', 'x'), - (0x1D688, 'M', 'y'), - (0x1D689, 'M', 'z'), - (0x1D68A, 'M', 'a'), - (0x1D68B, 'M', 'b'), - (0x1D68C, 'M', 'c'), - (0x1D68D, 'M', 'd'), - (0x1D68E, 'M', 'e'), - (0x1D68F, 'M', 'f'), - (0x1D690, 'M', 'g'), - (0x1D691, 'M', 'h'), - (0x1D692, 'M', 'i'), - (0x1D693, 'M', 'j'), - (0x1D694, 'M', 'k'), - (0x1D695, 'M', 'l'), - (0x1D696, 'M', 'm'), - (0x1D697, 'M', 'n'), - (0x1D698, 'M', 'o'), - (0x1D699, 'M', 'p'), - (0x1D69A, 'M', 'q'), - (0x1D69B, 'M', 'r'), - (0x1D69C, 'M', 's'), - (0x1D69D, 'M', 't'), - (0x1D69E, 'M', 'u'), - (0x1D69F, 'M', 'v'), - (0x1D6A0, 'M', 'w'), - (0x1D6A1, 'M', 'x'), - (0x1D6A2, 'M', 'y'), - (0x1D6A3, 'M', 'z'), - (0x1D6A4, 'M', 'ı'), - (0x1D6A5, 'M', 'ȷ'), - (0x1D6A6, 'X'), - (0x1D6A8, 'M', 'α'), - (0x1D6A9, 'M', 'β'), - (0x1D6AA, 'M', 'γ'), - (0x1D6AB, 'M', 'δ'), - (0x1D6AC, 'M', 'ε'), - (0x1D6AD, 'M', 'ζ'), - (0x1D6AE, 'M', 'η'), - (0x1D6AF, 'M', 'θ'), - (0x1D6B0, 'M', 'ι'), - (0x1D6B1, 'M', 'κ'), - (0x1D6B2, 'M', 'λ'), - (0x1D6B3, 'M', 'μ'), - (0x1D6B4, 'M', 'ν'), - (0x1D6B5, 'M', 'ξ'), - (0x1D6B6, 'M', 'ο'), - (0x1D6B7, 'M', 'π'), - (0x1D6B8, 'M', 'ρ'), - (0x1D6B9, 'M', 'θ'), - (0x1D6BA, 'M', 'σ'), - (0x1D6BB, 'M', 'τ'), - (0x1D6BC, 'M', 'υ'), - (0x1D6BD, 'M', 'φ'), - (0x1D6BE, 'M', 'χ'), - (0x1D6BF, 'M', 'ψ'), - (0x1D6C0, 'M', 'ω'), - (0x1D6C1, 'M', '∇'), - (0x1D6C2, 'M', 'α'), - (0x1D6C3, 'M', 'β'), - (0x1D6C4, 'M', 'γ'), - (0x1D6C5, 'M', 'δ'), - (0x1D6C6, 'M', 'ε'), - (0x1D6C7, 'M', 'ζ'), - (0x1D6C8, 'M', 'η'), - (0x1D6C9, 'M', 'θ'), - (0x1D6CA, 'M', 'ι'), - (0x1D6CB, 'M', 'κ'), - (0x1D6CC, 'M', 'λ'), - (0x1D6CD, 'M', 'μ'), - (0x1D6CE, 'M', 'ν'), - (0x1D6CF, 'M', 'ξ'), - (0x1D6D0, 'M', 'ο'), - (0x1D6D1, 'M', 'π'), - (0x1D6D2, 'M', 'ρ'), - (0x1D6D3, 'M', 'σ'), - (0x1D6D5, 'M', 'τ'), - (0x1D6D6, 'M', 'υ'), - (0x1D6D7, 'M', 'φ'), - (0x1D6D8, 'M', 'χ'), - (0x1D6D9, 'M', 'ψ'), - (0x1D6DA, 'M', 'ω'), - (0x1D6DB, 'M', '∂'), - (0x1D6DC, 'M', 'ε'), - (0x1D6DD, 'M', 'θ'), - (0x1D6DE, 'M', 'κ'), - (0x1D6DF, 'M', 'φ'), - (0x1D6E0, 'M', 'ρ'), - (0x1D6E1, 'M', 'π'), - (0x1D6E2, 'M', 'α'), - (0x1D6E3, 'M', 'β'), - (0x1D6E4, 'M', 'γ'), - ] - -def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D6E5, 'M', 'δ'), - (0x1D6E6, 'M', 'ε'), - (0x1D6E7, 'M', 'ζ'), - (0x1D6E8, 'M', 'η'), - (0x1D6E9, 'M', 'θ'), - (0x1D6EA, 'M', 'ι'), - (0x1D6EB, 'M', 'κ'), - (0x1D6EC, 'M', 'λ'), - (0x1D6ED, 'M', 'μ'), - (0x1D6EE, 'M', 'ν'), - (0x1D6EF, 'M', 'ξ'), - (0x1D6F0, 'M', 'ο'), - (0x1D6F1, 'M', 'π'), - (0x1D6F2, 'M', 'ρ'), - (0x1D6F3, 'M', 'θ'), - (0x1D6F4, 'M', 'σ'), - (0x1D6F5, 'M', 'τ'), - (0x1D6F6, 'M', 'υ'), - (0x1D6F7, 'M', 'φ'), - (0x1D6F8, 'M', 'χ'), - (0x1D6F9, 'M', 'ψ'), - (0x1D6FA, 'M', 'ω'), - (0x1D6FB, 'M', '∇'), - (0x1D6FC, 'M', 'α'), - (0x1D6FD, 'M', 'β'), - (0x1D6FE, 'M', 'γ'), - (0x1D6FF, 'M', 'δ'), - (0x1D700, 'M', 'ε'), - (0x1D701, 'M', 'ζ'), - (0x1D702, 'M', 'η'), - (0x1D703, 'M', 'θ'), - (0x1D704, 'M', 'ι'), - (0x1D705, 'M', 'κ'), - (0x1D706, 'M', 'λ'), - (0x1D707, 'M', 'μ'), - (0x1D708, 'M', 'ν'), - (0x1D709, 'M', 'ξ'), - (0x1D70A, 'M', 'ο'), - (0x1D70B, 'M', 'π'), - (0x1D70C, 'M', 'ρ'), - (0x1D70D, 'M', 'σ'), - (0x1D70F, 'M', 'τ'), - (0x1D710, 'M', 'υ'), - (0x1D711, 'M', 'φ'), - (0x1D712, 'M', 'χ'), - (0x1D713, 'M', 'ψ'), - (0x1D714, 'M', 'ω'), - (0x1D715, 'M', '∂'), - (0x1D716, 'M', 'ε'), - (0x1D717, 'M', 'θ'), - (0x1D718, 'M', 'κ'), - (0x1D719, 'M', 'φ'), - (0x1D71A, 'M', 'ρ'), - (0x1D71B, 'M', 'π'), - (0x1D71C, 'M', 'α'), - (0x1D71D, 'M', 'β'), - (0x1D71E, 'M', 'γ'), - (0x1D71F, 'M', 'δ'), - (0x1D720, 'M', 'ε'), - (0x1D721, 'M', 'ζ'), - (0x1D722, 'M', 'η'), - (0x1D723, 'M', 'θ'), - (0x1D724, 'M', 'ι'), - (0x1D725, 'M', 'κ'), - (0x1D726, 'M', 'λ'), - (0x1D727, 'M', 'μ'), - (0x1D728, 'M', 'ν'), - (0x1D729, 'M', 'ξ'), - (0x1D72A, 'M', 'ο'), - (0x1D72B, 'M', 'π'), - (0x1D72C, 'M', 'ρ'), - (0x1D72D, 'M', 'θ'), - (0x1D72E, 'M', 'σ'), - (0x1D72F, 'M', 'τ'), - (0x1D730, 'M', 'υ'), - (0x1D731, 'M', 'φ'), - (0x1D732, 'M', 'χ'), - (0x1D733, 'M', 'ψ'), - (0x1D734, 'M', 'ω'), - (0x1D735, 'M', '∇'), - (0x1D736, 'M', 'α'), - (0x1D737, 'M', 'β'), - (0x1D738, 'M', 'γ'), - (0x1D739, 'M', 'δ'), - (0x1D73A, 'M', 'ε'), - (0x1D73B, 'M', 'ζ'), - (0x1D73C, 'M', 'η'), - (0x1D73D, 'M', 'θ'), - (0x1D73E, 'M', 'ι'), - (0x1D73F, 'M', 'κ'), - (0x1D740, 'M', 'λ'), - (0x1D741, 'M', 'μ'), - (0x1D742, 'M', 'ν'), - (0x1D743, 'M', 'ξ'), - (0x1D744, 'M', 'ο'), - (0x1D745, 'M', 'π'), - (0x1D746, 'M', 'ρ'), - (0x1D747, 'M', 'σ'), - (0x1D749, 'M', 'τ'), - (0x1D74A, 'M', 'υ'), - ] - -def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D74B, 'M', 'φ'), - (0x1D74C, 'M', 'χ'), - (0x1D74D, 'M', 'ψ'), - (0x1D74E, 'M', 'ω'), - (0x1D74F, 'M', '∂'), - (0x1D750, 'M', 'ε'), - (0x1D751, 'M', 'θ'), - (0x1D752, 'M', 'κ'), - (0x1D753, 'M', 'φ'), - (0x1D754, 'M', 'ρ'), - (0x1D755, 'M', 'π'), - (0x1D756, 'M', 'α'), - (0x1D757, 'M', 'β'), - (0x1D758, 'M', 'γ'), - (0x1D759, 'M', 'δ'), - (0x1D75A, 'M', 'ε'), - (0x1D75B, 'M', 'ζ'), - (0x1D75C, 'M', 'η'), - (0x1D75D, 'M', 'θ'), - (0x1D75E, 'M', 'ι'), - (0x1D75F, 'M', 'κ'), - (0x1D760, 'M', 'λ'), - (0x1D761, 'M', 'μ'), - (0x1D762, 'M', 'ν'), - (0x1D763, 'M', 'ξ'), - (0x1D764, 'M', 'ο'), - (0x1D765, 'M', 'π'), - (0x1D766, 'M', 'ρ'), - (0x1D767, 'M', 'θ'), - (0x1D768, 'M', 'σ'), - (0x1D769, 'M', 'τ'), - (0x1D76A, 'M', 'υ'), - (0x1D76B, 'M', 'φ'), - (0x1D76C, 'M', 'χ'), - (0x1D76D, 'M', 'ψ'), - (0x1D76E, 'M', 'ω'), - (0x1D76F, 'M', '∇'), - (0x1D770, 'M', 'α'), - (0x1D771, 'M', 'β'), - (0x1D772, 'M', 'γ'), - (0x1D773, 'M', 'δ'), - (0x1D774, 'M', 'ε'), - (0x1D775, 'M', 'ζ'), - (0x1D776, 'M', 'η'), - (0x1D777, 'M', 'θ'), - (0x1D778, 'M', 'ι'), - (0x1D779, 'M', 'κ'), - (0x1D77A, 'M', 'λ'), - (0x1D77B, 'M', 'μ'), - (0x1D77C, 'M', 'ν'), - (0x1D77D, 'M', 'ξ'), - (0x1D77E, 'M', 'ο'), - (0x1D77F, 'M', 'π'), - (0x1D780, 'M', 'ρ'), - (0x1D781, 'M', 'σ'), - (0x1D783, 'M', 'τ'), - (0x1D784, 'M', 'υ'), - (0x1D785, 'M', 'φ'), - (0x1D786, 'M', 'χ'), - (0x1D787, 'M', 'ψ'), - (0x1D788, 'M', 'ω'), - (0x1D789, 'M', '∂'), - (0x1D78A, 'M', 'ε'), - (0x1D78B, 'M', 'θ'), - (0x1D78C, 'M', 'κ'), - (0x1D78D, 'M', 'φ'), - (0x1D78E, 'M', 'ρ'), - (0x1D78F, 'M', 'π'), - (0x1D790, 'M', 'α'), - (0x1D791, 'M', 'β'), - (0x1D792, 'M', 'γ'), - (0x1D793, 'M', 'δ'), - (0x1D794, 'M', 'ε'), - (0x1D795, 'M', 'ζ'), - (0x1D796, 'M', 'η'), - (0x1D797, 'M', 'θ'), - (0x1D798, 'M', 'ι'), - (0x1D799, 'M', 'κ'), - (0x1D79A, 'M', 'λ'), - (0x1D79B, 'M', 'μ'), - (0x1D79C, 'M', 'ν'), - (0x1D79D, 'M', 'ξ'), - (0x1D79E, 'M', 'ο'), - (0x1D79F, 'M', 'π'), - (0x1D7A0, 'M', 'ρ'), - (0x1D7A1, 'M', 'θ'), - (0x1D7A2, 'M', 'σ'), - (0x1D7A3, 'M', 'τ'), - (0x1D7A4, 'M', 'υ'), - (0x1D7A5, 'M', 'φ'), - (0x1D7A6, 'M', 'χ'), - (0x1D7A7, 'M', 'ψ'), - (0x1D7A8, 'M', 'ω'), - (0x1D7A9, 'M', '∇'), - (0x1D7AA, 'M', 'α'), - (0x1D7AB, 'M', 'β'), - (0x1D7AC, 'M', 'γ'), - (0x1D7AD, 'M', 'δ'), - (0x1D7AE, 'M', 'ε'), - (0x1D7AF, 'M', 'ζ'), - ] - -def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D7B0, 'M', 'η'), - (0x1D7B1, 'M', 'θ'), - (0x1D7B2, 'M', 'ι'), - (0x1D7B3, 'M', 'κ'), - (0x1D7B4, 'M', 'λ'), - (0x1D7B5, 'M', 'μ'), - (0x1D7B6, 'M', 'ν'), - (0x1D7B7, 'M', 'ξ'), - (0x1D7B8, 'M', 'ο'), - (0x1D7B9, 'M', 'π'), - (0x1D7BA, 'M', 'ρ'), - (0x1D7BB, 'M', 'σ'), - (0x1D7BD, 'M', 'τ'), - (0x1D7BE, 'M', 'υ'), - (0x1D7BF, 'M', 'φ'), - (0x1D7C0, 'M', 'χ'), - (0x1D7C1, 'M', 'ψ'), - (0x1D7C2, 'M', 'ω'), - (0x1D7C3, 'M', '∂'), - (0x1D7C4, 'M', 'ε'), - (0x1D7C5, 'M', 'θ'), - (0x1D7C6, 'M', 'κ'), - (0x1D7C7, 'M', 'φ'), - (0x1D7C8, 'M', 'ρ'), - (0x1D7C9, 'M', 'π'), - (0x1D7CA, 'M', 'ϝ'), - (0x1D7CC, 'X'), - (0x1D7CE, 'M', '0'), - (0x1D7CF, 'M', '1'), - (0x1D7D0, 'M', '2'), - (0x1D7D1, 'M', '3'), - (0x1D7D2, 'M', '4'), - (0x1D7D3, 'M', '5'), - (0x1D7D4, 'M', '6'), - (0x1D7D5, 'M', '7'), - (0x1D7D6, 'M', '8'), - (0x1D7D7, 'M', '9'), - (0x1D7D8, 'M', '0'), - (0x1D7D9, 'M', '1'), - (0x1D7DA, 'M', '2'), - (0x1D7DB, 'M', '3'), - (0x1D7DC, 'M', '4'), - (0x1D7DD, 'M', '5'), - (0x1D7DE, 'M', '6'), - (0x1D7DF, 'M', '7'), - (0x1D7E0, 'M', '8'), - (0x1D7E1, 'M', '9'), - (0x1D7E2, 'M', '0'), - (0x1D7E3, 'M', '1'), - (0x1D7E4, 'M', '2'), - (0x1D7E5, 'M', '3'), - (0x1D7E6, 'M', '4'), - (0x1D7E7, 'M', '5'), - (0x1D7E8, 'M', '6'), - (0x1D7E9, 'M', '7'), - (0x1D7EA, 'M', '8'), - (0x1D7EB, 'M', '9'), - (0x1D7EC, 'M', '0'), - (0x1D7ED, 'M', '1'), - (0x1D7EE, 'M', '2'), - (0x1D7EF, 'M', '3'), - (0x1D7F0, 'M', '4'), - (0x1D7F1, 'M', '5'), - (0x1D7F2, 'M', '6'), - (0x1D7F3, 'M', '7'), - (0x1D7F4, 'M', '8'), - (0x1D7F5, 'M', '9'), - (0x1D7F6, 'M', '0'), - (0x1D7F7, 'M', '1'), - (0x1D7F8, 'M', '2'), - (0x1D7F9, 'M', '3'), - (0x1D7FA, 'M', '4'), - (0x1D7FB, 'M', '5'), - (0x1D7FC, 'M', '6'), - (0x1D7FD, 'M', '7'), - (0x1D7FE, 'M', '8'), - (0x1D7FF, 'M', '9'), - (0x1D800, 'V'), - (0x1DA8C, 'X'), - (0x1DA9B, 'V'), - (0x1DAA0, 'X'), - (0x1DAA1, 'V'), - (0x1DAB0, 'X'), - (0x1DF00, 'V'), - (0x1DF1F, 'X'), - (0x1DF25, 'V'), - (0x1DF2B, 'X'), - (0x1E000, 'V'), - (0x1E007, 'X'), - (0x1E008, 'V'), - (0x1E019, 'X'), - (0x1E01B, 'V'), - (0x1E022, 'X'), - (0x1E023, 'V'), - (0x1E025, 'X'), - (0x1E026, 'V'), - (0x1E02B, 'X'), - (0x1E030, 'M', 'а'), - (0x1E031, 'M', 'б'), - (0x1E032, 'M', 'в'), - ] - -def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1E033, 'M', 'г'), - (0x1E034, 'M', 'д'), - (0x1E035, 'M', 'е'), - (0x1E036, 'M', 'ж'), - (0x1E037, 'M', 'з'), - (0x1E038, 'M', 'и'), - (0x1E039, 'M', 'к'), - (0x1E03A, 'M', 'л'), - (0x1E03B, 'M', 'м'), - (0x1E03C, 'M', 'о'), - (0x1E03D, 'M', 'п'), - (0x1E03E, 'M', 'р'), - (0x1E03F, 'M', 'с'), - (0x1E040, 'M', 'т'), - (0x1E041, 'M', 'у'), - (0x1E042, 'M', 'ф'), - (0x1E043, 'M', 'х'), - (0x1E044, 'M', 'ц'), - (0x1E045, 'M', 'ч'), - (0x1E046, 'M', 'ш'), - (0x1E047, 'M', 'ы'), - (0x1E048, 'M', 'э'), - (0x1E049, 'M', 'ю'), - (0x1E04A, 'M', 'ꚉ'), - (0x1E04B, 'M', 'ә'), - (0x1E04C, 'M', 'і'), - (0x1E04D, 'M', 'ј'), - (0x1E04E, 'M', 'ө'), - (0x1E04F, 'M', 'ү'), - (0x1E050, 'M', 'ӏ'), - (0x1E051, 'M', 'а'), - (0x1E052, 'M', 'б'), - (0x1E053, 'M', 'в'), - (0x1E054, 'M', 'г'), - (0x1E055, 'M', 'д'), - (0x1E056, 'M', 'е'), - (0x1E057, 'M', 'ж'), - (0x1E058, 'M', 'з'), - (0x1E059, 'M', 'и'), - (0x1E05A, 'M', 'к'), - (0x1E05B, 'M', 'л'), - (0x1E05C, 'M', 'о'), - (0x1E05D, 'M', 'п'), - (0x1E05E, 'M', 'с'), - (0x1E05F, 'M', 'у'), - (0x1E060, 'M', 'ф'), - (0x1E061, 'M', 'х'), - (0x1E062, 'M', 'ц'), - (0x1E063, 'M', 'ч'), - (0x1E064, 'M', 'ш'), - (0x1E065, 'M', 'ъ'), - (0x1E066, 'M', 'ы'), - (0x1E067, 'M', 'ґ'), - (0x1E068, 'M', 'і'), - (0x1E069, 'M', 'ѕ'), - (0x1E06A, 'M', 'џ'), - (0x1E06B, 'M', 'ҫ'), - (0x1E06C, 'M', 'ꙑ'), - (0x1E06D, 'M', 'ұ'), - (0x1E06E, 'X'), - (0x1E08F, 'V'), - (0x1E090, 'X'), - (0x1E100, 'V'), - (0x1E12D, 'X'), - (0x1E130, 'V'), - (0x1E13E, 'X'), - (0x1E140, 'V'), - (0x1E14A, 'X'), - (0x1E14E, 'V'), - (0x1E150, 'X'), - (0x1E290, 'V'), - (0x1E2AF, 'X'), - (0x1E2C0, 'V'), - (0x1E2FA, 'X'), - (0x1E2FF, 'V'), - (0x1E300, 'X'), - (0x1E4D0, 'V'), - (0x1E4FA, 'X'), - (0x1E7E0, 'V'), - (0x1E7E7, 'X'), - (0x1E7E8, 'V'), - (0x1E7EC, 'X'), - (0x1E7ED, 'V'), - (0x1E7EF, 'X'), - (0x1E7F0, 'V'), - (0x1E7FF, 'X'), - (0x1E800, 'V'), - (0x1E8C5, 'X'), - (0x1E8C7, 'V'), - (0x1E8D7, 'X'), - (0x1E900, 'M', '𞤢'), - (0x1E901, 'M', '𞤣'), - (0x1E902, 'M', '𞤤'), - (0x1E903, 'M', '𞤥'), - (0x1E904, 'M', '𞤦'), - (0x1E905, 'M', '𞤧'), - (0x1E906, 'M', '𞤨'), - (0x1E907, 'M', '𞤩'), - (0x1E908, 'M', '𞤪'), - (0x1E909, 'M', '𞤫'), - ] - -def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1E90A, 'M', '𞤬'), - (0x1E90B, 'M', '𞤭'), - (0x1E90C, 'M', '𞤮'), - (0x1E90D, 'M', '𞤯'), - (0x1E90E, 'M', '𞤰'), - (0x1E90F, 'M', '𞤱'), - (0x1E910, 'M', '𞤲'), - (0x1E911, 'M', '𞤳'), - (0x1E912, 'M', '𞤴'), - (0x1E913, 'M', '𞤵'), - (0x1E914, 'M', '𞤶'), - (0x1E915, 'M', '𞤷'), - (0x1E916, 'M', '𞤸'), - (0x1E917, 'M', '𞤹'), - (0x1E918, 'M', '𞤺'), - (0x1E919, 'M', '𞤻'), - (0x1E91A, 'M', '𞤼'), - (0x1E91B, 'M', '𞤽'), - (0x1E91C, 'M', '𞤾'), - (0x1E91D, 'M', '𞤿'), - (0x1E91E, 'M', '𞥀'), - (0x1E91F, 'M', '𞥁'), - (0x1E920, 'M', '𞥂'), - (0x1E921, 'M', '𞥃'), - (0x1E922, 'V'), - (0x1E94C, 'X'), - (0x1E950, 'V'), - (0x1E95A, 'X'), - (0x1E95E, 'V'), - (0x1E960, 'X'), - (0x1EC71, 'V'), - (0x1ECB5, 'X'), - (0x1ED01, 'V'), - (0x1ED3E, 'X'), - (0x1EE00, 'M', 'ا'), - (0x1EE01, 'M', 'ب'), - (0x1EE02, 'M', 'ج'), - (0x1EE03, 'M', 'د'), - (0x1EE04, 'X'), - (0x1EE05, 'M', 'و'), - (0x1EE06, 'M', 'ز'), - (0x1EE07, 'M', 'ح'), - (0x1EE08, 'M', 'ط'), - (0x1EE09, 'M', 'ي'), - (0x1EE0A, 'M', 'ك'), - (0x1EE0B, 'M', 'ل'), - (0x1EE0C, 'M', 'م'), - (0x1EE0D, 'M', 'ن'), - (0x1EE0E, 'M', 'س'), - (0x1EE0F, 'M', 'ع'), - (0x1EE10, 'M', 'ف'), - (0x1EE11, 'M', 'ص'), - (0x1EE12, 'M', 'ق'), - (0x1EE13, 'M', 'ر'), - (0x1EE14, 'M', 'ش'), - (0x1EE15, 'M', 'ت'), - (0x1EE16, 'M', 'ث'), - (0x1EE17, 'M', 'خ'), - (0x1EE18, 'M', 'ذ'), - (0x1EE19, 'M', 'ض'), - (0x1EE1A, 'M', 'ظ'), - (0x1EE1B, 'M', 'غ'), - (0x1EE1C, 'M', 'ٮ'), - (0x1EE1D, 'M', 'ں'), - (0x1EE1E, 'M', 'ڡ'), - (0x1EE1F, 'M', 'ٯ'), - (0x1EE20, 'X'), - (0x1EE21, 'M', 'ب'), - (0x1EE22, 'M', 'ج'), - (0x1EE23, 'X'), - (0x1EE24, 'M', 'ه'), - (0x1EE25, 'X'), - (0x1EE27, 'M', 'ح'), - (0x1EE28, 'X'), - (0x1EE29, 'M', 'ي'), - (0x1EE2A, 'M', 'ك'), - (0x1EE2B, 'M', 'ل'), - (0x1EE2C, 'M', 'م'), - (0x1EE2D, 'M', 'ن'), - (0x1EE2E, 'M', 'س'), - (0x1EE2F, 'M', 'ع'), - (0x1EE30, 'M', 'ف'), - (0x1EE31, 'M', 'ص'), - (0x1EE32, 'M', 'ق'), - (0x1EE33, 'X'), - (0x1EE34, 'M', 'ش'), - (0x1EE35, 'M', 'ت'), - (0x1EE36, 'M', 'ث'), - (0x1EE37, 'M', 'خ'), - (0x1EE38, 'X'), - (0x1EE39, 'M', 'ض'), - (0x1EE3A, 'X'), - (0x1EE3B, 'M', 'غ'), - (0x1EE3C, 'X'), - (0x1EE42, 'M', 'ج'), - (0x1EE43, 'X'), - (0x1EE47, 'M', 'ح'), - (0x1EE48, 'X'), - (0x1EE49, 'M', 'ي'), - (0x1EE4A, 'X'), - ] - -def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1EE4B, 'M', 'ل'), - (0x1EE4C, 'X'), - (0x1EE4D, 'M', 'ن'), - (0x1EE4E, 'M', 'س'), - (0x1EE4F, 'M', 'ع'), - (0x1EE50, 'X'), - (0x1EE51, 'M', 'ص'), - (0x1EE52, 'M', 'ق'), - (0x1EE53, 'X'), - (0x1EE54, 'M', 'ش'), - (0x1EE55, 'X'), - (0x1EE57, 'M', 'خ'), - (0x1EE58, 'X'), - (0x1EE59, 'M', 'ض'), - (0x1EE5A, 'X'), - (0x1EE5B, 'M', 'غ'), - (0x1EE5C, 'X'), - (0x1EE5D, 'M', 'ں'), - (0x1EE5E, 'X'), - (0x1EE5F, 'M', 'ٯ'), - (0x1EE60, 'X'), - (0x1EE61, 'M', 'ب'), - (0x1EE62, 'M', 'ج'), - (0x1EE63, 'X'), - (0x1EE64, 'M', 'ه'), - (0x1EE65, 'X'), - (0x1EE67, 'M', 'ح'), - (0x1EE68, 'M', 'ط'), - (0x1EE69, 'M', 'ي'), - (0x1EE6A, 'M', 'ك'), - (0x1EE6B, 'X'), - (0x1EE6C, 'M', 'م'), - (0x1EE6D, 'M', 'ن'), - (0x1EE6E, 'M', 'س'), - (0x1EE6F, 'M', 'ع'), - (0x1EE70, 'M', 'ف'), - (0x1EE71, 'M', 'ص'), - (0x1EE72, 'M', 'ق'), - (0x1EE73, 'X'), - (0x1EE74, 'M', 'ش'), - (0x1EE75, 'M', 'ت'), - (0x1EE76, 'M', 'ث'), - (0x1EE77, 'M', 'خ'), - (0x1EE78, 'X'), - (0x1EE79, 'M', 'ض'), - (0x1EE7A, 'M', 'ظ'), - (0x1EE7B, 'M', 'غ'), - (0x1EE7C, 'M', 'ٮ'), - (0x1EE7D, 'X'), - (0x1EE7E, 'M', 'ڡ'), - (0x1EE7F, 'X'), - (0x1EE80, 'M', 'ا'), - (0x1EE81, 'M', 'ب'), - (0x1EE82, 'M', 'ج'), - (0x1EE83, 'M', 'د'), - (0x1EE84, 'M', 'ه'), - (0x1EE85, 'M', 'و'), - (0x1EE86, 'M', 'ز'), - (0x1EE87, 'M', 'ح'), - (0x1EE88, 'M', 'ط'), - (0x1EE89, 'M', 'ي'), - (0x1EE8A, 'X'), - (0x1EE8B, 'M', 'ل'), - (0x1EE8C, 'M', 'م'), - (0x1EE8D, 'M', 'ن'), - (0x1EE8E, 'M', 'س'), - (0x1EE8F, 'M', 'ع'), - (0x1EE90, 'M', 'ف'), - (0x1EE91, 'M', 'ص'), - (0x1EE92, 'M', 'ق'), - (0x1EE93, 'M', 'ر'), - (0x1EE94, 'M', 'ش'), - (0x1EE95, 'M', 'ت'), - (0x1EE96, 'M', 'ث'), - (0x1EE97, 'M', 'خ'), - (0x1EE98, 'M', 'ذ'), - (0x1EE99, 'M', 'ض'), - (0x1EE9A, 'M', 'ظ'), - (0x1EE9B, 'M', 'غ'), - (0x1EE9C, 'X'), - (0x1EEA1, 'M', 'ب'), - (0x1EEA2, 'M', 'ج'), - (0x1EEA3, 'M', 'د'), - (0x1EEA4, 'X'), - (0x1EEA5, 'M', 'و'), - (0x1EEA6, 'M', 'ز'), - (0x1EEA7, 'M', 'ح'), - (0x1EEA8, 'M', 'ط'), - (0x1EEA9, 'M', 'ي'), - (0x1EEAA, 'X'), - (0x1EEAB, 'M', 'ل'), - (0x1EEAC, 'M', 'م'), - (0x1EEAD, 'M', 'ن'), - (0x1EEAE, 'M', 'س'), - (0x1EEAF, 'M', 'ع'), - (0x1EEB0, 'M', 'ف'), - (0x1EEB1, 'M', 'ص'), - (0x1EEB2, 'M', 'ق'), - (0x1EEB3, 'M', 'ر'), - (0x1EEB4, 'M', 'ش'), - ] - -def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1EEB5, 'M', 'ت'), - (0x1EEB6, 'M', 'ث'), - (0x1EEB7, 'M', 'خ'), - (0x1EEB8, 'M', 'ذ'), - (0x1EEB9, 'M', 'ض'), - (0x1EEBA, 'M', 'ظ'), - (0x1EEBB, 'M', 'غ'), - (0x1EEBC, 'X'), - (0x1EEF0, 'V'), - (0x1EEF2, 'X'), - (0x1F000, 'V'), - (0x1F02C, 'X'), - (0x1F030, 'V'), - (0x1F094, 'X'), - (0x1F0A0, 'V'), - (0x1F0AF, 'X'), - (0x1F0B1, 'V'), - (0x1F0C0, 'X'), - (0x1F0C1, 'V'), - (0x1F0D0, 'X'), - (0x1F0D1, 'V'), - (0x1F0F6, 'X'), - (0x1F101, '3', '0,'), - (0x1F102, '3', '1,'), - (0x1F103, '3', '2,'), - (0x1F104, '3', '3,'), - (0x1F105, '3', '4,'), - (0x1F106, '3', '5,'), - (0x1F107, '3', '6,'), - (0x1F108, '3', '7,'), - (0x1F109, '3', '8,'), - (0x1F10A, '3', '9,'), - (0x1F10B, 'V'), - (0x1F110, '3', '(a)'), - (0x1F111, '3', '(b)'), - (0x1F112, '3', '(c)'), - (0x1F113, '3', '(d)'), - (0x1F114, '3', '(e)'), - (0x1F115, '3', '(f)'), - (0x1F116, '3', '(g)'), - (0x1F117, '3', '(h)'), - (0x1F118, '3', '(i)'), - (0x1F119, '3', '(j)'), - (0x1F11A, '3', '(k)'), - (0x1F11B, '3', '(l)'), - (0x1F11C, '3', '(m)'), - (0x1F11D, '3', '(n)'), - (0x1F11E, '3', '(o)'), - (0x1F11F, '3', '(p)'), - (0x1F120, '3', '(q)'), - (0x1F121, '3', '(r)'), - (0x1F122, '3', '(s)'), - (0x1F123, '3', '(t)'), - (0x1F124, '3', '(u)'), - (0x1F125, '3', '(v)'), - (0x1F126, '3', '(w)'), - (0x1F127, '3', '(x)'), - (0x1F128, '3', '(y)'), - (0x1F129, '3', '(z)'), - (0x1F12A, 'M', '〔s〕'), - (0x1F12B, 'M', 'c'), - (0x1F12C, 'M', 'r'), - (0x1F12D, 'M', 'cd'), - (0x1F12E, 'M', 'wz'), - (0x1F12F, 'V'), - (0x1F130, 'M', 'a'), - (0x1F131, 'M', 'b'), - (0x1F132, 'M', 'c'), - (0x1F133, 'M', 'd'), - (0x1F134, 'M', 'e'), - (0x1F135, 'M', 'f'), - (0x1F136, 'M', 'g'), - (0x1F137, 'M', 'h'), - (0x1F138, 'M', 'i'), - (0x1F139, 'M', 'j'), - (0x1F13A, 'M', 'k'), - (0x1F13B, 'M', 'l'), - (0x1F13C, 'M', 'm'), - (0x1F13D, 'M', 'n'), - (0x1F13E, 'M', 'o'), - (0x1F13F, 'M', 'p'), - (0x1F140, 'M', 'q'), - (0x1F141, 'M', 'r'), - (0x1F142, 'M', 's'), - (0x1F143, 'M', 't'), - (0x1F144, 'M', 'u'), - (0x1F145, 'M', 'v'), - (0x1F146, 'M', 'w'), - (0x1F147, 'M', 'x'), - (0x1F148, 'M', 'y'), - (0x1F149, 'M', 'z'), - (0x1F14A, 'M', 'hv'), - (0x1F14B, 'M', 'mv'), - (0x1F14C, 'M', 'sd'), - (0x1F14D, 'M', 'ss'), - (0x1F14E, 'M', 'ppv'), - (0x1F14F, 'M', 'wc'), - (0x1F150, 'V'), - (0x1F16A, 'M', 'mc'), - (0x1F16B, 'M', 'md'), - ] - -def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1F16C, 'M', 'mr'), - (0x1F16D, 'V'), - (0x1F190, 'M', 'dj'), - (0x1F191, 'V'), - (0x1F1AE, 'X'), - (0x1F1E6, 'V'), - (0x1F200, 'M', 'ほか'), - (0x1F201, 'M', 'ココ'), - (0x1F202, 'M', 'サ'), - (0x1F203, 'X'), - (0x1F210, 'M', '手'), - (0x1F211, 'M', '字'), - (0x1F212, 'M', '双'), - (0x1F213, 'M', 'デ'), - (0x1F214, 'M', '二'), - (0x1F215, 'M', '多'), - (0x1F216, 'M', '解'), - (0x1F217, 'M', '天'), - (0x1F218, 'M', '交'), - (0x1F219, 'M', '映'), - (0x1F21A, 'M', '無'), - (0x1F21B, 'M', '料'), - (0x1F21C, 'M', '前'), - (0x1F21D, 'M', '後'), - (0x1F21E, 'M', '再'), - (0x1F21F, 'M', '新'), - (0x1F220, 'M', '初'), - (0x1F221, 'M', '終'), - (0x1F222, 'M', '生'), - (0x1F223, 'M', '販'), - (0x1F224, 'M', '声'), - (0x1F225, 'M', '吹'), - (0x1F226, 'M', '演'), - (0x1F227, 'M', '投'), - (0x1F228, 'M', '捕'), - (0x1F229, 'M', '一'), - (0x1F22A, 'M', '三'), - (0x1F22B, 'M', '遊'), - (0x1F22C, 'M', '左'), - (0x1F22D, 'M', '中'), - (0x1F22E, 'M', '右'), - (0x1F22F, 'M', '指'), - (0x1F230, 'M', '走'), - (0x1F231, 'M', '打'), - (0x1F232, 'M', '禁'), - (0x1F233, 'M', '空'), - (0x1F234, 'M', '合'), - (0x1F235, 'M', '満'), - (0x1F236, 'M', '有'), - (0x1F237, 'M', '月'), - (0x1F238, 'M', '申'), - (0x1F239, 'M', '割'), - (0x1F23A, 'M', '営'), - (0x1F23B, 'M', '配'), - (0x1F23C, 'X'), - (0x1F240, 'M', '〔本〕'), - (0x1F241, 'M', '〔三〕'), - (0x1F242, 'M', '〔二〕'), - (0x1F243, 'M', '〔安〕'), - (0x1F244, 'M', '〔点〕'), - (0x1F245, 'M', '〔打〕'), - (0x1F246, 'M', '〔盗〕'), - (0x1F247, 'M', '〔勝〕'), - (0x1F248, 'M', '〔敗〕'), - (0x1F249, 'X'), - (0x1F250, 'M', '得'), - (0x1F251, 'M', '可'), - (0x1F252, 'X'), - (0x1F260, 'V'), - (0x1F266, 'X'), - (0x1F300, 'V'), - (0x1F6D8, 'X'), - (0x1F6DC, 'V'), - (0x1F6ED, 'X'), - (0x1F6F0, 'V'), - (0x1F6FD, 'X'), - (0x1F700, 'V'), - (0x1F777, 'X'), - (0x1F77B, 'V'), - (0x1F7DA, 'X'), - (0x1F7E0, 'V'), - (0x1F7EC, 'X'), - (0x1F7F0, 'V'), - (0x1F7F1, 'X'), - (0x1F800, 'V'), - (0x1F80C, 'X'), - (0x1F810, 'V'), - (0x1F848, 'X'), - (0x1F850, 'V'), - (0x1F85A, 'X'), - (0x1F860, 'V'), - (0x1F888, 'X'), - (0x1F890, 'V'), - (0x1F8AE, 'X'), - (0x1F8B0, 'V'), - (0x1F8B2, 'X'), - (0x1F900, 'V'), - (0x1FA54, 'X'), - (0x1FA60, 'V'), - (0x1FA6E, 'X'), - ] - -def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1FA70, 'V'), - (0x1FA7D, 'X'), - (0x1FA80, 'V'), - (0x1FA89, 'X'), - (0x1FA90, 'V'), - (0x1FABE, 'X'), - (0x1FABF, 'V'), - (0x1FAC6, 'X'), - (0x1FACE, 'V'), - (0x1FADC, 'X'), - (0x1FAE0, 'V'), - (0x1FAE9, 'X'), - (0x1FAF0, 'V'), - (0x1FAF9, 'X'), - (0x1FB00, 'V'), - (0x1FB93, 'X'), - (0x1FB94, 'V'), - (0x1FBCB, 'X'), - (0x1FBF0, 'M', '0'), - (0x1FBF1, 'M', '1'), - (0x1FBF2, 'M', '2'), - (0x1FBF3, 'M', '3'), - (0x1FBF4, 'M', '4'), - (0x1FBF5, 'M', '5'), - (0x1FBF6, 'M', '6'), - (0x1FBF7, 'M', '7'), - (0x1FBF8, 'M', '8'), - (0x1FBF9, 'M', '9'), - (0x1FBFA, 'X'), - (0x20000, 'V'), - (0x2A6E0, 'X'), - (0x2A700, 'V'), - (0x2B73A, 'X'), - (0x2B740, 'V'), - (0x2B81E, 'X'), - (0x2B820, 'V'), - (0x2CEA2, 'X'), - (0x2CEB0, 'V'), - (0x2EBE1, 'X'), - (0x2F800, 'M', '丽'), - (0x2F801, 'M', '丸'), - (0x2F802, 'M', '乁'), - (0x2F803, 'M', '𠄢'), - (0x2F804, 'M', '你'), - (0x2F805, 'M', '侮'), - (0x2F806, 'M', '侻'), - (0x2F807, 'M', '倂'), - (0x2F808, 'M', '偺'), - (0x2F809, 'M', '備'), - (0x2F80A, 'M', '僧'), - (0x2F80B, 'M', '像'), - (0x2F80C, 'M', '㒞'), - (0x2F80D, 'M', '𠘺'), - (0x2F80E, 'M', '免'), - (0x2F80F, 'M', '兔'), - (0x2F810, 'M', '兤'), - (0x2F811, 'M', '具'), - (0x2F812, 'M', '𠔜'), - (0x2F813, 'M', '㒹'), - (0x2F814, 'M', '內'), - (0x2F815, 'M', '再'), - (0x2F816, 'M', '𠕋'), - (0x2F817, 'M', '冗'), - (0x2F818, 'M', '冤'), - (0x2F819, 'M', '仌'), - (0x2F81A, 'M', '冬'), - (0x2F81B, 'M', '况'), - (0x2F81C, 'M', '𩇟'), - (0x2F81D, 'M', '凵'), - (0x2F81E, 'M', '刃'), - (0x2F81F, 'M', '㓟'), - (0x2F820, 'M', '刻'), - (0x2F821, 'M', '剆'), - (0x2F822, 'M', '割'), - (0x2F823, 'M', '剷'), - (0x2F824, 'M', '㔕'), - (0x2F825, 'M', '勇'), - (0x2F826, 'M', '勉'), - (0x2F827, 'M', '勤'), - (0x2F828, 'M', '勺'), - (0x2F829, 'M', '包'), - (0x2F82A, 'M', '匆'), - (0x2F82B, 'M', '北'), - (0x2F82C, 'M', '卉'), - (0x2F82D, 'M', '卑'), - (0x2F82E, 'M', '博'), - (0x2F82F, 'M', '即'), - (0x2F830, 'M', '卽'), - (0x2F831, 'M', '卿'), - (0x2F834, 'M', '𠨬'), - (0x2F835, 'M', '灰'), - (0x2F836, 'M', '及'), - (0x2F837, 'M', '叟'), - (0x2F838, 'M', '𠭣'), - (0x2F839, 'M', '叫'), - (0x2F83A, 'M', '叱'), - (0x2F83B, 'M', '吆'), - (0x2F83C, 'M', '咞'), - (0x2F83D, 'M', '吸'), - (0x2F83E, 'M', '呈'), - ] - -def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F83F, 'M', '周'), - (0x2F840, 'M', '咢'), - (0x2F841, 'M', '哶'), - (0x2F842, 'M', '唐'), - (0x2F843, 'M', '啓'), - (0x2F844, 'M', '啣'), - (0x2F845, 'M', '善'), - (0x2F847, 'M', '喙'), - (0x2F848, 'M', '喫'), - (0x2F849, 'M', '喳'), - (0x2F84A, 'M', '嗂'), - (0x2F84B, 'M', '圖'), - (0x2F84C, 'M', '嘆'), - (0x2F84D, 'M', '圗'), - (0x2F84E, 'M', '噑'), - (0x2F84F, 'M', '噴'), - (0x2F850, 'M', '切'), - (0x2F851, 'M', '壮'), - (0x2F852, 'M', '城'), - (0x2F853, 'M', '埴'), - (0x2F854, 'M', '堍'), - (0x2F855, 'M', '型'), - (0x2F856, 'M', '堲'), - (0x2F857, 'M', '報'), - (0x2F858, 'M', '墬'), - (0x2F859, 'M', '𡓤'), - (0x2F85A, 'M', '売'), - (0x2F85B, 'M', '壷'), - (0x2F85C, 'M', '夆'), - (0x2F85D, 'M', '多'), - (0x2F85E, 'M', '夢'), - (0x2F85F, 'M', '奢'), - (0x2F860, 'M', '𡚨'), - (0x2F861, 'M', '𡛪'), - (0x2F862, 'M', '姬'), - (0x2F863, 'M', '娛'), - (0x2F864, 'M', '娧'), - (0x2F865, 'M', '姘'), - (0x2F866, 'M', '婦'), - (0x2F867, 'M', '㛮'), - (0x2F868, 'X'), - (0x2F869, 'M', '嬈'), - (0x2F86A, 'M', '嬾'), - (0x2F86C, 'M', '𡧈'), - (0x2F86D, 'M', '寃'), - (0x2F86E, 'M', '寘'), - (0x2F86F, 'M', '寧'), - (0x2F870, 'M', '寳'), - (0x2F871, 'M', '𡬘'), - (0x2F872, 'M', '寿'), - (0x2F873, 'M', '将'), - (0x2F874, 'X'), - (0x2F875, 'M', '尢'), - (0x2F876, 'M', '㞁'), - (0x2F877, 'M', '屠'), - (0x2F878, 'M', '屮'), - (0x2F879, 'M', '峀'), - (0x2F87A, 'M', '岍'), - (0x2F87B, 'M', '𡷤'), - (0x2F87C, 'M', '嵃'), - (0x2F87D, 'M', '𡷦'), - (0x2F87E, 'M', '嵮'), - (0x2F87F, 'M', '嵫'), - (0x2F880, 'M', '嵼'), - (0x2F881, 'M', '巡'), - (0x2F882, 'M', '巢'), - (0x2F883, 'M', '㠯'), - (0x2F884, 'M', '巽'), - (0x2F885, 'M', '帨'), - (0x2F886, 'M', '帽'), - (0x2F887, 'M', '幩'), - (0x2F888, 'M', '㡢'), - (0x2F889, 'M', '𢆃'), - (0x2F88A, 'M', '㡼'), - (0x2F88B, 'M', '庰'), - (0x2F88C, 'M', '庳'), - (0x2F88D, 'M', '庶'), - (0x2F88E, 'M', '廊'), - (0x2F88F, 'M', '𪎒'), - (0x2F890, 'M', '廾'), - (0x2F891, 'M', '𢌱'), - (0x2F893, 'M', '舁'), - (0x2F894, 'M', '弢'), - (0x2F896, 'M', '㣇'), - (0x2F897, 'M', '𣊸'), - (0x2F898, 'M', '𦇚'), - (0x2F899, 'M', '形'), - (0x2F89A, 'M', '彫'), - (0x2F89B, 'M', '㣣'), - (0x2F89C, 'M', '徚'), - (0x2F89D, 'M', '忍'), - (0x2F89E, 'M', '志'), - (0x2F89F, 'M', '忹'), - (0x2F8A0, 'M', '悁'), - (0x2F8A1, 'M', '㤺'), - (0x2F8A2, 'M', '㤜'), - (0x2F8A3, 'M', '悔'), - (0x2F8A4, 'M', '𢛔'), - (0x2F8A5, 'M', '惇'), - (0x2F8A6, 'M', '慈'), - ] - -def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F8A7, 'M', '慌'), - (0x2F8A8, 'M', '慎'), - (0x2F8A9, 'M', '慌'), - (0x2F8AA, 'M', '慺'), - (0x2F8AB, 'M', '憎'), - (0x2F8AC, 'M', '憲'), - (0x2F8AD, 'M', '憤'), - (0x2F8AE, 'M', '憯'), - (0x2F8AF, 'M', '懞'), - (0x2F8B0, 'M', '懲'), - (0x2F8B1, 'M', '懶'), - (0x2F8B2, 'M', '成'), - (0x2F8B3, 'M', '戛'), - (0x2F8B4, 'M', '扝'), - (0x2F8B5, 'M', '抱'), - (0x2F8B6, 'M', '拔'), - (0x2F8B7, 'M', '捐'), - (0x2F8B8, 'M', '𢬌'), - (0x2F8B9, 'M', '挽'), - (0x2F8BA, 'M', '拼'), - (0x2F8BB, 'M', '捨'), - (0x2F8BC, 'M', '掃'), - (0x2F8BD, 'M', '揤'), - (0x2F8BE, 'M', '𢯱'), - (0x2F8BF, 'M', '搢'), - (0x2F8C0, 'M', '揅'), - (0x2F8C1, 'M', '掩'), - (0x2F8C2, 'M', '㨮'), - (0x2F8C3, 'M', '摩'), - (0x2F8C4, 'M', '摾'), - (0x2F8C5, 'M', '撝'), - (0x2F8C6, 'M', '摷'), - (0x2F8C7, 'M', '㩬'), - (0x2F8C8, 'M', '敏'), - (0x2F8C9, 'M', '敬'), - (0x2F8CA, 'M', '𣀊'), - (0x2F8CB, 'M', '旣'), - (0x2F8CC, 'M', '書'), - (0x2F8CD, 'M', '晉'), - (0x2F8CE, 'M', '㬙'), - (0x2F8CF, 'M', '暑'), - (0x2F8D0, 'M', '㬈'), - (0x2F8D1, 'M', '㫤'), - (0x2F8D2, 'M', '冒'), - (0x2F8D3, 'M', '冕'), - (0x2F8D4, 'M', '最'), - (0x2F8D5, 'M', '暜'), - (0x2F8D6, 'M', '肭'), - (0x2F8D7, 'M', '䏙'), - (0x2F8D8, 'M', '朗'), - (0x2F8D9, 'M', '望'), - (0x2F8DA, 'M', '朡'), - (0x2F8DB, 'M', '杞'), - (0x2F8DC, 'M', '杓'), - (0x2F8DD, 'M', '𣏃'), - (0x2F8DE, 'M', '㭉'), - (0x2F8DF, 'M', '柺'), - (0x2F8E0, 'M', '枅'), - (0x2F8E1, 'M', '桒'), - (0x2F8E2, 'M', '梅'), - (0x2F8E3, 'M', '𣑭'), - (0x2F8E4, 'M', '梎'), - (0x2F8E5, 'M', '栟'), - (0x2F8E6, 'M', '椔'), - (0x2F8E7, 'M', '㮝'), - (0x2F8E8, 'M', '楂'), - (0x2F8E9, 'M', '榣'), - (0x2F8EA, 'M', '槪'), - (0x2F8EB, 'M', '檨'), - (0x2F8EC, 'M', '𣚣'), - (0x2F8ED, 'M', '櫛'), - (0x2F8EE, 'M', '㰘'), - (0x2F8EF, 'M', '次'), - (0x2F8F0, 'M', '𣢧'), - (0x2F8F1, 'M', '歔'), - (0x2F8F2, 'M', '㱎'), - (0x2F8F3, 'M', '歲'), - (0x2F8F4, 'M', '殟'), - (0x2F8F5, 'M', '殺'), - (0x2F8F6, 'M', '殻'), - (0x2F8F7, 'M', '𣪍'), - (0x2F8F8, 'M', '𡴋'), - (0x2F8F9, 'M', '𣫺'), - (0x2F8FA, 'M', '汎'), - (0x2F8FB, 'M', '𣲼'), - (0x2F8FC, 'M', '沿'), - (0x2F8FD, 'M', '泍'), - (0x2F8FE, 'M', '汧'), - (0x2F8FF, 'M', '洖'), - (0x2F900, 'M', '派'), - (0x2F901, 'M', '海'), - (0x2F902, 'M', '流'), - (0x2F903, 'M', '浩'), - (0x2F904, 'M', '浸'), - (0x2F905, 'M', '涅'), - (0x2F906, 'M', '𣴞'), - (0x2F907, 'M', '洴'), - (0x2F908, 'M', '港'), - (0x2F909, 'M', '湮'), - (0x2F90A, 'M', '㴳'), - ] - -def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F90B, 'M', '滋'), - (0x2F90C, 'M', '滇'), - (0x2F90D, 'M', '𣻑'), - (0x2F90E, 'M', '淹'), - (0x2F90F, 'M', '潮'), - (0x2F910, 'M', '𣽞'), - (0x2F911, 'M', '𣾎'), - (0x2F912, 'M', '濆'), - (0x2F913, 'M', '瀹'), - (0x2F914, 'M', '瀞'), - (0x2F915, 'M', '瀛'), - (0x2F916, 'M', '㶖'), - (0x2F917, 'M', '灊'), - (0x2F918, 'M', '災'), - (0x2F919, 'M', '灷'), - (0x2F91A, 'M', '炭'), - (0x2F91B, 'M', '𠔥'), - (0x2F91C, 'M', '煅'), - (0x2F91D, 'M', '𤉣'), - (0x2F91E, 'M', '熜'), - (0x2F91F, 'X'), - (0x2F920, 'M', '爨'), - (0x2F921, 'M', '爵'), - (0x2F922, 'M', '牐'), - (0x2F923, 'M', '𤘈'), - (0x2F924, 'M', '犀'), - (0x2F925, 'M', '犕'), - (0x2F926, 'M', '𤜵'), - (0x2F927, 'M', '𤠔'), - (0x2F928, 'M', '獺'), - (0x2F929, 'M', '王'), - (0x2F92A, 'M', '㺬'), - (0x2F92B, 'M', '玥'), - (0x2F92C, 'M', '㺸'), - (0x2F92E, 'M', '瑇'), - (0x2F92F, 'M', '瑜'), - (0x2F930, 'M', '瑱'), - (0x2F931, 'M', '璅'), - (0x2F932, 'M', '瓊'), - (0x2F933, 'M', '㼛'), - (0x2F934, 'M', '甤'), - (0x2F935, 'M', '𤰶'), - (0x2F936, 'M', '甾'), - (0x2F937, 'M', '𤲒'), - (0x2F938, 'M', '異'), - (0x2F939, 'M', '𢆟'), - (0x2F93A, 'M', '瘐'), - (0x2F93B, 'M', '𤾡'), - (0x2F93C, 'M', '𤾸'), - (0x2F93D, 'M', '𥁄'), - (0x2F93E, 'M', '㿼'), - (0x2F93F, 'M', '䀈'), - (0x2F940, 'M', '直'), - (0x2F941, 'M', '𥃳'), - (0x2F942, 'M', '𥃲'), - (0x2F943, 'M', '𥄙'), - (0x2F944, 'M', '𥄳'), - (0x2F945, 'M', '眞'), - (0x2F946, 'M', '真'), - (0x2F948, 'M', '睊'), - (0x2F949, 'M', '䀹'), - (0x2F94A, 'M', '瞋'), - (0x2F94B, 'M', '䁆'), - (0x2F94C, 'M', '䂖'), - (0x2F94D, 'M', '𥐝'), - (0x2F94E, 'M', '硎'), - (0x2F94F, 'M', '碌'), - (0x2F950, 'M', '磌'), - (0x2F951, 'M', '䃣'), - (0x2F952, 'M', '𥘦'), - (0x2F953, 'M', '祖'), - (0x2F954, 'M', '𥚚'), - (0x2F955, 'M', '𥛅'), - (0x2F956, 'M', '福'), - (0x2F957, 'M', '秫'), - (0x2F958, 'M', '䄯'), - (0x2F959, 'M', '穀'), - (0x2F95A, 'M', '穊'), - (0x2F95B, 'M', '穏'), - (0x2F95C, 'M', '𥥼'), - (0x2F95D, 'M', '𥪧'), - (0x2F95F, 'X'), - (0x2F960, 'M', '䈂'), - (0x2F961, 'M', '𥮫'), - (0x2F962, 'M', '篆'), - (0x2F963, 'M', '築'), - (0x2F964, 'M', '䈧'), - (0x2F965, 'M', '𥲀'), - (0x2F966, 'M', '糒'), - (0x2F967, 'M', '䊠'), - (0x2F968, 'M', '糨'), - (0x2F969, 'M', '糣'), - (0x2F96A, 'M', '紀'), - (0x2F96B, 'M', '𥾆'), - (0x2F96C, 'M', '絣'), - (0x2F96D, 'M', '䌁'), - (0x2F96E, 'M', '緇'), - (0x2F96F, 'M', '縂'), - (0x2F970, 'M', '繅'), - (0x2F971, 'M', '䌴'), - ] - -def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F972, 'M', '𦈨'), - (0x2F973, 'M', '𦉇'), - (0x2F974, 'M', '䍙'), - (0x2F975, 'M', '𦋙'), - (0x2F976, 'M', '罺'), - (0x2F977, 'M', '𦌾'), - (0x2F978, 'M', '羕'), - (0x2F979, 'M', '翺'), - (0x2F97A, 'M', '者'), - (0x2F97B, 'M', '𦓚'), - (0x2F97C, 'M', '𦔣'), - (0x2F97D, 'M', '聠'), - (0x2F97E, 'M', '𦖨'), - (0x2F97F, 'M', '聰'), - (0x2F980, 'M', '𣍟'), - (0x2F981, 'M', '䏕'), - (0x2F982, 'M', '育'), - (0x2F983, 'M', '脃'), - (0x2F984, 'M', '䐋'), - (0x2F985, 'M', '脾'), - (0x2F986, 'M', '媵'), - (0x2F987, 'M', '𦞧'), - (0x2F988, 'M', '𦞵'), - (0x2F989, 'M', '𣎓'), - (0x2F98A, 'M', '𣎜'), - (0x2F98B, 'M', '舁'), - (0x2F98C, 'M', '舄'), - (0x2F98D, 'M', '辞'), - (0x2F98E, 'M', '䑫'), - (0x2F98F, 'M', '芑'), - (0x2F990, 'M', '芋'), - (0x2F991, 'M', '芝'), - (0x2F992, 'M', '劳'), - (0x2F993, 'M', '花'), - (0x2F994, 'M', '芳'), - (0x2F995, 'M', '芽'), - (0x2F996, 'M', '苦'), - (0x2F997, 'M', '𦬼'), - (0x2F998, 'M', '若'), - (0x2F999, 'M', '茝'), - (0x2F99A, 'M', '荣'), - (0x2F99B, 'M', '莭'), - (0x2F99C, 'M', '茣'), - (0x2F99D, 'M', '莽'), - (0x2F99E, 'M', '菧'), - (0x2F99F, 'M', '著'), - (0x2F9A0, 'M', '荓'), - (0x2F9A1, 'M', '菊'), - (0x2F9A2, 'M', '菌'), - (0x2F9A3, 'M', '菜'), - (0x2F9A4, 'M', '𦰶'), - (0x2F9A5, 'M', '𦵫'), - (0x2F9A6, 'M', '𦳕'), - (0x2F9A7, 'M', '䔫'), - (0x2F9A8, 'M', '蓱'), - (0x2F9A9, 'M', '蓳'), - (0x2F9AA, 'M', '蔖'), - (0x2F9AB, 'M', '𧏊'), - (0x2F9AC, 'M', '蕤'), - (0x2F9AD, 'M', '𦼬'), - (0x2F9AE, 'M', '䕝'), - (0x2F9AF, 'M', '䕡'), - (0x2F9B0, 'M', '𦾱'), - (0x2F9B1, 'M', '𧃒'), - (0x2F9B2, 'M', '䕫'), - (0x2F9B3, 'M', '虐'), - (0x2F9B4, 'M', '虜'), - (0x2F9B5, 'M', '虧'), - (0x2F9B6, 'M', '虩'), - (0x2F9B7, 'M', '蚩'), - (0x2F9B8, 'M', '蚈'), - (0x2F9B9, 'M', '蜎'), - (0x2F9BA, 'M', '蛢'), - (0x2F9BB, 'M', '蝹'), - (0x2F9BC, 'M', '蜨'), - (0x2F9BD, 'M', '蝫'), - (0x2F9BE, 'M', '螆'), - (0x2F9BF, 'X'), - (0x2F9C0, 'M', '蟡'), - (0x2F9C1, 'M', '蠁'), - (0x2F9C2, 'M', '䗹'), - (0x2F9C3, 'M', '衠'), - (0x2F9C4, 'M', '衣'), - (0x2F9C5, 'M', '𧙧'), - (0x2F9C6, 'M', '裗'), - (0x2F9C7, 'M', '裞'), - (0x2F9C8, 'M', '䘵'), - (0x2F9C9, 'M', '裺'), - (0x2F9CA, 'M', '㒻'), - (0x2F9CB, 'M', '𧢮'), - (0x2F9CC, 'M', '𧥦'), - (0x2F9CD, 'M', '䚾'), - (0x2F9CE, 'M', '䛇'), - (0x2F9CF, 'M', '誠'), - (0x2F9D0, 'M', '諭'), - (0x2F9D1, 'M', '變'), - (0x2F9D2, 'M', '豕'), - (0x2F9D3, 'M', '𧲨'), - (0x2F9D4, 'M', '貫'), - (0x2F9D5, 'M', '賁'), - ] - -def _seg_81() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F9D6, 'M', '贛'), - (0x2F9D7, 'M', '起'), - (0x2F9D8, 'M', '𧼯'), - (0x2F9D9, 'M', '𠠄'), - (0x2F9DA, 'M', '跋'), - (0x2F9DB, 'M', '趼'), - (0x2F9DC, 'M', '跰'), - (0x2F9DD, 'M', '𠣞'), - (0x2F9DE, 'M', '軔'), - (0x2F9DF, 'M', '輸'), - (0x2F9E0, 'M', '𨗒'), - (0x2F9E1, 'M', '𨗭'), - (0x2F9E2, 'M', '邔'), - (0x2F9E3, 'M', '郱'), - (0x2F9E4, 'M', '鄑'), - (0x2F9E5, 'M', '𨜮'), - (0x2F9E6, 'M', '鄛'), - (0x2F9E7, 'M', '鈸'), - (0x2F9E8, 'M', '鋗'), - (0x2F9E9, 'M', '鋘'), - (0x2F9EA, 'M', '鉼'), - (0x2F9EB, 'M', '鏹'), - (0x2F9EC, 'M', '鐕'), - (0x2F9ED, 'M', '𨯺'), - (0x2F9EE, 'M', '開'), - (0x2F9EF, 'M', '䦕'), - (0x2F9F0, 'M', '閷'), - (0x2F9F1, 'M', '𨵷'), - (0x2F9F2, 'M', '䧦'), - (0x2F9F3, 'M', '雃'), - (0x2F9F4, 'M', '嶲'), - (0x2F9F5, 'M', '霣'), - (0x2F9F6, 'M', '𩅅'), - (0x2F9F7, 'M', '𩈚'), - (0x2F9F8, 'M', '䩮'), - (0x2F9F9, 'M', '䩶'), - (0x2F9FA, 'M', '韠'), - (0x2F9FB, 'M', '𩐊'), - (0x2F9FC, 'M', '䪲'), - (0x2F9FD, 'M', '𩒖'), - (0x2F9FE, 'M', '頋'), - (0x2FA00, 'M', '頩'), - (0x2FA01, 'M', '𩖶'), - (0x2FA02, 'M', '飢'), - (0x2FA03, 'M', '䬳'), - (0x2FA04, 'M', '餩'), - (0x2FA05, 'M', '馧'), - (0x2FA06, 'M', '駂'), - (0x2FA07, 'M', '駾'), - (0x2FA08, 'M', '䯎'), - (0x2FA09, 'M', '𩬰'), - (0x2FA0A, 'M', '鬒'), - (0x2FA0B, 'M', '鱀'), - (0x2FA0C, 'M', '鳽'), - (0x2FA0D, 'M', '䳎'), - (0x2FA0E, 'M', '䳭'), - (0x2FA0F, 'M', '鵧'), - (0x2FA10, 'M', '𪃎'), - (0x2FA11, 'M', '䳸'), - (0x2FA12, 'M', '𪄅'), - (0x2FA13, 'M', '𪈎'), - (0x2FA14, 'M', '𪊑'), - (0x2FA15, 'M', '麻'), - (0x2FA16, 'M', '䵖'), - (0x2FA17, 'M', '黹'), - (0x2FA18, 'M', '黾'), - (0x2FA19, 'M', '鼅'), - (0x2FA1A, 'M', '鼏'), - (0x2FA1B, 'M', '鼖'), - (0x2FA1C, 'M', '鼻'), - (0x2FA1D, 'M', '𪘀'), - (0x2FA1E, 'X'), - (0x30000, 'V'), - (0x3134B, 'X'), - (0x31350, 'V'), - (0x323B0, 'X'), - (0xE0100, 'I'), - (0xE01F0, 'X'), - ] - -uts46data = tuple( - _seg_0() - + _seg_1() - + _seg_2() - + _seg_3() - + _seg_4() - + _seg_5() - + _seg_6() - + _seg_7() - + _seg_8() - + _seg_9() - + _seg_10() - + _seg_11() - + _seg_12() - + _seg_13() - + _seg_14() - + _seg_15() - + _seg_16() - + _seg_17() - + _seg_18() - + _seg_19() - + _seg_20() - + _seg_21() - + _seg_22() - + _seg_23() - + _seg_24() - + _seg_25() - + _seg_26() - + _seg_27() - + _seg_28() - + _seg_29() - + _seg_30() - + _seg_31() - + _seg_32() - + _seg_33() - + _seg_34() - + _seg_35() - + _seg_36() - + _seg_37() - + _seg_38() - + _seg_39() - + _seg_40() - + _seg_41() - + _seg_42() - + _seg_43() - + _seg_44() - + _seg_45() - + _seg_46() - + _seg_47() - + _seg_48() - + _seg_49() - + _seg_50() - + _seg_51() - + _seg_52() - + _seg_53() - + _seg_54() - + _seg_55() - + _seg_56() - + _seg_57() - + _seg_58() - + _seg_59() - + _seg_60() - + _seg_61() - + _seg_62() - + _seg_63() - + _seg_64() - + _seg_65() - + _seg_66() - + _seg_67() - + _seg_68() - + _seg_69() - + _seg_70() - + _seg_71() - + _seg_72() - + _seg_73() - + _seg_74() - + _seg_75() - + _seg_76() - + _seg_77() - + _seg_78() - + _seg_79() - + _seg_80() - + _seg_81() -) # type: Tuple[Union[Tuple[int, str], Tuple[int, str, str]], ...] diff --git a/server/venv/Lib/site-packages/isapi/PyISAPI_loader.dll b/server/venv/Lib/site-packages/isapi/PyISAPI_loader.dll deleted file mode 100644 index fab6e55..0000000 Binary files a/server/venv/Lib/site-packages/isapi/PyISAPI_loader.dll and /dev/null differ diff --git a/server/venv/Lib/site-packages/isapi/README.txt b/server/venv/Lib/site-packages/isapi/README.txt deleted file mode 100644 index dc52862..0000000 --- a/server/venv/Lib/site-packages/isapi/README.txt +++ /dev/null @@ -1,7 +0,0 @@ -A Python ISAPI extension. Contributed by Phillip Frantz, and is -Copyright 2002-2003 by Blackdog Software Pty Ltd. - -See the 'samples' directory, and particularly samples\README.txt - -You can find documentation in the PyWin32.chm file that comes with pywin32 - -you can open this from Pythonwin->Help, or from the start menu. \ No newline at end of file diff --git a/server/venv/Lib/site-packages/isapi/__init__.py b/server/venv/Lib/site-packages/isapi/__init__.py deleted file mode 100644 index 7182361..0000000 --- a/server/venv/Lib/site-packages/isapi/__init__.py +++ /dev/null @@ -1,39 +0,0 @@ -# The Python ISAPI package. - - -# Exceptions thrown by the DLL framework. -class ISAPIError(Exception): - def __init__(self, errno, strerror=None, funcname=None): - # named attributes match IOError etc. - self.errno = errno - self.strerror = strerror - self.funcname = funcname - Exception.__init__(self, errno, strerror, funcname) - - def __str__(self): - if self.strerror is None: - try: - import win32api - - self.strerror = win32api.FormatMessage(self.errno).strip() - except: - self.strerror = "no error message is available" - # str() looks like a win32api error. - return str((self.errno, self.strerror, self.funcname)) - - -class FilterError(ISAPIError): - pass - - -class ExtensionError(ISAPIError): - pass - - -# A little development aid - a filter or extension callback function can -# raise one of these exceptions, and the handler module will be reloaded. -# This means you can change your code without restarting IIS. -# After a reload, your filter/extension will have the GetFilterVersion/ -# GetExtensionVersion function called, but with None as the first arg. -class InternalReloadException(Exception): - pass diff --git a/server/venv/Lib/site-packages/isapi/doc/isapi.html b/server/venv/Lib/site-packages/isapi/doc/isapi.html deleted file mode 100644 index 03001a1..0000000 --- a/server/venv/Lib/site-packages/isapi/doc/isapi.html +++ /dev/null @@ -1,92 +0,0 @@ - - - -Introduction to Python ISAPI support - -

Introduction to Python ISAPI support

- -

See also

- -

Note: if you are viewing this documentation directly from disk, -most links in this document will fail - you can also find this document in the -CHM file that comes with pywin32, where the links will work - -

Introduction

-This documents Python support for hosting ISAPI exensions and filters inside -Microsoft Internet Information Server (IIS). It assumes a basic understanding -of the ISAPI filter and extension mechanism. -

-In summary, to implement a filter or extension, you provide a Python module -which defines a Filter and/or Extension class. Once your class has been -loaded, IIS/ISAPI will, via an extension DLL, call methods on your class. -

-A filter and a class instance need only provide 3 methods - for filters they -are called GetFilterVersion, HttpFilterProc and -TerminateFilter. For extensions they -are named GetExtensionVersion, HttpExtensionProc and -TerminateExtension. If you are familiar with writing ISAPI -extensions in C/C++, these names and their purpose will be familiar. -

-Most of the work is done in the HttpFilterProc and -HttpExtensionProc methods. These both take a single -parameter - an HTTP_FILTER_CONTEXT and -EXTENSION_CONTROL_BLOCK -object respectively. -

-In addition to these components, there is an 'isapi' package, containing -support facilities (base-classes, exceptions, etc) which can be leveraged -by the extension. - -

Base classes

-There are a number of base classes provided to make writing extensions a little -simpler. Of particular note is isapi.threaded_extension.ThreadPoolExtension. -This implements a thread-pool and informs IIS that the request is progressing -in the background. Your sub-class need only provide a Dispatch -method, which is called on one of the worker threads rather than the thread -that the request came in on. -

-There is base-class for a filter in isapi.simple, but there is no -equivilent threaded filter - filters work under a different model, where -background processing is not possible. -

Samples

-Please see the isapi/samples directory for some sample filters -and extensions. - -

Implementation

-A Python ISAPI filter extension consists of 2 main components: -
    -
  • A DLL used by ISAPI to interface with Python.
  • -
  • A Python script used by that DLL to implement the filter or extension -functionality
  • -
- -

Extension DLL

-The DLL is usually managed automatically by the isapi.install module. As the -Python script for the extension is installed, a generic DLL provided with -the isapi package is installed next to the script, and IIS configured to -use this DLL. -

-The name of the DLL always has the same base name as the Python script, but -with a leading underscore (_), and an extension of .dll. For example, the -sample "redirector.py" will, when installed, have "_redirector.dll" created -in the same directory. -

-The Python script may provide 2 entry points - methods named __FilterFactory__ -and __ExtensionFactory__, both taking no arguments and returning a filter or -extension object. - -

Using py2exe and the isapi package

-You can instruct py2exe to create a 'frozen' Python ISAPI filter/extension. -In this case, py2exe will create a package with everything you need in one -directory, and the Python source file embedded in the .zip file. -

-In general, you will want to build a seperate installation executable along -with the ISAPI extension. This executable will be built from the same script. -See the ISAPI sample in the py2exe distribution. diff --git a/server/venv/Lib/site-packages/isapi/install.py b/server/venv/Lib/site-packages/isapi/install.py deleted file mode 100644 index 154f82a..0000000 --- a/server/venv/Lib/site-packages/isapi/install.py +++ /dev/null @@ -1,815 +0,0 @@ -"""Installation utilities for Python ISAPI filters and extensions.""" - -# this code adapted from "Tomcat JK2 ISAPI redirector", part of Apache -# Created July 2004, Mark Hammond. -import imp -import os -import shutil -import stat -import sys -import traceback - -import pythoncom -import win32api -import winerror -from win32com.client import Dispatch, GetObject -from win32com.client.gencache import EnsureDispatch, EnsureModule - -_APP_INPROC = 0 -_APP_OUTPROC = 1 -_APP_POOLED = 2 -_IIS_OBJECT = "IIS://LocalHost/W3SVC" -_IIS_SERVER = "IIsWebServer" -_IIS_WEBDIR = "IIsWebDirectory" -_IIS_WEBVIRTUALDIR = "IIsWebVirtualDir" -_IIS_FILTERS = "IIsFilters" -_IIS_FILTER = "IIsFilter" - -_DEFAULT_SERVER_NAME = "Default Web Site" -_DEFAULT_HEADERS = "X-Powered-By: Python" -_DEFAULT_PROTECTION = _APP_POOLED - -# Default is for 'execute' only access - ie, only the extension -# can be used. This can be overridden via your install script. -_DEFAULT_ACCESS_EXECUTE = True -_DEFAULT_ACCESS_READ = False -_DEFAULT_ACCESS_WRITE = False -_DEFAULT_ACCESS_SCRIPT = False -_DEFAULT_CONTENT_INDEXED = False -_DEFAULT_ENABLE_DIR_BROWSING = False -_DEFAULT_ENABLE_DEFAULT_DOC = False - -_extensions = [ext for ext, _, _ in imp.get_suffixes()] -is_debug_build = "_d.pyd" in _extensions - -this_dir = os.path.abspath(os.path.dirname(__file__)) - - -class FilterParameters: - Name = None - Description = None - Path = None - Server = None - # Params that control if/how AddExtensionFile is called. - AddExtensionFile = True - AddExtensionFile_Enabled = True - AddExtensionFile_GroupID = None # defaults to Name - AddExtensionFile_CanDelete = True - AddExtensionFile_Description = None # defaults to Description. - - def __init__(self, **kw): - self.__dict__.update(kw) - - -class VirtualDirParameters: - Name = None # Must be provided. - Description = None # defaults to Name - AppProtection = _DEFAULT_PROTECTION - Headers = _DEFAULT_HEADERS - Path = None # defaults to WWW root. - Type = _IIS_WEBVIRTUALDIR - AccessExecute = _DEFAULT_ACCESS_EXECUTE - AccessRead = _DEFAULT_ACCESS_READ - AccessWrite = _DEFAULT_ACCESS_WRITE - AccessScript = _DEFAULT_ACCESS_SCRIPT - ContentIndexed = _DEFAULT_CONTENT_INDEXED - EnableDirBrowsing = _DEFAULT_ENABLE_DIR_BROWSING - EnableDefaultDoc = _DEFAULT_ENABLE_DEFAULT_DOC - DefaultDoc = None # Only set in IIS if not None - ScriptMaps = [] - ScriptMapUpdate = "end" # can be 'start', 'end', 'replace' - Server = None - - def __init__(self, **kw): - self.__dict__.update(kw) - - def is_root(self): - "This virtual directory is a root directory if parent and name are blank" - parent, name = self.split_path() - return not parent and not name - - def split_path(self): - return split_path(self.Name) - - -class ScriptMapParams: - Extension = None - Module = None - Flags = 5 - Verbs = "" - # Params that control if/how AddExtensionFile is called. - AddExtensionFile = True - AddExtensionFile_Enabled = True - AddExtensionFile_GroupID = None # defaults to Name - AddExtensionFile_CanDelete = True - AddExtensionFile_Description = None # defaults to Description. - - def __init__(self, **kw): - self.__dict__.update(kw) - - def __str__(self): - "Format this parameter suitable for IIS" - items = [self.Extension, self.Module, self.Flags] - # IIS gets upset if there is a trailing verb comma, but no verbs - if self.Verbs: - items.append(self.Verbs) - items = [str(item) for item in items] - return ",".join(items) - - -class ISAPIParameters: - ServerName = _DEFAULT_SERVER_NAME - # Description = None - Filters = [] - VirtualDirs = [] - - def __init__(self, **kw): - self.__dict__.update(kw) - - -verbose = 1 # The level - 0 is quiet. - - -def log(level, what): - if verbose >= level: - print(what) - - -# Convert an ADSI COM exception to the Win32 error code embedded in it. -def _GetWin32ErrorCode(com_exc): - hr = com_exc.hresult - # If we have more details in the 'excepinfo' struct, use it. - if com_exc.excepinfo: - hr = com_exc.excepinfo[-1] - if winerror.HRESULT_FACILITY(hr) != winerror.FACILITY_WIN32: - raise - return winerror.SCODE_CODE(hr) - - -class InstallationError(Exception): - pass - - -class ItemNotFound(InstallationError): - pass - - -class ConfigurationError(InstallationError): - pass - - -def FindPath(options, server, name): - if name.lower().startswith("iis://"): - return name - else: - if name and name[0] != "/": - name = "/" + name - return FindWebServer(options, server) + "/ROOT" + name - - -def LocateWebServerPath(description): - """ - Find an IIS web server whose name or comment matches the provided - description (case-insensitive). - - >>> LocateWebServerPath('Default Web Site') # doctest: +SKIP - - or - - >>> LocateWebServerPath('1') #doctest: +SKIP - """ - assert len(description) >= 1, "Server name or comment is required" - iis = GetObject(_IIS_OBJECT) - description = description.lower().strip() - for site in iis: - # Name is generally a number, but no need to assume that. - site_attributes = [ - getattr(site, attr, "").lower().strip() - for attr in ("Name", "ServerComment") - ] - if description in site_attributes: - return site.AdsPath - msg = "No web sites match the description '%s'" % description - raise ItemNotFound(msg) - - -def GetWebServer(description=None): - """ - Load the web server instance (COM object) for a given instance - or description. - If None is specified, the default website is retrieved (indicated - by the identifier 1. - """ - description = description or "1" - path = LocateWebServerPath(description) - server = LoadWebServer(path) - return server - - -def LoadWebServer(path): - try: - server = GetObject(path) - except pythoncom.com_error as details: - msg = details.strerror - if exc.excepinfo and exc.excepinfo[2]: - msg = exc.excepinfo[2] - msg = "WebServer %s: %s" % (path, msg) - raise ItemNotFound(msg) - return server - - -def FindWebServer(options, server_desc): - """ - Legacy function to allow options to define a .server property - to override the other parameter. Use GetWebServer instead. - """ - # options takes precedence - server_desc = options.server or server_desc - # make sure server_desc is unicode (could be mbcs if passed in - # sys.argv). - if server_desc and not isinstance(server_desc, str): - server_desc = server_desc.decode("mbcs") - - # get the server (if server_desc is None, the default site is acquired) - server = GetWebServer(server_desc) - return server.adsPath - - -def split_path(path): - """ - Get the parent path and basename. - - >>> split_path('/') - ['', ''] - - >>> split_path('') - ['', ''] - - >>> split_path('foo') - ['', 'foo'] - - >>> split_path('/foo') - ['', 'foo'] - - >>> split_path('/foo/bar') - ['/foo', 'bar'] - - >>> split_path('foo/bar') - ['/foo', 'bar'] - """ - - if not path.startswith("/"): - path = "/" + path - return path.rsplit("/", 1) - - -def _CreateDirectory(iis_dir, name, params): - # We used to go to lengths to keep an existing virtual directory - # in place. However, in some cases the existing directories got - # into a bad state, and an update failed to get them working. - # So we nuke it first. If this is a problem, we could consider adding - # a --keep-existing option. - try: - # Also seen the Class change to a generic IISObject - so nuke - # *any* existing object, regardless of Class - assert name.strip("/"), "mustn't delete the root!" - iis_dir.Delete("", name) - log(2, "Deleted old directory '%s'" % (name,)) - except pythoncom.com_error: - pass - - newDir = iis_dir.Create(params.Type, name) - log(2, "Creating new directory '%s' in %s..." % (name, iis_dir.Name)) - - friendly = params.Description or params.Name - newDir.AppFriendlyName = friendly - - # Note that the new directory won't be visible in the IIS UI - # unless the directory exists on the filesystem. - try: - path = params.Path or iis_dir.Path - newDir.Path = path - except AttributeError: - # If params.Type is IIS_WEBDIRECTORY, an exception is thrown - pass - newDir.AppCreate2(params.AppProtection) - # XXX - note that these Headers only work in IIS6 and earlier. IIS7 - # only supports them on the w3svc node - not even on individial sites, - # let alone individual extensions in the site! - if params.Headers: - newDir.HttpCustomHeaders = params.Headers - - log(2, "Setting directory options...") - newDir.AccessExecute = params.AccessExecute - newDir.AccessRead = params.AccessRead - newDir.AccessWrite = params.AccessWrite - newDir.AccessScript = params.AccessScript - newDir.ContentIndexed = params.ContentIndexed - newDir.EnableDirBrowsing = params.EnableDirBrowsing - newDir.EnableDefaultDoc = params.EnableDefaultDoc - if params.DefaultDoc is not None: - newDir.DefaultDoc = params.DefaultDoc - newDir.SetInfo() - return newDir - - -def CreateDirectory(params, options): - _CallHook(params, "PreInstall", options) - if not params.Name: - raise ConfigurationError("No Name param") - parent, name = params.split_path() - target_dir = GetObject(FindPath(options, params.Server, parent)) - - if not params.is_root(): - target_dir = _CreateDirectory(target_dir, name, params) - - AssignScriptMaps(params.ScriptMaps, target_dir, params.ScriptMapUpdate) - - _CallHook(params, "PostInstall", options, target_dir) - log(1, "Configured Virtual Directory: %s" % (params.Name,)) - return target_dir - - -def AssignScriptMaps(script_maps, target, update="replace"): - """Updates IIS with the supplied script map information. - - script_maps is a list of ScriptMapParameter objects - - target is an IIS Virtual Directory to assign the script maps to - - update is a string indicating how to update the maps, one of ('start', - 'end', or 'replace') - """ - # determine which function to use to assign script maps - script_map_func = "_AssignScriptMaps" + update.capitalize() - try: - script_map_func = eval(script_map_func) - except NameError: - msg = "Unknown ScriptMapUpdate option '%s'" % update - raise ConfigurationError(msg) - # use the str method to format the script maps for IIS - script_maps = [str(s) for s in script_maps] - # call the correct function - script_map_func(target, script_maps) - target.SetInfo() - - -def get_unique_items(sequence, reference): - "Return items in sequence that can't be found in reference." - return tuple([item for item in sequence if item not in reference]) - - -def _AssignScriptMapsReplace(target, script_maps): - target.ScriptMaps = script_maps - - -def _AssignScriptMapsEnd(target, script_maps): - unique_new_maps = get_unique_items(script_maps, target.ScriptMaps) - target.ScriptMaps = target.ScriptMaps + unique_new_maps - - -def _AssignScriptMapsStart(target, script_maps): - unique_new_maps = get_unique_items(script_maps, target.ScriptMaps) - target.ScriptMaps = unique_new_maps + target.ScriptMaps - - -def CreateISAPIFilter(filterParams, options): - server = FindWebServer(options, filterParams.Server) - _CallHook(filterParams, "PreInstall", options) - try: - filters = GetObject(server + "/Filters") - except pythoncom.com_error as exc: - # Brand new sites don't have the '/Filters' collection - create it. - # Any errors other than 'not found' we shouldn't ignore. - if ( - winerror.HRESULT_FACILITY(exc.hresult) != winerror.FACILITY_WIN32 - or winerror.HRESULT_CODE(exc.hresult) != winerror.ERROR_PATH_NOT_FOUND - ): - raise - server_ob = GetObject(server) - filters = server_ob.Create(_IIS_FILTERS, "Filters") - filters.FilterLoadOrder = "" - filters.SetInfo() - - # As for VirtualDir, delete an existing one. - assert filterParams.Name.strip("/"), "mustn't delete the root!" - try: - filters.Delete(_IIS_FILTER, filterParams.Name) - log(2, "Deleted old filter '%s'" % (filterParams.Name,)) - except pythoncom.com_error: - pass - newFilter = filters.Create(_IIS_FILTER, filterParams.Name) - log(2, "Created new ISAPI filter...") - assert os.path.isfile(filterParams.Path) - newFilter.FilterPath = filterParams.Path - newFilter.FilterDescription = filterParams.Description - newFilter.SetInfo() - load_order = [b.strip() for b in filters.FilterLoadOrder.split(",") if b] - if filterParams.Name not in load_order: - load_order.append(filterParams.Name) - filters.FilterLoadOrder = ",".join(load_order) - filters.SetInfo() - _CallHook(filterParams, "PostInstall", options, newFilter) - log(1, "Configured Filter: %s" % (filterParams.Name,)) - return newFilter - - -def DeleteISAPIFilter(filterParams, options): - _CallHook(filterParams, "PreRemove", options) - server = FindWebServer(options, filterParams.Server) - ob_path = server + "/Filters" - try: - filters = GetObject(ob_path) - except pythoncom.com_error as details: - # failure to open the filters just means a totally clean IIS install - # (IIS5 at least has no 'Filters' key when freshly installed). - log(2, "ISAPI filter path '%s' did not exist." % (ob_path,)) - return - try: - assert filterParams.Name.strip("/"), "mustn't delete the root!" - filters.Delete(_IIS_FILTER, filterParams.Name) - log(2, "Deleted ISAPI filter '%s'" % (filterParams.Name,)) - except pythoncom.com_error as details: - rc = _GetWin32ErrorCode(details) - if rc != winerror.ERROR_PATH_NOT_FOUND: - raise - log(2, "ISAPI filter '%s' did not exist." % (filterParams.Name,)) - # Remove from the load order - load_order = [b.strip() for b in filters.FilterLoadOrder.split(",") if b] - if filterParams.Name in load_order: - load_order.remove(filterParams.Name) - filters.FilterLoadOrder = ",".join(load_order) - filters.SetInfo() - _CallHook(filterParams, "PostRemove", options) - log(1, "Deleted Filter: %s" % (filterParams.Name,)) - - -def _AddExtensionFile(module, def_groupid, def_desc, params, options): - group_id = params.AddExtensionFile_GroupID or def_groupid - desc = params.AddExtensionFile_Description or def_desc - try: - ob = GetObject(_IIS_OBJECT) - ob.AddExtensionFile( - module, - params.AddExtensionFile_Enabled, - group_id, - params.AddExtensionFile_CanDelete, - desc, - ) - log(2, "Added extension file '%s' (%s)" % (module, desc)) - except (pythoncom.com_error, AttributeError) as details: - # IIS5 always fails. Probably should upgrade this to - # complain more loudly if IIS6 fails. - log(2, "Failed to add extension file '%s': %s" % (module, details)) - - -def AddExtensionFiles(params, options): - """Register the modules used by the filters/extensions as a trusted - 'extension module' - required by the default IIS6 security settings.""" - # Add each module only once. - added = {} - for vd in params.VirtualDirs: - for smp in vd.ScriptMaps: - if smp.Module not in added and smp.AddExtensionFile: - _AddExtensionFile(smp.Module, vd.Name, vd.Description, smp, options) - added[smp.Module] = True - - for fd in params.Filters: - if fd.Path not in added and fd.AddExtensionFile: - _AddExtensionFile(fd.Path, fd.Name, fd.Description, fd, options) - added[fd.Path] = True - - -def _DeleteExtensionFileRecord(module, options): - try: - ob = GetObject(_IIS_OBJECT) - ob.DeleteExtensionFileRecord(module) - log(2, "Deleted extension file record for '%s'" % module) - except (pythoncom.com_error, AttributeError) as details: - log(2, "Failed to remove extension file '%s': %s" % (module, details)) - - -def DeleteExtensionFileRecords(params, options): - deleted = {} # only remove each .dll once. - for vd in params.VirtualDirs: - for smp in vd.ScriptMaps: - if smp.Module not in deleted and smp.AddExtensionFile: - _DeleteExtensionFileRecord(smp.Module, options) - deleted[smp.Module] = True - - for filter_def in params.Filters: - if filter_def.Path not in deleted and filter_def.AddExtensionFile: - _DeleteExtensionFileRecord(filter_def.Path, options) - deleted[filter_def.Path] = True - - -def CheckLoaderModule(dll_name): - suffix = "" - if is_debug_build: - suffix = "_d" - template = os.path.join(this_dir, "PyISAPI_loader" + suffix + ".dll") - if not os.path.isfile(template): - raise ConfigurationError("Template loader '%s' does not exist" % (template,)) - # We can't do a simple "is newer" check, as the DLL is specific to the - # Python version. So we check the date-time and size are identical, - # and skip the copy in that case. - src_stat = os.stat(template) - try: - dest_stat = os.stat(dll_name) - except os.error: - same = 0 - else: - same = ( - src_stat[stat.ST_SIZE] == dest_stat[stat.ST_SIZE] - and src_stat[stat.ST_MTIME] == dest_stat[stat.ST_MTIME] - ) - if not same: - log(2, "Updating %s->%s" % (template, dll_name)) - shutil.copyfile(template, dll_name) - shutil.copystat(template, dll_name) - else: - log(2, "%s is up to date." % (dll_name,)) - - -def _CallHook(ob, hook_name, options, *extra_args): - func = getattr(ob, hook_name, None) - if func is not None: - args = (ob, options) + extra_args - func(*args) - - -def Install(params, options): - _CallHook(params, "PreInstall", options) - for vd in params.VirtualDirs: - CreateDirectory(vd, options) - - for filter_def in params.Filters: - CreateISAPIFilter(filter_def, options) - - AddExtensionFiles(params, options) - - _CallHook(params, "PostInstall", options) - - -def RemoveDirectory(params, options): - if params.is_root(): - return - try: - directory = GetObject(FindPath(options, params.Server, params.Name)) - except pythoncom.com_error as details: - rc = _GetWin32ErrorCode(details) - if rc != winerror.ERROR_PATH_NOT_FOUND: - raise - log(2, "VirtualDirectory '%s' did not exist" % params.Name) - directory = None - if directory is not None: - # Be robust should IIS get upset about unloading. - try: - directory.AppUnLoad() - except: - exc_val = sys.exc_info()[1] - log(2, "AppUnLoad() for %s failed: %s" % (params.Name, exc_val)) - # Continue trying to delete it. - try: - parent = GetObject(directory.Parent) - parent.Delete(directory.Class, directory.Name) - log(1, "Deleted Virtual Directory: %s" % (params.Name,)) - except: - exc_val = sys.exc_info()[1] - log(1, "Failed to remove directory %s: %s" % (params.Name, exc_val)) - - -def RemoveScriptMaps(vd_params, options): - "Remove script maps from the already installed virtual directory" - parent, name = vd_params.split_path() - target_dir = GetObject(FindPath(options, vd_params.Server, parent)) - installed_maps = list(target_dir.ScriptMaps) - for _map in map(str, vd_params.ScriptMaps): - if _map in installed_maps: - installed_maps.remove(_map) - target_dir.ScriptMaps = installed_maps - target_dir.SetInfo() - - -def Uninstall(params, options): - _CallHook(params, "PreRemove", options) - - DeleteExtensionFileRecords(params, options) - - for vd in params.VirtualDirs: - _CallHook(vd, "PreRemove", options) - - RemoveDirectory(vd, options) - if vd.is_root(): - # if this is installed to the root virtual directory, we can't delete it - # so remove the script maps. - RemoveScriptMaps(vd, options) - - _CallHook(vd, "PostRemove", options) - - for filter_def in params.Filters: - DeleteISAPIFilter(filter_def, options) - _CallHook(params, "PostRemove", options) - - -# Patch up any missing module names in the params, replacing them with -# the DLL name that hosts this extension/filter. -def _PatchParamsModule(params, dll_name, file_must_exist=True): - if file_must_exist: - if not os.path.isfile(dll_name): - raise ConfigurationError("%s does not exist" % (dll_name,)) - - # Patch up all references to the DLL. - for f in params.Filters: - if f.Path is None: - f.Path = dll_name - for d in params.VirtualDirs: - for sm in d.ScriptMaps: - if sm.Module is None: - sm.Module = dll_name - - -def GetLoaderModuleName(mod_name, check_module=None): - # find the name of the DLL hosting us. - # By default, this is "_{module_base_name}.dll" - if hasattr(sys, "frozen"): - # What to do? The .dll knows its name, but this is likely to be - # executed via a .exe, which does not know. - base, ext = os.path.splitext(mod_name) - path, base = os.path.split(base) - # handle the common case of 'foo.exe'/'foow.exe' - if base.endswith("w"): - base = base[:-1] - # For py2exe, we have '_foo.dll' as the standard pyisapi loader - but - # 'foo.dll' is what we use (it just delegates). - # So no leading '_' on the installed name. - dll_name = os.path.abspath(os.path.join(path, base + ".dll")) - else: - base, ext = os.path.splitext(mod_name) - path, base = os.path.split(base) - dll_name = os.path.abspath(os.path.join(path, "_" + base + ".dll")) - # Check we actually have it. - if check_module is None: - check_module = not hasattr(sys, "frozen") - if check_module: - CheckLoaderModule(dll_name) - return dll_name - - -# Note the 'log' params to these 'builtin' args - old versions of pywin32 -# didn't log at all in this function (by intent; anyone calling this was -# responsible). So existing code that calls this function with the old -# signature (ie, without a 'log' param) still gets the same behaviour as -# before... - - -def InstallModule(conf_module_name, params, options, log=lambda *args: None): - "Install the extension" - if not hasattr(sys, "frozen"): - conf_module_name = os.path.abspath(conf_module_name) - if not os.path.isfile(conf_module_name): - raise ConfigurationError("%s does not exist" % (conf_module_name,)) - - loader_dll = GetLoaderModuleName(conf_module_name) - _PatchParamsModule(params, loader_dll) - Install(params, options) - log(1, "Installation complete.") - - -def UninstallModule(conf_module_name, params, options, log=lambda *args: None): - "Remove the extension" - loader_dll = GetLoaderModuleName(conf_module_name, False) - _PatchParamsModule(params, loader_dll, False) - Uninstall(params, options) - log(1, "Uninstallation complete.") - - -standard_arguments = { - "install": InstallModule, - "remove": UninstallModule, -} - - -def build_usage(handler_map): - docstrings = [handler.__doc__ for handler in handler_map.values()] - all_args = dict(zip(iter(handler_map.keys()), docstrings)) - arg_names = "|".join(iter(all_args.keys())) - usage_string = "%prog [options] [" + arg_names + "]\n" - usage_string += "commands:\n" - for arg, desc in all_args.items(): - usage_string += " %-10s: %s" % (arg, desc) + "\n" - return usage_string[:-1] - - -def MergeStandardOptions(options, params): - """ - Take an options object generated by the command line and merge - the values into the IISParameters object. - """ - pass - - -# We support 2 ways of extending our command-line/install support. -# * Many of the installation items allow you to specify "PreInstall", -# "PostInstall", "PreRemove" and "PostRemove" hooks -# All hooks are called with the 'params' object being operated on, and -# the 'optparser' options for this session (ie, the command-line options) -# PostInstall for VirtualDirectories and Filters both have an additional -# param - the ADSI object just created. -# * You can pass your own option parser for us to use, and/or define a map -# with your own custom arg handlers. It is a map of 'arg'->function. -# The function is called with (options, log_fn, arg). The function's -# docstring is used in the usage output. -def HandleCommandLine( - params, - argv=None, - conf_module_name=None, - default_arg="install", - opt_parser=None, - custom_arg_handlers={}, -): - """Perform installation or removal of an ISAPI filter or extension. - - This module handles standard command-line options and configuration - information, and installs, removes or updates the configuration of an - ISAPI filter or extension. - - You must pass your configuration information in params - all other - arguments are optional, and allow you to configure the installation - process. - """ - global verbose - from optparse import OptionParser - - argv = argv or sys.argv - if not conf_module_name: - conf_module_name = sys.argv[0] - # convert to a long name so that if we were somehow registered with - # the "short" version but unregistered with the "long" version we - # still work (that will depend on exactly how the installer was - # started) - try: - conf_module_name = win32api.GetLongPathName(conf_module_name) - except win32api.error as exc: - log( - 2, - "Couldn't determine the long name for %r: %s" % (conf_module_name, exc), - ) - - if opt_parser is None: - # Build our own parser. - parser = OptionParser(usage="") - else: - # The caller is providing their own filter, presumably with their - # own options all setup. - parser = opt_parser - - # build a usage string if we don't have one. - if not parser.get_usage(): - all_handlers = standard_arguments.copy() - all_handlers.update(custom_arg_handlers) - parser.set_usage(build_usage(all_handlers)) - - # allow the user to use uninstall as a synonym for remove if it wasn't - # defined by the custom arg handlers. - all_handlers.setdefault("uninstall", all_handlers["remove"]) - - parser.add_option( - "-q", - "--quiet", - action="store_false", - dest="verbose", - default=True, - help="don't print status messages to stdout", - ) - parser.add_option( - "-v", - "--verbosity", - action="count", - dest="verbose", - default=1, - help="increase the verbosity of status messages", - ) - parser.add_option( - "", - "--server", - action="store", - help="Specifies the IIS server to install/uninstall on." - " Default is '%s/1'" % (_IIS_OBJECT,), - ) - - (options, args) = parser.parse_args(argv[1:]) - MergeStandardOptions(options, params) - verbose = options.verbose - if not args: - args = [default_arg] - try: - for arg in args: - handler = all_handlers[arg] - handler(conf_module_name, params, options, log) - except (ItemNotFound, InstallationError) as details: - if options.verbose > 1: - traceback.print_exc() - print("%s: %s" % (details.__class__.__name__, details)) - except KeyError: - parser.error("Invalid arg '%s'" % arg) diff --git a/server/venv/Lib/site-packages/isapi/isapicon.py b/server/venv/Lib/site-packages/isapi/isapicon.py deleted file mode 100644 index 20de1a4..0000000 --- a/server/venv/Lib/site-packages/isapi/isapicon.py +++ /dev/null @@ -1,120 +0,0 @@ -"""Constants needed by ISAPI filters and extensions.""" -# ====================================================================== -# Copyright 2002-2003 by Blackdog Software Pty Ltd. -# -# All Rights Reserved -# -# Permission to use, copy, modify, and distribute this software and -# its documentation for any purpose and without fee is hereby -# granted, provided that the above copyright notice appear in all -# copies and that both that copyright notice and this permission -# notice appear in supporting documentation, and that the name of -# Blackdog Software not be used in advertising or publicity pertaining to -# distribution of the software without specific, written prior -# permission. -# -# BLACKDOG SOFTWARE DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, -# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN -# NO EVENT SHALL BLACKDOG SOFTWARE BE LIABLE FOR ANY SPECIAL, INDIRECT OR -# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, -# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN -# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. -# ====================================================================== - -# HTTP reply codes - -HTTP_CONTINUE = 100 -HTTP_SWITCHING_PROTOCOLS = 101 -HTTP_PROCESSING = 102 -HTTP_OK = 200 -HTTP_CREATED = 201 -HTTP_ACCEPTED = 202 -HTTP_NON_AUTHORITATIVE = 203 -HTTP_NO_CONTENT = 204 -HTTP_RESET_CONTENT = 205 -HTTP_PARTIAL_CONTENT = 206 -HTTP_MULTI_STATUS = 207 -HTTP_MULTIPLE_CHOICES = 300 -HTTP_MOVED_PERMANENTLY = 301 -HTTP_MOVED_TEMPORARILY = 302 -HTTP_SEE_OTHER = 303 -HTTP_NOT_MODIFIED = 304 -HTTP_USE_PROXY = 305 -HTTP_TEMPORARY_REDIRECT = 307 -HTTP_BAD_REQUEST = 400 -HTTP_UNAUTHORIZED = 401 -HTTP_PAYMENT_REQUIRED = 402 -HTTP_FORBIDDEN = 403 -HTTP_NOT_FOUND = 404 -HTTP_METHOD_NOT_ALLOWED = 405 -HTTP_NOT_ACCEPTABLE = 406 -HTTP_PROXY_AUTHENTICATION_REQUIRED = 407 -HTTP_REQUEST_TIME_OUT = 408 -HTTP_CONFLICT = 409 -HTTP_GONE = 410 -HTTP_LENGTH_REQUIRED = 411 -HTTP_PRECONDITION_FAILED = 412 -HTTP_REQUEST_ENTITY_TOO_LARGE = 413 -HTTP_REQUEST_URI_TOO_LARGE = 414 -HTTP_UNSUPPORTED_MEDIA_TYPE = 415 -HTTP_RANGE_NOT_SATISFIABLE = 416 -HTTP_EXPECTATION_FAILED = 417 -HTTP_UNPROCESSABLE_ENTITY = 422 -HTTP_INTERNAL_SERVER_ERROR = 500 -HTTP_NOT_IMPLEMENTED = 501 -HTTP_BAD_GATEWAY = 502 -HTTP_SERVICE_UNAVAILABLE = 503 -HTTP_GATEWAY_TIME_OUT = 504 -HTTP_VERSION_NOT_SUPPORTED = 505 -HTTP_VARIANT_ALSO_VARIES = 506 - -HSE_STATUS_SUCCESS = 1 -HSE_STATUS_SUCCESS_AND_KEEP_CONN = 2 -HSE_STATUS_PENDING = 3 -HSE_STATUS_ERROR = 4 - -SF_NOTIFY_SECURE_PORT = 0x00000001 -SF_NOTIFY_NONSECURE_PORT = 0x00000002 -SF_NOTIFY_READ_RAW_DATA = 0x00008000 -SF_NOTIFY_PREPROC_HEADERS = 0x00004000 -SF_NOTIFY_AUTHENTICATION = 0x00002000 -SF_NOTIFY_URL_MAP = 0x00001000 -SF_NOTIFY_ACCESS_DENIED = 0x00000800 -SF_NOTIFY_SEND_RESPONSE = 0x00000040 -SF_NOTIFY_SEND_RAW_DATA = 0x00000400 -SF_NOTIFY_LOG = 0x00000200 -SF_NOTIFY_END_OF_REQUEST = 0x00000080 -SF_NOTIFY_END_OF_NET_SESSION = 0x00000100 - -SF_NOTIFY_ORDER_HIGH = 0x00080000 -SF_NOTIFY_ORDER_MEDIUM = 0x00040000 -SF_NOTIFY_ORDER_LOW = 0x00020000 -SF_NOTIFY_ORDER_DEFAULT = SF_NOTIFY_ORDER_LOW - -SF_NOTIFY_ORDER_MASK = ( - SF_NOTIFY_ORDER_HIGH | SF_NOTIFY_ORDER_MEDIUM | SF_NOTIFY_ORDER_LOW -) - -SF_STATUS_REQ_FINISHED = 134217728 # 0x8000000 -SF_STATUS_REQ_FINISHED_KEEP_CONN = 134217728 + 1 -SF_STATUS_REQ_NEXT_NOTIFICATION = 134217728 + 2 -SF_STATUS_REQ_HANDLED_NOTIFICATION = 134217728 + 3 -SF_STATUS_REQ_ERROR = 134217728 + 4 -SF_STATUS_REQ_READ_NEXT = 134217728 + 5 - -HSE_IO_SYNC = 0x00000001 # for WriteClient -HSE_IO_ASYNC = 0x00000002 # for WriteClient/TF/EU -HSE_IO_DISCONNECT_AFTER_SEND = 0x00000004 # for TF -HSE_IO_SEND_HEADERS = 0x00000008 # for TF -HSE_IO_NODELAY = 0x00001000 # turn off nagling -# These two are only used by VectorSend -HSE_IO_FINAL_SEND = 0x00000010 -HSE_IO_CACHE_RESPONSE = 0x00000020 - -HSE_EXEC_URL_NO_HEADERS = 0x02 -HSE_EXEC_URL_IGNORE_CURRENT_INTERCEPTOR = 0x04 -HSE_EXEC_URL_IGNORE_VALIDATION_AND_RANGE = 0x10 -HSE_EXEC_URL_DISABLE_CUSTOM_ERROR = 0x20 -HSE_EXEC_URL_SSI_CMD = 0x40 -HSE_EXEC_URL_HTTP_CACHE_ELIGIBLE = 0x80 diff --git a/server/venv/Lib/site-packages/isapi/samples/README.txt b/server/venv/Lib/site-packages/isapi/samples/README.txt deleted file mode 100644 index cff8758..0000000 --- a/server/venv/Lib/site-packages/isapi/samples/README.txt +++ /dev/null @@ -1,20 +0,0 @@ -In this directory you will find examples of ISAPI filters and extensions. - -The filter loading mechanism works like this: -* IIS loads the special Python "loader" DLL. This DLL will generally have a - leading underscore as part of its name. -* This loader DLL looks for a Python module, by removing the first letter of - the DLL base name. - -This means that an ISAPI extension module consists of 2 key files - the loader -DLL (eg, "_MyIISModule.dll", and a Python module (which for this example -would be "MyIISModule.py") - -When you install an ISAPI extension, the installation code checks to see if -there is a loader DLL for your implementation file - if one does not exist, -or the standard loader is different, it is copied and renamed accordingly. - -We use this mechanism to provide the maximum separation between different -Python extensions installed on the same server - otherwise filter order and -other tricky IIS semantics would need to be replicated. Also, each filter -gets its own thread-pool, etc. diff --git a/server/venv/Lib/site-packages/isapi/samples/advanced.py b/server/venv/Lib/site-packages/isapi/samples/advanced.py deleted file mode 100644 index c10d0c8..0000000 --- a/server/venv/Lib/site-packages/isapi/samples/advanced.py +++ /dev/null @@ -1,218 +0,0 @@ -# This extension demonstrates some advanced features of the Python ISAPI -# framework. -# We demonstrate: -# * Reloading your Python module without shutting down IIS (eg, when your -# .py implementation file changes.) -# * Custom command-line handling - both additional options and commands. -# * Using a query string - any part of the URL after a '?' is assumed to -# be "variable names" separated by '&' - we will print the values of -# these server variables. -# * If the tail portion of the URL is "ReportUnhealthy", IIS will be -# notified we are unhealthy via a HSE_REQ_REPORT_UNHEALTHY request. -# Whether this is acted upon depends on if the IIS health-checking -# tools are installed, but you should always see the reason written -# to the Windows event log - see the IIS documentation for more. - -import os -import stat -import sys - -from isapi import isapicon -from isapi.simple import SimpleExtension - -if hasattr(sys, "isapidllhandle"): - import win32traceutil - -# Notes on reloading -# If your HttpFilterProc or HttpExtensionProc functions raises -# 'isapi.InternalReloadException', the framework will not treat it -# as an error but instead will terminate your extension, reload your -# extension module, re-initialize the instance, and re-issue the request. -# The Initialize functions are called with None as their param. The -# return code from the terminate function is ignored. -# -# This is all the framework does to help you. It is up to your code -# when you raise this exception. This sample uses a Win32 "find -# notification". Whenever windows tells us one of the files in the -# directory has changed, we check if the time of our source-file has -# changed, and set a flag. Next imcoming request, we check the flag and -# raise the special exception if set. -# -# The end result is that the module is automatically reloaded whenever -# the source-file changes - you need take no further action to see your -# changes reflected in the running server. - -# The framework only reloads your module - if you have libraries you -# depend on and also want reloaded, you must arrange for this yourself. -# One way of doing this would be to special case the import of these -# modules. Eg: -# -- -# try: -# my_module = reload(my_module) # module already imported - reload it -# except NameError: -# import my_module # first time around - import it. -# -- -# When your module is imported for the first time, the NameError will -# be raised, and the module imported. When the ISAPI framework reloads -# your module, the existing module will avoid the NameError, and allow -# you to reload that module. - -import threading - -import win32con -import win32event -import win32file -import winerror - -from isapi import InternalReloadException - -try: - reload_counter += 1 -except NameError: - reload_counter = 0 - - -# A watcher thread that checks for __file__ changing. -# When it detects it, it simply sets "change_detected" to true. -class ReloadWatcherThread(threading.Thread): - def __init__(self): - self.change_detected = False - self.filename = __file__ - if self.filename.endswith("c") or self.filename.endswith("o"): - self.filename = self.filename[:-1] - self.handle = win32file.FindFirstChangeNotification( - os.path.dirname(self.filename), - False, # watch tree? - win32con.FILE_NOTIFY_CHANGE_LAST_WRITE, - ) - threading.Thread.__init__(self) - - def run(self): - last_time = os.stat(self.filename)[stat.ST_MTIME] - while 1: - try: - rc = win32event.WaitForSingleObject(self.handle, win32event.INFINITE) - win32file.FindNextChangeNotification(self.handle) - except win32event.error as details: - # handle closed - thread should terminate. - if details.winerror != winerror.ERROR_INVALID_HANDLE: - raise - break - this_time = os.stat(self.filename)[stat.ST_MTIME] - if this_time != last_time: - print("Detected file change - flagging for reload.") - self.change_detected = True - last_time = this_time - - def stop(self): - win32file.FindCloseChangeNotification(self.handle) - - -# The ISAPI extension - handles requests in our virtual dir, and sends the -# response to the client. -class Extension(SimpleExtension): - "Python advanced sample Extension" - - def __init__(self): - self.reload_watcher = ReloadWatcherThread() - self.reload_watcher.start() - - def HttpExtensionProc(self, ecb): - # NOTE: If you use a ThreadPoolExtension, you must still perform - # this check in HttpExtensionProc - raising the exception from - # The "Dispatch" method will just cause the exception to be - # rendered to the browser. - if self.reload_watcher.change_detected: - print("Doing reload") - raise InternalReloadException - - url = ecb.GetServerVariable("UNICODE_URL") - if url.endswith("ReportUnhealthy"): - ecb.ReportUnhealthy("I'm a little sick") - - ecb.SendResponseHeaders("200 OK", "Content-Type: text/html\r\n\r\n", 0) - print("", file=ecb) - - qs = ecb.GetServerVariable("QUERY_STRING") - if qs: - queries = qs.split("&") - print("

", file=ecb)
-            for q in queries:
-                val = ecb.GetServerVariable(q, "<no such variable>")
-                print("%s=%r" % (q, val), file=ecb)
-            print("

", file=ecb) - - print("This module has been imported", file=ecb) - print("%d times" % (reload_counter,), file=ecb) - print("", file=ecb) - ecb.close() - return isapicon.HSE_STATUS_SUCCESS - - def TerminateExtension(self, status): - self.reload_watcher.stop() - - -# The entry points for the ISAPI extension. -def __ExtensionFactory__(): - return Extension() - - -# Our special command line customization. -# Pre-install hook for our virtual directory. -def PreInstallDirectory(params, options): - # If the user used our special '--description' option, - # then we override our default. - if options.description: - params.Description = options.description - - -# Post install hook for our entire script -def PostInstall(params, options): - print() - print("The sample has been installed.") - print("Point your browser to /AdvancedPythonSample") - print("If you modify the source file and reload the page,") - print("you should see the reload counter increment") - - -# Handler for our custom 'status' argument. -def status_handler(options, log, arg): - "Query the status of something" - print("Everything seems to be fine!") - - -custom_arg_handlers = {"status": status_handler} - -if __name__ == "__main__": - # If run from the command-line, install ourselves. - from isapi.install import * - - params = ISAPIParameters(PostInstall=PostInstall) - # Setup the virtual directories - this is a list of directories our - # extension uses - in this case only 1. - # Each extension has a "script map" - this is the mapping of ISAPI - # extensions. - sm = [ScriptMapParams(Extension="*", Flags=0)] - vd = VirtualDirParameters( - Name="AdvancedPythonSample", - Description=Extension.__doc__, - ScriptMaps=sm, - ScriptMapUpdate="replace", - # specify the pre-install hook. - PreInstall=PreInstallDirectory, - ) - params.VirtualDirs = [vd] - # Setup our custom option parser. - from optparse import OptionParser - - parser = OptionParser("") # blank usage, so isapi sets it. - parser.add_option( - "", - "--description", - action="store", - help="custom description to use for the virtual directory", - ) - - HandleCommandLine( - params, opt_parser=parser, custom_arg_handlers=custom_arg_handlers - ) diff --git a/server/venv/Lib/site-packages/isapi/samples/redirector.py b/server/venv/Lib/site-packages/isapi/samples/redirector.py deleted file mode 100644 index 40698bb..0000000 --- a/server/venv/Lib/site-packages/isapi/samples/redirector.py +++ /dev/null @@ -1,125 +0,0 @@ -# This is a sample ISAPI extension written in Python. -# -# Please see README.txt in this directory, and specifically the -# information about the "loader" DLL - installing this sample will create -# "_redirector.dll" in the current directory. The readme explains this. - -# Executing this script (or any server config script) will install the extension -# into your web server. As the server executes, the PyISAPI framework will load -# this module and create your Extension and Filter objects. - -# This is the simplest possible redirector (or proxy) we can write. The -# extension installs with a mask of '*' in the root of the site. -# As an added bonus though, we optionally show how, on IIS6 and later, we -# can use HSE_ERQ_EXEC_URL to ignore certain requests - in IIS5 and earlier -# we can only do this with an ISAPI filter - see redirector_with_filter for -# an example. If this sample is run on IIS5 or earlier it simply ignores -# any excludes. - -import sys - -from isapi import isapicon, threaded_extension - -try: - from urllib.request import urlopen -except ImportError: - # py3k spelling... - from urllib.request import urlopen - -import win32api - -# sys.isapidllhandle will exist when we are loaded by the IIS framework. -# In this case we redirect our output to the win32traceutil collector. -if hasattr(sys, "isapidllhandle"): - import win32traceutil - -# The site we are proxying. -proxy = "http://www.python.org" - -# Urls we exclude (ie, allow IIS to handle itself) - all are lowered, -# and these entries exist by default on Vista... -excludes = ["/iisstart.htm", "/welcome.png"] - - -# An "io completion" function, called when ecb.ExecURL completes... -def io_callback(ecb, url, cbIO, errcode): - # Get the status of our ExecURL - httpstatus, substatus, win32 = ecb.GetExecURLStatus() - print( - "ExecURL of %r finished with http status %d.%d, win32 status %d (%s)" - % (url, httpstatus, substatus, win32, win32api.FormatMessage(win32).strip()) - ) - # nothing more to do! - ecb.DoneWithSession() - - -# The ISAPI extension - handles all requests in the site. -class Extension(threaded_extension.ThreadPoolExtension): - "Python sample Extension" - - def Dispatch(self, ecb): - # Note that our ThreadPoolExtension base class will catch exceptions - # in our Dispatch method, and write the traceback to the client. - # That is perfect for this sample, so we don't catch our own. - # print 'IIS dispatching "%s"' % (ecb.GetServerVariable("URL"),) - url = ecb.GetServerVariable("URL").decode("ascii") - for exclude in excludes: - if url.lower().startswith(exclude): - print("excluding %s" % url) - if ecb.Version < 0x60000: - print("(but this is IIS5 or earlier - can't do 'excludes')") - else: - ecb.IOCompletion(io_callback, url) - ecb.ExecURL( - None, - None, - None, - None, - None, - isapicon.HSE_EXEC_URL_IGNORE_CURRENT_INTERCEPTOR, - ) - return isapicon.HSE_STATUS_PENDING - - new_url = proxy + url - print("Opening %s" % new_url) - fp = urlopen(new_url) - headers = fp.info() - # subtle py3k breakage: in py3k, str(headers) has normalized \r\n - # back to \n and also stuck an extra \n term. py2k leaves the - # \r\n from the server in tact and finishes with a single term. - if sys.version_info < (3, 0): - header_text = str(headers) + "\r\n" - else: - # take *all* trailing \n off, replace remaining with - # \r\n, then add the 2 trailing \r\n. - header_text = str(headers).rstrip("\n").replace("\n", "\r\n") + "\r\n\r\n" - ecb.SendResponseHeaders("200 OK", header_text, False) - ecb.WriteClient(fp.read()) - ecb.DoneWithSession() - print("Returned data from '%s'" % (new_url,)) - return isapicon.HSE_STATUS_SUCCESS - - -# The entry points for the ISAPI extension. -def __ExtensionFactory__(): - return Extension() - - -if __name__ == "__main__": - # If run from the command-line, install ourselves. - from isapi.install import * - - params = ISAPIParameters() - # Setup the virtual directories - this is a list of directories our - # extension uses - in this case only 1. - # Each extension has a "script map" - this is the mapping of ISAPI - # extensions. - sm = [ScriptMapParams(Extension="*", Flags=0)] - vd = VirtualDirParameters( - Name="/", - Description=Extension.__doc__, - ScriptMaps=sm, - ScriptMapUpdate="replace", - ) - params.VirtualDirs = [vd] - HandleCommandLine(params) diff --git a/server/venv/Lib/site-packages/isapi/samples/redirector_asynch.py b/server/venv/Lib/site-packages/isapi/samples/redirector_asynch.py deleted file mode 100644 index 3c4b5e4..0000000 --- a/server/venv/Lib/site-packages/isapi/samples/redirector_asynch.py +++ /dev/null @@ -1,85 +0,0 @@ -# This is a sample ISAPI extension written in Python. - -# This is like the other 'redirector' samples, but uses asnch IO when writing -# back to the client (it does *not* use asynch io talking to the remote -# server!) - -import sys -import urllib.error -import urllib.parse -import urllib.request - -from isapi import isapicon, threaded_extension - -# sys.isapidllhandle will exist when we are loaded by the IIS framework. -# In this case we redirect our output to the win32traceutil collector. -if hasattr(sys, "isapidllhandle"): - import win32traceutil - -# The site we are proxying. -proxy = "http://www.python.org" - -# We synchronously read chunks of this size then asynchronously write them. -CHUNK_SIZE = 8192 - - -# The callback made when IIS completes the asynch write. -def io_callback(ecb, fp, cbIO, errcode): - print("IO callback", ecb, fp, cbIO, errcode) - chunk = fp.read(CHUNK_SIZE) - if chunk: - ecb.WriteClient(chunk, isapicon.HSE_IO_ASYNC) - # and wait for the next callback to say this chunk is done. - else: - # eof - say we are complete. - fp.close() - ecb.DoneWithSession() - - -# The ISAPI extension - handles all requests in the site. -class Extension(threaded_extension.ThreadPoolExtension): - "Python sample proxy server - asynch version." - - def Dispatch(self, ecb): - print('IIS dispatching "%s"' % (ecb.GetServerVariable("URL"),)) - url = ecb.GetServerVariable("URL") - - new_url = proxy + url - print("Opening %s" % new_url) - fp = urllib.request.urlopen(new_url) - headers = fp.info() - ecb.SendResponseHeaders("200 OK", str(headers) + "\r\n", False) - # now send the first chunk asynchronously - ecb.ReqIOCompletion(io_callback, fp) - chunk = fp.read(CHUNK_SIZE) - if chunk: - ecb.WriteClient(chunk, isapicon.HSE_IO_ASYNC) - return isapicon.HSE_STATUS_PENDING - # no data - just close things now. - ecb.DoneWithSession() - return isapicon.HSE_STATUS_SUCCESS - - -# The entry points for the ISAPI extension. -def __ExtensionFactory__(): - return Extension() - - -if __name__ == "__main__": - # If run from the command-line, install ourselves. - from isapi.install import * - - params = ISAPIParameters() - # Setup the virtual directories - this is a list of directories our - # extension uses - in this case only 1. - # Each extension has a "script map" - this is the mapping of ISAPI - # extensions. - sm = [ScriptMapParams(Extension="*", Flags=0)] - vd = VirtualDirParameters( - Name="/", - Description=Extension.__doc__, - ScriptMaps=sm, - ScriptMapUpdate="replace", - ) - params.VirtualDirs = [vd] - HandleCommandLine(params) diff --git a/server/venv/Lib/site-packages/isapi/samples/redirector_with_filter.py b/server/venv/Lib/site-packages/isapi/samples/redirector_with_filter.py deleted file mode 100644 index a63b1db..0000000 --- a/server/venv/Lib/site-packages/isapi/samples/redirector_with_filter.py +++ /dev/null @@ -1,161 +0,0 @@ -# This is a sample configuration file for an ISAPI filter and extension -# written in Python. -# -# Please see README.txt in this directory, and specifically the -# information about the "loader" DLL - installing this sample will create -# "_redirector_with_filter.dll" in the current directory. The readme explains -# this. - -# Executing this script (or any server config script) will install the extension -# into your web server. As the server executes, the PyISAPI framework will load -# this module and create your Extension and Filter objects. - -# This sample provides sample redirector: -# It is implemented by a filter and an extension, so that some requests can -# be ignored. Compare with 'redirector_simple' which avoids the filter, but -# is unable to selectively ignore certain requests. -# The process is sample uses is: -# * The filter is installed globally, as all filters are. -# * A Virtual Directory named "python" is setup. This dir has our ISAPI -# extension as the only application, mapped to file-extension '*'. Thus, our -# extension handles *all* requests in this directory. -# The basic process is that the filter does URL rewriting, redirecting every -# URL to our Virtual Directory. Our extension then handles this request, -# forwarding the data from the proxied site. -# For example: -# * URL of "index.html" comes in. -# * Filter rewrites this to "/python/index.html" -# * Our extension sees the full "/python/index.html", removes the leading -# portion, and opens and forwards the remote URL. - - -# This sample is very small - it avoid most error handling, etc. It is for -# demonstration purposes only. - -import sys -import urllib.error -import urllib.parse -import urllib.request - -from isapi import isapicon, threaded_extension -from isapi.simple import SimpleFilter - -# sys.isapidllhandle will exist when we are loaded by the IIS framework. -# In this case we redirect our output to the win32traceutil collector. -if hasattr(sys, "isapidllhandle"): - import win32traceutil - -# The site we are proxying. -proxy = "http://www.python.org" -# The name of the virtual directory we install in, and redirect from. -virtualdir = "/python" - -# The key feature of this redirector over the simple redirector is that it -# can choose to ignore certain responses by having the filter not rewrite them -# to our virtual dir. For this sample, we just exclude the IIS help directory. - - -# The ISAPI extension - handles requests in our virtual dir, and sends the -# response to the client. -class Extension(threaded_extension.ThreadPoolExtension): - "Python sample Extension" - - def Dispatch(self, ecb): - # Note that our ThreadPoolExtension base class will catch exceptions - # in our Dispatch method, and write the traceback to the client. - # That is perfect for this sample, so we don't catch our own. - # print 'IIS dispatching "%s"' % (ecb.GetServerVariable("URL"),) - url = ecb.GetServerVariable("URL") - if url.startswith(virtualdir): - new_url = proxy + url[len(virtualdir) :] - print("Opening", new_url) - fp = urllib.request.urlopen(new_url) - headers = fp.info() - ecb.SendResponseHeaders("200 OK", str(headers) + "\r\n", False) - ecb.WriteClient(fp.read()) - ecb.DoneWithSession() - print("Returned data from '%s'!" % (new_url,)) - else: - # this should never happen - we should only see requests that - # start with our virtual directory name. - print("Not proxying '%s'" % (url,)) - - -# The ISAPI filter. -class Filter(SimpleFilter): - "Sample Python Redirector" - filter_flags = isapicon.SF_NOTIFY_PREPROC_HEADERS | isapicon.SF_NOTIFY_ORDER_DEFAULT - - def HttpFilterProc(self, fc): - # print "Filter Dispatch" - nt = fc.NotificationType - if nt != isapicon.SF_NOTIFY_PREPROC_HEADERS: - return isapicon.SF_STATUS_REQ_NEXT_NOTIFICATION - - pp = fc.GetData() - url = pp.GetHeader("url") - # print "URL is '%s'" % (url,) - prefix = virtualdir - if not url.startswith(prefix): - new_url = prefix + url - print("New proxied URL is '%s'" % (new_url,)) - pp.SetHeader("url", new_url) - # For the sake of demonstration, show how the FilterContext - # attribute is used. It always starts out life as None, and - # any assignments made are automatically decref'd by the - # framework during a SF_NOTIFY_END_OF_NET_SESSION notification. - if fc.FilterContext is None: - fc.FilterContext = 0 - fc.FilterContext += 1 - print("This is request number %d on this connection" % fc.FilterContext) - return isapicon.SF_STATUS_REQ_HANDLED_NOTIFICATION - else: - print("Filter ignoring URL '%s'" % (url,)) - - # Some older code that handled SF_NOTIFY_URL_MAP. - # ~ print "Have URL_MAP notify" - # ~ urlmap = fc.GetData() - # ~ print "URI is", urlmap.URL - # ~ print "Path is", urlmap.PhysicalPath - # ~ if urlmap.URL.startswith("/UC/"): - # ~ # Find the /UC/ in the physical path, and nuke it (except - # ~ # as the path is physical, it is \) - # ~ p = urlmap.PhysicalPath - # ~ pos = p.index("\\UC\\") - # ~ p = p[:pos] + p[pos+3:] - # ~ p = r"E:\src\pyisapi\webroot\PyTest\formTest.htm" - # ~ print "New path is", p - # ~ urlmap.PhysicalPath = p - - -# The entry points for the ISAPI extension. -def __FilterFactory__(): - return Filter() - - -def __ExtensionFactory__(): - return Extension() - - -if __name__ == "__main__": - # If run from the command-line, install ourselves. - from isapi.install import * - - params = ISAPIParameters() - # Setup all filters - these are global to the site. - params.Filters = [ - FilterParameters(Name="PythonRedirector", Description=Filter.__doc__), - ] - # Setup the virtual directories - this is a list of directories our - # extension uses - in this case only 1. - # Each extension has a "script map" - this is the mapping of ISAPI - # extensions. - sm = [ScriptMapParams(Extension="*", Flags=0)] - vd = VirtualDirParameters( - Name=virtualdir[1:], - Description=Extension.__doc__, - ScriptMaps=sm, - ScriptMapUpdate="replace", - ) - params.VirtualDirs = [vd] - HandleCommandLine(params) diff --git a/server/venv/Lib/site-packages/isapi/samples/test.py b/server/venv/Lib/site-packages/isapi/samples/test.py deleted file mode 100644 index 5e4d899..0000000 --- a/server/venv/Lib/site-packages/isapi/samples/test.py +++ /dev/null @@ -1,195 +0,0 @@ -# This extension is used mainly for testing purposes - it is not -# designed to be a simple sample, but instead is a hotch-potch of things -# that attempts to exercise the framework. - -import os -import stat -import sys - -from isapi import isapicon -from isapi.simple import SimpleExtension - -if hasattr(sys, "isapidllhandle"): - import win32traceutil - -# We use the same reload support as 'advanced.py' demonstrates. -import threading - -import win32con -import win32event -import win32file -import winerror - -from isapi import InternalReloadException - - -# A watcher thread that checks for __file__ changing. -# When it detects it, it simply sets "change_detected" to true. -class ReloadWatcherThread(threading.Thread): - def __init__(self): - self.change_detected = False - self.filename = __file__ - if self.filename.endswith("c") or self.filename.endswith("o"): - self.filename = self.filename[:-1] - self.handle = win32file.FindFirstChangeNotification( - os.path.dirname(self.filename), - False, # watch tree? - win32con.FILE_NOTIFY_CHANGE_LAST_WRITE, - ) - threading.Thread.__init__(self) - - def run(self): - last_time = os.stat(self.filename)[stat.ST_MTIME] - while 1: - try: - rc = win32event.WaitForSingleObject(self.handle, win32event.INFINITE) - win32file.FindNextChangeNotification(self.handle) - except win32event.error as details: - # handle closed - thread should terminate. - if details.winerror != winerror.ERROR_INVALID_HANDLE: - raise - break - this_time = os.stat(self.filename)[stat.ST_MTIME] - if this_time != last_time: - print("Detected file change - flagging for reload.") - self.change_detected = True - last_time = this_time - - def stop(self): - win32file.FindCloseChangeNotification(self.handle) - - -def TransmitFileCallback(ecb, hFile, cbIO, errCode): - print("Transmit complete!") - ecb.close() - - -# The ISAPI extension - handles requests in our virtual dir, and sends the -# response to the client. -class Extension(SimpleExtension): - "Python test Extension" - - def __init__(self): - self.reload_watcher = ReloadWatcherThread() - self.reload_watcher.start() - - def HttpExtensionProc(self, ecb): - # NOTE: If you use a ThreadPoolExtension, you must still perform - # this check in HttpExtensionProc - raising the exception from - # The "Dispatch" method will just cause the exception to be - # rendered to the browser. - if self.reload_watcher.change_detected: - print("Doing reload") - raise InternalReloadException - - if ecb.GetServerVariable("UNICODE_URL").endswith("test.py"): - file_flags = ( - win32con.FILE_FLAG_SEQUENTIAL_SCAN | win32con.FILE_FLAG_OVERLAPPED - ) - hfile = win32file.CreateFile( - __file__, - win32con.GENERIC_READ, - 0, - None, - win32con.OPEN_EXISTING, - file_flags, - None, - ) - flags = ( - isapicon.HSE_IO_ASYNC - | isapicon.HSE_IO_DISCONNECT_AFTER_SEND - | isapicon.HSE_IO_SEND_HEADERS - ) - # We pass hFile to the callback simply as a way of keeping it alive - # for the duration of the transmission - try: - ecb.TransmitFile( - TransmitFileCallback, - hfile, - int(hfile), - "200 OK", - 0, - 0, - None, - None, - flags, - ) - except: - # Errors keep this source file open! - hfile.Close() - raise - else: - # default response - ecb.SendResponseHeaders("200 OK", "Content-Type: text/html\r\n\r\n", 0) - print("", file=ecb) - print("The root of this site is at", ecb.MapURLToPath("/"), file=ecb) - print("", file=ecb) - ecb.close() - return isapicon.HSE_STATUS_SUCCESS - - def TerminateExtension(self, status): - self.reload_watcher.stop() - - -# The entry points for the ISAPI extension. -def __ExtensionFactory__(): - return Extension() - - -# Our special command line customization. -# Pre-install hook for our virtual directory. -def PreInstallDirectory(params, options): - # If the user used our special '--description' option, - # then we override our default. - if options.description: - params.Description = options.description - - -# Post install hook for our entire script -def PostInstall(params, options): - print() - print("The sample has been installed.") - print("Point your browser to /PyISAPITest") - - -# Handler for our custom 'status' argument. -def status_handler(options, log, arg): - "Query the status of something" - print("Everything seems to be fine!") - - -custom_arg_handlers = {"status": status_handler} - -if __name__ == "__main__": - # If run from the command-line, install ourselves. - from isapi.install import * - - params = ISAPIParameters(PostInstall=PostInstall) - # Setup the virtual directories - this is a list of directories our - # extension uses - in this case only 1. - # Each extension has a "script map" - this is the mapping of ISAPI - # extensions. - sm = [ScriptMapParams(Extension="*", Flags=0)] - vd = VirtualDirParameters( - Name="PyISAPITest", - Description=Extension.__doc__, - ScriptMaps=sm, - ScriptMapUpdate="replace", - # specify the pre-install hook. - PreInstall=PreInstallDirectory, - ) - params.VirtualDirs = [vd] - # Setup our custom option parser. - from optparse import OptionParser - - parser = OptionParser("") # blank usage, so isapi sets it. - parser.add_option( - "", - "--description", - action="store", - help="custom description to use for the virtual directory", - ) - - HandleCommandLine( - params, opt_parser=parser, custom_arg_handlers=custom_arg_handlers - ) diff --git a/server/venv/Lib/site-packages/isapi/simple.py b/server/venv/Lib/site-packages/isapi/simple.py deleted file mode 100644 index b453bba..0000000 --- a/server/venv/Lib/site-packages/isapi/simple.py +++ /dev/null @@ -1,70 +0,0 @@ -"""Simple base-classes for extensions and filters. - -None of the filter and extension functions are considered 'optional' by the -framework. These base-classes provide simple implementations for the -Initialize and Terminate functions, allowing you to omit them, - -It is not necessary to use these base-classes - but if you don't, you -must ensure each of the required methods are implemented. -""" - - -class SimpleExtension: - "Base class for a simple ISAPI extension" - - def __init__(self): - pass - - def GetExtensionVersion(self, vi): - """Called by the ISAPI framework to get the extension version - - The default implementation uses the classes docstring to - set the extension description.""" - # nod to our reload capability - vi is None when we are reloaded. - if vi is not None: - vi.ExtensionDesc = self.__doc__ - - def HttpExtensionProc(self, control_block): - """Called by the ISAPI framework for each extension request. - - sub-classes must provide an implementation for this method. - """ - raise NotImplementedError("sub-classes should override HttpExtensionProc") - - def TerminateExtension(self, status): - """Called by the ISAPI framework as the extension terminates.""" - pass - - -class SimpleFilter: - "Base class for a a simple ISAPI filter" - filter_flags = None - - def __init__(self): - pass - - def GetFilterVersion(self, fv): - """Called by the ISAPI framework to get the extension version - - The default implementation uses the classes docstring to - set the extension description, and uses the classes - filter_flags attribute to set the ISAPI filter flags - you - must specify filter_flags in your class. - """ - if self.filter_flags is None: - raise RuntimeError("You must specify the filter flags") - # nod to our reload capability - fv is None when we are reloaded. - if fv is not None: - fv.Flags = self.filter_flags - fv.FilterDesc = self.__doc__ - - def HttpFilterProc(self, fc): - """Called by the ISAPI framework for each filter request. - - sub-classes must provide an implementation for this method. - """ - raise NotImplementedError("sub-classes should override HttpExtensionProc") - - def TerminateFilter(self, status): - """Called by the ISAPI framework as the filter terminates.""" - pass diff --git a/server/venv/Lib/site-packages/isapi/test/README.txt b/server/venv/Lib/site-packages/isapi/test/README.txt deleted file mode 100644 index 18643dd..0000000 --- a/server/venv/Lib/site-packages/isapi/test/README.txt +++ /dev/null @@ -1,3 +0,0 @@ -This is a directory for tests of the PyISAPI framework. - -For demos, please see the pyisapi 'samples' directory. \ No newline at end of file diff --git a/server/venv/Lib/site-packages/isapi/test/extension_simple.py b/server/venv/Lib/site-packages/isapi/test/extension_simple.py deleted file mode 100644 index 64bd71f..0000000 --- a/server/venv/Lib/site-packages/isapi/test/extension_simple.py +++ /dev/null @@ -1,119 +0,0 @@ -# This is an ISAPI extension purely for testing purposes. It is NOT -# a 'demo' (even though it may be useful!) -# -# Install this extension, then point your browser to: -# "http://localhost/pyisapi_test/test1" -# This will execute the method 'test1' below. See below for the list of -# test methods that are acceptable. - -import urllib.error -import urllib.parse -import urllib.request - -# If we have no console (eg, am running from inside IIS), redirect output -# somewhere useful - in this case, the standard win32 trace collector. -import win32api -import winerror - -from isapi import ExtensionError, isapicon, threaded_extension -from isapi.simple import SimpleFilter - -try: - win32api.GetConsoleTitle() -except win32api.error: - # No console - redirect - import win32traceutil - - -# The ISAPI extension - handles requests in our virtual dir, and sends the -# response to the client. -class Extension(threaded_extension.ThreadPoolExtension): - "Python ISAPI Tester" - - def Dispatch(self, ecb): - print('Tester dispatching "%s"' % (ecb.GetServerVariable("URL"),)) - url = ecb.GetServerVariable("URL") - test_name = url.split("/")[-1] - meth = getattr(self, test_name, None) - if meth is None: - raise AttributeError("No test named '%s'" % (test_name,)) - result = meth(ecb) - if result is None: - # This means the test finalized everything - return - ecb.SendResponseHeaders("200 OK", "Content-type: text/html\r\n\r\n", False) - print("Finished running test ", test_name, "", file=ecb) - print("

", file=ecb)
-        print(result, file=ecb)
-        print("
", file=ecb) - print("", file=ecb) - ecb.DoneWithSession() - - def test1(self, ecb): - try: - ecb.GetServerVariable("foo bar") - raise RuntimeError("should have failed!") - except ExtensionError as err: - assert err.errno == winerror.ERROR_INVALID_INDEX, err - return "worked!" - - def test_long_vars(self, ecb): - qs = ecb.GetServerVariable("QUERY_STRING") - # Our implementation has a default buffer size of 8k - so we test - # the code that handles an overflow by ensuring there are more - # than 8k worth of chars in the URL. - expected_query = "x" * 8500 - if len(qs) == 0: - # Just the URL with no query part - redirect to myself, but with - # a huge query portion. - me = ecb.GetServerVariable("URL") - headers = "Location: " + me + "?" + expected_query + "\r\n\r\n" - ecb.SendResponseHeaders("301 Moved", headers) - ecb.DoneWithSession() - return None - if qs == expected_query: - return "Total length of variable is %d - test worked!" % (len(qs),) - else: - return "Unexpected query portion! Got %d chars, expected %d" % ( - len(qs), - len(expected_query), - ) - - def test_unicode_vars(self, ecb): - # We need to check that we are running IIS6! This seems the only - # effective way from an extension. - ver = float(ecb.GetServerVariable("SERVER_SOFTWARE").split("/")[1]) - if ver < 6.0: - return "This is IIS version %g - unicode only works in IIS6 and later" % ver - - us = ecb.GetServerVariable("UNICODE_SERVER_NAME") - if not isinstance(us, str): - raise RuntimeError("unexpected type!") - if us != str(ecb.GetServerVariable("SERVER_NAME")): - raise RuntimeError("Unicode and non-unicode values were not the same") - return "worked!" - - -# The entry points for the ISAPI extension. -def __ExtensionFactory__(): - return Extension() - - -if __name__ == "__main__": - # If run from the command-line, install ourselves. - from isapi.install import * - - params = ISAPIParameters() - # Setup the virtual directories - this is a list of directories our - # extension uses - in this case only 1. - # Each extension has a "script map" - this is the mapping of ISAPI - # extensions. - sm = [ScriptMapParams(Extension="*", Flags=0)] - vd = VirtualDirParameters( - Name="pyisapi_test", - Description=Extension.__doc__, - ScriptMaps=sm, - ScriptMapUpdate="replace", - ) - params.VirtualDirs = [vd] - HandleCommandLine(params) diff --git a/server/venv/Lib/site-packages/isapi/threaded_extension.py b/server/venv/Lib/site-packages/isapi/threaded_extension.py deleted file mode 100644 index b31c8c9..0000000 --- a/server/venv/Lib/site-packages/isapi/threaded_extension.py +++ /dev/null @@ -1,189 +0,0 @@ -"""An ISAPI extension base class implemented using a thread-pool.""" -# $Id$ - -import sys -import threading -import time -import traceback - -from pywintypes import OVERLAPPED -from win32event import INFINITE -from win32file import ( - CloseHandle, - CreateIoCompletionPort, - GetQueuedCompletionStatus, - PostQueuedCompletionStatus, -) -from win32security import SetThreadToken - -import isapi.simple -from isapi import ExtensionError, isapicon - -ISAPI_REQUEST = 1 -ISAPI_SHUTDOWN = 2 - - -class WorkerThread(threading.Thread): - def __init__(self, extension, io_req_port): - self.running = False - self.io_req_port = io_req_port - self.extension = extension - threading.Thread.__init__(self) - # We wait 15 seconds for a thread to terminate, but if it fails to, - # we don't want the process to hang at exit waiting for it... - self.setDaemon(True) - - def run(self): - self.running = True - while self.running: - errCode, bytes, key, overlapped = GetQueuedCompletionStatus( - self.io_req_port, INFINITE - ) - if key == ISAPI_SHUTDOWN and overlapped is None: - break - - # Let the parent extension handle the command. - dispatcher = self.extension.dispatch_map.get(key) - if dispatcher is None: - raise RuntimeError("Bad request '%s'" % (key,)) - - dispatcher(errCode, bytes, key, overlapped) - - def call_handler(self, cblock): - self.extension.Dispatch(cblock) - - -# A generic thread-pool based extension, using IO Completion Ports. -# Sub-classes can override one method to implement a simple extension, or -# may leverage the CompletionPort to queue their own requests, and implement a -# fully asynch extension. -class ThreadPoolExtension(isapi.simple.SimpleExtension): - "Base class for an ISAPI extension based around a thread-pool" - max_workers = 20 - worker_shutdown_wait = 15000 # 15 seconds for workers to quit... - - def __init__(self): - self.workers = [] - # extensible dispatch map, for sub-classes that need to post their - # own requests to the completion port. - # Each of these functions is called with the result of - # GetQueuedCompletionStatus for our port. - self.dispatch_map = { - ISAPI_REQUEST: self.DispatchConnection, - } - - def GetExtensionVersion(self, vi): - isapi.simple.SimpleExtension.GetExtensionVersion(self, vi) - # As per Q192800, the CompletionPort should be created with the number - # of processors, even if the number of worker threads is much larger. - # Passing 0 means the system picks the number. - self.io_req_port = CreateIoCompletionPort(-1, None, 0, 0) - # start up the workers - self.workers = [] - for i in range(self.max_workers): - worker = WorkerThread(self, self.io_req_port) - worker.start() - self.workers.append(worker) - - def HttpExtensionProc(self, control_block): - overlapped = OVERLAPPED() - overlapped.object = control_block - PostQueuedCompletionStatus(self.io_req_port, 0, ISAPI_REQUEST, overlapped) - return isapicon.HSE_STATUS_PENDING - - def TerminateExtension(self, status): - for worker in self.workers: - worker.running = False - for worker in self.workers: - PostQueuedCompletionStatus(self.io_req_port, 0, ISAPI_SHUTDOWN, None) - # wait for them to terminate - pity we aren't using 'native' threads - # as then we could do a smart wait - but now we need to poll.... - end_time = time.time() + self.worker_shutdown_wait / 1000 - alive = self.workers - while alive: - if time.time() > end_time: - # xxx - might be nice to log something here. - break - time.sleep(0.2) - alive = [w for w in alive if w.is_alive()] - self.dispatch_map = {} # break circles - CloseHandle(self.io_req_port) - - # This is the one operation the base class supports - a simple - # Connection request. We setup the thread-token, and dispatch to the - # sub-class's 'Dispatch' method. - def DispatchConnection(self, errCode, bytes, key, overlapped): - control_block = overlapped.object - # setup the correct user for this request - hRequestToken = control_block.GetImpersonationToken() - SetThreadToken(None, hRequestToken) - try: - try: - self.Dispatch(control_block) - except: - self.HandleDispatchError(control_block) - finally: - # reset the security context - SetThreadToken(None, None) - - def Dispatch(self, ecb): - """Overridden by the sub-class to handle connection requests. - - This class creates a thread-pool using a Windows completion port, - and dispatches requests via this port. Sub-classes can generally - implement each connection request using blocking reads and writes, and - the thread-pool will still provide decent response to the end user. - - The sub-class can set a max_workers attribute (default is 20). Note - that this generally does *not* mean 20 threads will all be concurrently - running, via the magic of Windows completion ports. - - There is no default implementation - sub-classes must implement this. - """ - raise NotImplementedError("sub-classes should override Dispatch") - - def HandleDispatchError(self, ecb): - """Handles errors in the Dispatch method. - - When a Dispatch method call fails, this method is called to handle - the exception. The default implementation formats the traceback - in the browser. - """ - ecb.HttpStatusCode = isapicon.HSE_STATUS_ERROR - # control_block.LogData = "we failed!" - exc_typ, exc_val, exc_tb = sys.exc_info() - limit = None - try: - try: - import cgi - - ecb.SendResponseHeaders( - "200 OK", "Content-type: text/html\r\n\r\n", False - ) - print(file=ecb) - print("

Traceback (most recent call last):

", file=ecb) - list = traceback.format_tb( - exc_tb, limit - ) + traceback.format_exception_only(exc_typ, exc_val) - print( - "
%s%s
" - % ( - cgi.escape("".join(list[:-1])), - cgi.escape(list[-1]), - ), - file=ecb, - ) - except ExtensionError: - # The client disconnected without reading the error body - - # its probably not a real browser at the other end, ignore it. - pass - except: - print("FAILED to render the error message!") - traceback.print_exc() - print("ORIGINAL extension error:") - traceback.print_exception(exc_typ, exc_val, exc_tb) - finally: - # holding tracebacks in a local of a frame that may itself be - # part of a traceback used to be evil and cause leaks! - exc_tb = None - ecb.DoneWithSession() diff --git a/server/venv/Lib/site-packages/itsdangerous-2.1.2.dist-info/INSTALLER b/server/venv/Lib/site-packages/itsdangerous-2.1.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/server/venv/Lib/site-packages/itsdangerous-2.1.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/server/venv/Lib/site-packages/itsdangerous-2.1.2.dist-info/LICENSE.rst b/server/venv/Lib/site-packages/itsdangerous-2.1.2.dist-info/LICENSE.rst deleted file mode 100644 index 7b190ca..0000000 --- a/server/venv/Lib/site-packages/itsdangerous-2.1.2.dist-info/LICENSE.rst +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2011 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/server/venv/Lib/site-packages/itsdangerous-2.1.2.dist-info/METADATA b/server/venv/Lib/site-packages/itsdangerous-2.1.2.dist-info/METADATA deleted file mode 100644 index 1d935ed..0000000 --- a/server/venv/Lib/site-packages/itsdangerous-2.1.2.dist-info/METADATA +++ /dev/null @@ -1,97 +0,0 @@ -Metadata-Version: 2.1 -Name: itsdangerous -Version: 2.1.2 -Summary: Safely pass data to untrusted environments and back. -Home-page: https://palletsprojects.com/p/itsdangerous/ -Author: Armin Ronacher -Author-email: armin.ronacher@active-4.com -Maintainer: Pallets -Maintainer-email: contact@palletsprojects.com -License: BSD-3-Clause -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Documentation, https://itsdangerous.palletsprojects.com/ -Project-URL: Changes, https://itsdangerous.palletsprojects.com/changes/ -Project-URL: Source Code, https://github.com/pallets/itsdangerous/ -Project-URL: Issue Tracker, https://github.com/pallets/itsdangerous/issues/ -Project-URL: Twitter, https://twitter.com/PalletsTeam -Project-URL: Chat, https://discord.gg/pallets -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Requires-Python: >=3.7 -Description-Content-Type: text/x-rst -License-File: LICENSE.rst - -ItsDangerous -============ - -... so better sign this - -Various helpers to pass data to untrusted environments and to get it -back safe and sound. Data is cryptographically signed to ensure that a -token has not been tampered with. - -It's possible to customize how data is serialized. Data is compressed as -needed. A timestamp can be added and verified automatically while -loading a token. - - -Installing ----------- - -Install and update using `pip`_: - -.. code-block:: text - - pip install -U itsdangerous - -.. _pip: https://pip.pypa.io/en/stable/getting-started/ - - -A Simple Example ----------------- - -Here's how you could generate a token for transmitting a user's id and -name between web requests. - -.. code-block:: python - - from itsdangerous import URLSafeSerializer - auth_s = URLSafeSerializer("secret key", "auth") - token = auth_s.dumps({"id": 5, "name": "itsdangerous"}) - - print(token) - # eyJpZCI6NSwibmFtZSI6Iml0c2Rhbmdlcm91cyJ9.6YP6T0BaO67XP--9UzTrmurXSmg - - data = auth_s.loads(token) - print(data["name"]) - # itsdangerous - - -Donate ------- - -The Pallets organization develops and supports ItsDangerous and other -popular packages. In order to grow the community of contributors and -users, and allow the maintainers to devote more time to the projects, -`please donate today`_. - -.. _please donate today: https://palletsprojects.com/donate - - -Links ------ - -- Documentation: https://itsdangerous.palletsprojects.com/ -- Changes: https://itsdangerous.palletsprojects.com/changes/ -- PyPI Releases: https://pypi.org/project/ItsDangerous/ -- Source Code: https://github.com/pallets/itsdangerous/ -- Issue Tracker: https://github.com/pallets/itsdangerous/issues/ -- Website: https://palletsprojects.com/p/itsdangerous/ -- Twitter: https://twitter.com/PalletsTeam -- Chat: https://discord.gg/pallets - - diff --git a/server/venv/Lib/site-packages/itsdangerous-2.1.2.dist-info/RECORD b/server/venv/Lib/site-packages/itsdangerous-2.1.2.dist-info/RECORD deleted file mode 100644 index 162284d..0000000 --- a/server/venv/Lib/site-packages/itsdangerous-2.1.2.dist-info/RECORD +++ /dev/null @@ -1,24 +0,0 @@ -itsdangerous-2.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -itsdangerous-2.1.2.dist-info/LICENSE.rst,sha256=Y68JiRtr6K0aQlLtQ68PTvun_JSOIoNnvtfzxa4LCdc,1475 -itsdangerous-2.1.2.dist-info/METADATA,sha256=ThrHIJQ_6XlfbDMCAVe_hawT7IXiIxnTBIDrwxxtucQ,2928 -itsdangerous-2.1.2.dist-info/RECORD,, -itsdangerous-2.1.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -itsdangerous-2.1.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 -itsdangerous-2.1.2.dist-info/top_level.txt,sha256=gKN1OKLk81i7fbWWildJA88EQ9NhnGMSvZqhfz9ICjk,13 -itsdangerous/__init__.py,sha256=n4mkyjlIVn23pgsgCIw0MJKPdcHIetyeRpe5Fwsn8qg,876 -itsdangerous/__pycache__/__init__.cpython-311.pyc,, -itsdangerous/__pycache__/_json.cpython-311.pyc,, -itsdangerous/__pycache__/encoding.cpython-311.pyc,, -itsdangerous/__pycache__/exc.cpython-311.pyc,, -itsdangerous/__pycache__/serializer.cpython-311.pyc,, -itsdangerous/__pycache__/signer.cpython-311.pyc,, -itsdangerous/__pycache__/timed.cpython-311.pyc,, -itsdangerous/__pycache__/url_safe.cpython-311.pyc,, -itsdangerous/_json.py,sha256=wIhs_7-_XZolmyr-JvKNiy_LgAcfevYR0qhCVdlIhg8,450 -itsdangerous/encoding.py,sha256=pgh86snHC76dPLNCnPlrjR5SaYL_M8H-gWRiiLNbhCU,1419 -itsdangerous/exc.py,sha256=VFxmP2lMoSJFqxNMzWonqs35ROII4-fvCBfG0v1Tkbs,3206 -itsdangerous/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -itsdangerous/serializer.py,sha256=zgZ1-U705jHDpt62x_pmLJdryEKDNAbt5UkJtnkcCSw,11144 -itsdangerous/signer.py,sha256=QUH0iX0in-OTptMAXKU5zWMwmOCXn1fsDsubXiGdFN4,9367 -itsdangerous/timed.py,sha256=5CBWLds4Nm8-3bFVC8RxNzFjx6PSwjch8wuZ5cwcHFI,8174 -itsdangerous/url_safe.py,sha256=5bC4jSKOjWNRkWrFseifWVXUnHnPgwOLROjiOwb-eeo,2402 diff --git a/server/venv/Lib/site-packages/itsdangerous-2.1.2.dist-info/REQUESTED b/server/venv/Lib/site-packages/itsdangerous-2.1.2.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/itsdangerous-2.1.2.dist-info/WHEEL b/server/venv/Lib/site-packages/itsdangerous-2.1.2.dist-info/WHEEL deleted file mode 100644 index becc9a6..0000000 --- a/server/venv/Lib/site-packages/itsdangerous-2.1.2.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.1) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/server/venv/Lib/site-packages/itsdangerous-2.1.2.dist-info/top_level.txt b/server/venv/Lib/site-packages/itsdangerous-2.1.2.dist-info/top_level.txt deleted file mode 100644 index e163955..0000000 --- a/server/venv/Lib/site-packages/itsdangerous-2.1.2.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -itsdangerous diff --git a/server/venv/Lib/site-packages/itsdangerous/__init__.py b/server/venv/Lib/site-packages/itsdangerous/__init__.py deleted file mode 100644 index fdb2dfd..0000000 --- a/server/venv/Lib/site-packages/itsdangerous/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -from .encoding import base64_decode as base64_decode -from .encoding import base64_encode as base64_encode -from .encoding import want_bytes as want_bytes -from .exc import BadData as BadData -from .exc import BadHeader as BadHeader -from .exc import BadPayload as BadPayload -from .exc import BadSignature as BadSignature -from .exc import BadTimeSignature as BadTimeSignature -from .exc import SignatureExpired as SignatureExpired -from .serializer import Serializer as Serializer -from .signer import HMACAlgorithm as HMACAlgorithm -from .signer import NoneAlgorithm as NoneAlgorithm -from .signer import Signer as Signer -from .timed import TimedSerializer as TimedSerializer -from .timed import TimestampSigner as TimestampSigner -from .url_safe import URLSafeSerializer as URLSafeSerializer -from .url_safe import URLSafeTimedSerializer as URLSafeTimedSerializer - -__version__ = "2.1.2" diff --git a/server/venv/Lib/site-packages/itsdangerous/_json.py b/server/venv/Lib/site-packages/itsdangerous/_json.py deleted file mode 100644 index c70d37a..0000000 --- a/server/venv/Lib/site-packages/itsdangerous/_json.py +++ /dev/null @@ -1,16 +0,0 @@ -import json as _json -import typing as _t - - -class _CompactJSON: - """Wrapper around json module that strips whitespace.""" - - @staticmethod - def loads(payload: _t.Union[str, bytes]) -> _t.Any: - return _json.loads(payload) - - @staticmethod - def dumps(obj: _t.Any, **kwargs: _t.Any) -> str: - kwargs.setdefault("ensure_ascii", False) - kwargs.setdefault("separators", (",", ":")) - return _json.dumps(obj, **kwargs) diff --git a/server/venv/Lib/site-packages/itsdangerous/encoding.py b/server/venv/Lib/site-packages/itsdangerous/encoding.py deleted file mode 100644 index edb04d1..0000000 --- a/server/venv/Lib/site-packages/itsdangerous/encoding.py +++ /dev/null @@ -1,54 +0,0 @@ -import base64 -import string -import struct -import typing as _t - -from .exc import BadData - -_t_str_bytes = _t.Union[str, bytes] - - -def want_bytes( - s: _t_str_bytes, encoding: str = "utf-8", errors: str = "strict" -) -> bytes: - if isinstance(s, str): - s = s.encode(encoding, errors) - - return s - - -def base64_encode(string: _t_str_bytes) -> bytes: - """Base64 encode a string of bytes or text. The resulting bytes are - safe to use in URLs. - """ - string = want_bytes(string) - return base64.urlsafe_b64encode(string).rstrip(b"=") - - -def base64_decode(string: _t_str_bytes) -> bytes: - """Base64 decode a URL-safe string of bytes or text. The result is - bytes. - """ - string = want_bytes(string, encoding="ascii", errors="ignore") - string += b"=" * (-len(string) % 4) - - try: - return base64.urlsafe_b64decode(string) - except (TypeError, ValueError) as e: - raise BadData("Invalid base64-encoded data") from e - - -# The alphabet used by base64.urlsafe_* -_base64_alphabet = f"{string.ascii_letters}{string.digits}-_=".encode("ascii") - -_int64_struct = struct.Struct(">Q") -_int_to_bytes = _int64_struct.pack -_bytes_to_int = _t.cast("_t.Callable[[bytes], _t.Tuple[int]]", _int64_struct.unpack) - - -def int_to_bytes(num: int) -> bytes: - return _int_to_bytes(num).lstrip(b"\x00") - - -def bytes_to_int(bytestr: bytes) -> int: - return _bytes_to_int(bytestr.rjust(8, b"\x00"))[0] diff --git a/server/venv/Lib/site-packages/itsdangerous/exc.py b/server/venv/Lib/site-packages/itsdangerous/exc.py deleted file mode 100644 index c38a6af..0000000 --- a/server/venv/Lib/site-packages/itsdangerous/exc.py +++ /dev/null @@ -1,107 +0,0 @@ -import typing as _t -from datetime import datetime - -_t_opt_any = _t.Optional[_t.Any] -_t_opt_exc = _t.Optional[Exception] - - -class BadData(Exception): - """Raised if bad data of any sort was encountered. This is the base - for all exceptions that ItsDangerous defines. - - .. versionadded:: 0.15 - """ - - def __init__(self, message: str): - super().__init__(message) - self.message = message - - def __str__(self) -> str: - return self.message - - -class BadSignature(BadData): - """Raised if a signature does not match.""" - - def __init__(self, message: str, payload: _t_opt_any = None): - super().__init__(message) - - #: The payload that failed the signature test. In some - #: situations you might still want to inspect this, even if - #: you know it was tampered with. - #: - #: .. versionadded:: 0.14 - self.payload: _t_opt_any = payload - - -class BadTimeSignature(BadSignature): - """Raised if a time-based signature is invalid. This is a subclass - of :class:`BadSignature`. - """ - - def __init__( - self, - message: str, - payload: _t_opt_any = None, - date_signed: _t.Optional[datetime] = None, - ): - super().__init__(message, payload) - - #: If the signature expired this exposes the date of when the - #: signature was created. This can be helpful in order to - #: tell the user how long a link has been gone stale. - #: - #: .. versionchanged:: 2.0 - #: The datetime value is timezone-aware rather than naive. - #: - #: .. versionadded:: 0.14 - self.date_signed = date_signed - - -class SignatureExpired(BadTimeSignature): - """Raised if a signature timestamp is older than ``max_age``. This - is a subclass of :exc:`BadTimeSignature`. - """ - - -class BadHeader(BadSignature): - """Raised if a signed header is invalid in some form. This only - happens for serializers that have a header that goes with the - signature. - - .. versionadded:: 0.24 - """ - - def __init__( - self, - message: str, - payload: _t_opt_any = None, - header: _t_opt_any = None, - original_error: _t_opt_exc = None, - ): - super().__init__(message, payload) - - #: If the header is actually available but just malformed it - #: might be stored here. - self.header: _t_opt_any = header - - #: If available, the error that indicates why the payload was - #: not valid. This might be ``None``. - self.original_error: _t_opt_exc = original_error - - -class BadPayload(BadData): - """Raised if a payload is invalid. This could happen if the payload - is loaded despite an invalid signature, or if there is a mismatch - between the serializer and deserializer. The original exception - that occurred during loading is stored on as :attr:`original_error`. - - .. versionadded:: 0.15 - """ - - def __init__(self, message: str, original_error: _t_opt_exc = None): - super().__init__(message) - - #: If available, the error that indicates why the payload was - #: not valid. This might be ``None``. - self.original_error: _t_opt_exc = original_error diff --git a/server/venv/Lib/site-packages/itsdangerous/py.typed b/server/venv/Lib/site-packages/itsdangerous/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/server/venv/Lib/site-packages/itsdangerous/serializer.py b/server/venv/Lib/site-packages/itsdangerous/serializer.py deleted file mode 100644 index 9f4a84a..0000000 --- a/server/venv/Lib/site-packages/itsdangerous/serializer.py +++ /dev/null @@ -1,295 +0,0 @@ -import json -import typing as _t - -from .encoding import want_bytes -from .exc import BadPayload -from .exc import BadSignature -from .signer import _make_keys_list -from .signer import Signer - -_t_str_bytes = _t.Union[str, bytes] -_t_opt_str_bytes = _t.Optional[_t_str_bytes] -_t_kwargs = _t.Dict[str, _t.Any] -_t_opt_kwargs = _t.Optional[_t_kwargs] -_t_signer = _t.Type[Signer] -_t_fallbacks = _t.List[_t.Union[_t_kwargs, _t.Tuple[_t_signer, _t_kwargs], _t_signer]] -_t_load_unsafe = _t.Tuple[bool, _t.Any] -_t_secret_key = _t.Union[_t.Iterable[_t_str_bytes], _t_str_bytes] - - -def is_text_serializer(serializer: _t.Any) -> bool: - """Checks whether a serializer generates text or binary.""" - return isinstance(serializer.dumps({}), str) - - -class Serializer: - """A serializer wraps a :class:`~itsdangerous.signer.Signer` to - enable serializing and securely signing data other than bytes. It - can unsign to verify that the data hasn't been changed. - - The serializer provides :meth:`dumps` and :meth:`loads`, similar to - :mod:`json`, and by default uses :mod:`json` internally to serialize - the data to bytes. - - The secret key should be a random string of ``bytes`` and should not - be saved to code or version control. Different salts should be used - to distinguish signing in different contexts. See :doc:`/concepts` - for information about the security of the secret key and salt. - - :param secret_key: The secret key to sign and verify with. Can be a - list of keys, oldest to newest, to support key rotation. - :param salt: Extra key to combine with ``secret_key`` to distinguish - signatures in different contexts. - :param serializer: An object that provides ``dumps`` and ``loads`` - methods for serializing data to a string. Defaults to - :attr:`default_serializer`, which defaults to :mod:`json`. - :param serializer_kwargs: Keyword arguments to pass when calling - ``serializer.dumps``. - :param signer: A ``Signer`` class to instantiate when signing data. - Defaults to :attr:`default_signer`, which defaults to - :class:`~itsdangerous.signer.Signer`. - :param signer_kwargs: Keyword arguments to pass when instantiating - the ``Signer`` class. - :param fallback_signers: List of signer parameters to try when - unsigning with the default signer fails. Each item can be a dict - of ``signer_kwargs``, a ``Signer`` class, or a tuple of - ``(signer, signer_kwargs)``. Defaults to - :attr:`default_fallback_signers`. - - .. versionchanged:: 2.0 - Added support for key rotation by passing a list to - ``secret_key``. - - .. versionchanged:: 2.0 - Removed the default SHA-512 fallback signer from - ``default_fallback_signers``. - - .. versionchanged:: 1.1 - Added support for ``fallback_signers`` and configured a default - SHA-512 fallback. This fallback is for users who used the yanked - 1.0.0 release which defaulted to SHA-512. - - .. versionchanged:: 0.14 - The ``signer`` and ``signer_kwargs`` parameters were added to - the constructor. - """ - - #: The default serialization module to use to serialize data to a - #: string internally. The default is :mod:`json`, but can be changed - #: to any object that provides ``dumps`` and ``loads`` methods. - default_serializer: _t.Any = json - - #: The default ``Signer`` class to instantiate when signing data. - #: The default is :class:`itsdangerous.signer.Signer`. - default_signer: _t_signer = Signer - - #: The default fallback signers to try when unsigning fails. - default_fallback_signers: _t_fallbacks = [] - - def __init__( - self, - secret_key: _t_secret_key, - salt: _t_opt_str_bytes = b"itsdangerous", - serializer: _t.Any = None, - serializer_kwargs: _t_opt_kwargs = None, - signer: _t.Optional[_t_signer] = None, - signer_kwargs: _t_opt_kwargs = None, - fallback_signers: _t.Optional[_t_fallbacks] = None, - ): - #: The list of secret keys to try for verifying signatures, from - #: oldest to newest. The newest (last) key is used for signing. - #: - #: This allows a key rotation system to keep a list of allowed - #: keys and remove expired ones. - self.secret_keys: _t.List[bytes] = _make_keys_list(secret_key) - - if salt is not None: - salt = want_bytes(salt) - # if salt is None then the signer's default is used - - self.salt = salt - - if serializer is None: - serializer = self.default_serializer - - self.serializer: _t.Any = serializer - self.is_text_serializer: bool = is_text_serializer(serializer) - - if signer is None: - signer = self.default_signer - - self.signer: _t_signer = signer - self.signer_kwargs: _t_kwargs = signer_kwargs or {} - - if fallback_signers is None: - fallback_signers = list(self.default_fallback_signers or ()) - - self.fallback_signers: _t_fallbacks = fallback_signers - self.serializer_kwargs: _t_kwargs = serializer_kwargs or {} - - @property - def secret_key(self) -> bytes: - """The newest (last) entry in the :attr:`secret_keys` list. This - is for compatibility from before key rotation support was added. - """ - return self.secret_keys[-1] - - def load_payload( - self, payload: bytes, serializer: _t.Optional[_t.Any] = None - ) -> _t.Any: - """Loads the encoded object. This function raises - :class:`.BadPayload` if the payload is not valid. The - ``serializer`` parameter can be used to override the serializer - stored on the class. The encoded ``payload`` should always be - bytes. - """ - if serializer is None: - serializer = self.serializer - is_text = self.is_text_serializer - else: - is_text = is_text_serializer(serializer) - - try: - if is_text: - return serializer.loads(payload.decode("utf-8")) - - return serializer.loads(payload) - except Exception as e: - raise BadPayload( - "Could not load the payload because an exception" - " occurred on unserializing the data.", - original_error=e, - ) from e - - def dump_payload(self, obj: _t.Any) -> bytes: - """Dumps the encoded object. The return value is always bytes. - If the internal serializer returns text, the value will be - encoded as UTF-8. - """ - return want_bytes(self.serializer.dumps(obj, **self.serializer_kwargs)) - - def make_signer(self, salt: _t_opt_str_bytes = None) -> Signer: - """Creates a new instance of the signer to be used. The default - implementation uses the :class:`.Signer` base class. - """ - if salt is None: - salt = self.salt - - return self.signer(self.secret_keys, salt=salt, **self.signer_kwargs) - - def iter_unsigners(self, salt: _t_opt_str_bytes = None) -> _t.Iterator[Signer]: - """Iterates over all signers to be tried for unsigning. Starts - with the configured signer, then constructs each signer - specified in ``fallback_signers``. - """ - if salt is None: - salt = self.salt - - yield self.make_signer(salt) - - for fallback in self.fallback_signers: - if isinstance(fallback, dict): - kwargs = fallback - fallback = self.signer - elif isinstance(fallback, tuple): - fallback, kwargs = fallback - else: - kwargs = self.signer_kwargs - - for secret_key in self.secret_keys: - yield fallback(secret_key, salt=salt, **kwargs) - - def dumps(self, obj: _t.Any, salt: _t_opt_str_bytes = None) -> _t_str_bytes: - """Returns a signed string serialized with the internal - serializer. The return value can be either a byte or unicode - string depending on the format of the internal serializer. - """ - payload = want_bytes(self.dump_payload(obj)) - rv = self.make_signer(salt).sign(payload) - - if self.is_text_serializer: - return rv.decode("utf-8") - - return rv - - def dump(self, obj: _t.Any, f: _t.IO, salt: _t_opt_str_bytes = None) -> None: - """Like :meth:`dumps` but dumps into a file. The file handle has - to be compatible with what the internal serializer expects. - """ - f.write(self.dumps(obj, salt)) - - def loads( - self, s: _t_str_bytes, salt: _t_opt_str_bytes = None, **kwargs: _t.Any - ) -> _t.Any: - """Reverse of :meth:`dumps`. Raises :exc:`.BadSignature` if the - signature validation fails. - """ - s = want_bytes(s) - last_exception = None - - for signer in self.iter_unsigners(salt): - try: - return self.load_payload(signer.unsign(s)) - except BadSignature as err: - last_exception = err - - raise _t.cast(BadSignature, last_exception) - - def load(self, f: _t.IO, salt: _t_opt_str_bytes = None) -> _t.Any: - """Like :meth:`loads` but loads from a file.""" - return self.loads(f.read(), salt) - - def loads_unsafe( - self, s: _t_str_bytes, salt: _t_opt_str_bytes = None - ) -> _t_load_unsafe: - """Like :meth:`loads` but without verifying the signature. This - is potentially very dangerous to use depending on how your - serializer works. The return value is ``(signature_valid, - payload)`` instead of just the payload. The first item will be a - boolean that indicates if the signature is valid. This function - never fails. - - Use it for debugging only and if you know that your serializer - module is not exploitable (for example, do not use it with a - pickle serializer). - - .. versionadded:: 0.15 - """ - return self._loads_unsafe_impl(s, salt) - - def _loads_unsafe_impl( - self, - s: _t_str_bytes, - salt: _t_opt_str_bytes, - load_kwargs: _t_opt_kwargs = None, - load_payload_kwargs: _t_opt_kwargs = None, - ) -> _t_load_unsafe: - """Low level helper function to implement :meth:`loads_unsafe` - in serializer subclasses. - """ - if load_kwargs is None: - load_kwargs = {} - - try: - return True, self.loads(s, salt=salt, **load_kwargs) - except BadSignature as e: - if e.payload is None: - return False, None - - if load_payload_kwargs is None: - load_payload_kwargs = {} - - try: - return ( - False, - self.load_payload(e.payload, **load_payload_kwargs), - ) - except BadPayload: - return False, None - - def load_unsafe(self, f: _t.IO, salt: _t_opt_str_bytes = None) -> _t_load_unsafe: - """Like :meth:`loads_unsafe` but loads from a file. - - .. versionadded:: 0.15 - """ - return self.loads_unsafe(f.read(), salt=salt) diff --git a/server/venv/Lib/site-packages/itsdangerous/signer.py b/server/venv/Lib/site-packages/itsdangerous/signer.py deleted file mode 100644 index aa12005..0000000 --- a/server/venv/Lib/site-packages/itsdangerous/signer.py +++ /dev/null @@ -1,257 +0,0 @@ -import hashlib -import hmac -import typing as _t - -from .encoding import _base64_alphabet -from .encoding import base64_decode -from .encoding import base64_encode -from .encoding import want_bytes -from .exc import BadSignature - -_t_str_bytes = _t.Union[str, bytes] -_t_opt_str_bytes = _t.Optional[_t_str_bytes] -_t_secret_key = _t.Union[_t.Iterable[_t_str_bytes], _t_str_bytes] - - -class SigningAlgorithm: - """Subclasses must implement :meth:`get_signature` to provide - signature generation functionality. - """ - - def get_signature(self, key: bytes, value: bytes) -> bytes: - """Returns the signature for the given key and value.""" - raise NotImplementedError() - - def verify_signature(self, key: bytes, value: bytes, sig: bytes) -> bool: - """Verifies the given signature matches the expected - signature. - """ - return hmac.compare_digest(sig, self.get_signature(key, value)) - - -class NoneAlgorithm(SigningAlgorithm): - """Provides an algorithm that does not perform any signing and - returns an empty signature. - """ - - def get_signature(self, key: bytes, value: bytes) -> bytes: - return b"" - - -class HMACAlgorithm(SigningAlgorithm): - """Provides signature generation using HMACs.""" - - #: The digest method to use with the MAC algorithm. This defaults to - #: SHA1, but can be changed to any other function in the hashlib - #: module. - default_digest_method: _t.Any = staticmethod(hashlib.sha1) - - def __init__(self, digest_method: _t.Any = None): - if digest_method is None: - digest_method = self.default_digest_method - - self.digest_method: _t.Any = digest_method - - def get_signature(self, key: bytes, value: bytes) -> bytes: - mac = hmac.new(key, msg=value, digestmod=self.digest_method) - return mac.digest() - - -def _make_keys_list(secret_key: _t_secret_key) -> _t.List[bytes]: - if isinstance(secret_key, (str, bytes)): - return [want_bytes(secret_key)] - - return [want_bytes(s) for s in secret_key] - - -class Signer: - """A signer securely signs bytes, then unsigns them to verify that - the value hasn't been changed. - - The secret key should be a random string of ``bytes`` and should not - be saved to code or version control. Different salts should be used - to distinguish signing in different contexts. See :doc:`/concepts` - for information about the security of the secret key and salt. - - :param secret_key: The secret key to sign and verify with. Can be a - list of keys, oldest to newest, to support key rotation. - :param salt: Extra key to combine with ``secret_key`` to distinguish - signatures in different contexts. - :param sep: Separator between the signature and value. - :param key_derivation: How to derive the signing key from the secret - key and salt. Possible values are ``concat``, ``django-concat``, - or ``hmac``. Defaults to :attr:`default_key_derivation`, which - defaults to ``django-concat``. - :param digest_method: Hash function to use when generating the HMAC - signature. Defaults to :attr:`default_digest_method`, which - defaults to :func:`hashlib.sha1`. Note that the security of the - hash alone doesn't apply when used intermediately in HMAC. - :param algorithm: A :class:`SigningAlgorithm` instance to use - instead of building a default :class:`HMACAlgorithm` with the - ``digest_method``. - - .. versionchanged:: 2.0 - Added support for key rotation by passing a list to - ``secret_key``. - - .. versionchanged:: 0.18 - ``algorithm`` was added as an argument to the class constructor. - - .. versionchanged:: 0.14 - ``key_derivation`` and ``digest_method`` were added as arguments - to the class constructor. - """ - - #: The default digest method to use for the signer. The default is - #: :func:`hashlib.sha1`, but can be changed to any :mod:`hashlib` or - #: compatible object. Note that the security of the hash alone - #: doesn't apply when used intermediately in HMAC. - #: - #: .. versionadded:: 0.14 - default_digest_method: _t.Any = staticmethod(hashlib.sha1) - - #: The default scheme to use to derive the signing key from the - #: secret key and salt. The default is ``django-concat``. Possible - #: values are ``concat``, ``django-concat``, and ``hmac``. - #: - #: .. versionadded:: 0.14 - default_key_derivation: str = "django-concat" - - def __init__( - self, - secret_key: _t_secret_key, - salt: _t_opt_str_bytes = b"itsdangerous.Signer", - sep: _t_str_bytes = b".", - key_derivation: _t.Optional[str] = None, - digest_method: _t.Optional[_t.Any] = None, - algorithm: _t.Optional[SigningAlgorithm] = None, - ): - #: The list of secret keys to try for verifying signatures, from - #: oldest to newest. The newest (last) key is used for signing. - #: - #: This allows a key rotation system to keep a list of allowed - #: keys and remove expired ones. - self.secret_keys: _t.List[bytes] = _make_keys_list(secret_key) - self.sep: bytes = want_bytes(sep) - - if self.sep in _base64_alphabet: - raise ValueError( - "The given separator cannot be used because it may be" - " contained in the signature itself. ASCII letters," - " digits, and '-_=' must not be used." - ) - - if salt is not None: - salt = want_bytes(salt) - else: - salt = b"itsdangerous.Signer" - - self.salt = salt - - if key_derivation is None: - key_derivation = self.default_key_derivation - - self.key_derivation: str = key_derivation - - if digest_method is None: - digest_method = self.default_digest_method - - self.digest_method: _t.Any = digest_method - - if algorithm is None: - algorithm = HMACAlgorithm(self.digest_method) - - self.algorithm: SigningAlgorithm = algorithm - - @property - def secret_key(self) -> bytes: - """The newest (last) entry in the :attr:`secret_keys` list. This - is for compatibility from before key rotation support was added. - """ - return self.secret_keys[-1] - - def derive_key(self, secret_key: _t_opt_str_bytes = None) -> bytes: - """This method is called to derive the key. The default key - derivation choices can be overridden here. Key derivation is not - intended to be used as a security method to make a complex key - out of a short password. Instead you should use large random - secret keys. - - :param secret_key: A specific secret key to derive from. - Defaults to the last item in :attr:`secret_keys`. - - .. versionchanged:: 2.0 - Added the ``secret_key`` parameter. - """ - if secret_key is None: - secret_key = self.secret_keys[-1] - else: - secret_key = want_bytes(secret_key) - - if self.key_derivation == "concat": - return _t.cast(bytes, self.digest_method(self.salt + secret_key).digest()) - elif self.key_derivation == "django-concat": - return _t.cast( - bytes, self.digest_method(self.salt + b"signer" + secret_key).digest() - ) - elif self.key_derivation == "hmac": - mac = hmac.new(secret_key, digestmod=self.digest_method) - mac.update(self.salt) - return mac.digest() - elif self.key_derivation == "none": - return secret_key - else: - raise TypeError("Unknown key derivation method") - - def get_signature(self, value: _t_str_bytes) -> bytes: - """Returns the signature for the given value.""" - value = want_bytes(value) - key = self.derive_key() - sig = self.algorithm.get_signature(key, value) - return base64_encode(sig) - - def sign(self, value: _t_str_bytes) -> bytes: - """Signs the given string.""" - value = want_bytes(value) - return value + self.sep + self.get_signature(value) - - def verify_signature(self, value: _t_str_bytes, sig: _t_str_bytes) -> bool: - """Verifies the signature for the given value.""" - try: - sig = base64_decode(sig) - except Exception: - return False - - value = want_bytes(value) - - for secret_key in reversed(self.secret_keys): - key = self.derive_key(secret_key) - - if self.algorithm.verify_signature(key, value, sig): - return True - - return False - - def unsign(self, signed_value: _t_str_bytes) -> bytes: - """Unsigns the given string.""" - signed_value = want_bytes(signed_value) - - if self.sep not in signed_value: - raise BadSignature(f"No {self.sep!r} found in value") - - value, sig = signed_value.rsplit(self.sep, 1) - - if self.verify_signature(value, sig): - return value - - raise BadSignature(f"Signature {sig!r} does not match", payload=value) - - def validate(self, signed_value: _t_str_bytes) -> bool: - """Only validates the given signed value. Returns ``True`` if - the signature exists and is valid. - """ - try: - self.unsign(signed_value) - return True - except BadSignature: - return False diff --git a/server/venv/Lib/site-packages/itsdangerous/timed.py b/server/venv/Lib/site-packages/itsdangerous/timed.py deleted file mode 100644 index cad8da3..0000000 --- a/server/venv/Lib/site-packages/itsdangerous/timed.py +++ /dev/null @@ -1,234 +0,0 @@ -import time -import typing -import typing as _t -from datetime import datetime -from datetime import timezone - -from .encoding import base64_decode -from .encoding import base64_encode -from .encoding import bytes_to_int -from .encoding import int_to_bytes -from .encoding import want_bytes -from .exc import BadSignature -from .exc import BadTimeSignature -from .exc import SignatureExpired -from .serializer import Serializer -from .signer import Signer - -_t_str_bytes = _t.Union[str, bytes] -_t_opt_str_bytes = _t.Optional[_t_str_bytes] -_t_opt_int = _t.Optional[int] - -if _t.TYPE_CHECKING: - import typing_extensions as _te - - -class TimestampSigner(Signer): - """Works like the regular :class:`.Signer` but also records the time - of the signing and can be used to expire signatures. The - :meth:`unsign` method can raise :exc:`.SignatureExpired` if the - unsigning failed because the signature is expired. - """ - - def get_timestamp(self) -> int: - """Returns the current timestamp. The function must return an - integer. - """ - return int(time.time()) - - def timestamp_to_datetime(self, ts: int) -> datetime: - """Convert the timestamp from :meth:`get_timestamp` into an - aware :class`datetime.datetime` in UTC. - - .. versionchanged:: 2.0 - The timestamp is returned as a timezone-aware ``datetime`` - in UTC rather than a naive ``datetime`` assumed to be UTC. - """ - return datetime.fromtimestamp(ts, tz=timezone.utc) - - def sign(self, value: _t_str_bytes) -> bytes: - """Signs the given string and also attaches time information.""" - value = want_bytes(value) - timestamp = base64_encode(int_to_bytes(self.get_timestamp())) - sep = want_bytes(self.sep) - value = value + sep + timestamp - return value + sep + self.get_signature(value) - - # Ignore overlapping signatures check, return_timestamp is the only - # parameter that affects the return type. - - @typing.overload - def unsign( # type: ignore - self, - signed_value: _t_str_bytes, - max_age: _t_opt_int = None, - return_timestamp: "_te.Literal[False]" = False, - ) -> bytes: - ... - - @typing.overload - def unsign( - self, - signed_value: _t_str_bytes, - max_age: _t_opt_int = None, - return_timestamp: "_te.Literal[True]" = True, - ) -> _t.Tuple[bytes, datetime]: - ... - - def unsign( - self, - signed_value: _t_str_bytes, - max_age: _t_opt_int = None, - return_timestamp: bool = False, - ) -> _t.Union[_t.Tuple[bytes, datetime], bytes]: - """Works like the regular :meth:`.Signer.unsign` but can also - validate the time. See the base docstring of the class for - the general behavior. If ``return_timestamp`` is ``True`` the - timestamp of the signature will be returned as an aware - :class:`datetime.datetime` object in UTC. - - .. versionchanged:: 2.0 - The timestamp is returned as a timezone-aware ``datetime`` - in UTC rather than a naive ``datetime`` assumed to be UTC. - """ - try: - result = super().unsign(signed_value) - sig_error = None - except BadSignature as e: - sig_error = e - result = e.payload or b"" - - sep = want_bytes(self.sep) - - # If there is no timestamp in the result there is something - # seriously wrong. In case there was a signature error, we raise - # that one directly, otherwise we have a weird situation in - # which we shouldn't have come except someone uses a time-based - # serializer on non-timestamp data, so catch that. - if sep not in result: - if sig_error: - raise sig_error - - raise BadTimeSignature("timestamp missing", payload=result) - - value, ts_bytes = result.rsplit(sep, 1) - ts_int: _t_opt_int = None - ts_dt: _t.Optional[datetime] = None - - try: - ts_int = bytes_to_int(base64_decode(ts_bytes)) - except Exception: - pass - - # Signature is *not* okay. Raise a proper error now that we have - # split the value and the timestamp. - if sig_error is not None: - if ts_int is not None: - try: - ts_dt = self.timestamp_to_datetime(ts_int) - except (ValueError, OSError, OverflowError) as exc: - # Windows raises OSError - # 32-bit raises OverflowError - raise BadTimeSignature( - "Malformed timestamp", payload=value - ) from exc - - raise BadTimeSignature(str(sig_error), payload=value, date_signed=ts_dt) - - # Signature was okay but the timestamp is actually not there or - # malformed. Should not happen, but we handle it anyway. - if ts_int is None: - raise BadTimeSignature("Malformed timestamp", payload=value) - - # Check timestamp is not older than max_age - if max_age is not None: - age = self.get_timestamp() - ts_int - - if age > max_age: - raise SignatureExpired( - f"Signature age {age} > {max_age} seconds", - payload=value, - date_signed=self.timestamp_to_datetime(ts_int), - ) - - if age < 0: - raise SignatureExpired( - f"Signature age {age} < 0 seconds", - payload=value, - date_signed=self.timestamp_to_datetime(ts_int), - ) - - if return_timestamp: - return value, self.timestamp_to_datetime(ts_int) - - return value - - def validate(self, signed_value: _t_str_bytes, max_age: _t_opt_int = None) -> bool: - """Only validates the given signed value. Returns ``True`` if - the signature exists and is valid.""" - try: - self.unsign(signed_value, max_age=max_age) - return True - except BadSignature: - return False - - -class TimedSerializer(Serializer): - """Uses :class:`TimestampSigner` instead of the default - :class:`.Signer`. - """ - - default_signer: _t.Type[TimestampSigner] = TimestampSigner - - def iter_unsigners( - self, salt: _t_opt_str_bytes = None - ) -> _t.Iterator[TimestampSigner]: - return _t.cast("_t.Iterator[TimestampSigner]", super().iter_unsigners(salt)) - - # TODO: Signature is incompatible because parameters were added - # before salt. - - def loads( # type: ignore - self, - s: _t_str_bytes, - max_age: _t_opt_int = None, - return_timestamp: bool = False, - salt: _t_opt_str_bytes = None, - ) -> _t.Any: - """Reverse of :meth:`dumps`, raises :exc:`.BadSignature` if the - signature validation fails. If a ``max_age`` is provided it will - ensure the signature is not older than that time in seconds. In - case the signature is outdated, :exc:`.SignatureExpired` is - raised. All arguments are forwarded to the signer's - :meth:`~TimestampSigner.unsign` method. - """ - s = want_bytes(s) - last_exception = None - - for signer in self.iter_unsigners(salt): - try: - base64d, timestamp = signer.unsign( - s, max_age=max_age, return_timestamp=True - ) - payload = self.load_payload(base64d) - - if return_timestamp: - return payload, timestamp - - return payload - except SignatureExpired: - # The signature was unsigned successfully but was - # expired. Do not try the next signer. - raise - except BadSignature as err: - last_exception = err - - raise _t.cast(BadSignature, last_exception) - - def loads_unsafe( # type: ignore - self, - s: _t_str_bytes, - max_age: _t_opt_int = None, - salt: _t_opt_str_bytes = None, - ) -> _t.Tuple[bool, _t.Any]: - return self._loads_unsafe_impl(s, salt, load_kwargs={"max_age": max_age}) diff --git a/server/venv/Lib/site-packages/itsdangerous/url_safe.py b/server/venv/Lib/site-packages/itsdangerous/url_safe.py deleted file mode 100644 index d5a9b0c..0000000 --- a/server/venv/Lib/site-packages/itsdangerous/url_safe.py +++ /dev/null @@ -1,80 +0,0 @@ -import typing as _t -import zlib - -from ._json import _CompactJSON -from .encoding import base64_decode -from .encoding import base64_encode -from .exc import BadPayload -from .serializer import Serializer -from .timed import TimedSerializer - - -class URLSafeSerializerMixin(Serializer): - """Mixed in with a regular serializer it will attempt to zlib - compress the string to make it shorter if necessary. It will also - base64 encode the string so that it can safely be placed in a URL. - """ - - default_serializer = _CompactJSON - - def load_payload( - self, - payload: bytes, - *args: _t.Any, - serializer: _t.Optional[_t.Any] = None, - **kwargs: _t.Any, - ) -> _t.Any: - decompress = False - - if payload.startswith(b"."): - payload = payload[1:] - decompress = True - - try: - json = base64_decode(payload) - except Exception as e: - raise BadPayload( - "Could not base64 decode the payload because of an exception", - original_error=e, - ) from e - - if decompress: - try: - json = zlib.decompress(json) - except Exception as e: - raise BadPayload( - "Could not zlib decompress the payload before decoding the payload", - original_error=e, - ) from e - - return super().load_payload(json, *args, **kwargs) - - def dump_payload(self, obj: _t.Any) -> bytes: - json = super().dump_payload(obj) - is_compressed = False - compressed = zlib.compress(json) - - if len(compressed) < (len(json) - 1): - json = compressed - is_compressed = True - - base64d = base64_encode(json) - - if is_compressed: - base64d = b"." + base64d - - return base64d - - -class URLSafeSerializer(URLSafeSerializerMixin, Serializer): - """Works like :class:`.Serializer` but dumps and loads into a URL - safe string consisting of the upper and lowercase character of the - alphabet as well as ``'_'``, ``'-'`` and ``'.'``. - """ - - -class URLSafeTimedSerializer(URLSafeSerializerMixin, TimedSerializer): - """Works like :class:`.TimedSerializer` but dumps and loads into a - URL safe string consisting of the upper and lowercase character of - the alphabet as well as ``'_'``, ``'-'`` and ``'.'``. - """ diff --git a/server/venv/Lib/site-packages/jinja2/__init__.py b/server/venv/Lib/site-packages/jinja2/__init__.py deleted file mode 100644 index e323926..0000000 --- a/server/venv/Lib/site-packages/jinja2/__init__.py +++ /dev/null @@ -1,37 +0,0 @@ -"""Jinja is a template engine written in pure Python. It provides a -non-XML syntax that supports inline expressions and an optional -sandboxed environment. -""" -from .bccache import BytecodeCache as BytecodeCache -from .bccache import FileSystemBytecodeCache as FileSystemBytecodeCache -from .bccache import MemcachedBytecodeCache as MemcachedBytecodeCache -from .environment import Environment as Environment -from .environment import Template as Template -from .exceptions import TemplateAssertionError as TemplateAssertionError -from .exceptions import TemplateError as TemplateError -from .exceptions import TemplateNotFound as TemplateNotFound -from .exceptions import TemplateRuntimeError as TemplateRuntimeError -from .exceptions import TemplatesNotFound as TemplatesNotFound -from .exceptions import TemplateSyntaxError as TemplateSyntaxError -from .exceptions import UndefinedError as UndefinedError -from .loaders import BaseLoader as BaseLoader -from .loaders import ChoiceLoader as ChoiceLoader -from .loaders import DictLoader as DictLoader -from .loaders import FileSystemLoader as FileSystemLoader -from .loaders import FunctionLoader as FunctionLoader -from .loaders import ModuleLoader as ModuleLoader -from .loaders import PackageLoader as PackageLoader -from .loaders import PrefixLoader as PrefixLoader -from .runtime import ChainableUndefined as ChainableUndefined -from .runtime import DebugUndefined as DebugUndefined -from .runtime import make_logging_undefined as make_logging_undefined -from .runtime import StrictUndefined as StrictUndefined -from .runtime import Undefined as Undefined -from .utils import clear_caches as clear_caches -from .utils import is_undefined as is_undefined -from .utils import pass_context as pass_context -from .utils import pass_environment as pass_environment -from .utils import pass_eval_context as pass_eval_context -from .utils import select_autoescape as select_autoescape - -__version__ = "3.1.2" diff --git a/server/venv/Lib/site-packages/jinja2/_identifier.py b/server/venv/Lib/site-packages/jinja2/_identifier.py deleted file mode 100644 index 928c150..0000000 --- a/server/venv/Lib/site-packages/jinja2/_identifier.py +++ /dev/null @@ -1,6 +0,0 @@ -import re - -# generated by scripts/generate_identifier_pattern.py -pattern = re.compile( - r"[\w·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߽߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛࣓-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣ৾ਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣૺ-૿ଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఄా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഀ-ഃ഻഼ാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳷-᳹᷀-᷹᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꣿꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𐴤-𐽆𐴧-𐽐𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑄴𑅅𑅆𑅳𑆀-𑆂𑆳-𑇀𑇉-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌻𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑑞𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑠬-𑠺𑨁-𑨊𑨳-𑨹𑨻-𑨾𑩇𑩑-𑩛𑪊-𑪙𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𑴱-𑴶𑴺𑴼𑴽𑴿-𑵅𑵇𑶊-𑶎𑶐𑶑𑶓-𑶗𑻳-𑻶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯]+" # noqa: B950 -) diff --git a/server/venv/Lib/site-packages/jinja2/async_utils.py b/server/venv/Lib/site-packages/jinja2/async_utils.py deleted file mode 100644 index 1a4f389..0000000 --- a/server/venv/Lib/site-packages/jinja2/async_utils.py +++ /dev/null @@ -1,84 +0,0 @@ -import inspect -import typing as t -from functools import WRAPPER_ASSIGNMENTS -from functools import wraps - -from .utils import _PassArg -from .utils import pass_eval_context - -V = t.TypeVar("V") - - -def async_variant(normal_func): # type: ignore - def decorator(async_func): # type: ignore - pass_arg = _PassArg.from_obj(normal_func) - need_eval_context = pass_arg is None - - if pass_arg is _PassArg.environment: - - def is_async(args: t.Any) -> bool: - return t.cast(bool, args[0].is_async) - - else: - - def is_async(args: t.Any) -> bool: - return t.cast(bool, args[0].environment.is_async) - - # Take the doc and annotations from the sync function, but the - # name from the async function. Pallets-Sphinx-Themes - # build_function_directive expects __wrapped__ to point to the - # sync function. - async_func_attrs = ("__module__", "__name__", "__qualname__") - normal_func_attrs = tuple(set(WRAPPER_ASSIGNMENTS).difference(async_func_attrs)) - - @wraps(normal_func, assigned=normal_func_attrs) - @wraps(async_func, assigned=async_func_attrs, updated=()) - def wrapper(*args, **kwargs): # type: ignore - b = is_async(args) - - if need_eval_context: - args = args[1:] - - if b: - return async_func(*args, **kwargs) - - return normal_func(*args, **kwargs) - - if need_eval_context: - wrapper = pass_eval_context(wrapper) - - wrapper.jinja_async_variant = True - return wrapper - - return decorator - - -_common_primitives = {int, float, bool, str, list, dict, tuple, type(None)} - - -async def auto_await(value: t.Union[t.Awaitable["V"], "V"]) -> "V": - # Avoid a costly call to isawaitable - if type(value) in _common_primitives: - return t.cast("V", value) - - if inspect.isawaitable(value): - return await t.cast("t.Awaitable[V]", value) - - return t.cast("V", value) - - -async def auto_aiter( - iterable: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", -) -> "t.AsyncIterator[V]": - if hasattr(iterable, "__aiter__"): - async for item in t.cast("t.AsyncIterable[V]", iterable): - yield item - else: - for item in t.cast("t.Iterable[V]", iterable): - yield item - - -async def auto_to_list( - value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", -) -> t.List["V"]: - return [x async for x in auto_aiter(value)] diff --git a/server/venv/Lib/site-packages/jinja2/bccache.py b/server/venv/Lib/site-packages/jinja2/bccache.py deleted file mode 100644 index d0ddf56..0000000 --- a/server/venv/Lib/site-packages/jinja2/bccache.py +++ /dev/null @@ -1,406 +0,0 @@ -"""The optional bytecode cache system. This is useful if you have very -complex template situations and the compilation of all those templates -slows down your application too much. - -Situations where this is useful are often forking web applications that -are initialized on the first request. -""" -import errno -import fnmatch -import marshal -import os -import pickle -import stat -import sys -import tempfile -import typing as t -from hashlib import sha1 -from io import BytesIO -from types import CodeType - -if t.TYPE_CHECKING: - import typing_extensions as te - from .environment import Environment - - class _MemcachedClient(te.Protocol): - def get(self, key: str) -> bytes: - ... - - def set(self, key: str, value: bytes, timeout: t.Optional[int] = None) -> None: - ... - - -bc_version = 5 -# Magic bytes to identify Jinja bytecode cache files. Contains the -# Python major and minor version to avoid loading incompatible bytecode -# if a project upgrades its Python version. -bc_magic = ( - b"j2" - + pickle.dumps(bc_version, 2) - + pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1], 2) -) - - -class Bucket: - """Buckets are used to store the bytecode for one template. It's created - and initialized by the bytecode cache and passed to the loading functions. - - The buckets get an internal checksum from the cache assigned and use this - to automatically reject outdated cache material. Individual bytecode - cache subclasses don't have to care about cache invalidation. - """ - - def __init__(self, environment: "Environment", key: str, checksum: str) -> None: - self.environment = environment - self.key = key - self.checksum = checksum - self.reset() - - def reset(self) -> None: - """Resets the bucket (unloads the bytecode).""" - self.code: t.Optional[CodeType] = None - - def load_bytecode(self, f: t.BinaryIO) -> None: - """Loads bytecode from a file or file like object.""" - # make sure the magic header is correct - magic = f.read(len(bc_magic)) - if magic != bc_magic: - self.reset() - return - # the source code of the file changed, we need to reload - checksum = pickle.load(f) - if self.checksum != checksum: - self.reset() - return - # if marshal_load fails then we need to reload - try: - self.code = marshal.load(f) - except (EOFError, ValueError, TypeError): - self.reset() - return - - def write_bytecode(self, f: t.IO[bytes]) -> None: - """Dump the bytecode into the file or file like object passed.""" - if self.code is None: - raise TypeError("can't write empty bucket") - f.write(bc_magic) - pickle.dump(self.checksum, f, 2) - marshal.dump(self.code, f) - - def bytecode_from_string(self, string: bytes) -> None: - """Load bytecode from bytes.""" - self.load_bytecode(BytesIO(string)) - - def bytecode_to_string(self) -> bytes: - """Return the bytecode as bytes.""" - out = BytesIO() - self.write_bytecode(out) - return out.getvalue() - - -class BytecodeCache: - """To implement your own bytecode cache you have to subclass this class - and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of - these methods are passed a :class:`~jinja2.bccache.Bucket`. - - A very basic bytecode cache that saves the bytecode on the file system:: - - from os import path - - class MyCache(BytecodeCache): - - def __init__(self, directory): - self.directory = directory - - def load_bytecode(self, bucket): - filename = path.join(self.directory, bucket.key) - if path.exists(filename): - with open(filename, 'rb') as f: - bucket.load_bytecode(f) - - def dump_bytecode(self, bucket): - filename = path.join(self.directory, bucket.key) - with open(filename, 'wb') as f: - bucket.write_bytecode(f) - - A more advanced version of a filesystem based bytecode cache is part of - Jinja. - """ - - def load_bytecode(self, bucket: Bucket) -> None: - """Subclasses have to override this method to load bytecode into a - bucket. If they are not able to find code in the cache for the - bucket, it must not do anything. - """ - raise NotImplementedError() - - def dump_bytecode(self, bucket: Bucket) -> None: - """Subclasses have to override this method to write the bytecode - from a bucket back to the cache. If it unable to do so it must not - fail silently but raise an exception. - """ - raise NotImplementedError() - - def clear(self) -> None: - """Clears the cache. This method is not used by Jinja but should be - implemented to allow applications to clear the bytecode cache used - by a particular environment. - """ - - def get_cache_key( - self, name: str, filename: t.Optional[t.Union[str]] = None - ) -> str: - """Returns the unique hash key for this template name.""" - hash = sha1(name.encode("utf-8")) - - if filename is not None: - hash.update(f"|{filename}".encode()) - - return hash.hexdigest() - - def get_source_checksum(self, source: str) -> str: - """Returns a checksum for the source.""" - return sha1(source.encode("utf-8")).hexdigest() - - def get_bucket( - self, - environment: "Environment", - name: str, - filename: t.Optional[str], - source: str, - ) -> Bucket: - """Return a cache bucket for the given template. All arguments are - mandatory but filename may be `None`. - """ - key = self.get_cache_key(name, filename) - checksum = self.get_source_checksum(source) - bucket = Bucket(environment, key, checksum) - self.load_bytecode(bucket) - return bucket - - def set_bucket(self, bucket: Bucket) -> None: - """Put the bucket into the cache.""" - self.dump_bytecode(bucket) - - -class FileSystemBytecodeCache(BytecodeCache): - """A bytecode cache that stores bytecode on the filesystem. It accepts - two arguments: The directory where the cache items are stored and a - pattern string that is used to build the filename. - - If no directory is specified a default cache directory is selected. On - Windows the user's temp directory is used, on UNIX systems a directory - is created for the user in the system temp directory. - - The pattern can be used to have multiple separate caches operate on the - same directory. The default pattern is ``'__jinja2_%s.cache'``. ``%s`` - is replaced with the cache key. - - >>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache') - - This bytecode cache supports clearing of the cache using the clear method. - """ - - def __init__( - self, directory: t.Optional[str] = None, pattern: str = "__jinja2_%s.cache" - ) -> None: - if directory is None: - directory = self._get_default_cache_dir() - self.directory = directory - self.pattern = pattern - - def _get_default_cache_dir(self) -> str: - def _unsafe_dir() -> "te.NoReturn": - raise RuntimeError( - "Cannot determine safe temp directory. You " - "need to explicitly provide one." - ) - - tmpdir = tempfile.gettempdir() - - # On windows the temporary directory is used specific unless - # explicitly forced otherwise. We can just use that. - if os.name == "nt": - return tmpdir - if not hasattr(os, "getuid"): - _unsafe_dir() - - dirname = f"_jinja2-cache-{os.getuid()}" - actual_dir = os.path.join(tmpdir, dirname) - - try: - os.mkdir(actual_dir, stat.S_IRWXU) - except OSError as e: - if e.errno != errno.EEXIST: - raise - try: - os.chmod(actual_dir, stat.S_IRWXU) - actual_dir_stat = os.lstat(actual_dir) - if ( - actual_dir_stat.st_uid != os.getuid() - or not stat.S_ISDIR(actual_dir_stat.st_mode) - or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU - ): - _unsafe_dir() - except OSError as e: - if e.errno != errno.EEXIST: - raise - - actual_dir_stat = os.lstat(actual_dir) - if ( - actual_dir_stat.st_uid != os.getuid() - or not stat.S_ISDIR(actual_dir_stat.st_mode) - or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU - ): - _unsafe_dir() - - return actual_dir - - def _get_cache_filename(self, bucket: Bucket) -> str: - return os.path.join(self.directory, self.pattern % (bucket.key,)) - - def load_bytecode(self, bucket: Bucket) -> None: - filename = self._get_cache_filename(bucket) - - # Don't test for existence before opening the file, since the - # file could disappear after the test before the open. - try: - f = open(filename, "rb") - except (FileNotFoundError, IsADirectoryError, PermissionError): - # PermissionError can occur on Windows when an operation is - # in progress, such as calling clear(). - return - - with f: - bucket.load_bytecode(f) - - def dump_bytecode(self, bucket: Bucket) -> None: - # Write to a temporary file, then rename to the real name after - # writing. This avoids another process reading the file before - # it is fully written. - name = self._get_cache_filename(bucket) - f = tempfile.NamedTemporaryFile( - mode="wb", - dir=os.path.dirname(name), - prefix=os.path.basename(name), - suffix=".tmp", - delete=False, - ) - - def remove_silent() -> None: - try: - os.remove(f.name) - except OSError: - # Another process may have called clear(). On Windows, - # another program may be holding the file open. - pass - - try: - with f: - bucket.write_bytecode(f) - except BaseException: - remove_silent() - raise - - try: - os.replace(f.name, name) - except OSError: - # Another process may have called clear(). On Windows, - # another program may be holding the file open. - remove_silent() - except BaseException: - remove_silent() - raise - - def clear(self) -> None: - # imported lazily here because google app-engine doesn't support - # write access on the file system and the function does not exist - # normally. - from os import remove - - files = fnmatch.filter(os.listdir(self.directory), self.pattern % ("*",)) - for filename in files: - try: - remove(os.path.join(self.directory, filename)) - except OSError: - pass - - -class MemcachedBytecodeCache(BytecodeCache): - """This class implements a bytecode cache that uses a memcache cache for - storing the information. It does not enforce a specific memcache library - (tummy's memcache or cmemcache) but will accept any class that provides - the minimal interface required. - - Libraries compatible with this class: - - - `cachelib `_ - - `python-memcached `_ - - (Unfortunately the django cache interface is not compatible because it - does not support storing binary data, only text. You can however pass - the underlying cache client to the bytecode cache which is available - as `django.core.cache.cache._client`.) - - The minimal interface for the client passed to the constructor is this: - - .. class:: MinimalClientInterface - - .. method:: set(key, value[, timeout]) - - Stores the bytecode in the cache. `value` is a string and - `timeout` the timeout of the key. If timeout is not provided - a default timeout or no timeout should be assumed, if it's - provided it's an integer with the number of seconds the cache - item should exist. - - .. method:: get(key) - - Returns the value for the cache key. If the item does not - exist in the cache the return value must be `None`. - - The other arguments to the constructor are the prefix for all keys that - is added before the actual cache key and the timeout for the bytecode in - the cache system. We recommend a high (or no) timeout. - - This bytecode cache does not support clearing of used items in the cache. - The clear method is a no-operation function. - - .. versionadded:: 2.7 - Added support for ignoring memcache errors through the - `ignore_memcache_errors` parameter. - """ - - def __init__( - self, - client: "_MemcachedClient", - prefix: str = "jinja2/bytecode/", - timeout: t.Optional[int] = None, - ignore_memcache_errors: bool = True, - ): - self.client = client - self.prefix = prefix - self.timeout = timeout - self.ignore_memcache_errors = ignore_memcache_errors - - def load_bytecode(self, bucket: Bucket) -> None: - try: - code = self.client.get(self.prefix + bucket.key) - except Exception: - if not self.ignore_memcache_errors: - raise - else: - bucket.bytecode_from_string(code) - - def dump_bytecode(self, bucket: Bucket) -> None: - key = self.prefix + bucket.key - value = bucket.bytecode_to_string() - - try: - if self.timeout is not None: - self.client.set(key, value, self.timeout) - else: - self.client.set(key, value) - except Exception: - if not self.ignore_memcache_errors: - raise diff --git a/server/venv/Lib/site-packages/jinja2/compiler.py b/server/venv/Lib/site-packages/jinja2/compiler.py deleted file mode 100644 index 3458095..0000000 --- a/server/venv/Lib/site-packages/jinja2/compiler.py +++ /dev/null @@ -1,1957 +0,0 @@ -"""Compiles nodes from the parser into Python code.""" -import typing as t -from contextlib import contextmanager -from functools import update_wrapper -from io import StringIO -from itertools import chain -from keyword import iskeyword as is_python_keyword - -from markupsafe import escape -from markupsafe import Markup - -from . import nodes -from .exceptions import TemplateAssertionError -from .idtracking import Symbols -from .idtracking import VAR_LOAD_ALIAS -from .idtracking import VAR_LOAD_PARAMETER -from .idtracking import VAR_LOAD_RESOLVE -from .idtracking import VAR_LOAD_UNDEFINED -from .nodes import EvalContext -from .optimizer import Optimizer -from .utils import _PassArg -from .utils import concat -from .visitor import NodeVisitor - -if t.TYPE_CHECKING: - import typing_extensions as te - from .environment import Environment - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) - -operators = { - "eq": "==", - "ne": "!=", - "gt": ">", - "gteq": ">=", - "lt": "<", - "lteq": "<=", - "in": "in", - "notin": "not in", -} - - -def optimizeconst(f: F) -> F: - def new_func( - self: "CodeGenerator", node: nodes.Expr, frame: "Frame", **kwargs: t.Any - ) -> t.Any: - # Only optimize if the frame is not volatile - if self.optimizer is not None and not frame.eval_ctx.volatile: - new_node = self.optimizer.visit(node, frame.eval_ctx) - - if new_node != node: - return self.visit(new_node, frame) - - return f(self, node, frame, **kwargs) - - return update_wrapper(t.cast(F, new_func), f) - - -def _make_binop(op: str) -> t.Callable[["CodeGenerator", nodes.BinExpr, "Frame"], None]: - @optimizeconst - def visitor(self: "CodeGenerator", node: nodes.BinExpr, frame: Frame) -> None: - if ( - self.environment.sandboxed - and op in self.environment.intercepted_binops # type: ignore - ): - self.write(f"environment.call_binop(context, {op!r}, ") - self.visit(node.left, frame) - self.write(", ") - self.visit(node.right, frame) - else: - self.write("(") - self.visit(node.left, frame) - self.write(f" {op} ") - self.visit(node.right, frame) - - self.write(")") - - return visitor - - -def _make_unop( - op: str, -) -> t.Callable[["CodeGenerator", nodes.UnaryExpr, "Frame"], None]: - @optimizeconst - def visitor(self: "CodeGenerator", node: nodes.UnaryExpr, frame: Frame) -> None: - if ( - self.environment.sandboxed - and op in self.environment.intercepted_unops # type: ignore - ): - self.write(f"environment.call_unop(context, {op!r}, ") - self.visit(node.node, frame) - else: - self.write("(" + op) - self.visit(node.node, frame) - - self.write(")") - - return visitor - - -def generate( - node: nodes.Template, - environment: "Environment", - name: t.Optional[str], - filename: t.Optional[str], - stream: t.Optional[t.TextIO] = None, - defer_init: bool = False, - optimized: bool = True, -) -> t.Optional[str]: - """Generate the python source for a node tree.""" - if not isinstance(node, nodes.Template): - raise TypeError("Can't compile non template nodes") - - generator = environment.code_generator_class( - environment, name, filename, stream, defer_init, optimized - ) - generator.visit(node) - - if stream is None: - return generator.stream.getvalue() # type: ignore - - return None - - -def has_safe_repr(value: t.Any) -> bool: - """Does the node have a safe representation?""" - if value is None or value is NotImplemented or value is Ellipsis: - return True - - if type(value) in {bool, int, float, complex, range, str, Markup}: - return True - - if type(value) in {tuple, list, set, frozenset}: - return all(has_safe_repr(v) for v in value) - - if type(value) is dict: - return all(has_safe_repr(k) and has_safe_repr(v) for k, v in value.items()) - - return False - - -def find_undeclared( - nodes: t.Iterable[nodes.Node], names: t.Iterable[str] -) -> t.Set[str]: - """Check if the names passed are accessed undeclared. The return value - is a set of all the undeclared names from the sequence of names found. - """ - visitor = UndeclaredNameVisitor(names) - try: - for node in nodes: - visitor.visit(node) - except VisitorExit: - pass - return visitor.undeclared - - -class MacroRef: - def __init__(self, node: t.Union[nodes.Macro, nodes.CallBlock]) -> None: - self.node = node - self.accesses_caller = False - self.accesses_kwargs = False - self.accesses_varargs = False - - -class Frame: - """Holds compile time information for us.""" - - def __init__( - self, - eval_ctx: EvalContext, - parent: t.Optional["Frame"] = None, - level: t.Optional[int] = None, - ) -> None: - self.eval_ctx = eval_ctx - - # the parent of this frame - self.parent = parent - - if parent is None: - self.symbols = Symbols(level=level) - - # in some dynamic inheritance situations the compiler needs to add - # write tests around output statements. - self.require_output_check = False - - # inside some tags we are using a buffer rather than yield statements. - # this for example affects {% filter %} or {% macro %}. If a frame - # is buffered this variable points to the name of the list used as - # buffer. - self.buffer: t.Optional[str] = None - - # the name of the block we're in, otherwise None. - self.block: t.Optional[str] = None - - else: - self.symbols = Symbols(parent.symbols, level=level) - self.require_output_check = parent.require_output_check - self.buffer = parent.buffer - self.block = parent.block - - # a toplevel frame is the root + soft frames such as if conditions. - self.toplevel = False - - # the root frame is basically just the outermost frame, so no if - # conditions. This information is used to optimize inheritance - # situations. - self.rootlevel = False - - # variables set inside of loops and blocks should not affect outer frames, - # but they still needs to be kept track of as part of the active context. - self.loop_frame = False - self.block_frame = False - - # track whether the frame is being used in an if-statement or conditional - # expression as it determines which errors should be raised during runtime - # or compile time. - self.soft_frame = False - - def copy(self) -> "Frame": - """Create a copy of the current one.""" - rv = object.__new__(self.__class__) - rv.__dict__.update(self.__dict__) - rv.symbols = self.symbols.copy() - return rv - - def inner(self, isolated: bool = False) -> "Frame": - """Return an inner frame.""" - if isolated: - return Frame(self.eval_ctx, level=self.symbols.level + 1) - return Frame(self.eval_ctx, self) - - def soft(self) -> "Frame": - """Return a soft frame. A soft frame may not be modified as - standalone thing as it shares the resources with the frame it - was created of, but it's not a rootlevel frame any longer. - - This is only used to implement if-statements and conditional - expressions. - """ - rv = self.copy() - rv.rootlevel = False - rv.soft_frame = True - return rv - - __copy__ = copy - - -class VisitorExit(RuntimeError): - """Exception used by the `UndeclaredNameVisitor` to signal a stop.""" - - -class DependencyFinderVisitor(NodeVisitor): - """A visitor that collects filter and test calls.""" - - def __init__(self) -> None: - self.filters: t.Set[str] = set() - self.tests: t.Set[str] = set() - - def visit_Filter(self, node: nodes.Filter) -> None: - self.generic_visit(node) - self.filters.add(node.name) - - def visit_Test(self, node: nodes.Test) -> None: - self.generic_visit(node) - self.tests.add(node.name) - - def visit_Block(self, node: nodes.Block) -> None: - """Stop visiting at blocks.""" - - -class UndeclaredNameVisitor(NodeVisitor): - """A visitor that checks if a name is accessed without being - declared. This is different from the frame visitor as it will - not stop at closure frames. - """ - - def __init__(self, names: t.Iterable[str]) -> None: - self.names = set(names) - self.undeclared: t.Set[str] = set() - - def visit_Name(self, node: nodes.Name) -> None: - if node.ctx == "load" and node.name in self.names: - self.undeclared.add(node.name) - if self.undeclared == self.names: - raise VisitorExit() - else: - self.names.discard(node.name) - - def visit_Block(self, node: nodes.Block) -> None: - """Stop visiting a blocks.""" - - -class CompilerExit(Exception): - """Raised if the compiler encountered a situation where it just - doesn't make sense to further process the code. Any block that - raises such an exception is not further processed. - """ - - -class CodeGenerator(NodeVisitor): - def __init__( - self, - environment: "Environment", - name: t.Optional[str], - filename: t.Optional[str], - stream: t.Optional[t.TextIO] = None, - defer_init: bool = False, - optimized: bool = True, - ) -> None: - if stream is None: - stream = StringIO() - self.environment = environment - self.name = name - self.filename = filename - self.stream = stream - self.created_block_context = False - self.defer_init = defer_init - self.optimizer: t.Optional[Optimizer] = None - - if optimized: - self.optimizer = Optimizer(environment) - - # aliases for imports - self.import_aliases: t.Dict[str, str] = {} - - # a registry for all blocks. Because blocks are moved out - # into the global python scope they are registered here - self.blocks: t.Dict[str, nodes.Block] = {} - - # the number of extends statements so far - self.extends_so_far = 0 - - # some templates have a rootlevel extends. In this case we - # can safely assume that we're a child template and do some - # more optimizations. - self.has_known_extends = False - - # the current line number - self.code_lineno = 1 - - # registry of all filters and tests (global, not block local) - self.tests: t.Dict[str, str] = {} - self.filters: t.Dict[str, str] = {} - - # the debug information - self.debug_info: t.List[t.Tuple[int, int]] = [] - self._write_debug_info: t.Optional[int] = None - - # the number of new lines before the next write() - self._new_lines = 0 - - # the line number of the last written statement - self._last_line = 0 - - # true if nothing was written so far. - self._first_write = True - - # used by the `temporary_identifier` method to get new - # unique, temporary identifier - self._last_identifier = 0 - - # the current indentation - self._indentation = 0 - - # Tracks toplevel assignments - self._assign_stack: t.List[t.Set[str]] = [] - - # Tracks parameter definition blocks - self._param_def_block: t.List[t.Set[str]] = [] - - # Tracks the current context. - self._context_reference_stack = ["context"] - - @property - def optimized(self) -> bool: - return self.optimizer is not None - - # -- Various compilation helpers - - def fail(self, msg: str, lineno: int) -> "te.NoReturn": - """Fail with a :exc:`TemplateAssertionError`.""" - raise TemplateAssertionError(msg, lineno, self.name, self.filename) - - def temporary_identifier(self) -> str: - """Get a new unique identifier.""" - self._last_identifier += 1 - return f"t_{self._last_identifier}" - - def buffer(self, frame: Frame) -> None: - """Enable buffering for the frame from that point onwards.""" - frame.buffer = self.temporary_identifier() - self.writeline(f"{frame.buffer} = []") - - def return_buffer_contents( - self, frame: Frame, force_unescaped: bool = False - ) -> None: - """Return the buffer contents of the frame.""" - if not force_unescaped: - if frame.eval_ctx.volatile: - self.writeline("if context.eval_ctx.autoescape:") - self.indent() - self.writeline(f"return Markup(concat({frame.buffer}))") - self.outdent() - self.writeline("else:") - self.indent() - self.writeline(f"return concat({frame.buffer})") - self.outdent() - return - elif frame.eval_ctx.autoescape: - self.writeline(f"return Markup(concat({frame.buffer}))") - return - self.writeline(f"return concat({frame.buffer})") - - def indent(self) -> None: - """Indent by one.""" - self._indentation += 1 - - def outdent(self, step: int = 1) -> None: - """Outdent by step.""" - self._indentation -= step - - def start_write(self, frame: Frame, node: t.Optional[nodes.Node] = None) -> None: - """Yield or write into the frame buffer.""" - if frame.buffer is None: - self.writeline("yield ", node) - else: - self.writeline(f"{frame.buffer}.append(", node) - - def end_write(self, frame: Frame) -> None: - """End the writing process started by `start_write`.""" - if frame.buffer is not None: - self.write(")") - - def simple_write( - self, s: str, frame: Frame, node: t.Optional[nodes.Node] = None - ) -> None: - """Simple shortcut for start_write + write + end_write.""" - self.start_write(frame, node) - self.write(s) - self.end_write(frame) - - def blockvisit(self, nodes: t.Iterable[nodes.Node], frame: Frame) -> None: - """Visit a list of nodes as block in a frame. If the current frame - is no buffer a dummy ``if 0: yield None`` is written automatically. - """ - try: - self.writeline("pass") - for node in nodes: - self.visit(node, frame) - except CompilerExit: - pass - - def write(self, x: str) -> None: - """Write a string into the output stream.""" - if self._new_lines: - if not self._first_write: - self.stream.write("\n" * self._new_lines) - self.code_lineno += self._new_lines - if self._write_debug_info is not None: - self.debug_info.append((self._write_debug_info, self.code_lineno)) - self._write_debug_info = None - self._first_write = False - self.stream.write(" " * self._indentation) - self._new_lines = 0 - self.stream.write(x) - - def writeline( - self, x: str, node: t.Optional[nodes.Node] = None, extra: int = 0 - ) -> None: - """Combination of newline and write.""" - self.newline(node, extra) - self.write(x) - - def newline(self, node: t.Optional[nodes.Node] = None, extra: int = 0) -> None: - """Add one or more newlines before the next write.""" - self._new_lines = max(self._new_lines, 1 + extra) - if node is not None and node.lineno != self._last_line: - self._write_debug_info = node.lineno - self._last_line = node.lineno - - def signature( - self, - node: t.Union[nodes.Call, nodes.Filter, nodes.Test], - frame: Frame, - extra_kwargs: t.Optional[t.Mapping[str, t.Any]] = None, - ) -> None: - """Writes a function call to the stream for the current node. - A leading comma is added automatically. The extra keyword - arguments may not include python keywords otherwise a syntax - error could occur. The extra keyword arguments should be given - as python dict. - """ - # if any of the given keyword arguments is a python keyword - # we have to make sure that no invalid call is created. - kwarg_workaround = any( - is_python_keyword(t.cast(str, k)) - for k in chain((x.key for x in node.kwargs), extra_kwargs or ()) - ) - - for arg in node.args: - self.write(", ") - self.visit(arg, frame) - - if not kwarg_workaround: - for kwarg in node.kwargs: - self.write(", ") - self.visit(kwarg, frame) - if extra_kwargs is not None: - for key, value in extra_kwargs.items(): - self.write(f", {key}={value}") - if node.dyn_args: - self.write(", *") - self.visit(node.dyn_args, frame) - - if kwarg_workaround: - if node.dyn_kwargs is not None: - self.write(", **dict({") - else: - self.write(", **{") - for kwarg in node.kwargs: - self.write(f"{kwarg.key!r}: ") - self.visit(kwarg.value, frame) - self.write(", ") - if extra_kwargs is not None: - for key, value in extra_kwargs.items(): - self.write(f"{key!r}: {value}, ") - if node.dyn_kwargs is not None: - self.write("}, **") - self.visit(node.dyn_kwargs, frame) - self.write(")") - else: - self.write("}") - - elif node.dyn_kwargs is not None: - self.write(", **") - self.visit(node.dyn_kwargs, frame) - - def pull_dependencies(self, nodes: t.Iterable[nodes.Node]) -> None: - """Find all filter and test names used in the template and - assign them to variables in the compiled namespace. Checking - that the names are registered with the environment is done when - compiling the Filter and Test nodes. If the node is in an If or - CondExpr node, the check is done at runtime instead. - - .. versionchanged:: 3.0 - Filters and tests in If and CondExpr nodes are checked at - runtime instead of compile time. - """ - visitor = DependencyFinderVisitor() - - for node in nodes: - visitor.visit(node) - - for id_map, names, dependency in (self.filters, visitor.filters, "filters"), ( - self.tests, - visitor.tests, - "tests", - ): - for name in sorted(names): - if name not in id_map: - id_map[name] = self.temporary_identifier() - - # add check during runtime that dependencies used inside of executed - # blocks are defined, as this step may be skipped during compile time - self.writeline("try:") - self.indent() - self.writeline(f"{id_map[name]} = environment.{dependency}[{name!r}]") - self.outdent() - self.writeline("except KeyError:") - self.indent() - self.writeline("@internalcode") - self.writeline(f"def {id_map[name]}(*unused):") - self.indent() - self.writeline( - f'raise TemplateRuntimeError("No {dependency[:-1]}' - f' named {name!r} found.")' - ) - self.outdent() - self.outdent() - - def enter_frame(self, frame: Frame) -> None: - undefs = [] - for target, (action, param) in frame.symbols.loads.items(): - if action == VAR_LOAD_PARAMETER: - pass - elif action == VAR_LOAD_RESOLVE: - self.writeline(f"{target} = {self.get_resolve_func()}({param!r})") - elif action == VAR_LOAD_ALIAS: - self.writeline(f"{target} = {param}") - elif action == VAR_LOAD_UNDEFINED: - undefs.append(target) - else: - raise NotImplementedError("unknown load instruction") - if undefs: - self.writeline(f"{' = '.join(undefs)} = missing") - - def leave_frame(self, frame: Frame, with_python_scope: bool = False) -> None: - if not with_python_scope: - undefs = [] - for target in frame.symbols.loads: - undefs.append(target) - if undefs: - self.writeline(f"{' = '.join(undefs)} = missing") - - def choose_async(self, async_value: str = "async ", sync_value: str = "") -> str: - return async_value if self.environment.is_async else sync_value - - def func(self, name: str) -> str: - return f"{self.choose_async()}def {name}" - - def macro_body( - self, node: t.Union[nodes.Macro, nodes.CallBlock], frame: Frame - ) -> t.Tuple[Frame, MacroRef]: - """Dump the function def of a macro or call block.""" - frame = frame.inner() - frame.symbols.analyze_node(node) - macro_ref = MacroRef(node) - - explicit_caller = None - skip_special_params = set() - args = [] - - for idx, arg in enumerate(node.args): - if arg.name == "caller": - explicit_caller = idx - if arg.name in ("kwargs", "varargs"): - skip_special_params.add(arg.name) - args.append(frame.symbols.ref(arg.name)) - - undeclared = find_undeclared(node.body, ("caller", "kwargs", "varargs")) - - if "caller" in undeclared: - # In older Jinja versions there was a bug that allowed caller - # to retain the special behavior even if it was mentioned in - # the argument list. However thankfully this was only really - # working if it was the last argument. So we are explicitly - # checking this now and error out if it is anywhere else in - # the argument list. - if explicit_caller is not None: - try: - node.defaults[explicit_caller - len(node.args)] - except IndexError: - self.fail( - "When defining macros or call blocks the " - 'special "caller" argument must be omitted ' - "or be given a default.", - node.lineno, - ) - else: - args.append(frame.symbols.declare_parameter("caller")) - macro_ref.accesses_caller = True - if "kwargs" in undeclared and "kwargs" not in skip_special_params: - args.append(frame.symbols.declare_parameter("kwargs")) - macro_ref.accesses_kwargs = True - if "varargs" in undeclared and "varargs" not in skip_special_params: - args.append(frame.symbols.declare_parameter("varargs")) - macro_ref.accesses_varargs = True - - # macros are delayed, they never require output checks - frame.require_output_check = False - frame.symbols.analyze_node(node) - self.writeline(f"{self.func('macro')}({', '.join(args)}):", node) - self.indent() - - self.buffer(frame) - self.enter_frame(frame) - - self.push_parameter_definitions(frame) - for idx, arg in enumerate(node.args): - ref = frame.symbols.ref(arg.name) - self.writeline(f"if {ref} is missing:") - self.indent() - try: - default = node.defaults[idx - len(node.args)] - except IndexError: - self.writeline( - f'{ref} = undefined("parameter {arg.name!r} was not provided",' - f" name={arg.name!r})" - ) - else: - self.writeline(f"{ref} = ") - self.visit(default, frame) - self.mark_parameter_stored(ref) - self.outdent() - self.pop_parameter_definitions() - - self.blockvisit(node.body, frame) - self.return_buffer_contents(frame, force_unescaped=True) - self.leave_frame(frame, with_python_scope=True) - self.outdent() - - return frame, macro_ref - - def macro_def(self, macro_ref: MacroRef, frame: Frame) -> None: - """Dump the macro definition for the def created by macro_body.""" - arg_tuple = ", ".join(repr(x.name) for x in macro_ref.node.args) - name = getattr(macro_ref.node, "name", None) - if len(macro_ref.node.args) == 1: - arg_tuple += "," - self.write( - f"Macro(environment, macro, {name!r}, ({arg_tuple})," - f" {macro_ref.accesses_kwargs!r}, {macro_ref.accesses_varargs!r}," - f" {macro_ref.accesses_caller!r}, context.eval_ctx.autoescape)" - ) - - def position(self, node: nodes.Node) -> str: - """Return a human readable position for the node.""" - rv = f"line {node.lineno}" - if self.name is not None: - rv = f"{rv} in {self.name!r}" - return rv - - def dump_local_context(self, frame: Frame) -> str: - items_kv = ", ".join( - f"{name!r}: {target}" - for name, target in frame.symbols.dump_stores().items() - ) - return f"{{{items_kv}}}" - - def write_commons(self) -> None: - """Writes a common preamble that is used by root and block functions. - Primarily this sets up common local helpers and enforces a generator - through a dead branch. - """ - self.writeline("resolve = context.resolve_or_missing") - self.writeline("undefined = environment.undefined") - self.writeline("concat = environment.concat") - # always use the standard Undefined class for the implicit else of - # conditional expressions - self.writeline("cond_expr_undefined = Undefined") - self.writeline("if 0: yield None") - - def push_parameter_definitions(self, frame: Frame) -> None: - """Pushes all parameter targets from the given frame into a local - stack that permits tracking of yet to be assigned parameters. In - particular this enables the optimization from `visit_Name` to skip - undefined expressions for parameters in macros as macros can reference - otherwise unbound parameters. - """ - self._param_def_block.append(frame.symbols.dump_param_targets()) - - def pop_parameter_definitions(self) -> None: - """Pops the current parameter definitions set.""" - self._param_def_block.pop() - - def mark_parameter_stored(self, target: str) -> None: - """Marks a parameter in the current parameter definitions as stored. - This will skip the enforced undefined checks. - """ - if self._param_def_block: - self._param_def_block[-1].discard(target) - - def push_context_reference(self, target: str) -> None: - self._context_reference_stack.append(target) - - def pop_context_reference(self) -> None: - self._context_reference_stack.pop() - - def get_context_ref(self) -> str: - return self._context_reference_stack[-1] - - def get_resolve_func(self) -> str: - target = self._context_reference_stack[-1] - if target == "context": - return "resolve" - return f"{target}.resolve" - - def derive_context(self, frame: Frame) -> str: - return f"{self.get_context_ref()}.derived({self.dump_local_context(frame)})" - - def parameter_is_undeclared(self, target: str) -> bool: - """Checks if a given target is an undeclared parameter.""" - if not self._param_def_block: - return False - return target in self._param_def_block[-1] - - def push_assign_tracking(self) -> None: - """Pushes a new layer for assignment tracking.""" - self._assign_stack.append(set()) - - def pop_assign_tracking(self, frame: Frame) -> None: - """Pops the topmost level for assignment tracking and updates the - context variables if necessary. - """ - vars = self._assign_stack.pop() - if ( - not frame.block_frame - and not frame.loop_frame - and not frame.toplevel - or not vars - ): - return - public_names = [x for x in vars if x[:1] != "_"] - if len(vars) == 1: - name = next(iter(vars)) - ref = frame.symbols.ref(name) - if frame.loop_frame: - self.writeline(f"_loop_vars[{name!r}] = {ref}") - return - if frame.block_frame: - self.writeline(f"_block_vars[{name!r}] = {ref}") - return - self.writeline(f"context.vars[{name!r}] = {ref}") - else: - if frame.loop_frame: - self.writeline("_loop_vars.update({") - elif frame.block_frame: - self.writeline("_block_vars.update({") - else: - self.writeline("context.vars.update({") - for idx, name in enumerate(vars): - if idx: - self.write(", ") - ref = frame.symbols.ref(name) - self.write(f"{name!r}: {ref}") - self.write("})") - if not frame.block_frame and not frame.loop_frame and public_names: - if len(public_names) == 1: - self.writeline(f"context.exported_vars.add({public_names[0]!r})") - else: - names_str = ", ".join(map(repr, public_names)) - self.writeline(f"context.exported_vars.update(({names_str}))") - - # -- Statement Visitors - - def visit_Template( - self, node: nodes.Template, frame: t.Optional[Frame] = None - ) -> None: - assert frame is None, "no root frame allowed" - eval_ctx = EvalContext(self.environment, self.name) - - from .runtime import exported, async_exported - - if self.environment.is_async: - exported_names = sorted(exported + async_exported) - else: - exported_names = sorted(exported) - - self.writeline("from jinja2.runtime import " + ", ".join(exported_names)) - - # if we want a deferred initialization we cannot move the - # environment into a local name - envenv = "" if self.defer_init else ", environment=environment" - - # do we have an extends tag at all? If not, we can save some - # overhead by just not processing any inheritance code. - have_extends = node.find(nodes.Extends) is not None - - # find all blocks - for block in node.find_all(nodes.Block): - if block.name in self.blocks: - self.fail(f"block {block.name!r} defined twice", block.lineno) - self.blocks[block.name] = block - - # find all imports and import them - for import_ in node.find_all(nodes.ImportedName): - if import_.importname not in self.import_aliases: - imp = import_.importname - self.import_aliases[imp] = alias = self.temporary_identifier() - if "." in imp: - module, obj = imp.rsplit(".", 1) - self.writeline(f"from {module} import {obj} as {alias}") - else: - self.writeline(f"import {imp} as {alias}") - - # add the load name - self.writeline(f"name = {self.name!r}") - - # generate the root render function. - self.writeline( - f"{self.func('root')}(context, missing=missing{envenv}):", extra=1 - ) - self.indent() - self.write_commons() - - # process the root - frame = Frame(eval_ctx) - if "self" in find_undeclared(node.body, ("self",)): - ref = frame.symbols.declare_parameter("self") - self.writeline(f"{ref} = TemplateReference(context)") - frame.symbols.analyze_node(node) - frame.toplevel = frame.rootlevel = True - frame.require_output_check = have_extends and not self.has_known_extends - if have_extends: - self.writeline("parent_template = None") - self.enter_frame(frame) - self.pull_dependencies(node.body) - self.blockvisit(node.body, frame) - self.leave_frame(frame, with_python_scope=True) - self.outdent() - - # make sure that the parent root is called. - if have_extends: - if not self.has_known_extends: - self.indent() - self.writeline("if parent_template is not None:") - self.indent() - if not self.environment.is_async: - self.writeline("yield from parent_template.root_render_func(context)") - else: - self.writeline( - "async for event in parent_template.root_render_func(context):" - ) - self.indent() - self.writeline("yield event") - self.outdent() - self.outdent(1 + (not self.has_known_extends)) - - # at this point we now have the blocks collected and can visit them too. - for name, block in self.blocks.items(): - self.writeline( - f"{self.func('block_' + name)}(context, missing=missing{envenv}):", - block, - 1, - ) - self.indent() - self.write_commons() - # It's important that we do not make this frame a child of the - # toplevel template. This would cause a variety of - # interesting issues with identifier tracking. - block_frame = Frame(eval_ctx) - block_frame.block_frame = True - undeclared = find_undeclared(block.body, ("self", "super")) - if "self" in undeclared: - ref = block_frame.symbols.declare_parameter("self") - self.writeline(f"{ref} = TemplateReference(context)") - if "super" in undeclared: - ref = block_frame.symbols.declare_parameter("super") - self.writeline(f"{ref} = context.super({name!r}, block_{name})") - block_frame.symbols.analyze_node(block) - block_frame.block = name - self.writeline("_block_vars = {}") - self.enter_frame(block_frame) - self.pull_dependencies(block.body) - self.blockvisit(block.body, block_frame) - self.leave_frame(block_frame, with_python_scope=True) - self.outdent() - - blocks_kv_str = ", ".join(f"{x!r}: block_{x}" for x in self.blocks) - self.writeline(f"blocks = {{{blocks_kv_str}}}", extra=1) - debug_kv_str = "&".join(f"{k}={v}" for k, v in self.debug_info) - self.writeline(f"debug_info = {debug_kv_str!r}") - - def visit_Block(self, node: nodes.Block, frame: Frame) -> None: - """Call a block and register it for the template.""" - level = 0 - if frame.toplevel: - # if we know that we are a child template, there is no need to - # check if we are one - if self.has_known_extends: - return - if self.extends_so_far > 0: - self.writeline("if parent_template is None:") - self.indent() - level += 1 - - if node.scoped: - context = self.derive_context(frame) - else: - context = self.get_context_ref() - - if node.required: - self.writeline(f"if len(context.blocks[{node.name!r}]) <= 1:", node) - self.indent() - self.writeline( - f'raise TemplateRuntimeError("Required block {node.name!r} not found")', - node, - ) - self.outdent() - - if not self.environment.is_async and frame.buffer is None: - self.writeline( - f"yield from context.blocks[{node.name!r}][0]({context})", node - ) - else: - self.writeline( - f"{self.choose_async()}for event in" - f" context.blocks[{node.name!r}][0]({context}):", - node, - ) - self.indent() - self.simple_write("event", frame) - self.outdent() - - self.outdent(level) - - def visit_Extends(self, node: nodes.Extends, frame: Frame) -> None: - """Calls the extender.""" - if not frame.toplevel: - self.fail("cannot use extend from a non top-level scope", node.lineno) - - # if the number of extends statements in general is zero so - # far, we don't have to add a check if something extended - # the template before this one. - if self.extends_so_far > 0: - - # if we have a known extends we just add a template runtime - # error into the generated code. We could catch that at compile - # time too, but i welcome it not to confuse users by throwing the - # same error at different times just "because we can". - if not self.has_known_extends: - self.writeline("if parent_template is not None:") - self.indent() - self.writeline('raise TemplateRuntimeError("extended multiple times")') - - # if we have a known extends already we don't need that code here - # as we know that the template execution will end here. - if self.has_known_extends: - raise CompilerExit() - else: - self.outdent() - - self.writeline("parent_template = environment.get_template(", node) - self.visit(node.template, frame) - self.write(f", {self.name!r})") - self.writeline("for name, parent_block in parent_template.blocks.items():") - self.indent() - self.writeline("context.blocks.setdefault(name, []).append(parent_block)") - self.outdent() - - # if this extends statement was in the root level we can take - # advantage of that information and simplify the generated code - # in the top level from this point onwards - if frame.rootlevel: - self.has_known_extends = True - - # and now we have one more - self.extends_so_far += 1 - - def visit_Include(self, node: nodes.Include, frame: Frame) -> None: - """Handles includes.""" - if node.ignore_missing: - self.writeline("try:") - self.indent() - - func_name = "get_or_select_template" - if isinstance(node.template, nodes.Const): - if isinstance(node.template.value, str): - func_name = "get_template" - elif isinstance(node.template.value, (tuple, list)): - func_name = "select_template" - elif isinstance(node.template, (nodes.Tuple, nodes.List)): - func_name = "select_template" - - self.writeline(f"template = environment.{func_name}(", node) - self.visit(node.template, frame) - self.write(f", {self.name!r})") - if node.ignore_missing: - self.outdent() - self.writeline("except TemplateNotFound:") - self.indent() - self.writeline("pass") - self.outdent() - self.writeline("else:") - self.indent() - - skip_event_yield = False - if node.with_context: - self.writeline( - f"{self.choose_async()}for event in template.root_render_func(" - "template.new_context(context.get_all(), True," - f" {self.dump_local_context(frame)})):" - ) - elif self.environment.is_async: - self.writeline( - "for event in (await template._get_default_module_async())" - "._body_stream:" - ) - else: - self.writeline("yield from template._get_default_module()._body_stream") - skip_event_yield = True - - if not skip_event_yield: - self.indent() - self.simple_write("event", frame) - self.outdent() - - if node.ignore_missing: - self.outdent() - - def _import_common( - self, node: t.Union[nodes.Import, nodes.FromImport], frame: Frame - ) -> None: - self.write(f"{self.choose_async('await ')}environment.get_template(") - self.visit(node.template, frame) - self.write(f", {self.name!r}).") - - if node.with_context: - f_name = f"make_module{self.choose_async('_async')}" - self.write( - f"{f_name}(context.get_all(), True, {self.dump_local_context(frame)})" - ) - else: - self.write(f"_get_default_module{self.choose_async('_async')}(context)") - - def visit_Import(self, node: nodes.Import, frame: Frame) -> None: - """Visit regular imports.""" - self.writeline(f"{frame.symbols.ref(node.target)} = ", node) - if frame.toplevel: - self.write(f"context.vars[{node.target!r}] = ") - - self._import_common(node, frame) - - if frame.toplevel and not node.target.startswith("_"): - self.writeline(f"context.exported_vars.discard({node.target!r})") - - def visit_FromImport(self, node: nodes.FromImport, frame: Frame) -> None: - """Visit named imports.""" - self.newline(node) - self.write("included_template = ") - self._import_common(node, frame) - var_names = [] - discarded_names = [] - for name in node.names: - if isinstance(name, tuple): - name, alias = name - else: - alias = name - self.writeline( - f"{frame.symbols.ref(alias)} =" - f" getattr(included_template, {name!r}, missing)" - ) - self.writeline(f"if {frame.symbols.ref(alias)} is missing:") - self.indent() - message = ( - "the template {included_template.__name__!r}" - f" (imported on {self.position(node)})" - f" does not export the requested name {name!r}" - ) - self.writeline( - f"{frame.symbols.ref(alias)} = undefined(f{message!r}, name={name!r})" - ) - self.outdent() - if frame.toplevel: - var_names.append(alias) - if not alias.startswith("_"): - discarded_names.append(alias) - - if var_names: - if len(var_names) == 1: - name = var_names[0] - self.writeline(f"context.vars[{name!r}] = {frame.symbols.ref(name)}") - else: - names_kv = ", ".join( - f"{name!r}: {frame.symbols.ref(name)}" for name in var_names - ) - self.writeline(f"context.vars.update({{{names_kv}}})") - if discarded_names: - if len(discarded_names) == 1: - self.writeline(f"context.exported_vars.discard({discarded_names[0]!r})") - else: - names_str = ", ".join(map(repr, discarded_names)) - self.writeline( - f"context.exported_vars.difference_update(({names_str}))" - ) - - def visit_For(self, node: nodes.For, frame: Frame) -> None: - loop_frame = frame.inner() - loop_frame.loop_frame = True - test_frame = frame.inner() - else_frame = frame.inner() - - # try to figure out if we have an extended loop. An extended loop - # is necessary if the loop is in recursive mode if the special loop - # variable is accessed in the body if the body is a scoped block. - extended_loop = ( - node.recursive - or "loop" - in find_undeclared(node.iter_child_nodes(only=("body",)), ("loop",)) - or any(block.scoped for block in node.find_all(nodes.Block)) - ) - - loop_ref = None - if extended_loop: - loop_ref = loop_frame.symbols.declare_parameter("loop") - - loop_frame.symbols.analyze_node(node, for_branch="body") - if node.else_: - else_frame.symbols.analyze_node(node, for_branch="else") - - if node.test: - loop_filter_func = self.temporary_identifier() - test_frame.symbols.analyze_node(node, for_branch="test") - self.writeline(f"{self.func(loop_filter_func)}(fiter):", node.test) - self.indent() - self.enter_frame(test_frame) - self.writeline(self.choose_async("async for ", "for ")) - self.visit(node.target, loop_frame) - self.write(" in ") - self.write(self.choose_async("auto_aiter(fiter)", "fiter")) - self.write(":") - self.indent() - self.writeline("if ", node.test) - self.visit(node.test, test_frame) - self.write(":") - self.indent() - self.writeline("yield ") - self.visit(node.target, loop_frame) - self.outdent(3) - self.leave_frame(test_frame, with_python_scope=True) - - # if we don't have an recursive loop we have to find the shadowed - # variables at that point. Because loops can be nested but the loop - # variable is a special one we have to enforce aliasing for it. - if node.recursive: - self.writeline( - f"{self.func('loop')}(reciter, loop_render_func, depth=0):", node - ) - self.indent() - self.buffer(loop_frame) - - # Use the same buffer for the else frame - else_frame.buffer = loop_frame.buffer - - # make sure the loop variable is a special one and raise a template - # assertion error if a loop tries to write to loop - if extended_loop: - self.writeline(f"{loop_ref} = missing") - - for name in node.find_all(nodes.Name): - if name.ctx == "store" and name.name == "loop": - self.fail( - "Can't assign to special loop variable in for-loop target", - name.lineno, - ) - - if node.else_: - iteration_indicator = self.temporary_identifier() - self.writeline(f"{iteration_indicator} = 1") - - self.writeline(self.choose_async("async for ", "for "), node) - self.visit(node.target, loop_frame) - if extended_loop: - self.write(f", {loop_ref} in {self.choose_async('Async')}LoopContext(") - else: - self.write(" in ") - - if node.test: - self.write(f"{loop_filter_func}(") - if node.recursive: - self.write("reciter") - else: - if self.environment.is_async and not extended_loop: - self.write("auto_aiter(") - self.visit(node.iter, frame) - if self.environment.is_async and not extended_loop: - self.write(")") - if node.test: - self.write(")") - - if node.recursive: - self.write(", undefined, loop_render_func, depth):") - else: - self.write(", undefined):" if extended_loop else ":") - - self.indent() - self.enter_frame(loop_frame) - - self.writeline("_loop_vars = {}") - self.blockvisit(node.body, loop_frame) - if node.else_: - self.writeline(f"{iteration_indicator} = 0") - self.outdent() - self.leave_frame( - loop_frame, with_python_scope=node.recursive and not node.else_ - ) - - if node.else_: - self.writeline(f"if {iteration_indicator}:") - self.indent() - self.enter_frame(else_frame) - self.blockvisit(node.else_, else_frame) - self.leave_frame(else_frame) - self.outdent() - - # if the node was recursive we have to return the buffer contents - # and start the iteration code - if node.recursive: - self.return_buffer_contents(loop_frame) - self.outdent() - self.start_write(frame, node) - self.write(f"{self.choose_async('await ')}loop(") - if self.environment.is_async: - self.write("auto_aiter(") - self.visit(node.iter, frame) - if self.environment.is_async: - self.write(")") - self.write(", loop)") - self.end_write(frame) - - # at the end of the iteration, clear any assignments made in the - # loop from the top level - if self._assign_stack: - self._assign_stack[-1].difference_update(loop_frame.symbols.stores) - - def visit_If(self, node: nodes.If, frame: Frame) -> None: - if_frame = frame.soft() - self.writeline("if ", node) - self.visit(node.test, if_frame) - self.write(":") - self.indent() - self.blockvisit(node.body, if_frame) - self.outdent() - for elif_ in node.elif_: - self.writeline("elif ", elif_) - self.visit(elif_.test, if_frame) - self.write(":") - self.indent() - self.blockvisit(elif_.body, if_frame) - self.outdent() - if node.else_: - self.writeline("else:") - self.indent() - self.blockvisit(node.else_, if_frame) - self.outdent() - - def visit_Macro(self, node: nodes.Macro, frame: Frame) -> None: - macro_frame, macro_ref = self.macro_body(node, frame) - self.newline() - if frame.toplevel: - if not node.name.startswith("_"): - self.write(f"context.exported_vars.add({node.name!r})") - self.writeline(f"context.vars[{node.name!r}] = ") - self.write(f"{frame.symbols.ref(node.name)} = ") - self.macro_def(macro_ref, macro_frame) - - def visit_CallBlock(self, node: nodes.CallBlock, frame: Frame) -> None: - call_frame, macro_ref = self.macro_body(node, frame) - self.writeline("caller = ") - self.macro_def(macro_ref, call_frame) - self.start_write(frame, node) - self.visit_Call(node.call, frame, forward_caller=True) - self.end_write(frame) - - def visit_FilterBlock(self, node: nodes.FilterBlock, frame: Frame) -> None: - filter_frame = frame.inner() - filter_frame.symbols.analyze_node(node) - self.enter_frame(filter_frame) - self.buffer(filter_frame) - self.blockvisit(node.body, filter_frame) - self.start_write(frame, node) - self.visit_Filter(node.filter, filter_frame) - self.end_write(frame) - self.leave_frame(filter_frame) - - def visit_With(self, node: nodes.With, frame: Frame) -> None: - with_frame = frame.inner() - with_frame.symbols.analyze_node(node) - self.enter_frame(with_frame) - for target, expr in zip(node.targets, node.values): - self.newline() - self.visit(target, with_frame) - self.write(" = ") - self.visit(expr, frame) - self.blockvisit(node.body, with_frame) - self.leave_frame(with_frame) - - def visit_ExprStmt(self, node: nodes.ExprStmt, frame: Frame) -> None: - self.newline(node) - self.visit(node.node, frame) - - class _FinalizeInfo(t.NamedTuple): - const: t.Optional[t.Callable[..., str]] - src: t.Optional[str] - - @staticmethod - def _default_finalize(value: t.Any) -> t.Any: - """The default finalize function if the environment isn't - configured with one. Or, if the environment has one, this is - called on that function's output for constants. - """ - return str(value) - - _finalize: t.Optional[_FinalizeInfo] = None - - def _make_finalize(self) -> _FinalizeInfo: - """Build the finalize function to be used on constants and at - runtime. Cached so it's only created once for all output nodes. - - Returns a ``namedtuple`` with the following attributes: - - ``const`` - A function to finalize constant data at compile time. - - ``src`` - Source code to output around nodes to be evaluated at - runtime. - """ - if self._finalize is not None: - return self._finalize - - finalize: t.Optional[t.Callable[..., t.Any]] - finalize = default = self._default_finalize - src = None - - if self.environment.finalize: - src = "environment.finalize(" - env_finalize = self.environment.finalize - pass_arg = { - _PassArg.context: "context", - _PassArg.eval_context: "context.eval_ctx", - _PassArg.environment: "environment", - }.get( - _PassArg.from_obj(env_finalize) # type: ignore - ) - finalize = None - - if pass_arg is None: - - def finalize(value: t.Any) -> t.Any: - return default(env_finalize(value)) - - else: - src = f"{src}{pass_arg}, " - - if pass_arg == "environment": - - def finalize(value: t.Any) -> t.Any: - return default(env_finalize(self.environment, value)) - - self._finalize = self._FinalizeInfo(finalize, src) - return self._finalize - - def _output_const_repr(self, group: t.Iterable[t.Any]) -> str: - """Given a group of constant values converted from ``Output`` - child nodes, produce a string to write to the template module - source. - """ - return repr(concat(group)) - - def _output_child_to_const( - self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo - ) -> str: - """Try to optimize a child of an ``Output`` node by trying to - convert it to constant, finalized data at compile time. - - If :exc:`Impossible` is raised, the node is not constant and - will be evaluated at runtime. Any other exception will also be - evaluated at runtime for easier debugging. - """ - const = node.as_const(frame.eval_ctx) - - if frame.eval_ctx.autoescape: - const = escape(const) - - # Template data doesn't go through finalize. - if isinstance(node, nodes.TemplateData): - return str(const) - - return finalize.const(const) # type: ignore - - def _output_child_pre( - self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo - ) -> None: - """Output extra source code before visiting a child of an - ``Output`` node. - """ - if frame.eval_ctx.volatile: - self.write("(escape if context.eval_ctx.autoescape else str)(") - elif frame.eval_ctx.autoescape: - self.write("escape(") - else: - self.write("str(") - - if finalize.src is not None: - self.write(finalize.src) - - def _output_child_post( - self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo - ) -> None: - """Output extra source code after visiting a child of an - ``Output`` node. - """ - self.write(")") - - if finalize.src is not None: - self.write(")") - - def visit_Output(self, node: nodes.Output, frame: Frame) -> None: - # If an extends is active, don't render outside a block. - if frame.require_output_check: - # A top-level extends is known to exist at compile time. - if self.has_known_extends: - return - - self.writeline("if parent_template is None:") - self.indent() - - finalize = self._make_finalize() - body: t.List[t.Union[t.List[t.Any], nodes.Expr]] = [] - - # Evaluate constants at compile time if possible. Each item in - # body will be either a list of static data or a node to be - # evaluated at runtime. - for child in node.nodes: - try: - if not ( - # If the finalize function requires runtime context, - # constants can't be evaluated at compile time. - finalize.const - # Unless it's basic template data that won't be - # finalized anyway. - or isinstance(child, nodes.TemplateData) - ): - raise nodes.Impossible() - - const = self._output_child_to_const(child, frame, finalize) - except (nodes.Impossible, Exception): - # The node was not constant and needs to be evaluated at - # runtime. Or another error was raised, which is easier - # to debug at runtime. - body.append(child) - continue - - if body and isinstance(body[-1], list): - body[-1].append(const) - else: - body.append([const]) - - if frame.buffer is not None: - if len(body) == 1: - self.writeline(f"{frame.buffer}.append(") - else: - self.writeline(f"{frame.buffer}.extend((") - - self.indent() - - for item in body: - if isinstance(item, list): - # A group of constant data to join and output. - val = self._output_const_repr(item) - - if frame.buffer is None: - self.writeline("yield " + val) - else: - self.writeline(val + ",") - else: - if frame.buffer is None: - self.writeline("yield ", item) - else: - self.newline(item) - - # A node to be evaluated at runtime. - self._output_child_pre(item, frame, finalize) - self.visit(item, frame) - self._output_child_post(item, frame, finalize) - - if frame.buffer is not None: - self.write(",") - - if frame.buffer is not None: - self.outdent() - self.writeline(")" if len(body) == 1 else "))") - - if frame.require_output_check: - self.outdent() - - def visit_Assign(self, node: nodes.Assign, frame: Frame) -> None: - self.push_assign_tracking() - self.newline(node) - self.visit(node.target, frame) - self.write(" = ") - self.visit(node.node, frame) - self.pop_assign_tracking(frame) - - def visit_AssignBlock(self, node: nodes.AssignBlock, frame: Frame) -> None: - self.push_assign_tracking() - block_frame = frame.inner() - # This is a special case. Since a set block always captures we - # will disable output checks. This way one can use set blocks - # toplevel even in extended templates. - block_frame.require_output_check = False - block_frame.symbols.analyze_node(node) - self.enter_frame(block_frame) - self.buffer(block_frame) - self.blockvisit(node.body, block_frame) - self.newline(node) - self.visit(node.target, frame) - self.write(" = (Markup if context.eval_ctx.autoescape else identity)(") - if node.filter is not None: - self.visit_Filter(node.filter, block_frame) - else: - self.write(f"concat({block_frame.buffer})") - self.write(")") - self.pop_assign_tracking(frame) - self.leave_frame(block_frame) - - # -- Expression Visitors - - def visit_Name(self, node: nodes.Name, frame: Frame) -> None: - if node.ctx == "store" and ( - frame.toplevel or frame.loop_frame or frame.block_frame - ): - if self._assign_stack: - self._assign_stack[-1].add(node.name) - ref = frame.symbols.ref(node.name) - - # If we are looking up a variable we might have to deal with the - # case where it's undefined. We can skip that case if the load - # instruction indicates a parameter which are always defined. - if node.ctx == "load": - load = frame.symbols.find_load(ref) - if not ( - load is not None - and load[0] == VAR_LOAD_PARAMETER - and not self.parameter_is_undeclared(ref) - ): - self.write( - f"(undefined(name={node.name!r}) if {ref} is missing else {ref})" - ) - return - - self.write(ref) - - def visit_NSRef(self, node: nodes.NSRef, frame: Frame) -> None: - # NSRefs can only be used to store values; since they use the normal - # `foo.bar` notation they will be parsed as a normal attribute access - # when used anywhere but in a `set` context - ref = frame.symbols.ref(node.name) - self.writeline(f"if not isinstance({ref}, Namespace):") - self.indent() - self.writeline( - "raise TemplateRuntimeError" - '("cannot assign attribute on non-namespace object")' - ) - self.outdent() - self.writeline(f"{ref}[{node.attr!r}]") - - def visit_Const(self, node: nodes.Const, frame: Frame) -> None: - val = node.as_const(frame.eval_ctx) - if isinstance(val, float): - self.write(str(val)) - else: - self.write(repr(val)) - - def visit_TemplateData(self, node: nodes.TemplateData, frame: Frame) -> None: - try: - self.write(repr(node.as_const(frame.eval_ctx))) - except nodes.Impossible: - self.write( - f"(Markup if context.eval_ctx.autoescape else identity)({node.data!r})" - ) - - def visit_Tuple(self, node: nodes.Tuple, frame: Frame) -> None: - self.write("(") - idx = -1 - for idx, item in enumerate(node.items): - if idx: - self.write(", ") - self.visit(item, frame) - self.write(",)" if idx == 0 else ")") - - def visit_List(self, node: nodes.List, frame: Frame) -> None: - self.write("[") - for idx, item in enumerate(node.items): - if idx: - self.write(", ") - self.visit(item, frame) - self.write("]") - - def visit_Dict(self, node: nodes.Dict, frame: Frame) -> None: - self.write("{") - for idx, item in enumerate(node.items): - if idx: - self.write(", ") - self.visit(item.key, frame) - self.write(": ") - self.visit(item.value, frame) - self.write("}") - - visit_Add = _make_binop("+") - visit_Sub = _make_binop("-") - visit_Mul = _make_binop("*") - visit_Div = _make_binop("/") - visit_FloorDiv = _make_binop("//") - visit_Pow = _make_binop("**") - visit_Mod = _make_binop("%") - visit_And = _make_binop("and") - visit_Or = _make_binop("or") - visit_Pos = _make_unop("+") - visit_Neg = _make_unop("-") - visit_Not = _make_unop("not ") - - @optimizeconst - def visit_Concat(self, node: nodes.Concat, frame: Frame) -> None: - if frame.eval_ctx.volatile: - func_name = "(markup_join if context.eval_ctx.volatile else str_join)" - elif frame.eval_ctx.autoescape: - func_name = "markup_join" - else: - func_name = "str_join" - self.write(f"{func_name}((") - for arg in node.nodes: - self.visit(arg, frame) - self.write(", ") - self.write("))") - - @optimizeconst - def visit_Compare(self, node: nodes.Compare, frame: Frame) -> None: - self.write("(") - self.visit(node.expr, frame) - for op in node.ops: - self.visit(op, frame) - self.write(")") - - def visit_Operand(self, node: nodes.Operand, frame: Frame) -> None: - self.write(f" {operators[node.op]} ") - self.visit(node.expr, frame) - - @optimizeconst - def visit_Getattr(self, node: nodes.Getattr, frame: Frame) -> None: - if self.environment.is_async: - self.write("(await auto_await(") - - self.write("environment.getattr(") - self.visit(node.node, frame) - self.write(f", {node.attr!r})") - - if self.environment.is_async: - self.write("))") - - @optimizeconst - def visit_Getitem(self, node: nodes.Getitem, frame: Frame) -> None: - # slices bypass the environment getitem method. - if isinstance(node.arg, nodes.Slice): - self.visit(node.node, frame) - self.write("[") - self.visit(node.arg, frame) - self.write("]") - else: - if self.environment.is_async: - self.write("(await auto_await(") - - self.write("environment.getitem(") - self.visit(node.node, frame) - self.write(", ") - self.visit(node.arg, frame) - self.write(")") - - if self.environment.is_async: - self.write("))") - - def visit_Slice(self, node: nodes.Slice, frame: Frame) -> None: - if node.start is not None: - self.visit(node.start, frame) - self.write(":") - if node.stop is not None: - self.visit(node.stop, frame) - if node.step is not None: - self.write(":") - self.visit(node.step, frame) - - @contextmanager - def _filter_test_common( - self, node: t.Union[nodes.Filter, nodes.Test], frame: Frame, is_filter: bool - ) -> t.Iterator[None]: - if self.environment.is_async: - self.write("(await auto_await(") - - if is_filter: - self.write(f"{self.filters[node.name]}(") - func = self.environment.filters.get(node.name) - else: - self.write(f"{self.tests[node.name]}(") - func = self.environment.tests.get(node.name) - - # When inside an If or CondExpr frame, allow the filter to be - # undefined at compile time and only raise an error if it's - # actually called at runtime. See pull_dependencies. - if func is None and not frame.soft_frame: - type_name = "filter" if is_filter else "test" - self.fail(f"No {type_name} named {node.name!r}.", node.lineno) - - pass_arg = { - _PassArg.context: "context", - _PassArg.eval_context: "context.eval_ctx", - _PassArg.environment: "environment", - }.get( - _PassArg.from_obj(func) # type: ignore - ) - - if pass_arg is not None: - self.write(f"{pass_arg}, ") - - # Back to the visitor function to handle visiting the target of - # the filter or test. - yield - - self.signature(node, frame) - self.write(")") - - if self.environment.is_async: - self.write("))") - - @optimizeconst - def visit_Filter(self, node: nodes.Filter, frame: Frame) -> None: - with self._filter_test_common(node, frame, True): - # if the filter node is None we are inside a filter block - # and want to write to the current buffer - if node.node is not None: - self.visit(node.node, frame) - elif frame.eval_ctx.volatile: - self.write( - f"(Markup(concat({frame.buffer}))" - f" if context.eval_ctx.autoescape else concat({frame.buffer}))" - ) - elif frame.eval_ctx.autoescape: - self.write(f"Markup(concat({frame.buffer}))") - else: - self.write(f"concat({frame.buffer})") - - @optimizeconst - def visit_Test(self, node: nodes.Test, frame: Frame) -> None: - with self._filter_test_common(node, frame, False): - self.visit(node.node, frame) - - @optimizeconst - def visit_CondExpr(self, node: nodes.CondExpr, frame: Frame) -> None: - frame = frame.soft() - - def write_expr2() -> None: - if node.expr2 is not None: - self.visit(node.expr2, frame) - return - - self.write( - f'cond_expr_undefined("the inline if-expression on' - f" {self.position(node)} evaluated to false and no else" - f' section was defined.")' - ) - - self.write("(") - self.visit(node.expr1, frame) - self.write(" if ") - self.visit(node.test, frame) - self.write(" else ") - write_expr2() - self.write(")") - - @optimizeconst - def visit_Call( - self, node: nodes.Call, frame: Frame, forward_caller: bool = False - ) -> None: - if self.environment.is_async: - self.write("(await auto_await(") - if self.environment.sandboxed: - self.write("environment.call(context, ") - else: - self.write("context.call(") - self.visit(node.node, frame) - extra_kwargs = {"caller": "caller"} if forward_caller else None - loop_kwargs = {"_loop_vars": "_loop_vars"} if frame.loop_frame else {} - block_kwargs = {"_block_vars": "_block_vars"} if frame.block_frame else {} - if extra_kwargs: - extra_kwargs.update(loop_kwargs, **block_kwargs) - elif loop_kwargs or block_kwargs: - extra_kwargs = dict(loop_kwargs, **block_kwargs) - self.signature(node, frame, extra_kwargs) - self.write(")") - if self.environment.is_async: - self.write("))") - - def visit_Keyword(self, node: nodes.Keyword, frame: Frame) -> None: - self.write(node.key + "=") - self.visit(node.value, frame) - - # -- Unused nodes for extensions - - def visit_MarkSafe(self, node: nodes.MarkSafe, frame: Frame) -> None: - self.write("Markup(") - self.visit(node.expr, frame) - self.write(")") - - def visit_MarkSafeIfAutoescape( - self, node: nodes.MarkSafeIfAutoescape, frame: Frame - ) -> None: - self.write("(Markup if context.eval_ctx.autoescape else identity)(") - self.visit(node.expr, frame) - self.write(")") - - def visit_EnvironmentAttribute( - self, node: nodes.EnvironmentAttribute, frame: Frame - ) -> None: - self.write("environment." + node.name) - - def visit_ExtensionAttribute( - self, node: nodes.ExtensionAttribute, frame: Frame - ) -> None: - self.write(f"environment.extensions[{node.identifier!r}].{node.name}") - - def visit_ImportedName(self, node: nodes.ImportedName, frame: Frame) -> None: - self.write(self.import_aliases[node.importname]) - - def visit_InternalName(self, node: nodes.InternalName, frame: Frame) -> None: - self.write(node.name) - - def visit_ContextReference( - self, node: nodes.ContextReference, frame: Frame - ) -> None: - self.write("context") - - def visit_DerivedContextReference( - self, node: nodes.DerivedContextReference, frame: Frame - ) -> None: - self.write(self.derive_context(frame)) - - def visit_Continue(self, node: nodes.Continue, frame: Frame) -> None: - self.writeline("continue", node) - - def visit_Break(self, node: nodes.Break, frame: Frame) -> None: - self.writeline("break", node) - - def visit_Scope(self, node: nodes.Scope, frame: Frame) -> None: - scope_frame = frame.inner() - scope_frame.symbols.analyze_node(node) - self.enter_frame(scope_frame) - self.blockvisit(node.body, scope_frame) - self.leave_frame(scope_frame) - - def visit_OverlayScope(self, node: nodes.OverlayScope, frame: Frame) -> None: - ctx = self.temporary_identifier() - self.writeline(f"{ctx} = {self.derive_context(frame)}") - self.writeline(f"{ctx}.vars = ") - self.visit(node.context, frame) - self.push_context_reference(ctx) - - scope_frame = frame.inner(isolated=True) - scope_frame.symbols.analyze_node(node) - self.enter_frame(scope_frame) - self.blockvisit(node.body, scope_frame) - self.leave_frame(scope_frame) - self.pop_context_reference() - - def visit_EvalContextModifier( - self, node: nodes.EvalContextModifier, frame: Frame - ) -> None: - for keyword in node.options: - self.writeline(f"context.eval_ctx.{keyword.key} = ") - self.visit(keyword.value, frame) - try: - val = keyword.value.as_const(frame.eval_ctx) - except nodes.Impossible: - frame.eval_ctx.volatile = True - else: - setattr(frame.eval_ctx, keyword.key, val) - - def visit_ScopedEvalContextModifier( - self, node: nodes.ScopedEvalContextModifier, frame: Frame - ) -> None: - old_ctx_name = self.temporary_identifier() - saved_ctx = frame.eval_ctx.save() - self.writeline(f"{old_ctx_name} = context.eval_ctx.save()") - self.visit_EvalContextModifier(node, frame) - for child in node.body: - self.visit(child, frame) - frame.eval_ctx.revert(saved_ctx) - self.writeline(f"context.eval_ctx.revert({old_ctx_name})") diff --git a/server/venv/Lib/site-packages/jinja2/constants.py b/server/venv/Lib/site-packages/jinja2/constants.py deleted file mode 100644 index 41a1c23..0000000 --- a/server/venv/Lib/site-packages/jinja2/constants.py +++ /dev/null @@ -1,20 +0,0 @@ -#: list of lorem ipsum words used by the lipsum() helper function -LOREM_IPSUM_WORDS = """\ -a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at -auctor augue bibendum blandit class commodo condimentum congue consectetuer -consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus -diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend -elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames -faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac -hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum -justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem -luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie -mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non -nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque -penatibus per pharetra phasellus placerat platea porta porttitor posuere -potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus -ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit -sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor -tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices -ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus -viverra volutpat vulputate""" diff --git a/server/venv/Lib/site-packages/jinja2/debug.py b/server/venv/Lib/site-packages/jinja2/debug.py deleted file mode 100644 index 7ed7e92..0000000 --- a/server/venv/Lib/site-packages/jinja2/debug.py +++ /dev/null @@ -1,191 +0,0 @@ -import sys -import typing as t -from types import CodeType -from types import TracebackType - -from .exceptions import TemplateSyntaxError -from .utils import internal_code -from .utils import missing - -if t.TYPE_CHECKING: - from .runtime import Context - - -def rewrite_traceback_stack(source: t.Optional[str] = None) -> BaseException: - """Rewrite the current exception to replace any tracebacks from - within compiled template code with tracebacks that look like they - came from the template source. - - This must be called within an ``except`` block. - - :param source: For ``TemplateSyntaxError``, the original source if - known. - :return: The original exception with the rewritten traceback. - """ - _, exc_value, tb = sys.exc_info() - exc_value = t.cast(BaseException, exc_value) - tb = t.cast(TracebackType, tb) - - if isinstance(exc_value, TemplateSyntaxError) and not exc_value.translated: - exc_value.translated = True - exc_value.source = source - # Remove the old traceback, otherwise the frames from the - # compiler still show up. - exc_value.with_traceback(None) - # Outside of runtime, so the frame isn't executing template - # code, but it still needs to point at the template. - tb = fake_traceback( - exc_value, None, exc_value.filename or "", exc_value.lineno - ) - else: - # Skip the frame for the render function. - tb = tb.tb_next - - stack = [] - - # Build the stack of traceback object, replacing any in template - # code with the source file and line information. - while tb is not None: - # Skip frames decorated with @internalcode. These are internal - # calls that aren't useful in template debugging output. - if tb.tb_frame.f_code in internal_code: - tb = tb.tb_next - continue - - template = tb.tb_frame.f_globals.get("__jinja_template__") - - if template is not None: - lineno = template.get_corresponding_lineno(tb.tb_lineno) - fake_tb = fake_traceback(exc_value, tb, template.filename, lineno) - stack.append(fake_tb) - else: - stack.append(tb) - - tb = tb.tb_next - - tb_next = None - - # Assign tb_next in reverse to avoid circular references. - for tb in reversed(stack): - tb.tb_next = tb_next - tb_next = tb - - return exc_value.with_traceback(tb_next) - - -def fake_traceback( # type: ignore - exc_value: BaseException, tb: t.Optional[TracebackType], filename: str, lineno: int -) -> TracebackType: - """Produce a new traceback object that looks like it came from the - template source instead of the compiled code. The filename, line - number, and location name will point to the template, and the local - variables will be the current template context. - - :param exc_value: The original exception to be re-raised to create - the new traceback. - :param tb: The original traceback to get the local variables and - code info from. - :param filename: The template filename. - :param lineno: The line number in the template source. - """ - if tb is not None: - # Replace the real locals with the context that would be - # available at that point in the template. - locals = get_template_locals(tb.tb_frame.f_locals) - locals.pop("__jinja_exception__", None) - else: - locals = {} - - globals = { - "__name__": filename, - "__file__": filename, - "__jinja_exception__": exc_value, - } - # Raise an exception at the correct line number. - code: CodeType = compile( - "\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec" - ) - - # Build a new code object that points to the template file and - # replaces the location with a block name. - location = "template" - - if tb is not None: - function = tb.tb_frame.f_code.co_name - - if function == "root": - location = "top-level template code" - elif function.startswith("block_"): - location = f"block {function[6:]!r}" - - if sys.version_info >= (3, 8): - code = code.replace(co_name=location) - else: - code = CodeType( - code.co_argcount, - code.co_kwonlyargcount, - code.co_nlocals, - code.co_stacksize, - code.co_flags, - code.co_code, - code.co_consts, - code.co_names, - code.co_varnames, - code.co_filename, - location, - code.co_firstlineno, - code.co_lnotab, - code.co_freevars, - code.co_cellvars, - ) - - # Execute the new code, which is guaranteed to raise, and return - # the new traceback without this frame. - try: - exec(code, globals, locals) - except BaseException: - return sys.exc_info()[2].tb_next # type: ignore - - -def get_template_locals(real_locals: t.Mapping[str, t.Any]) -> t.Dict[str, t.Any]: - """Based on the runtime locals, get the context that would be - available at that point in the template. - """ - # Start with the current template context. - ctx: "t.Optional[Context]" = real_locals.get("context") - - if ctx is not None: - data: t.Dict[str, t.Any] = ctx.get_all().copy() - else: - data = {} - - # Might be in a derived context that only sets local variables - # rather than pushing a context. Local variables follow the scheme - # l_depth_name. Find the highest-depth local that has a value for - # each name. - local_overrides: t.Dict[str, t.Tuple[int, t.Any]] = {} - - for name, value in real_locals.items(): - if not name.startswith("l_") or value is missing: - # Not a template variable, or no longer relevant. - continue - - try: - _, depth_str, name = name.split("_", 2) - depth = int(depth_str) - except ValueError: - continue - - cur_depth = local_overrides.get(name, (-1,))[0] - - if cur_depth < depth: - local_overrides[name] = (depth, value) - - # Modify the context with any derived context. - for name, (_, value) in local_overrides.items(): - if value is missing: - data.pop(name, None) - else: - data[name] = value - - return data diff --git a/server/venv/Lib/site-packages/jinja2/defaults.py b/server/venv/Lib/site-packages/jinja2/defaults.py deleted file mode 100644 index 638cad3..0000000 --- a/server/venv/Lib/site-packages/jinja2/defaults.py +++ /dev/null @@ -1,48 +0,0 @@ -import typing as t - -from .filters import FILTERS as DEFAULT_FILTERS # noqa: F401 -from .tests import TESTS as DEFAULT_TESTS # noqa: F401 -from .utils import Cycler -from .utils import generate_lorem_ipsum -from .utils import Joiner -from .utils import Namespace - -if t.TYPE_CHECKING: - import typing_extensions as te - -# defaults for the parser / lexer -BLOCK_START_STRING = "{%" -BLOCK_END_STRING = "%}" -VARIABLE_START_STRING = "{{" -VARIABLE_END_STRING = "}}" -COMMENT_START_STRING = "{#" -COMMENT_END_STRING = "#}" -LINE_STATEMENT_PREFIX: t.Optional[str] = None -LINE_COMMENT_PREFIX: t.Optional[str] = None -TRIM_BLOCKS = False -LSTRIP_BLOCKS = False -NEWLINE_SEQUENCE: "te.Literal['\\n', '\\r\\n', '\\r']" = "\n" -KEEP_TRAILING_NEWLINE = False - -# default filters, tests and namespace - -DEFAULT_NAMESPACE = { - "range": range, - "dict": dict, - "lipsum": generate_lorem_ipsum, - "cycler": Cycler, - "joiner": Joiner, - "namespace": Namespace, -} - -# default policies -DEFAULT_POLICIES: t.Dict[str, t.Any] = { - "compiler.ascii_str": True, - "urlize.rel": "noopener", - "urlize.target": None, - "urlize.extra_schemes": None, - "truncate.leeway": 5, - "json.dumps_function": None, - "json.dumps_kwargs": {"sort_keys": True}, - "ext.i18n.trimmed": False, -} diff --git a/server/venv/Lib/site-packages/jinja2/environment.py b/server/venv/Lib/site-packages/jinja2/environment.py deleted file mode 100644 index ea04e8b..0000000 --- a/server/venv/Lib/site-packages/jinja2/environment.py +++ /dev/null @@ -1,1667 +0,0 @@ -"""Classes for managing templates and their runtime and compile time -options. -""" -import os -import typing -import typing as t -import weakref -from collections import ChainMap -from functools import lru_cache -from functools import partial -from functools import reduce -from types import CodeType - -from markupsafe import Markup - -from . import nodes -from .compiler import CodeGenerator -from .compiler import generate -from .defaults import BLOCK_END_STRING -from .defaults import BLOCK_START_STRING -from .defaults import COMMENT_END_STRING -from .defaults import COMMENT_START_STRING -from .defaults import DEFAULT_FILTERS -from .defaults import DEFAULT_NAMESPACE -from .defaults import DEFAULT_POLICIES -from .defaults import DEFAULT_TESTS -from .defaults import KEEP_TRAILING_NEWLINE -from .defaults import LINE_COMMENT_PREFIX -from .defaults import LINE_STATEMENT_PREFIX -from .defaults import LSTRIP_BLOCKS -from .defaults import NEWLINE_SEQUENCE -from .defaults import TRIM_BLOCKS -from .defaults import VARIABLE_END_STRING -from .defaults import VARIABLE_START_STRING -from .exceptions import TemplateNotFound -from .exceptions import TemplateRuntimeError -from .exceptions import TemplatesNotFound -from .exceptions import TemplateSyntaxError -from .exceptions import UndefinedError -from .lexer import get_lexer -from .lexer import Lexer -from .lexer import TokenStream -from .nodes import EvalContext -from .parser import Parser -from .runtime import Context -from .runtime import new_context -from .runtime import Undefined -from .utils import _PassArg -from .utils import concat -from .utils import consume -from .utils import import_string -from .utils import internalcode -from .utils import LRUCache -from .utils import missing - -if t.TYPE_CHECKING: - import typing_extensions as te - from .bccache import BytecodeCache - from .ext import Extension - from .loaders import BaseLoader - -_env_bound = t.TypeVar("_env_bound", bound="Environment") - - -# for direct template usage we have up to ten living environments -@lru_cache(maxsize=10) -def get_spontaneous_environment(cls: t.Type[_env_bound], *args: t.Any) -> _env_bound: - """Return a new spontaneous environment. A spontaneous environment - is used for templates created directly rather than through an - existing environment. - - :param cls: Environment class to create. - :param args: Positional arguments passed to environment. - """ - env = cls(*args) - env.shared = True - return env - - -def create_cache( - size: int, -) -> t.Optional[t.MutableMapping[t.Tuple[weakref.ref, str], "Template"]]: - """Return the cache class for the given size.""" - if size == 0: - return None - - if size < 0: - return {} - - return LRUCache(size) # type: ignore - - -def copy_cache( - cache: t.Optional[t.MutableMapping], -) -> t.Optional[t.MutableMapping[t.Tuple[weakref.ref, str], "Template"]]: - """Create an empty copy of the given cache.""" - if cache is None: - return None - - if type(cache) is dict: - return {} - - return LRUCache(cache.capacity) # type: ignore - - -def load_extensions( - environment: "Environment", - extensions: t.Sequence[t.Union[str, t.Type["Extension"]]], -) -> t.Dict[str, "Extension"]: - """Load the extensions from the list and bind it to the environment. - Returns a dict of instantiated extensions. - """ - result = {} - - for extension in extensions: - if isinstance(extension, str): - extension = t.cast(t.Type["Extension"], import_string(extension)) - - result[extension.identifier] = extension(environment) - - return result - - -def _environment_config_check(environment: "Environment") -> "Environment": - """Perform a sanity check on the environment.""" - assert issubclass( - environment.undefined, Undefined - ), "'undefined' must be a subclass of 'jinja2.Undefined'." - assert ( - environment.block_start_string - != environment.variable_start_string - != environment.comment_start_string - ), "block, variable and comment start strings must be different." - assert environment.newline_sequence in { - "\r", - "\r\n", - "\n", - }, "'newline_sequence' must be one of '\\n', '\\r\\n', or '\\r'." - return environment - - -class Environment: - r"""The core component of Jinja is the `Environment`. It contains - important shared variables like configuration, filters, tests, - globals and others. Instances of this class may be modified if - they are not shared and if no template was loaded so far. - Modifications on environments after the first template was loaded - will lead to surprising effects and undefined behavior. - - Here are the possible initialization parameters: - - `block_start_string` - The string marking the beginning of a block. Defaults to ``'{%'``. - - `block_end_string` - The string marking the end of a block. Defaults to ``'%}'``. - - `variable_start_string` - The string marking the beginning of a print statement. - Defaults to ``'{{'``. - - `variable_end_string` - The string marking the end of a print statement. Defaults to - ``'}}'``. - - `comment_start_string` - The string marking the beginning of a comment. Defaults to ``'{#'``. - - `comment_end_string` - The string marking the end of a comment. Defaults to ``'#}'``. - - `line_statement_prefix` - If given and a string, this will be used as prefix for line based - statements. See also :ref:`line-statements`. - - `line_comment_prefix` - If given and a string, this will be used as prefix for line based - comments. See also :ref:`line-statements`. - - .. versionadded:: 2.2 - - `trim_blocks` - If this is set to ``True`` the first newline after a block is - removed (block, not variable tag!). Defaults to `False`. - - `lstrip_blocks` - If this is set to ``True`` leading spaces and tabs are stripped - from the start of a line to a block. Defaults to `False`. - - `newline_sequence` - The sequence that starts a newline. Must be one of ``'\r'``, - ``'\n'`` or ``'\r\n'``. The default is ``'\n'`` which is a - useful default for Linux and OS X systems as well as web - applications. - - `keep_trailing_newline` - Preserve the trailing newline when rendering templates. - The default is ``False``, which causes a single newline, - if present, to be stripped from the end of the template. - - .. versionadded:: 2.7 - - `extensions` - List of Jinja extensions to use. This can either be import paths - as strings or extension classes. For more information have a - look at :ref:`the extensions documentation `. - - `optimized` - should the optimizer be enabled? Default is ``True``. - - `undefined` - :class:`Undefined` or a subclass of it that is used to represent - undefined values in the template. - - `finalize` - A callable that can be used to process the result of a variable - expression before it is output. For example one can convert - ``None`` implicitly into an empty string here. - - `autoescape` - If set to ``True`` the XML/HTML autoescaping feature is enabled by - default. For more details about autoescaping see - :class:`~markupsafe.Markup`. As of Jinja 2.4 this can also - be a callable that is passed the template name and has to - return ``True`` or ``False`` depending on autoescape should be - enabled by default. - - .. versionchanged:: 2.4 - `autoescape` can now be a function - - `loader` - The template loader for this environment. - - `cache_size` - The size of the cache. Per default this is ``400`` which means - that if more than 400 templates are loaded the loader will clean - out the least recently used template. If the cache size is set to - ``0`` templates are recompiled all the time, if the cache size is - ``-1`` the cache will not be cleaned. - - .. versionchanged:: 2.8 - The cache size was increased to 400 from a low 50. - - `auto_reload` - Some loaders load templates from locations where the template - sources may change (ie: file system or database). If - ``auto_reload`` is set to ``True`` (default) every time a template is - requested the loader checks if the source changed and if yes, it - will reload the template. For higher performance it's possible to - disable that. - - `bytecode_cache` - If set to a bytecode cache object, this object will provide a - cache for the internal Jinja bytecode so that templates don't - have to be parsed if they were not changed. - - See :ref:`bytecode-cache` for more information. - - `enable_async` - If set to true this enables async template execution which - allows using async functions and generators. - """ - - #: if this environment is sandboxed. Modifying this variable won't make - #: the environment sandboxed though. For a real sandboxed environment - #: have a look at jinja2.sandbox. This flag alone controls the code - #: generation by the compiler. - sandboxed = False - - #: True if the environment is just an overlay - overlayed = False - - #: the environment this environment is linked to if it is an overlay - linked_to: t.Optional["Environment"] = None - - #: shared environments have this set to `True`. A shared environment - #: must not be modified - shared = False - - #: the class that is used for code generation. See - #: :class:`~jinja2.compiler.CodeGenerator` for more information. - code_generator_class: t.Type["CodeGenerator"] = CodeGenerator - - concat = "".join - - #: the context class that is used for templates. See - #: :class:`~jinja2.runtime.Context` for more information. - context_class: t.Type[Context] = Context - - template_class: t.Type["Template"] - - def __init__( - self, - block_start_string: str = BLOCK_START_STRING, - block_end_string: str = BLOCK_END_STRING, - variable_start_string: str = VARIABLE_START_STRING, - variable_end_string: str = VARIABLE_END_STRING, - comment_start_string: str = COMMENT_START_STRING, - comment_end_string: str = COMMENT_END_STRING, - line_statement_prefix: t.Optional[str] = LINE_STATEMENT_PREFIX, - line_comment_prefix: t.Optional[str] = LINE_COMMENT_PREFIX, - trim_blocks: bool = TRIM_BLOCKS, - lstrip_blocks: bool = LSTRIP_BLOCKS, - newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = NEWLINE_SEQUENCE, - keep_trailing_newline: bool = KEEP_TRAILING_NEWLINE, - extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = (), - optimized: bool = True, - undefined: t.Type[Undefined] = Undefined, - finalize: t.Optional[t.Callable[..., t.Any]] = None, - autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = False, - loader: t.Optional["BaseLoader"] = None, - cache_size: int = 400, - auto_reload: bool = True, - bytecode_cache: t.Optional["BytecodeCache"] = None, - enable_async: bool = False, - ): - # !!Important notice!! - # The constructor accepts quite a few arguments that should be - # passed by keyword rather than position. However it's important to - # not change the order of arguments because it's used at least - # internally in those cases: - # - spontaneous environments (i18n extension and Template) - # - unittests - # If parameter changes are required only add parameters at the end - # and don't change the arguments (or the defaults!) of the arguments - # existing already. - - # lexer / parser information - self.block_start_string = block_start_string - self.block_end_string = block_end_string - self.variable_start_string = variable_start_string - self.variable_end_string = variable_end_string - self.comment_start_string = comment_start_string - self.comment_end_string = comment_end_string - self.line_statement_prefix = line_statement_prefix - self.line_comment_prefix = line_comment_prefix - self.trim_blocks = trim_blocks - self.lstrip_blocks = lstrip_blocks - self.newline_sequence = newline_sequence - self.keep_trailing_newline = keep_trailing_newline - - # runtime information - self.undefined: t.Type[Undefined] = undefined - self.optimized = optimized - self.finalize = finalize - self.autoescape = autoescape - - # defaults - self.filters = DEFAULT_FILTERS.copy() - self.tests = DEFAULT_TESTS.copy() - self.globals = DEFAULT_NAMESPACE.copy() - - # set the loader provided - self.loader = loader - self.cache = create_cache(cache_size) - self.bytecode_cache = bytecode_cache - self.auto_reload = auto_reload - - # configurable policies - self.policies = DEFAULT_POLICIES.copy() - - # load extensions - self.extensions = load_extensions(self, extensions) - - self.is_async = enable_async - _environment_config_check(self) - - def add_extension(self, extension: t.Union[str, t.Type["Extension"]]) -> None: - """Adds an extension after the environment was created. - - .. versionadded:: 2.5 - """ - self.extensions.update(load_extensions(self, [extension])) - - def extend(self, **attributes: t.Any) -> None: - """Add the items to the instance of the environment if they do not exist - yet. This is used by :ref:`extensions ` to register - callbacks and configuration values without breaking inheritance. - """ - for key, value in attributes.items(): - if not hasattr(self, key): - setattr(self, key, value) - - def overlay( - self, - block_start_string: str = missing, - block_end_string: str = missing, - variable_start_string: str = missing, - variable_end_string: str = missing, - comment_start_string: str = missing, - comment_end_string: str = missing, - line_statement_prefix: t.Optional[str] = missing, - line_comment_prefix: t.Optional[str] = missing, - trim_blocks: bool = missing, - lstrip_blocks: bool = missing, - newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = missing, - keep_trailing_newline: bool = missing, - extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = missing, - optimized: bool = missing, - undefined: t.Type[Undefined] = missing, - finalize: t.Optional[t.Callable[..., t.Any]] = missing, - autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = missing, - loader: t.Optional["BaseLoader"] = missing, - cache_size: int = missing, - auto_reload: bool = missing, - bytecode_cache: t.Optional["BytecodeCache"] = missing, - enable_async: bool = False, - ) -> "Environment": - """Create a new overlay environment that shares all the data with the - current environment except for cache and the overridden attributes. - Extensions cannot be removed for an overlayed environment. An overlayed - environment automatically gets all the extensions of the environment it - is linked to plus optional extra extensions. - - Creating overlays should happen after the initial environment was set - up completely. Not all attributes are truly linked, some are just - copied over so modifications on the original environment may not shine - through. - - .. versionchanged:: 3.1.2 - Added the ``newline_sequence``,, ``keep_trailing_newline``, - and ``enable_async`` parameters to match ``__init__``. - """ - args = dict(locals()) - del args["self"], args["cache_size"], args["extensions"], args["enable_async"] - - rv = object.__new__(self.__class__) - rv.__dict__.update(self.__dict__) - rv.overlayed = True - rv.linked_to = self - - for key, value in args.items(): - if value is not missing: - setattr(rv, key, value) - - if cache_size is not missing: - rv.cache = create_cache(cache_size) - else: - rv.cache = copy_cache(self.cache) - - rv.extensions = {} - for key, value in self.extensions.items(): - rv.extensions[key] = value.bind(rv) - if extensions is not missing: - rv.extensions.update(load_extensions(rv, extensions)) - - if enable_async is not missing: - rv.is_async = enable_async - - return _environment_config_check(rv) - - @property - def lexer(self) -> Lexer: - """The lexer for this environment.""" - return get_lexer(self) - - def iter_extensions(self) -> t.Iterator["Extension"]: - """Iterates over the extensions by priority.""" - return iter(sorted(self.extensions.values(), key=lambda x: x.priority)) - - def getitem( - self, obj: t.Any, argument: t.Union[str, t.Any] - ) -> t.Union[t.Any, Undefined]: - """Get an item or attribute of an object but prefer the item.""" - try: - return obj[argument] - except (AttributeError, TypeError, LookupError): - if isinstance(argument, str): - try: - attr = str(argument) - except Exception: - pass - else: - try: - return getattr(obj, attr) - except AttributeError: - pass - return self.undefined(obj=obj, name=argument) - - def getattr(self, obj: t.Any, attribute: str) -> t.Any: - """Get an item or attribute of an object but prefer the attribute. - Unlike :meth:`getitem` the attribute *must* be a string. - """ - try: - return getattr(obj, attribute) - except AttributeError: - pass - try: - return obj[attribute] - except (TypeError, LookupError, AttributeError): - return self.undefined(obj=obj, name=attribute) - - def _filter_test_common( - self, - name: t.Union[str, Undefined], - value: t.Any, - args: t.Optional[t.Sequence[t.Any]], - kwargs: t.Optional[t.Mapping[str, t.Any]], - context: t.Optional[Context], - eval_ctx: t.Optional[EvalContext], - is_filter: bool, - ) -> t.Any: - if is_filter: - env_map = self.filters - type_name = "filter" - else: - env_map = self.tests - type_name = "test" - - func = env_map.get(name) # type: ignore - - if func is None: - msg = f"No {type_name} named {name!r}." - - if isinstance(name, Undefined): - try: - name._fail_with_undefined_error() - except Exception as e: - msg = f"{msg} ({e}; did you forget to quote the callable name?)" - - raise TemplateRuntimeError(msg) - - args = [value, *(args if args is not None else ())] - kwargs = kwargs if kwargs is not None else {} - pass_arg = _PassArg.from_obj(func) - - if pass_arg is _PassArg.context: - if context is None: - raise TemplateRuntimeError( - f"Attempted to invoke a context {type_name} without context." - ) - - args.insert(0, context) - elif pass_arg is _PassArg.eval_context: - if eval_ctx is None: - if context is not None: - eval_ctx = context.eval_ctx - else: - eval_ctx = EvalContext(self) - - args.insert(0, eval_ctx) - elif pass_arg is _PassArg.environment: - args.insert(0, self) - - return func(*args, **kwargs) - - def call_filter( - self, - name: str, - value: t.Any, - args: t.Optional[t.Sequence[t.Any]] = None, - kwargs: t.Optional[t.Mapping[str, t.Any]] = None, - context: t.Optional[Context] = None, - eval_ctx: t.Optional[EvalContext] = None, - ) -> t.Any: - """Invoke a filter on a value the same way the compiler does. - - This might return a coroutine if the filter is running from an - environment in async mode and the filter supports async - execution. It's your responsibility to await this if needed. - - .. versionadded:: 2.7 - """ - return self._filter_test_common( - name, value, args, kwargs, context, eval_ctx, True - ) - - def call_test( - self, - name: str, - value: t.Any, - args: t.Optional[t.Sequence[t.Any]] = None, - kwargs: t.Optional[t.Mapping[str, t.Any]] = None, - context: t.Optional[Context] = None, - eval_ctx: t.Optional[EvalContext] = None, - ) -> t.Any: - """Invoke a test on a value the same way the compiler does. - - This might return a coroutine if the test is running from an - environment in async mode and the test supports async execution. - It's your responsibility to await this if needed. - - .. versionchanged:: 3.0 - Tests support ``@pass_context``, etc. decorators. Added - the ``context`` and ``eval_ctx`` parameters. - - .. versionadded:: 2.7 - """ - return self._filter_test_common( - name, value, args, kwargs, context, eval_ctx, False - ) - - @internalcode - def parse( - self, - source: str, - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - ) -> nodes.Template: - """Parse the sourcecode and return the abstract syntax tree. This - tree of nodes is used by the compiler to convert the template into - executable source- or bytecode. This is useful for debugging or to - extract information from templates. - - If you are :ref:`developing Jinja extensions ` - this gives you a good overview of the node tree generated. - """ - try: - return self._parse(source, name, filename) - except TemplateSyntaxError: - self.handle_exception(source=source) - - def _parse( - self, source: str, name: t.Optional[str], filename: t.Optional[str] - ) -> nodes.Template: - """Internal parsing function used by `parse` and `compile`.""" - return Parser(self, source, name, filename).parse() - - def lex( - self, - source: str, - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - ) -> t.Iterator[t.Tuple[int, str, str]]: - """Lex the given sourcecode and return a generator that yields - tokens as tuples in the form ``(lineno, token_type, value)``. - This can be useful for :ref:`extension development ` - and debugging templates. - - This does not perform preprocessing. If you want the preprocessing - of the extensions to be applied you have to filter source through - the :meth:`preprocess` method. - """ - source = str(source) - try: - return self.lexer.tokeniter(source, name, filename) - except TemplateSyntaxError: - self.handle_exception(source=source) - - def preprocess( - self, - source: str, - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - ) -> str: - """Preprocesses the source with all extensions. This is automatically - called for all parsing and compiling methods but *not* for :meth:`lex` - because there you usually only want the actual source tokenized. - """ - return reduce( - lambda s, e: e.preprocess(s, name, filename), - self.iter_extensions(), - str(source), - ) - - def _tokenize( - self, - source: str, - name: t.Optional[str], - filename: t.Optional[str] = None, - state: t.Optional[str] = None, - ) -> TokenStream: - """Called by the parser to do the preprocessing and filtering - for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`. - """ - source = self.preprocess(source, name, filename) - stream = self.lexer.tokenize(source, name, filename, state) - - for ext in self.iter_extensions(): - stream = ext.filter_stream(stream) # type: ignore - - if not isinstance(stream, TokenStream): - stream = TokenStream(stream, name, filename) # type: ignore - - return stream - - def _generate( - self, - source: nodes.Template, - name: t.Optional[str], - filename: t.Optional[str], - defer_init: bool = False, - ) -> str: - """Internal hook that can be overridden to hook a different generate - method in. - - .. versionadded:: 2.5 - """ - return generate( # type: ignore - source, - self, - name, - filename, - defer_init=defer_init, - optimized=self.optimized, - ) - - def _compile(self, source: str, filename: str) -> CodeType: - """Internal hook that can be overridden to hook a different compile - method in. - - .. versionadded:: 2.5 - """ - return compile(source, filename, "exec") # type: ignore - - @typing.overload - def compile( # type: ignore - self, - source: t.Union[str, nodes.Template], - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - raw: "te.Literal[False]" = False, - defer_init: bool = False, - ) -> CodeType: - ... - - @typing.overload - def compile( - self, - source: t.Union[str, nodes.Template], - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - raw: "te.Literal[True]" = ..., - defer_init: bool = False, - ) -> str: - ... - - @internalcode - def compile( - self, - source: t.Union[str, nodes.Template], - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - raw: bool = False, - defer_init: bool = False, - ) -> t.Union[str, CodeType]: - """Compile a node or template source code. The `name` parameter is - the load name of the template after it was joined using - :meth:`join_path` if necessary, not the filename on the file system. - the `filename` parameter is the estimated filename of the template on - the file system. If the template came from a database or memory this - can be omitted. - - The return value of this method is a python code object. If the `raw` - parameter is `True` the return value will be a string with python - code equivalent to the bytecode returned otherwise. This method is - mainly used internally. - - `defer_init` is use internally to aid the module code generator. This - causes the generated code to be able to import without the global - environment variable to be set. - - .. versionadded:: 2.4 - `defer_init` parameter added. - """ - source_hint = None - try: - if isinstance(source, str): - source_hint = source - source = self._parse(source, name, filename) - source = self._generate(source, name, filename, defer_init=defer_init) - if raw: - return source - if filename is None: - filename = "